Compare commits

..

22 Commits

Author SHA1 Message Date
Deluan
7cf98af9da Refactor included handling 2023-06-07 10:49:56 -04:00
Deluan
40f7170930 Add descriptions to included fields 2023-06-07 10:49:56 -04:00
Deluan
4a8f176f0d Add includes for /albums 2023-06-07 10:49:56 -04:00
Deluan
4507895d58 Add includes for /tracks 2023-06-07 10:49:56 -04:00
Deluan
90c2d7d1cf Add Albums endpoints 2023-06-07 10:49:56 -04:00
Deluan
f62231f728 Fix pipeline 2023-06-07 10:49:55 -04:00
Deluan
fd1e06049f Use redocly/cli to bundle/lint OpenAPI spec 2023-06-07 10:49:55 -04:00
Deluan
7dfe29af5e Build OpenAPI.yaml 2023-06-07 10:49:55 -04:00
Deluan
8e03d7d013 Add authorization to new API 2023-06-07 10:49:55 -04:00
Deluan
960415ed95 Update oapi-codegen 2023-06-07 10:49:55 -04:00
Deluan
ea231fe265 Refactor 2023-06-07 10:49:55 -04:00
Deluan
09e52eba87 Change API name 2023-06-07 10:49:55 -04:00
Deluan
2650e4c27c Finish splitting openapi into multiple files 2023-06-07 10:49:55 -04:00
Deluan
3c0f23e3f2 Fix spec paths 2023-06-07 10:49:55 -04:00
Deluan
250b6dbb33 Start breaking OpenAPI spec 2023-06-07 10:49:55 -04:00
Deluan
be945e010a go mod tidy 2023-06-07 10:49:55 -04:00
Deluan
9308127342 Add more tracks attributes 2023-06-07 10:49:55 -04:00
Deluan
141edb881e Add sort param 2023-06-07 10:49:55 -04:00
Deluan
20a1e3160b Add Album and relationships 2023-06-07 10:49:55 -04:00
Deluan
d4c458d193 Add tests 2023-06-07 10:49:55 -04:00
Deluan
ed87e703ff Build collection Links 2023-06-07 10:49:55 -04:00
Deluan
dcb5725642 Add OpenAPI spec for new API, and wire up a new API handler
Based on Aura and JSONAPI
2023-06-07 10:49:55 -04:00
1283 changed files with 56605 additions and 129388 deletions

View File

@@ -2,7 +2,7 @@
# [Choice] Go version: 1, 1.15, 1.14
ARG VARIANT="1"
FROM mcr.microsoft.com/vscode/devcontainers/go:${VARIANT}
FROM mcr.microsoft.com/vscode/devcontainers/go:0-${VARIANT}
# [Option] Install Node.js
ARG INSTALL_NODE="true"
@@ -17,4 +17,4 @@ RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
# RUN go get -x <your-dependency-or-tool>
# [Optional] Uncomment this line to install global node packages.
# RUN su vscode -c "source /usr/local/share/nvm/nvm.sh && npm install -g <your-package-here>" 2>&1
# RUN su vscode -c "source /usr/local/share/nvm/nvm.sh && npm install -g <your-package-here>" 2>&1

View File

@@ -4,10 +4,10 @@
"dockerfile": "Dockerfile",
"args": {
// Update the VARIANT arg to pick a version of Go: 1, 1.15, 1.14
"VARIANT": "1.25",
"VARIANT": "1.20",
// Options
"INSTALL_NODE": "true",
"NODE_VERSION": "v24"
"NODE_VERSION": "v18"
}
},
"workspaceMount": "",
@@ -18,37 +18,33 @@
"--volume=${localWorkspaceFolder}:/workspaces/${localWorkspaceFolderBasename}:Z"
],
// Set *default* container specific settings.json values on container create.
"customizations": {
"vscode": {
"settings": {
"terminal.integrated.shell.linux": "/bin/bash",
"go.useGoProxyToCheckForToolUpdates": false,
"go.useLanguageServer": true,
"go.gopath": "/go",
"go.goroot": "/usr/local/go",
"go.toolsGopath": "/go/bin",
"go.formatTool": "goimports",
"go.lintOnSave": "package",
"go.lintTool": "golangci-lint",
"editor.formatOnSave": true,
"[javascript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[json]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[jsonc]": {
"editor.defaultFormatter": "vscode.json-language-features"
}
},
// Add the IDs of extensions you want installed when the container is created.
"extensions": [
"golang.Go",
"esbenp.prettier-vscode",
"tamasfe.even-better-toml"
]
"settings": {
"terminal.integrated.shell.linux": "/bin/bash",
"go.useGoProxyToCheckForToolUpdates": false,
"go.useLanguageServer": true,
"go.gopath": "/go",
"go.goroot": "/usr/local/go",
"go.toolsGopath": "/go/bin",
"go.formatTool": "goimports",
"go.lintOnSave": "package",
"go.lintTool": "golangci-lint",
"editor.formatOnSave": true,
"[javascript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[json]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[jsonc]": {
"editor.defaultFormatter": "vscode.json-language-features"
}
},
// Add the IDs of extensions you want installed when the container is created.
"extensions": [
"golang.Go",
"esbenp.prettier-vscode",
"tamasfe.even-better-toml"
],
// Use 'forwardPorts' to make a list of ports inside the container available locally.
"forwardPorts": [
4533,

View File

@@ -1,18 +1,10 @@
.DS_Store
ui/node_modules
ui/build
!ui/build/.gitkeep
Dockerfile
docker-compose*.yml
data
*.db
testDB
navidrome
navidrome.db
navidrome.toml
tmp
!tmp/taglib
dist
binaries
cache
music
!Dockerfile

View File

@@ -1,23 +0,0 @@
name: 'Download TagLib'
description: 'Downloads and extracts the TagLib library, adding it to PKG_CONFIG_PATH'
inputs:
version:
description: 'Version of TagLib to download'
required: true
platform:
description: 'Platform to download TagLib for'
default: 'linux-amd64'
runs:
using: 'composite'
steps:
- name: Download TagLib
shell: bash
run: |
mkdir -p /tmp/taglib
cd /tmp
FILE=taglib-${{ inputs.platform }}.tar.gz
wget https://github.com/navidrome/cross-taglib/releases/download/v${{ inputs.version }}/${FILE}
tar -xzf ${FILE} -C taglib
PKG_CONFIG_PREFIX=/tmp/taglib
echo "PKG_CONFIG_PREFIX=${PKG_CONFIG_PREFIX}" >> $GITHUB_ENV
echo "PKG_CONFIG_PATH=${PKG_CONFIG_PATH}:${PKG_CONFIG_PREFIX}/lib/pkgconfig" >> $GITHUB_ENV

View File

@@ -1,84 +0,0 @@
name: 'Prepare Docker Buildx environment'
description: 'Downloads and extracts the TagLib library, adding it to PKG_CONFIG_PATH'
inputs:
github_token:
description: 'GitHub token'
required: true
default: ''
hub_repository:
description: 'Docker Hub repository to push images to'
required: false
default: ''
hub_username:
description: 'Docker Hub username'
required: false
default: ''
hub_password:
description: 'Docker Hub password'
required: false
default: ''
outputs:
tags:
description: 'Docker image tags'
value: ${{ steps.meta.outputs.tags }}
labels:
description: 'Docker image labels'
value: ${{ steps.meta.outputs.labels }}
annotations:
description: 'Docker image annotations'
value: ${{ steps.meta.outputs.annotations }}
version:
description: 'Docker image version'
value: ${{ steps.meta.outputs.version }}
hub_repository:
description: 'Docker Hub repository'
value: ${{ env.DOCKER_HUB_REPO }}
hub_enabled:
description: 'Is Docker Hub enabled'
value: ${{ env.DOCKER_HUB_ENABLED }}
runs:
using: 'composite'
steps:
- name: Check Docker Hub configuration
shell: bash
run: |
if [ -z "${{inputs.hub_repository}}" ]; then
echo "DOCKER_HUB_REPO=none" >> $GITHUB_ENV
echo "DOCKER_HUB_ENABLED=false" >> $GITHUB_ENV
else
echo "DOCKER_HUB_REPO=${{inputs.hub_repository}}" >> $GITHUB_ENV
echo "DOCKER_HUB_ENABLED=true" >> $GITHUB_ENV
fi
- name: Login to Docker Hub
if: inputs.hub_username != '' && inputs.hub_password != ''
uses: docker/login-action@v3
with:
username: ${{ inputs.hub_username }}
password: ${{ inputs.hub_password }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ inputs.github_token }}
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v3
- name: Extract metadata for Docker image
id: meta
uses: docker/metadata-action@v5
with:
labels: |
maintainer=deluan@navidrome.org
images: |
name=${{env.DOCKER_HUB_REPO}},enable=${{env.DOCKER_HUB_ENABLED}}
name=ghcr.io/${{ github.repository }}
tags: |
type=ref,event=pr
type=semver,pattern={{version}}
type=raw,value=develop,enable={{is_default_branch}}

View File

@@ -10,13 +10,3 @@ updates:
schedule:
interval: weekly
open-pull-requests-limit: 10
- package-ecosystem: docker
directory: "/"
schedule:
interval: weekly
open-pull-requests-limit: 10
- package-ecosystem: github-actions
directory: "/.github/workflows"
schedule:
interval: weekly
open-pull-requests-limit: 10

View File

@@ -1,38 +0,0 @@
### Description
<!-- Please provide a clear and concise description of what this PR does and why it is needed. -->
### Related Issues
<!-- List any related issues, e.g., "Fixes #123" or "Related to #456". -->
### Type of Change
- [ ] Bug fix
- [ ] New feature
- [ ] Documentation update
- [ ] Refactor
- [ ] Other (please describe):
### Checklist
Please review and check all that apply:
- [ ] My code follows the projects coding style
- [ ] I have tested the changes locally
- [ ] I have added or updated documentation as needed
- [ ] I have added tests that prove my fix/feature works (or explain why not)
- [ ] All existing and new tests pass
### How to Test
<!-- Describe the steps to test your changes. Include setup, commands, and expected results. -->
### Screenshots / Demos (if applicable)
<!-- Add screenshots, GIFs, or links to demos if your change includes UI updates or visual changes. -->
### Additional Notes
<!-- Anything else the maintainer should know? Potential side effects, breaking changes, or areas of concern? -->
<!--
**Tips for Contributors:**
- Be concise but thorough.
- If your PR is large, consider breaking it into smaller PRs.
- Tag the maintainer if you need a prompt review.
- Avoid force pushing to the branch after opening the PR, as it can complicate the review process.
-->

38
.github/workflows/pipeline.dockerfile vendored Normal file
View File

@@ -0,0 +1,38 @@
#####################################################
### Copy platform specific binary
FROM bash as copy-binary
ARG TARGETPLATFORM
RUN echo "Target Platform = ${TARGETPLATFORM}"
COPY dist .
RUN if [ "$TARGETPLATFORM" = "linux/amd64" ]; then cp navidrome_linux_amd64_linux_amd64_v1/navidrome /navidrome; fi
RUN if [ "$TARGETPLATFORM" = "linux/386" ]; then cp navidrome_linux_386_linux_386/navidrome /navidrome; fi
RUN if [ "$TARGETPLATFORM" = "linux/arm64" ]; then cp navidrome_linux_arm64_linux_arm64/navidrome /navidrome; fi
RUN if [ "$TARGETPLATFORM" = "linux/arm/v6" ]; then cp navidrome_linux_arm_linux_arm_6/navidrome /navidrome; fi
RUN if [ "$TARGETPLATFORM" = "linux/arm/v7" ]; then cp navidrome_linux_arm_linux_arm_7/navidrome /navidrome; fi
RUN chmod +x /navidrome
#####################################################
### Build Final Image
FROM alpine as release
LABEL maintainer="deluan@navidrome.org"
# Install ffmpeg and output build config
RUN apk add --no-cache ffmpeg
RUN ffmpeg -buildconf
COPY --from=copy-binary /navidrome /app/
VOLUME ["/data", "/music"]
ENV ND_MUSICFOLDER /music
ENV ND_DATAFOLDER /data
ENV ND_PORT 4533
ENV GODEBUG "asyncpreemptoff=1"
EXPOSE ${ND_PORT}
HEALTHCHECK CMD wget -O- http://localhost:${ND_PORT}/ping || exit 1
WORKDIR /app
ENTRYPOINT ["/app/navidrome"]

View File

@@ -1,4 +1,4 @@
name: "Pipeline: Test, Lint, Build"
name: 'Pipeline: Test, Lint, Build'
on:
push:
branches:
@@ -9,117 +9,103 @@ on:
branches:
- master
concurrency:
group: ${{ startsWith(github.ref, 'refs/tags/v') && 'tag' || 'branch' }}-${{ github.ref }}
cancel-in-progress: true
env:
CROSS_TAGLIB_VERSION: "2.1.1-1"
IS_RELEASE: ${{ startsWith(github.ref, 'refs/tags/') && 'true' || 'false' }}
jobs:
git-version:
name: Get version info
runs-on: ubuntu-latest
outputs:
git_tag: ${{ steps.git-version.outputs.GIT_TAG }}
git_sha: ${{ steps.git-version.outputs.GIT_SHA }}
steps:
- uses: actions/checkout@v5
with:
fetch-depth: 0
fetch-tags: true
- name: Show git version info
run: |
echo "git describe (dirty): $(git describe --dirty --always --tags)"
echo "git describe --tags: $(git describe --tags `git rev-list --tags --max-count=1`)"
echo "git tag: $(git tag --sort=-committerdate | head -n 1)"
echo "github_ref: $GITHUB_REF"
echo "github_head_sha: ${{ github.event.pull_request.head.sha }}"
git tag -l
- name: Determine git current SHA and latest tag
id: git-version
run: |
GIT_TAG=$(git tag --sort=-committerdate | head -n 1)
if [ -n "$GIT_TAG" ]; then
if [[ "$GITHUB_REF" != refs/tags/* ]]; then
GIT_TAG=${GIT_TAG}-SNAPSHOT
fi
echo "GIT_TAG=$GIT_TAG" >> $GITHUB_OUTPUT
fi
GIT_SHA=$(git rev-parse --short HEAD)
PR_NUM=$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")
if [[ $PR_NUM != "null" ]]; then
GIT_SHA=$(echo "${{ github.event.pull_request.head.sha }}" | cut -c1-8)
GIT_SHA="pr-${PR_NUM}/${GIT_SHA}"
fi
echo "GIT_SHA=$GIT_SHA" >> $GITHUB_OUTPUT
echo "GIT_TAG=$GIT_TAG"
echo "GIT_SHA=$GIT_SHA"
go-lint:
name: Lint Go code
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- name: Install taglib
run: sudo apt-get install libtag1-dev
- name: Download TagLib
uses: ./.github/actions/download-taglib
- name: Set up Go 1.20
uses: actions/setup-go@v3
with:
version: ${{ env.CROSS_TAGLIB_VERSION }}
go-version: 1.20.x
- uses: actions/checkout@v3
- name: golangci-lint
uses: golangci/golangci-lint-action@v8
uses: golangci/golangci-lint-action@v3
with:
version: latest
problem-matchers: true
github-token: ${{ secrets.GITHUB_TOKEN }}
args: --timeout 2m
- name: Run go goimports
run: go run golang.org/x/tools/cmd/goimports@latest -w `find . -name '*.go' | grep -v '_gen.go$' | grep -v '.pb.go$'`
- name: Install goimports
run: go install golang.org/x/tools/cmd/goimports
- run: goimports -w `find . -name '*.go' | grep -v '_gen.go$'`
- run: go mod tidy
- name: Verify no changes from goimports and go mod tidy
run: |
git status --porcelain
if [ -n "$(git status --porcelain)" ]; then
echo 'To fix this check, run "make format" and commit the changes'
echo 'To fix this check, run "goimports -w $(find . -name '*.go' | grep -v '_gen.go$') && go mod tidy"'
exit 1
fi
go:
name: Test Go code
runs-on: ubuntu-latest
steps:
- name: Check out code into the Go module directory
uses: actions/checkout@v5
- name: Download TagLib
uses: ./.github/actions/download-taglib
- uses: actions/setup-node@v3
with:
version: ${{ env.CROSS_TAGLIB_VERSION }}
node-version: 18
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Build and Lint OpenAPI spec
run: make lintapi gen
- name: Verify no changes
run: |
git status --porcelain
if [ -n "$(git status --porcelain)" ]; then
echo 'Changes to OpenAPI spec caused changes to the code. Please review and commit the changes.'
exit 1
fi
- uses: actions/upload-artifact@v3
with:
name: openapi.yaml
path: api/openapi.yaml
go:
name: Test with Go ${{ matrix.go_version }}
runs-on: ubuntu-latest
strategy:
matrix:
go_version: [1.20.x,1.19.x]
steps:
- name: Install taglib
run: sudo apt-get install libtag1-dev
- name: Check out code into the Go module directory
uses: actions/checkout@v3
- name: Set up Go ${{ matrix.go_version }}
uses: actions/setup-go@v3
with:
go-version: ${{ matrix.go_version }}
cache: true
- name: Download dependencies
if: steps.cache-go.outputs.cache-hit != 'true'
continue-on-error: ${{contains(matrix.go_version, 'beta') || contains(matrix.go_version, 'rc')}}
run: go mod download
- name: Test
run: |
pkg-config --define-prefix --cflags --libs taglib # for debugging
go test -shuffle=on -tags netgo -race ./... -v
continue-on-error: ${{contains(matrix.go_version, 'beta') || contains(matrix.go_version, 'rc')}}
run: go test -shuffle=on -race -cover ./... -v
js:
name: Test JS code
name: Build JS bundle
runs-on: ubuntu-latest
env:
NODE_OPTIONS: "--max_old_space_size=4096"
NODE_OPTIONS: '--max_old_space_size=4096'
steps:
- uses: actions/checkout@v5
- uses: actions/setup-node@v6
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 24
cache: "npm"
cache-dependency-path: "**/package-lock.json"
node-version: 18
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: npm install dependencies
run: |
@@ -141,291 +127,119 @@ jobs:
cd ui
npm run build
i18n-lint:
name: Lint i18n files
- uses: actions/upload-artifact@v3
with:
name: js-bundle
path: ui/build
binaries:
name: Build binaries
needs: [js, go, go-lint]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- run: |
set -e
for file in resources/i18n/*.json; do
echo "Validating $file"
if ! jq empty "$file" 2>error.log; then
error_message=$(cat error.log)
line_number=$(echo "$error_message" | grep -oP 'line \K[0-9]+')
echo "::error file=$file,line=$line_number::$error_message"
exit 1
fi
done
- run: ./.github/workflows/validate-translations.sh -v
check-push-enabled:
name: Check Docker configuration
runs-on: ubuntu-latest
outputs:
is_enabled: ${{ steps.check.outputs.is_enabled }}
steps:
- name: Check if Docker push is configured
id: check
run: echo "is_enabled=${{ secrets.DOCKER_HUB_USERNAME != '' }}" >> $GITHUB_OUTPUT
build:
name: Build
needs: [js, go, go-lint, i18n-lint, git-version, check-push-enabled]
strategy:
matrix:
platform: [ linux/amd64, linux/arm64, linux/arm/v5, linux/arm/v6, linux/arm/v7, linux/386, darwin/amd64, darwin/arm64, windows/amd64, windows/386 ]
runs-on: ubuntu-latest
env:
IS_LINUX: ${{ startsWith(matrix.platform, 'linux/') && 'true' || 'false' }}
IS_ARMV5: ${{ matrix.platform == 'linux/arm/v5' && 'true' || 'false' }}
IS_DOCKER_PUSH_CONFIGURED: ${{ needs.check-push-enabled.outputs.is_enabled == 'true' }}
DOCKER_BUILD_SUMMARY: false
GIT_SHA: ${{ needs.git-version.outputs.git_sha }}
GIT_TAG: ${{ needs.git-version.outputs.git_tag }}
steps:
- name: Sanitize platform name
id: set-platform
run: |
PLATFORM=$(echo ${{ matrix.platform }} | tr '/' '_')
echo "PLATFORM=$PLATFORM" >> $GITHUB_ENV
- uses: actions/checkout@v5
- name: Prepare Docker Buildx
uses: ./.github/actions/prepare-docker
id: docker
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
hub_repository: ${{ vars.DOCKER_HUB_REPO }}
hub_username: ${{ secrets.DOCKER_HUB_USERNAME }}
hub_password: ${{ secrets.DOCKER_HUB_PASSWORD }}
- name: Build Binaries
uses: docker/build-push-action@v6
with:
context: .
file: Dockerfile
platforms: ${{ matrix.platform }}
outputs: |
type=local,dest=./output/${{ env.PLATFORM }}
target: binary
build-args: |
GIT_SHA=${{ env.GIT_SHA }}
GIT_TAG=${{ env.GIT_TAG }}
CROSS_TAGLIB_VERSION=${{ env.CROSS_TAGLIB_VERSION }}
- name: Upload Binaries
uses: actions/upload-artifact@v5
with:
name: navidrome-${{ env.PLATFORM }}
path: ./output
retention-days: 7
- name: Build and push image by digest
id: push-image
if: env.IS_LINUX == 'true' && env.IS_DOCKER_PUSH_CONFIGURED == 'true' && env.IS_ARMV5 == 'false'
uses: docker/build-push-action@v6
with:
context: .
file: Dockerfile
platforms: ${{ matrix.platform }}
labels: ${{ steps.docker.outputs.labels }}
build-args: |
GIT_SHA=${{ env.GIT_SHA }}
GIT_TAG=${{ env.GIT_TAG }}
CROSS_TAGLIB_VERSION=${{ env.CROSS_TAGLIB_VERSION }}
outputs: |
type=image,name=${{ steps.docker.outputs.hub_repository }},push-by-digest=true,name-canonical=true,push=${{ steps.docker.outputs.hub_enabled }}
type=image,name=ghcr.io/${{ github.repository }},push-by-digest=true,name-canonical=true,push=true
- name: Export digest
if: env.IS_LINUX == 'true' && env.IS_DOCKER_PUSH_CONFIGURED == 'true' && env.IS_ARMV5 == 'false'
run: |
mkdir -p /tmp/digests
digest="${{ steps.push-image.outputs.digest }}"
touch "/tmp/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@v5
if: env.IS_LINUX == 'true' && env.IS_DOCKER_PUSH_CONFIGURED == 'true' && env.IS_ARMV5 == 'false'
with:
name: digests-${{ env.PLATFORM }}
path: /tmp/digests/*
if-no-files-found: error
retention-days: 1
push-manifest:
name: Push Docker manifest
runs-on: ubuntu-latest
needs: [build, check-push-enabled]
if: needs.check-push-enabled.outputs.is_enabled == 'true'
env:
REGISTRY_IMAGE: ghcr.io/${{ github.repository }}
steps:
- uses: actions/checkout@v5
- name: Download digests
uses: actions/download-artifact@v6
with:
path: /tmp/digests
pattern: digests-*
merge-multiple: true
- name: Prepare Docker Buildx
uses: ./.github/actions/prepare-docker
id: docker
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
hub_repository: ${{ vars.DOCKER_HUB_REPO }}
hub_username: ${{ secrets.DOCKER_HUB_USERNAME }}
hub_password: ${{ secrets.DOCKER_HUB_PASSWORD }}
- name: Create manifest list and push to ghcr.io
working-directory: /tmp/digests
run: |
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env.REGISTRY_IMAGE }}@sha256:%s ' *)
- name: Create manifest list and push to Docker Hub
working-directory: /tmp/digests
if: vars.DOCKER_HUB_REPO != ''
run: |
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ vars.DOCKER_HUB_REPO }}@sha256:%s ' *)
- name: Inspect image in ghcr.io
run: |
docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:${{ steps.docker.outputs.version }}
- name: Inspect image in Docker Hub
if: vars.DOCKER_HUB_REPO != ''
run: |
docker buildx imagetools inspect ${{ vars.DOCKER_HUB_REPO }}:${{ steps.docker.outputs.version }}
- name: Delete unnecessary digest artifacts
env:
GH_TOKEN: ${{ github.token }}
run: |
for artifact in $(gh api repos/${{ github.repository }}/actions/artifacts | jq -r '.artifacts[] | select(.name | startswith("digests-")) | .id'); do
gh api --method DELETE repos/${{ github.repository }}/actions/artifacts/$artifact
done
msi:
name: Build Windows installers
needs: [build, git-version]
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v5
- uses: actions/download-artifact@v6
with:
path: ./binaries
pattern: navidrome-windows*
merge-multiple: true
- name: Install Wix
run: sudo apt-get install -y wixl jq
- name: Build MSI
env:
GIT_TAG: ${{ needs.git-version.outputs.git_tag }}
run: |
rm -rf binaries/msi
sudo GIT_TAG=$GIT_TAG release/wix/build_msi.sh ${GITHUB_WORKSPACE} 386
sudo GIT_TAG=$GIT_TAG release/wix/build_msi.sh ${GITHUB_WORKSPACE} amd64
du -h binaries/msi/*.msi
- name: Upload MSI files
uses: actions/upload-artifact@v5
with:
name: navidrome-windows-installers
path: binaries/msi/*.msi
retention-days: 7
release:
name: Package/Release
needs: [build, msi]
runs-on: ubuntu-latest
outputs:
package_list: ${{ steps.set-package-list.outputs.package_list }}
steps:
- uses: actions/checkout@v5
- name: Checkout Code
uses: actions/checkout@v3
with:
fetch-depth: 0
fetch-tags: true
- uses: actions/download-artifact@v6
- uses: actions/download-artifact@v3
with:
path: ./binaries
pattern: navidrome-*
merge-multiple: true
name: js-bundle
path: ui/build
- run: ls -lR ./binaries
- name: Set RELEASE_FLAGS for snapshot releases
if: env.IS_RELEASE == 'false'
run: echo 'RELEASE_FLAGS=--skip=publish --snapshot' >> $GITHUB_ENV
- name: Run GoReleaser
uses: goreleaser/goreleaser-action@v6
with:
version: '~> v2'
args: "release --clean -f release/goreleaser.yml ${{ env.RELEASE_FLAGS }}"
- name: Config /github/workspace folder as trusted
uses: docker://deluan/ci-goreleaser:1.20.3-1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Remove build artifacts
run: |
ls -l ./dist
rm ./dist/*.tar.gz ./dist/*.zip
- name: Upload all-packages artifact
uses: actions/upload-artifact@v5
with:
name: packages
path: dist/navidrome_0*
args: /bin/bash -c "git config --global --add safe.directory /github/workspace; git describe --dirty --always --tags"
- id: set-package-list
name: Export list of generated packages
run: |
cd dist
set +x
ITEMS=$(ls navidrome_0* | sed 's/^navidrome_0[^_]*_linux_//' | jq -R -s -c 'split("\n")[:-1]')
echo $ITEMS
echo "package_list=${ITEMS}" >> $GITHUB_OUTPUT
- name: Run GoReleaser - SNAPSHOT
if: startsWith(github.ref, 'refs/tags/') != true
uses: docker://deluan/ci-goreleaser:1.20.3-1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
args: goreleaser release --rm-dist --skip-publish --snapshot
upload-packages:
name: Upload Linux PKG
- name: Run GoReleaser - RELEASE
if: startsWith(github.ref, 'refs/tags/')
uses: docker://deluan/ci-goreleaser:1.20.3-1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
args: goreleaser release --rm-dist
- uses: actions/upload-artifact@v3
with:
name: binaries
path: |
dist
!dist/*.tar.gz
!dist/*.zip
docker:
name: Build and publish Docker images
needs: [binaries]
runs-on: ubuntu-latest
needs: [release]
strategy:
matrix:
item: ${{ fromJson(needs.release.outputs.package_list) }}
env:
DOCKER_IMAGE: ${{secrets.DOCKER_IMAGE}}
steps:
- name: Download all-packages artifact
uses: actions/download-artifact@v6
with:
name: packages
path: ./dist
- name: Set up QEMU
id: qemu
uses: docker/setup-qemu-action@v2
if: env.DOCKER_IMAGE != ''
- name: Upload all-packages artifact
uses: actions/upload-artifact@v5
with:
name: navidrome_linux_${{ matrix.item }}
path: dist/navidrome_0*_linux_${{ matrix.item }}
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v2
if: env.DOCKER_IMAGE != ''
# delete-artifacts:
# name: Delete unused artifacts
# runs-on: ubuntu-latest
# needs: [upload-packages]
# steps:
# - name: Delete all-packages artifact
# env:
# GH_TOKEN: ${{ github.token }}
# run: |
# for artifact in $(gh api repos/${{ github.repository }}/actions/artifacts | jq -r '.artifacts[] | select(.name | startswith("packages")) | .id'); do
# gh api --method DELETE repos/${{ github.repository }}/actions/artifacts/$artifact
# done
- uses: actions/checkout@v3
if: env.DOCKER_IMAGE != ''
- uses: actions/download-artifact@v3
if: env.DOCKER_IMAGE != ''
with:
name: binaries
path: dist
- name: Login to Docker Hub
if: env.DOCKER_IMAGE != ''
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to GitHub Container Registry
if: env.DOCKER_IMAGE != ''
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata for Docker
if: env.DOCKER_IMAGE != ''
id: meta
uses: docker/metadata-action@v4
with:
labels: |
maintainer=deluan
images: |
name=${{secrets.DOCKER_IMAGE}},enable=${{env.GITHUB_REF_TYPE == 'tag' || github.ref == format('refs/heads/{0}', 'master')}}
name=ghcr.io/${{ github.repository }}
tags: |
type=ref,event=pr
type=semver,pattern={{version}}
type=raw,value=develop,enable={{is_default_branch}}
- name: Build and Push
if: env.DOCKER_IMAGE != ''
uses: docker/build-push-action@v4
with:
context: .
file: .github/workflows/pipeline.dockerfile
platforms: linux/amd64,linux/386,linux/arm/v6,linux/arm/v7,linux/arm64
push: true
tags: ${{ steps.meta.outputs.tags }}

View File

@@ -12,9 +12,8 @@ jobs:
pull-requests: write
runs-on: ubuntu-latest
steps:
- uses: dessant/lock-threads@v5
- uses: dessant/lock-threads@v4.0.0
with:
process-only: 'issues, prs'
issue-inactive-days: 120
pr-inactive-days: 120
log-output: true
@@ -28,7 +27,7 @@ jobs:
This pull request has been automatically locked since there
has not been any recent activity after it was closed.
Please open a new issue for related bugs.
- uses: actions/stale@v9
- uses: actions/stale@v7
with:
operations-per-run: 999
days-before-issue-stale: 180

View File

@@ -1,93 +0,0 @@
#!/bin/sh
set -e
I18N_DIR=resources/i18n
# Function to process JSON: remove empty attributes and sort
process_json() {
jq 'walk(if type == "object" then with_entries(select(.value != null and .value != "" and .value != [] and .value != {})) | to_entries | sort_by(.key) | from_entries else . end)' "$1"
}
# Function to check differences between local and remote translations
check_lang_diff() {
filename=${I18N_DIR}/"$1".json
url=$(curl -s -X POST https://poeditor.com/api/ \
-d api_token="${POEDITOR_APIKEY}" \
-d action="export" \
-d id="${POEDITOR_PROJECTID}" \
-d language="$1" \
-d type="key_value_json" | jq -r .item)
if [ -z "$url" ]; then
echo "Failed to export $1"
return 1
fi
curl -sSL "$url" > poeditor.json
process_json "$filename" > "$filename".tmp
process_json poeditor.json > poeditor.tmp
diff=$(diff -u "$filename".tmp poeditor.tmp) || true
if [ -n "$diff" ]; then
echo "$diff"
mv poeditor.json "$filename"
fi
rm -f poeditor.json poeditor.tmp "$filename".tmp
}
# Function to get the list of languages
get_language_list() {
response=$(curl -s -X POST https://api.poeditor.com/v2/languages/list \
-d api_token="${POEDITOR_APIKEY}" \
-d id="${POEDITOR_PROJECTID}")
echo $response
}
# Function to get the language name from the language code
get_language_name() {
lang_code="$1"
lang_list="$2"
lang_name=$(echo "$lang_list" | jq -r ".result.languages[] | select(.code == \"$lang_code\") | .name")
if [ -z "$lang_name" ]; then
echo "Error: Language code '$lang_code' not found" >&2
return 1
fi
echo "$lang_name"
}
# Function to get the language code from the file path
get_lang_code() {
filepath="$1"
# Extract just the filename
filename=$(basename "$filepath")
# Remove the extension
lang_code="${filename%.*}"
echo "$lang_code"
}
lang_list=$(get_language_list)
# Check differences for each language
for file in ${I18N_DIR}/*.json; do
code=$(get_lang_code "$file")
lang=$(jq -r .languageName < "$file")
lang_name=$(get_language_name "$code" "$lang_list")
echo "Downloading $lang_name - $lang ($code)"
check_lang_diff "$code"
done
# List changed languages to stderr
languages=""
for file in $(git diff --name-only --exit-code | grep json); do
lang_code=$(get_lang_code "$file")
lang_name=$(get_language_name "$lang_code" "$lang_list")
languages="${languages}$(echo "$lang_name" | tr -d '\n'), "
done
echo "${languages%??}" 1>&2

View File

@@ -8,26 +8,21 @@ jobs:
runs-on: ubuntu-latest
if: ${{ github.repository_owner == 'navidrome' }}
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v3
- name: Get updated translations
id: poeditor
env:
POEDITOR_PROJECTID: ${{ secrets.POEDITOR_PROJECTID }}
POEDITOR_APIKEY: ${{ secrets.POEDITOR_APIKEY }}
run: |
.github/workflows/update-translations.sh 2> title.tmp
title=$(cat title.tmp)
echo "::set-output name=title::$title"
rm title.tmp
./update-translations.sh
- name: Show changes, if any
run: |
git status --porcelain
git diff
- name: Create Pull Request
uses: peter-evans/create-pull-request@v7
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.PAT }}
author: "navidrome-bot <navidrome-bot@navidrome.org>"
commit-message: "fix(ui): update ${{ steps.poeditor.outputs.title }} translations from POEditor"
title: "fix(ui): update ${{ steps.poeditor.outputs.title }} translations from POEditor"
commit-message: Update translations
title: Update translations from POEditor
branch: update-translations

View File

@@ -1,236 +0,0 @@
#!/bin/bash
# validate-translations.sh
#
# This script validates the structure of JSON translation files by comparing them
# against the reference English translation file (ui/src/i18n/en.json).
#
# The script performs the following validations:
# 1. JSON syntax validation using jq
# 2. Structural validation - ensures all keys from English file are present
# 3. Reports missing keys (translation incomplete)
# 4. Reports extra keys (keys not in English reference, possibly deprecated)
# 5. Emits GitHub Actions annotations for CI/CD integration
#
# Usage:
# ./validate-translations.sh
#
# Environment Variables:
# EN_FILE - Path to reference English file (default: ui/src/i18n/en.json)
# TRANSLATION_DIR - Directory containing translation files (default: resources/i18n)
#
# Exit codes:
# 0 - All translations are valid
# 1 - One or more translations have structural issues
#
# GitHub Actions Integration:
# The script outputs GitHub Actions annotations using ::error and ::warning
# format that will be displayed in PR checks and workflow summaries.
# Script to validate JSON translation files structure against en.json
set -e
# Path to the reference English translation file
EN_FILE="${EN_FILE:-ui/src/i18n/en.json}"
TRANSLATION_DIR="${TRANSLATION_DIR:-resources/i18n}"
VERBOSE=false
# Parse command line arguments
while [[ $# -gt 0 ]]; do
case "$1" in
-v|--verbose)
VERBOSE=true
shift
;;
-h|--help)
echo "Usage: $0 [options]"
echo ""
echo "Validates JSON translation files structure against English reference file."
echo ""
echo "Options:"
echo " -h, --help Show this help message"
echo " -v, --verbose Show detailed output (default: only show errors)"
echo ""
echo "Environment Variables:"
echo " EN_FILE Path to reference English file (default: ui/src/i18n/en.json)"
echo " TRANSLATION_DIR Directory with translation files (default: resources/i18n)"
echo ""
echo "Examples:"
echo " $0 # Validate all translation files (quiet mode)"
echo " $0 -v # Validate with detailed output"
echo " EN_FILE=custom/en.json $0 # Use custom reference file"
echo " TRANSLATION_DIR=custom/i18n $0 # Use custom translations directory"
exit 0
;;
*)
echo "Unknown option: $1" >&2
echo "Use --help for usage information" >&2
exit 1
;;
esac
done
# Color codes for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
if [[ "$VERBOSE" == "true" ]]; then
echo "Validating translation files structure against ${EN_FILE}..."
fi
# Check if English reference file exists
if [[ ! -f "$EN_FILE" ]]; then
echo "::error::Reference file $EN_FILE not found"
exit 1
fi
# Function to extract all JSON keys from a file, creating a flat list of dot-separated paths
extract_keys() {
local file="$1"
jq -r 'paths(scalars) as $p | $p | join(".")' "$file" 2>/dev/null | sort
}
# Function to extract all non-empty string keys (to identify structural issues)
extract_structure_keys() {
local file="$1"
# Get only keys where values are not empty strings
jq -r 'paths(scalars) as $p | select(getpath($p) != "") | $p | join(".")' "$file" 2>/dev/null | sort
}
# Function to validate a single translation file
validate_translation() {
local translation_file="$1"
local filename=$(basename "$translation_file")
local has_errors=false
local verbose=${2:-false}
if [[ "$verbose" == "true" ]]; then
echo "Validating $filename..."
fi
# First validate JSON syntax
if ! jq empty "$translation_file" 2>/dev/null; then
echo "::error file=$translation_file::Invalid JSON syntax"
echo -e "${RED}$filename has invalid JSON syntax${NC}"
return 1
fi
# Extract all keys from both files (for statistics)
local en_keys_file=$(mktemp)
local translation_keys_file=$(mktemp)
extract_keys "$EN_FILE" > "$en_keys_file"
extract_keys "$translation_file" > "$translation_keys_file"
# Extract only non-empty structure keys (to validate structural issues)
local en_structure_file=$(mktemp)
local translation_structure_file=$(mktemp)
extract_structure_keys "$EN_FILE" > "$en_structure_file"
extract_structure_keys "$translation_file" > "$translation_structure_file"
# Find structural issues: keys in translation not in English (misplaced)
local extra_keys=$(comm -13 "$en_keys_file" "$translation_keys_file")
# Find missing keys (for statistics only)
local missing_keys=$(comm -23 "$en_keys_file" "$translation_keys_file")
# Count keys for statistics
local total_en_keys=$(wc -l < "$en_keys_file")
local total_translation_keys=$(wc -l < "$translation_keys_file")
local missing_count=0
local extra_count=0
if [[ -n "$missing_keys" ]]; then
missing_count=$(echo "$missing_keys" | grep -c '^' || echo 0)
fi
if [[ -n "$extra_keys" ]]; then
extra_count=$(echo "$extra_keys" | grep -c '^' || echo 0)
has_errors=true
fi
# Report extra/misplaced keys (these are structural issues)
if [[ -n "$extra_keys" ]]; then
if [[ "$verbose" == "true" ]]; then
echo -e "${YELLOW}Misplaced keys in $filename ($extra_count):${NC}"
fi
while IFS= read -r key; do
# Try to find the line number
line=$(grep -n "\"$(echo "$key" | sed 's/.*\.//')" "$translation_file" | head -1 | cut -d: -f1)
line=${line:-1} # Default to line 1 if not found
echo "::error file=$translation_file,line=$line::Misplaced key: $key"
if [[ "$verbose" == "true" ]]; then
echo " + $key (line ~$line)"
fi
done <<< "$extra_keys"
fi
# Clean up temp files
rm -f "$en_keys_file" "$translation_keys_file" "$en_structure_file" "$translation_structure_file"
# Print statistics
if [[ "$verbose" == "true" ]]; then
echo " Keys: $total_translation_keys/$total_en_keys (Missing: $missing_count, Extra/Misplaced: $extra_count)"
if [[ "$has_errors" == "true" ]]; then
echo -e "${RED}$filename has structural issues${NC}"
else
echo -e "${GREEN}$filename structure is valid${NC}"
fi
elif [[ "$has_errors" == "true" ]]; then
echo -e "${RED}$filename has structural issues (Extra/Misplaced: $extra_count)${NC}"
fi
return $([[ "$has_errors" == "true" ]] && echo 1 || echo 0)
}
# Main validation loop
validation_failed=false
total_files=0
failed_files=0
valid_files=0
for translation_file in "$TRANSLATION_DIR"/*.json; do
if [[ -f "$translation_file" ]]; then
total_files=$((total_files + 1))
if ! validate_translation "$translation_file" "$VERBOSE"; then
validation_failed=true
failed_files=$((failed_files + 1))
else
valid_files=$((valid_files + 1))
fi
if [[ "$VERBOSE" == "true" ]]; then
echo "" # Add spacing between files
fi
fi
done
# Summary
if [[ "$VERBOSE" == "true" ]]; then
echo "========================================="
echo "Translation Validation Summary:"
echo " Total files: $total_files"
echo " Valid files: $valid_files"
echo " Files with structural issues: $failed_files"
echo "========================================="
fi
if [[ "$validation_failed" == "true" ]]; then
if [[ "$VERBOSE" == "true" ]]; then
echo -e "${RED}Translation validation failed - $failed_files file(s) have structural issues${NC}"
else
echo -e "${RED}Translation validation failed - $failed_files/$total_files file(s) have structural issues${NC}"
fi
exit 1
elif [[ "$VERBOSE" == "true" ]]; then
echo -e "${GREEN}All translation files are structurally valid${NC}"
fi
exit 0

18
.gitignore vendored
View File

@@ -5,30 +5,24 @@
/navidrome
/iTunes*.xml
/tmp
/bin
data/*
vendor/*/
wiki
TODO.md
var
navidrome.toml
!release/linux/navidrome.toml
master.zip
testDB
navidrome.db
cache/*
*.swp
embedded_gen.go
dist
music
*.db*
navidrome.db-shm
navidrome.db-wal
tags
.gitinfo
docker-compose.yml
!contrib/docker-compose.yml
binaries
navidrome-*
AGENTS.md
.github/prompts
.github/instructions
.github/git-commit-instructions.md
*.exe
*.test
*.wasm
/api/openapi.yaml

View File

@@ -1,58 +1,35 @@
version: "2"
run:
build-tags:
- netgo
go: "1.19"
linters:
enable:
- asasalint
- asciicheck
- bidichk
- bodyclose
- copyloopvar
- dogsled
- durationcheck
- errcheck
- errorlint
- gocritic
- exportloopref
- gocyclo
- goprintffuncname
- gosec
- gosimple
- govet
- ineffassign
- misspell
- nakedret
- nilerr
- rowserrcheck
- unconvert
- whitespace
disable:
- staticcheck
settings:
gocritic:
disable-all: true
enabled-checks:
- deprecatedComment
gosec:
excludes:
- G501
- G401
- G505
- G115
govet:
enable:
- nilness
exclusions:
generated: lax
presets:
- comments
- common-false-positives
- legacy
- std-error-handling
paths:
- third_party$
- builtin$
- examples$
formatters:
exclusions:
generated: lax
paths:
- third_party$
- builtin$
- examples$
- typecheck
- unconvert
- unused
- whitespace
issues:
exclude-rules:
- linters:
- gosec
text: "(G501|G401|G505):"

139
.goreleaser.yml Normal file
View File

@@ -0,0 +1,139 @@
# GoReleaser config
project_name: navidrome
builds:
- id: navidrome_linux_amd64
env:
- CGO_ENABLED=1
goos:
- linux
goarch:
- amd64
flags:
- -tags=netgo
ldflags:
- "-extldflags '-static -lz'"
- -s -w -X github.com/navidrome/navidrome/consts.gitSha={{.ShortCommit}} -X github.com/navidrome/navidrome/consts.gitTag={{.Version}}
- id: navidrome_linux_386
env:
- CGO_ENABLED=1
- PKG_CONFIG_PATH=/i386/lib/pkgconfig
goos:
- linux
goarch:
- "386"
flags:
- -tags=netgo
ldflags:
- "-extldflags '-static'"
- -s -w -X github.com/navidrome/navidrome/consts.gitSha={{.ShortCommit}} -X github.com/navidrome/navidrome/consts.gitTag={{.Version}}
- id: navidrome_linux_arm
env:
- CGO_ENABLED=1
- CC=arm-linux-gnueabi-gcc
- CXX=arm-linux-gnueabi-g++
- PKG_CONFIG_PATH=/arm/lib/pkgconfig
goos:
- linux
goarch:
- arm
goarm:
- "5"
- "6"
- "7"
flags:
- -tags=netgo
ldflags:
- "-extldflags '-static'"
- -s -w -X github.com/navidrome/navidrome/consts.gitSha={{.ShortCommit}} -X github.com/navidrome/navidrome/consts.gitTag={{.Version}}
- id: navidrome_linux_arm64
env:
- CGO_ENABLED=1
- CC=aarch64-linux-gnu-gcc
- CXX=aarch64-linux-gnu-g++
- PKG_CONFIG_PATH=/arm64/lib/pkgconfig
goos:
- linux
goarch:
- arm64
flags:
- -tags=netgo
ldflags:
- "-extldflags '-static'"
- -s -w -X github.com/navidrome/navidrome/consts.gitSha={{.ShortCommit}} -X github.com/navidrome/navidrome/consts.gitTag={{.Version}}
- id: navidrome_windows_386
env:
- CGO_ENABLED=1
- CC=i686-w64-mingw32-gcc
- CXX=i686-w64-mingw32-g++
- PKG_CONFIG_PATH=/mingw32/lib/pkgconfig
goos:
- windows
goarch:
- "386"
flags:
- -tags=netgo
ldflags:
- "-extldflags '-static'"
- -s -w -X github.com/navidrome/navidrome/consts.gitSha={{.ShortCommit}} -X github.com/navidrome/navidrome/consts.gitTag={{.Version}}
- id: navidrome_windows_amd64
env:
- CGO_ENABLED=1
- CC=x86_64-w64-mingw32-gcc
- CXX=x86_64-w64-mingw32-g++
- PKG_CONFIG_PATH=/mingw64/lib/pkgconfig
goos:
- windows
goarch:
- amd64
flags:
- -tags=netgo
ldflags:
- "-extldflags '-static'"
- -s -w -X github.com/navidrome/navidrome/consts.gitSha={{.ShortCommit}} -X github.com/navidrome/navidrome/consts.gitTag={{.Version}}
- id: navidrome_darwin_amd64
env:
- CGO_ENABLED=1
- CC=o64-clang
- CXX=o64-clang++
- PKG_CONFIG_PATH=/darwin/lib/pkgconfig
goos:
- darwin
goarch:
- amd64
flags:
- -tags=netgo
ldflags:
- -s -w -X github.com/navidrome/navidrome/consts.gitSha={{.ShortCommit}} -X github.com/navidrome/navidrome/consts.gitTag={{.Version}}
archives:
- format_overrides:
- goos: windows
format: zip
replacements:
darwin: macOS
linux: Linux
windows: Windows
386: i386
amd64: x86_64
checksum:
name_template: "{{ .ProjectName }}_checksums.txt"
snapshot:
name_template: "{{ .Tag }}-SNAPSHOT"
release:
draft: true
changelog:
# sort: asc
filters:
exclude:
- "^docs:"

2
.nvmrc
View File

@@ -1 +1 @@
v24
v18

View File

@@ -48,15 +48,14 @@ This improves the readability of the messages
It can be one of the following:
1. **feat**: Addition of a new feature
2. **fix**: Bug fix
3. **sec**: Fixing security issues
4. **docs**: Documentation Changes
5. **style**: Changes to styling
6. **refactor**: Refactoring of code
7. **perf**: Code that affects performance
8. **test**: Updating or improving the current tests
9. **build**: Changes to Build process
10. **revert**: Reverting to a previous commit
11. **chore** : updating grunt tasks etc
3. **docs**: Documentation Changes
4. **style**: Changes to styling
5. **refactor**: Refactoring of code
6. **perf**: Code that affects performance
7. **test**: Updating or improving the current tests
8. **build**: Changes to Build process
9. **revert**: Reverting to a previous commit
10. **chore** : updating grunt tasks etc
If there is a breaking change in your Pull Request, please add `BREAKING CHANGE` in the optional body section

View File

@@ -1,147 +0,0 @@
FROM --platform=$BUILDPLATFORM ghcr.io/crazy-max/osxcross:14.5-debian AS osxcross
########################################################################################################################
### Build xx (original image: tonistiigi/xx)
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/alpine:3.19 AS xx-build
# v1.5.0
ENV XX_VERSION=b4e4c451c778822e6742bfc9d9a91d7c7d885c8a
RUN apk add -U --no-cache git
RUN git clone https://github.com/tonistiigi/xx && \
cd xx && \
git checkout ${XX_VERSION} && \
mkdir -p /out && \
cp src/xx-* /out/
RUN cd /out && \
ln -s xx-cc /out/xx-clang && \
ln -s xx-cc /out/xx-clang++ && \
ln -s xx-cc /out/xx-c++ && \
ln -s xx-apt /out/xx-apt-get
# xx mimics the original tonistiigi/xx image
FROM scratch AS xx
COPY --from=xx-build /out/ /usr/bin/
########################################################################################################################
### Get TagLib
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/alpine:3.19 AS taglib-build
ARG TARGETPLATFORM
ARG CROSS_TAGLIB_VERSION=2.1.1-1
ENV CROSS_TAGLIB_RELEASES_URL=https://github.com/navidrome/cross-taglib/releases/download/v${CROSS_TAGLIB_VERSION}/
# wget in busybox can't follow redirects
RUN <<EOT
apk add --no-cache wget
PLATFORM=$(echo ${TARGETPLATFORM} | tr '/' '-')
FILE=taglib-${PLATFORM}.tar.gz
DOWNLOAD_URL=${CROSS_TAGLIB_RELEASES_URL}${FILE}
wget ${DOWNLOAD_URL}
mkdir /taglib
tar -xzf ${FILE} -C /taglib
EOT
########################################################################################################################
### Build Navidrome UI
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/node:lts-alpine AS ui
WORKDIR /app
# Install node dependencies
COPY ui/package.json ui/package-lock.json ./
COPY ui/bin/ ./bin/
RUN npm ci
# Build bundle
COPY ui/ ./
RUN npm run build -- --outDir=/build
FROM scratch AS ui-bundle
COPY --from=ui /build /build
########################################################################################################################
### Build Navidrome binary
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/golang:1.25-bookworm AS base
RUN apt-get update && apt-get install -y clang lld
COPY --from=xx / /
WORKDIR /workspace
FROM --platform=$BUILDPLATFORM base AS build
# Install build dependencies for the target platform
ARG TARGETPLATFORM
RUN xx-apt install -y binutils gcc g++ libc6-dev zlib1g-dev
RUN xx-verify --setup
RUN --mount=type=bind,source=. \
--mount=type=cache,target=/root/.cache \
--mount=type=cache,target=/go/pkg/mod \
go mod download
ARG GIT_SHA
ARG GIT_TAG
RUN --mount=type=bind,source=. \
--mount=from=ui,source=/build,target=./ui/build,ro \
--mount=from=osxcross,src=/osxcross/SDK,target=/xx-sdk,ro \
--mount=type=cache,target=/root/.cache \
--mount=type=cache,target=/go/pkg/mod \
--mount=from=taglib-build,target=/taglib,src=/taglib,ro <<EOT
# Setup CGO cross-compilation environment
xx-go --wrap
export CGO_ENABLED=1
export PKG_CONFIG_PATH=/taglib/lib/pkgconfig
cat $(go env GOENV)
# Only Darwin (macOS) requires clang (default), Windows requires gcc, everything else can use any compiler.
# So let's use gcc for everything except Darwin.
if [ "$(xx-info os)" != "darwin" ]; then
export CC=$(xx-info)-gcc
export CXX=$(xx-info)-g++
export LD_EXTRA="-extldflags '-static -latomic'"
fi
if [ "$(xx-info os)" = "windows" ]; then
export EXT=".exe"
fi
go build -tags=netgo -ldflags="${LD_EXTRA} -w -s \
-X github.com/navidrome/navidrome/consts.gitSha=${GIT_SHA} \
-X github.com/navidrome/navidrome/consts.gitTag=${GIT_TAG}" \
-o /out/navidrome${EXT} .
EOT
# Verify if the binary was built for the correct platform and it is statically linked
RUN xx-verify --static /out/navidrome*
FROM scratch AS binary
COPY --from=build /out /
########################################################################################################################
### Build Final Image
FROM public.ecr.aws/docker/library/alpine:3.19 AS final
LABEL maintainer="deluan@navidrome.org"
LABEL org.opencontainers.image.source="https://github.com/navidrome/navidrome"
# Install ffmpeg and mpv
RUN apk add -U --no-cache ffmpeg mpv sqlite
# Copy navidrome binary
COPY --from=build /out/navidrome /app/
VOLUME ["/data", "/music"]
ENV ND_MUSICFOLDER=/music
ENV ND_DATAFOLDER=/data
ENV ND_CONFIGFILE=/data/navidrome.toml
ENV ND_PORT=4533
ENV GODEBUG="asyncpreemptoff=1"
RUN touch /.nddockerenv
EXPOSE ${ND_PORT}
WORKDIR /app
ENTRYPOINT ["/app/navidrome"]

235
Makefile
View File

@@ -3,118 +3,77 @@ NODE_VERSION=$(shell cat .nvmrc)
ifneq ("$(wildcard .git/HEAD)","")
GIT_SHA=$(shell git rev-parse --short HEAD)
GIT_TAG=$(shell git describe --tags `git rev-list --tags --max-count=1`)-SNAPSHOT
GIT_TAG=$(shell git describe --tags `git rev-list --tags --max-count=1`)
else
GIT_SHA=source_archive
GIT_TAG=$(patsubst navidrome-%,v%,$(notdir $(PWD)))-SNAPSHOT
GIT_TAG=$(patsubst navidrome-%,v%,$(notdir $(PWD)))
endif
SUPPORTED_PLATFORMS ?= linux/amd64,linux/arm64,linux/arm/v5,linux/arm/v6,linux/arm/v7,linux/386,darwin/amd64,darwin/arm64,windows/amd64,windows/386
IMAGE_PLATFORMS ?= $(shell echo $(SUPPORTED_PLATFORMS) | tr ',' '\n' | grep "linux" | grep -v "arm/v5" | tr '\n' ',' | sed 's/,$$//')
PLATFORMS ?= $(SUPPORTED_PLATFORMS)
DOCKER_TAG ?= deluan/navidrome:develop
CI_RELEASER_VERSION=1.20.3-1 ## https://github.com/navidrome/ci-goreleaser
# Taglib version to use in cross-compilation, from https://github.com/navidrome/cross-taglib
CROSS_TAGLIB_VERSION ?= 2.1.1-1
GOLANGCI_LINT_VERSION ?= v2.5.0
UI_SRC_FILES := $(shell find ui -type f -not -path "ui/build/*" -not -path "ui/node_modules/*")
setup: check_env download-deps install-golangci-lint setup-git ##@1_Run_First Install dependencies and prepare development environment
setup: check_env download-deps setup-git ##@1_Run_First Install dependencies and prepare development environment
@echo Downloading Node dependencies...
@(cd ./ui && npm ci)
.PHONY: setup
dev: check_env ##@Development Start Navidrome in development mode, with hot-reload for both frontend and backend
ND_ENABLEINSIGHTSCOLLECTOR="false" npx foreman -j Procfile.dev -p 4533 start
npx foreman -j Procfile.dev -p 4533 start
.PHONY: dev
server: check_go_env buildjs ##@Development Start the backend in development mode
@ND_ENABLEINSIGHTSCOLLECTOR="false" go tool reflex -d none -c reflex.conf
server: check_go_env ##@Development Start the backend in development mode
@go run github.com/cespare/reflex@latest -d none -c reflex.conf
.PHONY: server
stop: ##@Development Stop development servers (UI and backend)
@echo "Stopping development servers..."
@-pkill -f "vite"
@-pkill -f "go tool reflex.*reflex.conf"
@-pkill -f "go run.*netgo"
@echo "Development servers stopped."
.PHONY: stop
watch: ##@Development Start Go tests in watch mode (re-run when code changes)
go tool ginkgo watch -tags=netgo -notify ./...
go run github.com/onsi/ginkgo/v2/ginkgo@latest watch -notify ./...
.PHONY: watch
PKG ?= ./...
test: ##@Development Run Go tests. Use PKG variable to specify packages to test, e.g. make test PKG=./server
go test -tags netgo $(PKG)
test: ##@Development Run Go tests
go test -race -shuffle=on ./...
.PHONY: test
testall: test-race test-i18n test-js ##@Development Run Go and JS tests
testall: test ##@Development Run Go and JS tests, and validate OpenAPI spec
@(cd ./ui && npm test -- --watchAll=false)
.PHONY: testall
test-race: ##@Development Run Go tests with race detector
go test -tags netgo -race -shuffle=on ./...
.PHONY: test-race
test-js: ##@Development Run JS tests
@(cd ./ui && npm run test)
.PHONY: test-js
test-i18n: ##@Development Validate all translations files
./.github/workflows/validate-translations.sh
.PHONY: test-i18n
install-golangci-lint: ##@Development Install golangci-lint if not present
@INSTALL=false; \
if PATH=$$PATH:./bin which golangci-lint > /dev/null 2>&1; then \
CURRENT_VERSION=$$(PATH=$$PATH:./bin golangci-lint version 2>/dev/null | grep -oE '[0-9]+\.[0-9]+\.[0-9]+' | head -n1); \
REQUIRED_VERSION=$$(echo "$(GOLANGCI_LINT_VERSION)" | sed 's/^v//'); \
if [ "$$CURRENT_VERSION" != "$$REQUIRED_VERSION" ]; then \
echo "Found golangci-lint $$CURRENT_VERSION, but $$REQUIRED_VERSION is required. Reinstalling..."; \
rm -f ./bin/golangci-lint; \
INSTALL=true; \
fi; \
else \
INSTALL=true; \
fi; \
if [ "$$INSTALL" = "true" ]; then \
echo "Installing golangci-lint $(GOLANGCI_LINT_VERSION)..."; \
curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/HEAD/install.sh | sh -s $(GOLANGCI_LINT_VERSION); \
fi
.PHONY: install-golangci-lint
lint: install-golangci-lint ##@Development Lint Go code
PATH=$$PATH:./bin golangci-lint run -v --timeout 5m
lint: ##@Development Lint Go code
go run github.com/golangci/golangci-lint/cmd/golangci-lint@latest run -v --timeout 5m
.PHONY: lint
lintall: lint ##@Development Lint Go and JS code
lintapi: api/openapi.yaml ##@Development Lint OpenAPI spec
npx @redocly/cli lint api/openapi.yaml
.PHONY: lintapi
lintall: lint lintapi ##@Development Lint Go and JS code
@(cd ./ui && npm run check-formatting) || (echo "\n\nPlease run 'npm run prettier' to fix formatting issues." && exit 1)
@(cd ./ui && npm run lint)
.PHONY: lintall
format: ##@Development Format code
@(cd ./ui && npm run prettier)
@go tool goimports -w `find . -name '*.go' | grep -v _gen.go$$ | grep -v .pb.go$$`
@go mod tidy
.PHONY: format
wire: check_go_env ##@Development Update Dependency Injection
go tool wire gen -tags=netgo ./...
gen: check_go_env api ##@Development Update Generated Code (wire, mocks, openapi, etc)
go run github.com/google/wire/cmd/wire@latest ./...
.PHONY: wire
api: check_go_env api/openapi.yaml
go generate ./server/api/...
.PHONY: api
spec_parts=$(shell find api -name '*.yml')
api/openapi.yaml: $(spec_parts)
@echo "Bundling OpenAPI spec..."
npx @redocly/cli bundle api/spec.yml -o api/openapi.yaml
snapshots: ##@Development Update (GoLang) Snapshot tests
UPDATE_SNAPSHOTS=true go tool ginkgo ./server/subsonic/responses/...
UPDATE_SNAPSHOTS=true go run github.com/onsi/ginkgo/v2/ginkgo@latest ./server/subsonic/...
.PHONY: snapshots
migration-sql: ##@Development Create an empty SQL migration file
@if [ -z "${name}" ]; then echo "Usage: make migration-sql name=name_of_migration_file"; exit 1; fi
go run github.com/pressly/goose/v3/cmd/goose@latest -dir db/migrations create ${name} sql
go run github.com/pressly/goose/v3/cmd/goose@latest -dir db/migration create ${name} sql
.PHONY: migration
migration-go: ##@Development Create an empty Go migration file
@if [ -z "${name}" ]; then echo "Usage: make migration-go name=name_of_migration_file"; exit 1; fi
go run github.com/pressly/goose/v3/cmd/goose@latest -dir db/migrations create ${name}
go run github.com/pressly/goose/v3/cmd/goose@latest -dir db/migration create ${name}
.PHONY: migration
setup-dev: setup
@@ -126,90 +85,45 @@ setup-git: ##@Development Setup Git hooks (pre-commit and pre-push)
@(cd .git/hooks && ln -sf ../../git/* .)
.PHONY: setup-git
build: check_go_env buildjs ##@Build Build the project
go build -ldflags="-X github.com/navidrome/navidrome/consts.gitSha=$(GIT_SHA) -X github.com/navidrome/navidrome/consts.gitTag=$(GIT_TAG)" -tags=netgo
.PHONY: build
buildall: deprecated build
buildall: buildjs build ##@Build Build the project, both frontend and backend
.PHONY: buildall
debug-build: check_go_env buildjs ##@Build Build the project (with remote debug on)
go build -gcflags="all=-N -l" -ldflags="-X github.com/navidrome/navidrome/consts.gitSha=$(GIT_SHA) -X github.com/navidrome/navidrome/consts.gitTag=$(GIT_TAG)" -tags=netgo
.PHONY: debug-build
build: warning-noui-build check_go_env ##@Build Build only backend
go build -ldflags="-X github.com/navidrome/navidrome/consts.gitSha=$(GIT_SHA) -X github.com/navidrome/navidrome/consts.gitTag=$(GIT_TAG)-SNAPSHOT" -tags=netgo
.PHONY: build
buildjs: check_node_env ui/build/index.html ##@Build Build only frontend
buildjs: check_node_env ##@Build Build only frontend
@(cd ./ui && npm run build)
.PHONY: buildjs
docker-buildjs: ##@Build Build only frontend using Docker
docker build --output "./ui" --target ui-bundle .
.PHONY: docker-buildjs
all: warning-noui-build ##@Cross_Compilation Build binaries for all supported platforms. It does not build the frontend
docker run -t -v $(PWD):/workspace -w /workspace deluan/ci-goreleaser:$(CI_RELEASER_VERSION) \
goreleaser release --rm-dist --skip-publish --snapshot
.PHONY: all
ui/build/index.html: $(UI_SRC_FILES)
@(cd ./ui && npm run build)
single: warning-noui-build ##@Cross_Compilation Build binaries for a single supported platforms. It does not build the frontend
@if [ -z "${GOOS}" -o -z "${GOARCH}" ]; then \
echo "Usage: GOOS=<os> GOARCH=<arch> make single"; \
echo "Options:"; \
grep -- "- id: navidrome_" .goreleaser.yml | sed 's/- id: navidrome_//g'; \
exit 1; \
fi
@echo "Building binaries for ${GOOS}/${GOARCH}"
docker run -t -v $(PWD):/workspace -e GOOS -e GOARCH -w /workspace deluan/ci-goreleaser:$(CI_RELEASER_VERSION) \
goreleaser build --rm-dist --snapshot --single-target --id navidrome_${GOOS}_${GOARCH}
.PHONY: single
docker-platforms: ##@Cross_Compilation List supported platforms
@echo "Supported platforms:"
@echo "$(SUPPORTED_PLATFORMS)" | tr ',' '\n' | sort | sed 's/^/ /'
@echo "\nUsage: make PLATFORMS=\"linux/amd64\" docker-build"
@echo " make IMAGE_PLATFORMS=\"linux/amd64\" docker-image"
.PHONY: docker-platforms
docker-build: ##@Cross_Compilation Cross-compile for any supported platform (check `make docker-platforms`)
docker buildx build \
--platform $(PLATFORMS) \
--build-arg GIT_TAG=${GIT_TAG} \
--build-arg GIT_SHA=${GIT_SHA} \
--build-arg CROSS_TAGLIB_VERSION=${CROSS_TAGLIB_VERSION} \
--output "./binaries" --target binary .
.PHONY: docker-build
docker-image: ##@Cross_Compilation Build Docker image, tagged as `deluan/navidrome:develop`, override with DOCKER_TAG var. Use IMAGE_PLATFORMS to specify target platforms
@echo $(IMAGE_PLATFORMS) | grep -q "windows" && echo "ERROR: Windows is not supported for Docker builds" && exit 1 || true
@echo $(IMAGE_PLATFORMS) | grep -q "darwin" && echo "ERROR: macOS is not supported for Docker builds" && exit 1 || true
@echo $(IMAGE_PLATFORMS) | grep -q "arm/v5" && echo "ERROR: Linux ARMv5 is not supported for Docker builds" && exit 1 || true
docker buildx build \
--platform $(IMAGE_PLATFORMS) \
--build-arg GIT_TAG=${GIT_TAG} \
--build-arg GIT_SHA=${GIT_SHA} \
--build-arg CROSS_TAGLIB_VERSION=${CROSS_TAGLIB_VERSION} \
--tag $(DOCKER_TAG) .
.PHONY: docker-image
docker-msi: ##@Cross_Compilation Build MSI installer for Windows
make docker-build PLATFORMS=windows/386,windows/amd64
DOCKER_CLI_HINTS=false docker build -q -t navidrome-msi-builder -f release/wix/msitools.dockerfile .
@rm -rf binaries/msi
docker run -it --rm -v $(PWD):/workspace -v $(PWD)/binaries:/workspace/binaries -e GIT_TAG=${GIT_TAG} \
navidrome-msi-builder sh -c "release/wix/build_msi.sh /workspace 386 && release/wix/build_msi.sh /workspace amd64"
@du -h binaries/msi/*.msi
.PHONY: docker-msi
run-docker: ##@Development Run a Navidrome Docker image. Usage: make run-docker tag=<tag>
@if [ -z "$(tag)" ]; then echo "Usage: make run-docker tag=<tag>"; exit 1; fi
@TAG_DIR="tmp/$$(echo '$(tag)' | tr '/:' '_')"; mkdir -p "$$TAG_DIR"; \
VOLUMES="-v $(PWD)/$$TAG_DIR:/data"; \
if [ -f navidrome.toml ]; then \
VOLUMES="$$VOLUMES -v $(PWD)/navidrome.toml:/data/navidrome.toml:ro"; \
MUSIC_FOLDER=$$(grep '^MusicFolder' navidrome.toml | head -n1 | sed 's/.*= *"//' | sed 's/".*//'); \
if [ -n "$$MUSIC_FOLDER" ] && [ -d "$$MUSIC_FOLDER" ]; then \
VOLUMES="$$VOLUMES -v $$MUSIC_FOLDER:/music:ro"; \
fi; \
fi; \
echo "Running: docker run --rm -p 4533:4533 $$VOLUMES $(tag)"; docker run --rm -p 4533:4533 $$VOLUMES $(tag)
.PHONY: run-docker
package: docker-build ##@Cross_Compilation Create binaries and packages for ALL supported platforms
@if [ -z `which goreleaser` ]; then echo "Please install goreleaser first: https://goreleaser.com/install/"; exit 1; fi
goreleaser release -f release/goreleaser.yml --clean --skip=publish --snapshot
.PHONY: package
warning-noui-build:
@echo "WARNING: This command does not build the frontend, it uses the latest built with 'make buildjs'"
.PHONY: warning-noui-build
get-music: ##@Development Download some free music from Navidrome's demo instance
mkdir -p music
( cd music; \
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=2Y3qQA6zJC3ObbBrF9ZBoV" > brock.zip; \
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=04HrSORpypcLGNUdQp37gn" > back_on_earth.zip; \
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=5xcMPJdeEgNrGtnzYbzAqb" > ugress.zip; \
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=1jjQMAZrG3lUsJ0YH6ZRS0" > voodoocuts.zip; \
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=ec2093ec4801402f1e17cc462195cdbb" > brock.zip; \
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=b376eeb4652d2498aa2b25ba0696725e" > back_on_earth.zip; \
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=e49c609b542fc51899ee8b53aa858cb4" > ugress.zip; \
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=350bcab3a4c1d93869e39ce496464f03" > voodoocuts.zip; \
for file in *.zip; do unzip -n $${file}; done )
@echo "Done. Remember to set your MusicFolder to ./music"
.PHONY: get-music
@@ -218,11 +132,6 @@ get-music: ##@Development Download some free music from Navidrome's demo instanc
##########################################
#### Miscellaneous
clean:
@rm -rf ./binaries ./dist ./ui/build/*
@touch ./ui/build/.gitkeep
.PHONY: clean
release:
@if [[ ! "${V}" =~ ^[0-9]+\.[0-9]+\.[0-9]+.*$$ ]]; then echo "Usage: make release V=X.X.X"; exit 1; fi
go mod tidy
@@ -262,28 +171,6 @@ check_node_env:
pre-push: lintall testall
.PHONY: pre-push
deprecated:
@echo "WARNING: This target is deprecated and will be removed in future releases. Use 'make build' instead."
.PHONY: deprecated
# Generate Go code from plugins/api/api.proto
plugin-gen: check_go_env ##@Development Generate Go code from plugins protobuf files
go generate ./plugins/...
.PHONY: plugin-gen
plugin-examples: check_go_env ##@Development Build all example plugins
$(MAKE) -C plugins/examples clean all
.PHONY: plugin-examples
plugin-clean: check_go_env ##@Development Clean all plugins
$(MAKE) -C plugins/examples clean
$(MAKE) -C plugins/testdata clean
.PHONY: plugin-clean
plugin-tests: check_go_env ##@Development Build all test plugins
$(MAKE) -C plugins/testdata clean all
.PHONY: plugin-tests
.DEFAULT_GOAL := help
HELP_FUN = \

View File

@@ -1,2 +1,2 @@
JS: sh -c "cd ./ui && npm start"
GO: go tool reflex -d none -c reflex.conf
GO: go run github.com/cespare/reflex@latest -d none -c reflex.conf

View File

@@ -9,7 +9,6 @@
[![Dev Chat](https://img.shields.io/discord/671335427726114836?logo=discord&label=discord&style=flat-square)](https://discord.gg/xh7j7yF)
[![Subreddit](https://img.shields.io/reddit/subreddit-subscribers/navidrome?logo=reddit&label=/r/navidrome&style=flat-square)](https://www.reddit.com/r/navidrome/)
[![Contributor Covenant](https://img.shields.io/badge/Contributor%20Covenant-v2.0-ff69b4.svg?style=flat-square)](CODE_OF_CONDUCT.md)
[![Gurubase](https://img.shields.io/badge/Gurubase-Ask%20Navidrome%20Guru-006BFF?style=flat-square)](https://gurubase.io/g/navidrome)
Navidrome is an open source web-based music collection server and streamer. It gives you freedom to listen to your
music collection from any browser or mobile device. It's like your personal Spotify!
@@ -57,15 +56,6 @@ A share of the revenue helps fund the development of Navidrome at no additional
- **Transcoding** on the fly. Can be set per user/player. **Opus encoding is supported**
- Translated to **various languages**
## Translations
Navidrome uses [POEditor](https://poeditor.com/) for translations, and we are always looking
for [more contributors](https://www.navidrome.org/docs/developers/translations/)
<a href="https://poeditor.com/">
<img height="32" src="https://github.com/user-attachments/assets/c19b1d2b-01e1-4682-a007-12356c42147c">
</a>
## Documentation
All documentation can be found in the project's website: https://www.navidrome.org/docs.
Here are some useful direct links:

View File

@@ -1,278 +0,0 @@
package taglib
import (
"io/fs"
"os"
"time"
"github.com/djherbis/times"
"github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/model/metadata"
"github.com/navidrome/navidrome/utils/gg"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
type testFileInfo struct {
fs.FileInfo
}
func (t testFileInfo) BirthTime() time.Time {
if ts := times.Get(t.FileInfo); ts.HasBirthTime() {
return ts.BirthTime()
}
return t.FileInfo.ModTime()
}
var _ = Describe("Extractor", func() {
toP := func(name, sortName, mbid string) model.Participant {
return model.Participant{
Artist: model.Artist{Name: name, SortArtistName: sortName, MbzArtistID: mbid},
}
}
roles := []struct {
model.Role
model.ParticipantList
}{
{model.RoleComposer, model.ParticipantList{
toP("coma a", "a, coma", "bf13b584-f27c-43db-8f42-32898d33d4e2"),
toP("comb", "comb", "924039a2-09c6-4d29-9b4f-50cc54447d36"),
}},
{model.RoleLyricist, model.ParticipantList{
toP("la a", "a, la", "c84f648f-68a6-40a2-a0cb-d135b25da3c2"),
toP("lb", "lb", "0a7c582d-143a-4540-b4e9-77200835af65"),
}},
{model.RoleArranger, model.ParticipantList{
toP("aa", "", "4605a1d4-8d15-42a3-bd00-9c20e42f71e6"),
toP("ab", "", "002f0ff8-77bf-42cc-8216-61a9c43dc145"),
}},
{model.RoleConductor, model.ParticipantList{
toP("cona", "", "af86879b-2141-42af-bad2-389a4dc91489"),
toP("conb", "", "3dfa3c70-d7d3-4b97-b953-c298dd305e12"),
}},
{model.RoleDirector, model.ParticipantList{
toP("dia", "", "f943187f-73de-4794-be47-88c66f0fd0f4"),
toP("dib", "", "bceb75da-1853-4b3d-b399-b27f0cafc389"),
}},
{model.RoleEngineer, model.ParticipantList{
toP("ea", "", "f634bf6d-d66a-425d-888a-28ad39392759"),
toP("eb", "", "243d64ae-d514-44e1-901a-b918d692baee"),
}},
{model.RoleProducer, model.ParticipantList{
toP("pra", "", "d971c8d7-999c-4a5f-ac31-719721ab35d6"),
toP("prb", "", "f0a09070-9324-434f-a599-6d25ded87b69"),
}},
{model.RoleRemixer, model.ParticipantList{
toP("ra", "", "c7dc6095-9534-4c72-87cc-aea0103462cf"),
toP("rb", "", "8ebeef51-c08c-4736-992f-c37870becedd"),
}},
{model.RoleDJMixer, model.ParticipantList{
toP("dja", "", "d063f13b-7589-4efc-ab7f-c60e6db17247"),
toP("djb", "", "3636670c-385f-4212-89c8-0ff51d6bc456"),
}},
{model.RoleMixer, model.ParticipantList{
toP("ma", "", "53fb5a2d-7016-427e-a563-d91819a5f35a"),
toP("mb", "", "64c13e65-f0da-4ab9-a300-71ee53b0376a"),
}},
}
var e *extractor
parseTestFile := func(path string) *model.MediaFile {
mds, err := e.Parse(path)
Expect(err).ToNot(HaveOccurred())
info, ok := mds[path]
Expect(ok).To(BeTrue())
fileInfo, err := os.Stat(path)
Expect(err).ToNot(HaveOccurred())
info.FileInfo = testFileInfo{FileInfo: fileInfo}
metadata := metadata.New(path, info)
mf := metadata.ToMediaFile(1, "folderID")
return &mf
}
BeforeEach(func() {
e = &extractor{}
})
Describe("ReplayGain", func() {
DescribeTable("test replaygain end-to-end", func(file string, trackGain, trackPeak, albumGain, albumPeak *float64) {
mf := parseTestFile("tests/fixtures/" + file)
Expect(mf.RGTrackGain).To(Equal(trackGain))
Expect(mf.RGTrackPeak).To(Equal(trackPeak))
Expect(mf.RGAlbumGain).To(Equal(albumGain))
Expect(mf.RGAlbumPeak).To(Equal(albumPeak))
},
Entry("mp3 with no replaygain", "no_replaygain.mp3", nil, nil, nil, nil),
Entry("mp3 with no zero replaygain", "zero_replaygain.mp3", gg.P(0.0), gg.P(1.0), gg.P(0.0), gg.P(1.0)),
)
})
Describe("lyrics", func() {
makeLyrics := func(code, secondLine string) model.Lyrics {
return model.Lyrics{
DisplayArtist: "",
DisplayTitle: "",
Lang: code,
Line: []model.Line{
{Start: gg.P(int64(0)), Value: "This is"},
{Start: gg.P(int64(2500)), Value: secondLine},
},
Offset: nil,
Synced: true,
}
}
It("should fetch both synced and unsynced lyrics in mixed flac", func() {
mf := parseTestFile("tests/fixtures/mixed-lyrics.flac")
lyrics, err := mf.StructuredLyrics()
Expect(err).ToNot(HaveOccurred())
Expect(lyrics).To(HaveLen(2))
Expect(lyrics[0].Synced).To(BeTrue())
Expect(lyrics[1].Synced).To(BeFalse())
})
It("should handle mp3 with uslt and sylt", func() {
mf := parseTestFile("tests/fixtures/test.mp3")
lyrics, err := mf.StructuredLyrics()
Expect(err).ToNot(HaveOccurred())
Expect(lyrics).To(HaveLen(4))
engSylt := makeLyrics("eng", "English SYLT")
engUslt := makeLyrics("eng", "English")
unsSylt := makeLyrics("xxx", "unspecified SYLT")
unsUslt := makeLyrics("xxx", "unspecified")
// Why is the order inconsistent between runs? Nobody knows
Expect(lyrics).To(Or(
Equal(model.LyricList{engSylt, engUslt, unsSylt, unsUslt}),
Equal(model.LyricList{unsSylt, unsUslt, engSylt, engUslt}),
))
})
DescribeTable("format-specific lyrics", func(file string, isId3 bool) {
mf := parseTestFile("tests/fixtures/" + file)
lyrics, err := mf.StructuredLyrics()
Expect(err).To(Not(HaveOccurred()))
Expect(lyrics).To(HaveLen(2))
unspec := makeLyrics("xxx", "unspecified")
eng := makeLyrics("xxx", "English")
if isId3 {
eng.Lang = "eng"
}
Expect(lyrics).To(Or(
Equal(model.LyricList{unspec, eng}),
Equal(model.LyricList{eng, unspec})))
},
Entry("flac", "test.flac", false),
Entry("m4a", "test.m4a", false),
Entry("ogg", "test.ogg", false),
Entry("wma", "test.wma", false),
Entry("wv", "test.wv", false),
Entry("wav", "test.wav", true),
Entry("aiff", "test.aiff", true),
)
})
Describe("Participants", func() {
DescribeTable("test tags consistent across formats", func(format string) {
mf := parseTestFile("tests/fixtures/test." + format)
for _, data := range roles {
role := data.Role
artists := data.ParticipantList
actual := mf.Participants[role]
Expect(actual).To(HaveLen(len(artists)))
for i := range artists {
actualArtist := actual[i]
expectedArtist := artists[i]
Expect(actualArtist.Name).To(Equal(expectedArtist.Name))
Expect(actualArtist.SortArtistName).To(Equal(expectedArtist.SortArtistName))
Expect(actualArtist.MbzArtistID).To(Equal(expectedArtist.MbzArtistID))
}
}
if format != "m4a" {
performers := mf.Participants[model.RolePerformer]
Expect(performers).To(HaveLen(8))
rules := map[string][]string{
"pgaa": {"2fd0b311-9fa8-4ff9-be5d-f6f3d16b835e", "Guitar"},
"pgbb": {"223d030b-bf97-4c2a-ad26-b7f7bbe25c93", "Guitar", ""},
"pvaa": {"cb195f72-448f-41c8-b962-3f3c13d09d38", "Vocals"},
"pvbb": {"60a1f832-8ca2-49f6-8660-84d57f07b520", "Vocals", "Flute"},
"pfaa": {"51fb40c-0305-4bf9-a11b-2ee615277725", "", "Flute"},
}
for name, rule := range rules {
mbid := rule[0]
for i := 1; i < len(rule); i++ {
found := false
for _, mapped := range performers {
if mapped.Name == name && mapped.MbzArtistID == mbid && mapped.SubRole == rule[i] {
found = true
break
}
}
Expect(found).To(BeTrue(), "Could not find matching artist")
}
}
}
},
Entry("FLAC format", "flac"),
Entry("M4a format", "m4a"),
Entry("OGG format", "ogg"),
Entry("WV format", "wv"),
Entry("MP3 format", "mp3"),
Entry("WAV format", "wav"),
Entry("AIFF format", "aiff"),
)
It("should parse wma", func() {
mf := parseTestFile("tests/fixtures/test.wma")
for _, data := range roles {
role := data.Role
artists := data.ParticipantList
actual := mf.Participants[role]
// WMA has no Arranger role
if role == model.RoleArranger {
Expect(actual).To(HaveLen(0))
continue
}
Expect(actual).To(HaveLen(len(artists)), role.String())
// For some bizarre reason, the order is inverted. We also don't get
// sort names or MBIDs
for i := range artists {
idx := len(artists) - 1 - i
actualArtist := actual[i]
expectedArtist := artists[idx]
Expect(actualArtist.Name).To(Equal(expectedArtist.Name))
}
}
})
})
})

View File

@@ -1,178 +0,0 @@
package taglib
import (
"io/fs"
"path/filepath"
"strconv"
"strings"
"time"
"github.com/navidrome/navidrome/conf"
"github.com/navidrome/navidrome/core/storage/local"
"github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/model/metadata"
)
type extractor struct {
baseDir string
}
func (e extractor) Parse(files ...string) (map[string]metadata.Info, error) {
results := make(map[string]metadata.Info)
for _, path := range files {
props, err := e.extractMetadata(path)
if err != nil {
continue
}
results[path] = *props
}
return results, nil
}
func (e extractor) Version() string {
return Version()
}
func (e extractor) extractMetadata(filePath string) (*metadata.Info, error) {
fullPath := filepath.Join(e.baseDir, filePath)
tags, err := Read(fullPath)
if err != nil {
log.Warn("extractor: Error reading metadata from file. Skipping", "filePath", fullPath, err)
return nil, err
}
// Parse audio properties
ap := metadata.AudioProperties{}
ap.BitRate = parseProp(tags, "__bitrate")
ap.Channels = parseProp(tags, "__channels")
ap.SampleRate = parseProp(tags, "__samplerate")
ap.BitDepth = parseProp(tags, "__bitspersample")
length := parseProp(tags, "__lengthinmilliseconds")
ap.Duration = (time.Millisecond * time.Duration(length)).Round(time.Millisecond * 10)
// Extract basic tags
parseBasicTag(tags, "__title", "title")
parseBasicTag(tags, "__artist", "artist")
parseBasicTag(tags, "__album", "album")
parseBasicTag(tags, "__comment", "comment")
parseBasicTag(tags, "__genre", "genre")
parseBasicTag(tags, "__year", "year")
parseBasicTag(tags, "__track", "tracknumber")
// Parse track/disc totals
parseTuple := func(prop string) {
tagName := prop + "number"
tagTotal := prop + "total"
if value, ok := tags[tagName]; ok && len(value) > 0 {
parts := strings.Split(value[0], "/")
tags[tagName] = []string{parts[0]}
if len(parts) == 2 {
tags[tagTotal] = []string{parts[1]}
}
}
}
parseTuple("track")
parseTuple("disc")
// Adjust some ID3 tags
parseLyrics(tags)
parseTIPL(tags)
delete(tags, "tmcl") // TMCL is already parsed by TagLib
return &metadata.Info{
Tags: tags,
AudioProperties: ap,
HasPicture: tags["has_picture"] != nil && len(tags["has_picture"]) > 0 && tags["has_picture"][0] == "true",
}, nil
}
// parseLyrics make sure lyrics tags have language
func parseLyrics(tags map[string][]string) {
lyrics := tags["lyrics"]
if len(lyrics) > 0 {
tags["lyrics:xxx"] = lyrics
delete(tags, "lyrics")
}
}
// These are the only roles we support, based on Picard's tag map:
// https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html
var tiplMapping = map[string]string{
"arranger": "arranger",
"engineer": "engineer",
"producer": "producer",
"mix": "mixer",
"DJ-mix": "djmixer",
}
// parseProp parses a property from the tags map and sets it to the target integer.
// It also deletes the property from the tags map after parsing.
func parseProp(tags map[string][]string, prop string) int {
if value, ok := tags[prop]; ok && len(value) > 0 {
v, _ := strconv.Atoi(value[0])
delete(tags, prop)
return v
}
return 0
}
// parseBasicTag checks if a basic tag (like __title, __artist, etc.) exists in the tags map.
// If it does, it moves the value to a more appropriate tag name (like title, artist, etc.),
// and deletes the basic tag from the map. If the target tag already exists, it ignores the basic tag.
func parseBasicTag(tags map[string][]string, basicName string, tagName string) {
basicValue := tags[basicName]
if len(basicValue) == 0 {
return
}
delete(tags, basicName)
if len(tags[tagName]) == 0 {
tags[tagName] = basicValue
}
}
// parseTIPL parses the ID3v2.4 TIPL frame string, which is received from TagLib in the format:
//
// "arranger Andrew Powell engineer Chris Blair engineer Pat Stapley producer Eric Woolfson".
//
// and breaks it down into a map of roles and names, e.g.:
//
// {"arranger": ["Andrew Powell"], "engineer": ["Chris Blair", "Pat Stapley"], "producer": ["Eric Woolfson"]}.
func parseTIPL(tags map[string][]string) {
tipl := tags["tipl"]
if len(tipl) == 0 {
return
}
addRole := func(currentRole string, currentValue []string) {
if currentRole != "" && len(currentValue) > 0 {
role := tiplMapping[currentRole]
tags[role] = append(tags[role], strings.Join(currentValue, " "))
}
}
var currentRole string
var currentValue []string
for _, part := range strings.Split(tipl[0], " ") {
if _, ok := tiplMapping[part]; ok {
addRole(currentRole, currentValue)
currentRole = part
currentValue = nil
continue
}
currentValue = append(currentValue, part)
}
addRole(currentRole, currentValue)
delete(tags, "tipl")
}
var _ local.Extractor = (*extractor)(nil)
func init() {
local.RegisterExtractor("taglib", func(_ fs.FS, baseDir string) local.Extractor {
// ignores fs, as taglib extractor only works with local files
return &extractor{baseDir}
})
conf.AddHook(func() {
log.Debug("TagLib version", "version", Version())
})
}

View File

@@ -1,296 +0,0 @@
package taglib
import (
"io/fs"
"os"
"strings"
"github.com/navidrome/navidrome/utils"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("Extractor", func() {
var e *extractor
BeforeEach(func() {
e = &extractor{}
})
Describe("Parse", func() {
It("correctly parses metadata from all files in folder", func() {
mds, err := e.Parse(
"tests/fixtures/test.mp3",
"tests/fixtures/test.ogg",
)
Expect(err).NotTo(HaveOccurred())
Expect(mds).To(HaveLen(2))
// Test MP3
m := mds["tests/fixtures/test.mp3"]
Expect(m.Tags).To(HaveKeyWithValue("title", []string{"Song"}))
Expect(m.Tags).To(HaveKeyWithValue("album", []string{"Album"}))
Expect(m.Tags).To(HaveKeyWithValue("artist", []string{"Artist"}))
Expect(m.Tags).To(HaveKeyWithValue("albumartist", []string{"Album Artist"}))
Expect(m.HasPicture).To(BeTrue())
Expect(m.AudioProperties.Duration.String()).To(Equal("1.02s"))
Expect(m.AudioProperties.BitRate).To(Equal(192))
Expect(m.AudioProperties.Channels).To(Equal(2))
Expect(m.AudioProperties.SampleRate).To(Equal(44100))
Expect(m.Tags).To(Or(
HaveKeyWithValue("compilation", []string{"1"}),
HaveKeyWithValue("tcmp", []string{"1"})),
)
Expect(m.Tags).To(HaveKeyWithValue("genre", []string{"Rock"}))
Expect(m.Tags).To(HaveKeyWithValue("date", []string{"2014-05-21"}))
Expect(m.Tags).To(HaveKeyWithValue("originaldate", []string{"1996-11-21"}))
Expect(m.Tags).To(HaveKeyWithValue("releasedate", []string{"2020-12-31"}))
Expect(m.Tags).To(HaveKeyWithValue("discnumber", []string{"1"}))
Expect(m.Tags).To(HaveKeyWithValue("disctotal", []string{"2"}))
Expect(m.Tags).To(HaveKeyWithValue("comment", []string{"Comment1\nComment2"}))
Expect(m.Tags).To(HaveKeyWithValue("bpm", []string{"123"}))
Expect(m.Tags).To(HaveKeyWithValue("replaygain_album_gain", []string{"+3.21518 dB"}))
Expect(m.Tags).To(HaveKeyWithValue("replaygain_album_peak", []string{"0.9125"}))
Expect(m.Tags).To(HaveKeyWithValue("replaygain_track_gain", []string{"-1.48 dB"}))
Expect(m.Tags).To(HaveKeyWithValue("replaygain_track_peak", []string{"0.4512"}))
Expect(m.Tags).To(HaveKeyWithValue("tracknumber", []string{"2"}))
Expect(m.Tags).To(HaveKeyWithValue("tracktotal", []string{"10"}))
Expect(m.Tags).ToNot(HaveKey("lyrics"))
Expect(m.Tags).To(Or(HaveKeyWithValue("lyrics:eng", []string{
"[00:00.00]This is\n[00:02.50]English SYLT\n",
"[00:00.00]This is\n[00:02.50]English",
}), HaveKeyWithValue("lyrics:eng", []string{
"[00:00.00]This is\n[00:02.50]English",
"[00:00.00]This is\n[00:02.50]English SYLT\n",
})))
Expect(m.Tags).To(Or(HaveKeyWithValue("lyrics:xxx", []string{
"[00:00.00]This is\n[00:02.50]unspecified SYLT\n",
"[00:00.00]This is\n[00:02.50]unspecified",
}), HaveKeyWithValue("lyrics:xxx", []string{
"[00:00.00]This is\n[00:02.50]unspecified",
"[00:00.00]This is\n[00:02.50]unspecified SYLT\n",
})))
// Test OGG
m = mds["tests/fixtures/test.ogg"]
Expect(err).To(BeNil())
Expect(m.Tags).To(HaveKeyWithValue("fbpm", []string{"141.7"}))
// TabLib 1.12 returns 18, previous versions return 39.
// See https://github.com/taglib/taglib/commit/2f238921824741b2cfe6fbfbfc9701d9827ab06b
Expect(m.AudioProperties.BitRate).To(BeElementOf(18, 19, 39, 40, 43, 49))
Expect(m.AudioProperties.Channels).To(BeElementOf(2))
Expect(m.AudioProperties.SampleRate).To(BeElementOf(8000))
Expect(m.AudioProperties.SampleRate).To(BeElementOf(8000))
Expect(m.HasPicture).To(BeTrue())
})
DescribeTable("Format-Specific tests",
func(file, duration string, channels, samplerate, bitdepth int, albumGain, albumPeak, trackGain, trackPeak string, id3Lyrics bool, image bool) {
file = "tests/fixtures/" + file
mds, err := e.Parse(file)
Expect(err).NotTo(HaveOccurred())
Expect(mds).To(HaveLen(1))
m := mds[file]
Expect(m.HasPicture).To(Equal(image))
Expect(m.AudioProperties.Duration.String()).To(Equal(duration))
Expect(m.AudioProperties.Channels).To(Equal(channels))
Expect(m.AudioProperties.SampleRate).To(Equal(samplerate))
Expect(m.AudioProperties.BitDepth).To(Equal(bitdepth))
Expect(m.Tags).To(Or(
HaveKeyWithValue("replaygain_album_gain", []string{albumGain}),
HaveKeyWithValue("----:com.apple.itunes:replaygain_track_gain", []string{albumGain}),
))
Expect(m.Tags).To(Or(
HaveKeyWithValue("replaygain_album_peak", []string{albumPeak}),
HaveKeyWithValue("----:com.apple.itunes:replaygain_album_peak", []string{albumPeak}),
))
Expect(m.Tags).To(Or(
HaveKeyWithValue("replaygain_track_gain", []string{trackGain}),
HaveKeyWithValue("----:com.apple.itunes:replaygain_track_gain", []string{trackGain}),
))
Expect(m.Tags).To(Or(
HaveKeyWithValue("replaygain_track_peak", []string{trackPeak}),
HaveKeyWithValue("----:com.apple.itunes:replaygain_track_peak", []string{trackPeak}),
))
Expect(m.Tags).To(HaveKeyWithValue("title", []string{"Title"}))
Expect(m.Tags).To(HaveKeyWithValue("album", []string{"Album"}))
Expect(m.Tags).To(HaveKeyWithValue("artist", []string{"Artist"}))
Expect(m.Tags).To(HaveKeyWithValue("albumartist", []string{"Album Artist"}))
Expect(m.Tags).To(HaveKeyWithValue("genre", []string{"Rock"}))
Expect(m.Tags).To(HaveKeyWithValue("date", []string{"2014"}))
Expect(m.Tags).To(HaveKeyWithValue("bpm", []string{"123"}))
Expect(m.Tags).To(Or(
HaveKeyWithValue("tracknumber", []string{"3"}),
HaveKeyWithValue("tracknumber", []string{"3/10"}),
))
if !strings.HasSuffix(file, "test.wma") {
// TODO Not sure why this is not working for WMA
Expect(m.Tags).To(HaveKeyWithValue("tracktotal", []string{"10"}))
}
Expect(m.Tags).To(Or(
HaveKeyWithValue("discnumber", []string{"1"}),
HaveKeyWithValue("discnumber", []string{"1/2"}),
))
Expect(m.Tags).To(HaveKeyWithValue("disctotal", []string{"2"}))
// WMA does not have a "compilation" tag, but "wm/iscompilation"
Expect(m.Tags).To(Or(
HaveKeyWithValue("compilation", []string{"1"}),
HaveKeyWithValue("wm/iscompilation", []string{"1"})),
)
if id3Lyrics {
Expect(m.Tags).To(HaveKeyWithValue("lyrics:eng", []string{
"[00:00.00]This is\n[00:02.50]English",
}))
Expect(m.Tags).To(HaveKeyWithValue("lyrics:xxx", []string{
"[00:00.00]This is\n[00:02.50]unspecified",
}))
} else {
Expect(m.Tags).To(HaveKeyWithValue("lyrics:xxx", []string{
"[00:00.00]This is\n[00:02.50]unspecified",
"[00:00.00]This is\n[00:02.50]English",
}))
}
Expect(m.Tags).To(HaveKeyWithValue("comment", []string{"Comment1\nComment2"}))
},
// ffmpeg -f lavfi -i "sine=frequency=1200:duration=1" test.flac
Entry("correctly parses flac tags", "test.flac", "1s", 1, 44100, 16, "+4.06 dB", "0.12496948", "+4.06 dB", "0.12496948", false, true),
Entry("correctly parses m4a (aac) gain tags", "01 Invisible (RED) Edit Version.m4a", "1.04s", 2, 44100, 16, "0.37", "0.48", "0.37", "0.48", false, true),
Entry("correctly parses m4a (aac) gain tags (uppercase)", "test.m4a", "1.04s", 2, 44100, 16, "0.37", "0.48", "0.37", "0.48", false, true),
Entry("correctly parses ogg (vorbis) tags", "test.ogg", "1.04s", 2, 8000, 0, "+7.64 dB", "0.11772506", "+7.64 dB", "0.11772506", false, true),
// ffmpeg -f lavfi -i "sine=frequency=900:duration=1" test.wma
// Weird note: for the tag parsing to work, the lyrics are actually stored in the reverse order
Entry("correctly parses wma/asf tags", "test.wma", "1.02s", 1, 44100, 16, "3.27 dB", "0.132914", "3.27 dB", "0.132914", false, true),
// ffmpeg -f lavfi -i "sine=frequency=800:duration=1" test.wv
Entry("correctly parses wv (wavpak) tags", "test.wv", "1s", 1, 44100, 16, "3.43 dB", "0.125061", "3.43 dB", "0.125061", false, true),
// ffmpeg -f lavfi -i "sine=frequency=1000:duration=1" test.wav
Entry("correctly parses wav tags", "test.wav", "1s", 1, 44100, 16, "3.06 dB", "0.125056", "3.06 dB", "0.125056", true, true),
// ffmpeg -f lavfi -i "sine=frequency=1400:duration=1" test.aiff
Entry("correctly parses aiff tags", "test.aiff", "1s", 1, 44100, 16, "2.00 dB", "0.124972", "2.00 dB", "0.124972", true, true),
)
// Skip these tests when running as root
Context("Access Forbidden", func() {
var accessForbiddenFile string
var RegularUserContext = XContext
var isRegularUser = os.Getuid() != 0
if isRegularUser {
RegularUserContext = Context
}
// Only run permission tests if we are not root
RegularUserContext("when run without root privileges", func() {
BeforeEach(func() {
accessForbiddenFile = utils.TempFileName("access_forbidden-", ".mp3")
f, err := os.OpenFile(accessForbiddenFile, os.O_WRONLY|os.O_CREATE, 0222)
Expect(err).ToNot(HaveOccurred())
DeferCleanup(func() {
Expect(f.Close()).To(Succeed())
Expect(os.Remove(accessForbiddenFile)).To(Succeed())
})
})
It("correctly handle unreadable file due to insufficient read permission", func() {
_, err := e.extractMetadata(accessForbiddenFile)
Expect(err).To(MatchError(os.ErrPermission))
})
It("skips the file if it cannot be read", func() {
files := []string{
"tests/fixtures/test.mp3",
"tests/fixtures/test.ogg",
accessForbiddenFile,
}
mds, err := e.Parse(files...)
Expect(err).NotTo(HaveOccurred())
Expect(mds).To(HaveLen(2))
Expect(mds).ToNot(HaveKey(accessForbiddenFile))
})
})
})
})
Describe("Error Checking", func() {
It("returns a generic ErrPath if file does not exist", func() {
testFilePath := "tests/fixtures/NON_EXISTENT.ogg"
_, err := e.extractMetadata(testFilePath)
Expect(err).To(MatchError(fs.ErrNotExist))
})
It("does not throw a SIGSEGV error when reading a file with an invalid frame", func() {
// File has an empty TDAT frame
md, err := e.extractMetadata("tests/fixtures/invalid-files/test-invalid-frame.mp3")
Expect(err).ToNot(HaveOccurred())
Expect(md.Tags).To(HaveKeyWithValue("albumartist", []string{"Elvis Presley"}))
})
})
Describe("parseTIPL", func() {
var tags map[string][]string
BeforeEach(func() {
tags = make(map[string][]string)
})
Context("when the TIPL string is populated", func() {
It("correctly parses roles and names", func() {
tags["tipl"] = []string{"arranger Andrew Powell DJ-mix François Kevorkian DJ-mix Jane Doe engineer Chris Blair"}
parseTIPL(tags)
Expect(tags["arranger"]).To(ConsistOf("Andrew Powell"))
Expect(tags["engineer"]).To(ConsistOf("Chris Blair"))
Expect(tags["djmixer"]).To(ConsistOf("François Kevorkian", "Jane Doe"))
})
It("handles multiple names for a single role", func() {
tags["tipl"] = []string{"engineer Pat Stapley producer Eric Woolfson engineer Chris Blair"}
parseTIPL(tags)
Expect(tags["producer"]).To(ConsistOf("Eric Woolfson"))
Expect(tags["engineer"]).To(ConsistOf("Pat Stapley", "Chris Blair"))
})
It("discards roles without names", func() {
tags["tipl"] = []string{"engineer Pat Stapley producer engineer Chris Blair"}
parseTIPL(tags)
Expect(tags).ToNot(HaveKey("producer"))
Expect(tags["engineer"]).To(ConsistOf("Pat Stapley", "Chris Blair"))
})
})
Context("when the TIPL string is empty", func() {
It("does nothing", func() {
tags["tipl"] = []string{""}
parseTIPL(tags)
Expect(tags).To(BeEmpty())
})
})
Context("when the TIPL is not present", func() {
It("does nothing", func() {
parseTIPL(tags)
Expect(tags).To(BeEmpty())
})
})
})
})

View File

@@ -1,299 +0,0 @@
#include <stdlib.h>
#include <string.h>
#define TAGLIB_STATIC
#include <apeproperties.h>
#include <apetag.h>
#include <aifffile.h>
#include <asffile.h>
#include <dsffile.h>
#include <fileref.h>
#include <flacfile.h>
#include <id3v2tag.h>
#include <unsynchronizedlyricsframe.h>
#include <synchronizedlyricsframe.h>
#include <mp4file.h>
#include <mpegfile.h>
#include <opusfile.h>
#include <tpropertymap.h>
#include <vorbisfile.h>
#include <wavfile.h>
#include <wavfile.h>
#include <wavpackfile.h>
#include "taglib_wrapper.h"
char has_cover(const TagLib::FileRef f);
static char TAGLIB_VERSION[16];
char* taglib_version() {
snprintf((char *)TAGLIB_VERSION, 16, "%d.%d.%d", TAGLIB_MAJOR_VERSION, TAGLIB_MINOR_VERSION, TAGLIB_PATCH_VERSION);
return (char *)TAGLIB_VERSION;
}
int taglib_read(const FILENAME_CHAR_T *filename, unsigned long id) {
TagLib::FileRef f(filename, true, TagLib::AudioProperties::Fast);
if (f.isNull()) {
return TAGLIB_ERR_PARSE;
}
if (!f.audioProperties()) {
return TAGLIB_ERR_AUDIO_PROPS;
}
// Add audio properties to the tags
const TagLib::AudioProperties *props(f.audioProperties());
goPutInt(id, (char *)"__lengthinmilliseconds", props->lengthInMilliseconds());
goPutInt(id, (char *)"__bitrate", props->bitrate());
goPutInt(id, (char *)"__channels", props->channels());
goPutInt(id, (char *)"__samplerate", props->sampleRate());
// Extract bits per sample for supported formats
int bitsPerSample = 0;
if (const auto* apeProperties{ dynamic_cast<const TagLib::APE::Properties*>(props) })
bitsPerSample = apeProperties->bitsPerSample();
else if (const auto* asfProperties{ dynamic_cast<const TagLib::ASF::Properties*>(props) })
bitsPerSample = asfProperties->bitsPerSample();
else if (const auto* flacProperties{ dynamic_cast<const TagLib::FLAC::Properties*>(props) })
bitsPerSample = flacProperties->bitsPerSample();
else if (const auto* mp4Properties{ dynamic_cast<const TagLib::MP4::Properties*>(props) })
bitsPerSample = mp4Properties->bitsPerSample();
else if (const auto* wavePackProperties{ dynamic_cast<const TagLib::WavPack::Properties*>(props) })
bitsPerSample = wavePackProperties->bitsPerSample();
else if (const auto* aiffProperties{ dynamic_cast<const TagLib::RIFF::AIFF::Properties*>(props) })
bitsPerSample = aiffProperties->bitsPerSample();
else if (const auto* wavProperties{ dynamic_cast<const TagLib::RIFF::WAV::Properties*>(props) })
bitsPerSample = wavProperties->bitsPerSample();
else if (const auto* dsfProperties{ dynamic_cast<const TagLib::DSF::Properties*>(props) })
bitsPerSample = dsfProperties->bitsPerSample();
if (bitsPerSample > 0) {
goPutInt(id, (char *)"__bitspersample", bitsPerSample);
}
// Send all properties to the Go map
TagLib::PropertyMap tags = f.file()->properties();
// Make sure at least the basic properties are extracted
TagLib::Tag *basic = f.file()->tag();
if (!basic->isEmpty()) {
if (!basic->title().isEmpty()) {
tags.insert("__title", basic->title());
}
if (!basic->artist().isEmpty()) {
tags.insert("__artist", basic->artist());
}
if (!basic->album().isEmpty()) {
tags.insert("__album", basic->album());
}
if (!basic->comment().isEmpty()) {
tags.insert("__comment", basic->comment());
}
if (!basic->genre().isEmpty()) {
tags.insert("__genre", basic->genre());
}
if (basic->year() > 0) {
tags.insert("__year", TagLib::String::number(basic->year()));
}
if (basic->track() > 0) {
tags.insert("__track", TagLib::String::number(basic->track()));
}
}
TagLib::ID3v2::Tag *id3Tags = NULL;
// Get some extended/non-standard ID3-only tags (ex: iTunes extended frames)
TagLib::MPEG::File *mp3File(dynamic_cast<TagLib::MPEG::File *>(f.file()));
if (mp3File != NULL) {
id3Tags = mp3File->ID3v2Tag();
}
if (id3Tags == NULL) {
TagLib::RIFF::WAV::File *wavFile(dynamic_cast<TagLib::RIFF::WAV::File *>(f.file()));
if (wavFile != NULL && wavFile->hasID3v2Tag()) {
id3Tags = wavFile->ID3v2Tag();
}
}
if (id3Tags == NULL) {
TagLib::RIFF::AIFF::File *aiffFile(dynamic_cast<TagLib::RIFF::AIFF::File *>(f.file()));
if (aiffFile && aiffFile->hasID3v2Tag()) {
id3Tags = aiffFile->tag();
}
}
// Yes, it is possible to have ID3v2 tags in FLAC. However, that can cause problems
// with many players, so they will not be parsed
if (id3Tags != NULL) {
const auto &frames = id3Tags->frameListMap();
for (const auto &kv: frames) {
if (kv.first == "USLT") {
for (const auto &tag: kv.second) {
TagLib::ID3v2::UnsynchronizedLyricsFrame *frame = dynamic_cast<TagLib::ID3v2::UnsynchronizedLyricsFrame *>(tag);
if (frame == NULL) continue;
tags.erase("LYRICS");
const auto bv = frame->language();
char language[4] = {'x', 'x', 'x', '\0'};
if (bv.size() == 3) {
strncpy(language, bv.data(), 3);
}
char *val = const_cast<char*>(frame->text().toCString(true));
goPutLyrics(id, language, val);
}
} else if (kv.first == "SYLT") {
for (const auto &tag: kv.second) {
TagLib::ID3v2::SynchronizedLyricsFrame *frame = dynamic_cast<TagLib::ID3v2::SynchronizedLyricsFrame *>(tag);
if (frame == NULL) continue;
const auto bv = frame->language();
char language[4] = {'x', 'x', 'x', '\0'};
if (bv.size() == 3) {
strncpy(language, bv.data(), 3);
}
const auto format = frame->timestampFormat();
if (format == TagLib::ID3v2::SynchronizedLyricsFrame::AbsoluteMilliseconds) {
for (const auto &line: frame->synchedText()) {
char *text = const_cast<char*>(line.text.toCString(true));
goPutLyricLine(id, language, text, line.time);
}
} else if (format == TagLib::ID3v2::SynchronizedLyricsFrame::AbsoluteMpegFrames) {
const int sampleRate = props->sampleRate();
if (sampleRate != 0) {
for (const auto &line: frame->synchedText()) {
const int timeInMs = (line.time * 1000) / sampleRate;
char *text = const_cast<char*>(line.text.toCString(true));
goPutLyricLine(id, language, text, timeInMs);
}
}
}
}
} else if (kv.first == "TIPL"){
if (!kv.second.isEmpty()) {
tags.insert(kv.first, kv.second.front()->toString());
}
}
}
}
// M4A may have some iTunes specific tags not captured by the PropertyMap interface
TagLib::MP4::File *m4afile(dynamic_cast<TagLib::MP4::File *>(f.file()));
if (m4afile != NULL) {
const auto itemListMap = m4afile->tag()->itemMap();
for (const auto item: itemListMap) {
char *key = const_cast<char*>(item.first.toCString(true));
for (const auto value: item.second.toStringList()) {
char *val = const_cast<char*>(value.toCString(true));
goPutM4AStr(id, key, val);
}
}
}
// WMA/ASF files may have additional tags not captured by the PropertyMap interface
TagLib::ASF::File *asfFile(dynamic_cast<TagLib::ASF::File *>(f.file()));
if (asfFile != NULL) {
const TagLib::ASF::Tag *asfTags{asfFile->tag()};
const auto itemListMap = asfTags->attributeListMap();
for (const auto item : itemListMap) {
char *key = const_cast<char*>(item.first.toCString(true));
for (auto j = item.second.begin();
j != item.second.end(); ++j) {
char *val = const_cast<char*>(j->toString().toCString(true));
goPutStr(id, key, val);
}
}
}
// Send all collected tags to the Go map
for (TagLib::PropertyMap::ConstIterator i = tags.begin(); i != tags.end();
++i) {
char *key = const_cast<char*>(i->first.toCString(true));
for (TagLib::StringList::ConstIterator j = i->second.begin();
j != i->second.end(); ++j) {
char *val = const_cast<char*>((*j).toCString(true));
goPutStr(id, key, val);
}
}
// Cover art has to be handled separately
if (has_cover(f)) {
goPutStr(id, (char *)"has_picture", (char *)"true");
}
return 0;
}
// Detect if the file has cover art. Returns 1 if the file has cover art, 0 otherwise.
char has_cover(const TagLib::FileRef f) {
char hasCover = 0;
// ----- MP3
if (TagLib::MPEG::File * mp3File{dynamic_cast<TagLib::MPEG::File *>(f.file())}) {
if (mp3File->ID3v2Tag()) {
const auto &frameListMap{mp3File->ID3v2Tag()->frameListMap()};
hasCover = !frameListMap["APIC"].isEmpty();
}
}
// ----- FLAC
else if (TagLib::FLAC::File * flacFile{dynamic_cast<TagLib::FLAC::File *>(f.file())}) {
hasCover = !flacFile->pictureList().isEmpty();
}
// ----- MP4
else if (TagLib::MP4::File * mp4File{dynamic_cast<TagLib::MP4::File *>(f.file())}) {
auto &coverItem{mp4File->tag()->itemMap()["covr"]};
TagLib::MP4::CoverArtList coverArtList{coverItem.toCoverArtList()};
hasCover = !coverArtList.isEmpty();
}
// ----- Ogg
else if (TagLib::Ogg::Vorbis::File * vorbisFile{dynamic_cast<TagLib::Ogg::Vorbis::File *>(f.file())}) {
hasCover = !vorbisFile->tag()->pictureList().isEmpty();
}
// ----- Opus
else if (TagLib::Ogg::Opus::File * opusFile{dynamic_cast<TagLib::Ogg::Opus::File *>(f.file())}) {
hasCover = !opusFile->tag()->pictureList().isEmpty();
}
// ----- WAV
else if (TagLib::RIFF::WAV::File * wavFile{ dynamic_cast<TagLib::RIFF::WAV::File*>(f.file()) }) {
if (wavFile->hasID3v2Tag()) {
const auto& frameListMap{ wavFile->ID3v2Tag()->frameListMap() };
hasCover = !frameListMap["APIC"].isEmpty();
}
}
// ----- AIFF
else if (TagLib::RIFF::AIFF::File * aiffFile{ dynamic_cast<TagLib::RIFF::AIFF::File *>(f.file())}) {
if (aiffFile->hasID3v2Tag()) {
const auto& frameListMap{ aiffFile->tag()->frameListMap() };
hasCover = !frameListMap["APIC"].isEmpty();
}
}
// ----- WMA
else if (TagLib::ASF::File * asfFile{dynamic_cast<TagLib::ASF::File *>(f.file())}) {
const TagLib::ASF::Tag *tag{ asfFile->tag() };
hasCover = tag && tag->attributeListMap().contains("WM/Picture");
}
// ----- DSF
else if (TagLib::DSF::File * dsffile{ dynamic_cast<TagLib::DSF::File *>(f.file())}) {
const TagLib::ID3v2::Tag *tag { dsffile->tag() };
hasCover = tag && !tag->frameListMap()["APIC"].isEmpty();
}
// ----- WAVPAK (APE tag)
else if (TagLib::WavPack::File * wvFile{dynamic_cast<TagLib::WavPack::File *>(f.file())}) {
if (wvFile->hasAPETag()) {
// This is the particular string that Picard uses
hasCover = !wvFile->APETag()->itemListMap()["COVER ART (FRONT)"].isEmpty();
}
}
return hasCover;
}

View File

@@ -1,157 +0,0 @@
package taglib
/*
#cgo !windows pkg-config: --define-prefix taglib
#cgo windows pkg-config: taglib
#cgo illumos LDFLAGS: -lstdc++ -lsendfile
#cgo linux darwin CXXFLAGS: -std=c++11
#cgo darwin LDFLAGS: -L/opt/homebrew/opt/taglib/lib
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "taglib_wrapper.h"
*/
import "C"
import (
"encoding/json"
"fmt"
"os"
"runtime/debug"
"strconv"
"strings"
"sync"
"sync/atomic"
"unsafe"
"github.com/navidrome/navidrome/log"
)
const iTunesKeyPrefix = "----:com.apple.itunes:"
func Version() string {
return C.GoString(C.taglib_version())
}
func Read(filename string) (tags map[string][]string, err error) {
// Do not crash on failures in the C code/library
debug.SetPanicOnFault(true)
defer func() {
if r := recover(); r != nil {
log.Error("extractor: recovered from panic when reading tags", "file", filename, "error", r)
err = fmt.Errorf("extractor: recovered from panic: %s", r)
}
}()
fp := getFilename(filename)
defer C.free(unsafe.Pointer(fp))
id, m, release := newMap()
defer release()
log.Trace("extractor: reading tags", "filename", filename, "map_id", id)
res := C.taglib_read(fp, C.ulong(id))
switch res {
case C.TAGLIB_ERR_PARSE:
// Check additional case whether the file is unreadable due to permission
file, fileErr := os.OpenFile(filename, os.O_RDONLY, 0600)
defer file.Close()
if os.IsPermission(fileErr) {
return nil, fmt.Errorf("navidrome does not have permission: %w", fileErr)
} else if fileErr != nil {
return nil, fmt.Errorf("cannot parse file media file: %w", fileErr)
} else {
return nil, fmt.Errorf("cannot parse file media file")
}
case C.TAGLIB_ERR_AUDIO_PROPS:
return nil, fmt.Errorf("can't get audio properties from file")
}
if log.IsGreaterOrEqualTo(log.LevelDebug) {
j, _ := json.Marshal(m)
log.Trace("extractor: read tags", "tags", string(j), "filename", filename, "id", id)
} else {
log.Trace("extractor: read tags", "tags", m, "filename", filename, "id", id)
}
return m, nil
}
type tagMap map[string][]string
var allMaps sync.Map
var mapsNextID atomic.Uint32
func newMap() (uint32, tagMap, func()) {
id := mapsNextID.Add(1)
m := tagMap{}
allMaps.Store(id, m)
return id, m, func() {
allMaps.Delete(id)
}
}
func doPutTag(id C.ulong, key string, val *C.char) {
if key == "" {
return
}
r, _ := allMaps.Load(uint32(id))
m := r.(tagMap)
k := strings.ToLower(key)
v := strings.TrimSpace(C.GoString(val))
m[k] = append(m[k], v)
}
//export goPutM4AStr
func goPutM4AStr(id C.ulong, key *C.char, val *C.char) {
k := C.GoString(key)
// Special for M4A, do not catch keys that have no actual name
k = strings.TrimPrefix(k, iTunesKeyPrefix)
doPutTag(id, k, val)
}
//export goPutStr
func goPutStr(id C.ulong, key *C.char, val *C.char) {
doPutTag(id, C.GoString(key), val)
}
//export goPutInt
func goPutInt(id C.ulong, key *C.char, val C.int) {
valStr := strconv.Itoa(int(val))
vp := C.CString(valStr)
defer C.free(unsafe.Pointer(vp))
goPutStr(id, key, vp)
}
//export goPutLyrics
func goPutLyrics(id C.ulong, lang *C.char, val *C.char) {
doPutTag(id, "lyrics:"+C.GoString(lang), val)
}
//export goPutLyricLine
func goPutLyricLine(id C.ulong, lang *C.char, text *C.char, time C.int) {
language := C.GoString(lang)
line := C.GoString(text)
timeGo := int64(time)
ms := timeGo % 1000
timeGo /= 1000
sec := timeGo % 60
timeGo /= 60
minimum := timeGo % 60
formattedLine := fmt.Sprintf("[%02d:%02d.%02d]%s\n", minimum, sec, ms/10, line)
key := "lyrics:" + language
r, _ := allMaps.Load(uint32(id))
m := r.(tagMap)
k := strings.ToLower(key)
existing, ok := m[k]
if ok {
existing[0] += formattedLine
} else {
m[k] = []string{formattedLine}
}
}

View File

@@ -1,24 +0,0 @@
#define TAGLIB_ERR_PARSE -1
#define TAGLIB_ERR_AUDIO_PROPS -2
#ifdef __cplusplus
extern "C" {
#endif
#ifdef WIN32
#define FILENAME_CHAR_T wchar_t
#else
#define FILENAME_CHAR_T char
#endif
extern void goPutM4AStr(unsigned long id, char *key, char *val);
extern void goPutStr(unsigned long id, char *key, char *val);
extern void goPutInt(unsigned long id, char *key, int val);
extern void goPutLyrics(unsigned long id, char *lang, char *val);
extern void goPutLyricLine(unsigned long id, char *lang, char *text, int time);
int taglib_read(const FILENAME_CHAR_T *filename, unsigned long id);
char* taglib_version();
#ifdef __cplusplus
}
#endif

28
api/parameters/_index.yml Normal file
View File

@@ -0,0 +1,28 @@
pageOffset:
$ref: './query/pageOffset.yml'
pageLimit:
$ref: './query/pageLimit.yml'
filterEquals:
$ref: './query/filterEquals.yml'
filterLessThan:
$ref: './query/filterLessThan.yml'
filterLessOrEqual:
$ref: './query/filterLessOrEqual.yml'
filterGreaterThan:
$ref: './query/filterGreaterThan.yml'
filterGreaterOrEqual:
$ref: './query/filterGreaterOrEqual.yml'
filterContains:
$ref: './query/filterContains.yml'
filterStartsWith:
$ref: './query/filterStartsWith.yml'
filterEndsWith:
$ref: './query/filterEndsWith.yml'
sort:
$ref: './query/sort.yml'
include:
$ref: './query/include.yml'
includeForTracks:
$ref: './query/includeForTracks.yml'
includeForAlbums:
$ref: './query/includeForAlbums.yml'

View File

@@ -0,0 +1,9 @@
name: filter[contains]
in: query
description: 'Filter by any property containing text. Usage: filter[contains]=property:value'
required: false
schema:
type: array
items:
type: string
pattern: '^\w+:\w+'

View File

@@ -0,0 +1,9 @@
name: filter[endsWith]
in: query
description: 'Filter by any property that ends with text. Usage: filter[endsWith]=property:value'
required: false
schema:
type: array
items:
type: string
pattern: '^\w+:\w+'

View File

@@ -0,0 +1,9 @@
name: filter[equals]
in: query
description: 'Filter by any property with an exact match. Usage: filter[equals]=property:value'
required: false
schema:
type: array
items:
type: string
pattern: '^\w+:\w+'

View File

@@ -0,0 +1,9 @@
name: filter[greaterOrEqual]
in: query
description: 'Filter by any numeric property greater than or equal to a value. Usage: filter[greaterOrEqual]=property:value'
required: false
schema:
type: array
items:
type: string
pattern: '^\w+:\d+'

View File

@@ -0,0 +1,9 @@
name: filter[greaterThan]
in: query
description: 'Filter by any numeric property greater than a value. Usage: filter[greaterThan]=property:value'
required: false
schema:
type: array
items:
type: string
pattern: '^\w+:\d+'

View File

@@ -0,0 +1,9 @@
name: filter[lessOrEqual]
in: query
description: 'Filter by any numeric property less than or equal to a value. Usage: filter[lessOrEqual]=property:value'
required: false
schema:
type: array
items:
type: string
pattern: '^\w+:\d+'

View File

@@ -0,0 +1,9 @@
name: filter[lessThan]
in: query
description: 'Filter by any numeric property less than a value. Usage: filter[lessThan]=property:value'
required: false
schema:
type: array
items:
type: string
pattern: '^\w+:\d+'

View File

@@ -0,0 +1,9 @@
name: filter[startsWith]
in: query
description: 'Filter by any property that starts with text. Usage: filter[startsWith]=property:value'
required: false
schema:
type: array
items:
type: string
pattern: '^\w+:\w+'

View File

@@ -0,0 +1,6 @@
name: include
in: query
description: Related resources to include in the response, separated by commas
required: false
schema:
type: string

View File

@@ -0,0 +1,13 @@
name: include
in: query
description: Related resources to include in the response, separated by commas
required: false
explode: false
schema:
type: array
x-go-type: includeSlice
items:
type: string
enum:
- track
- artist

View File

@@ -0,0 +1,12 @@
name: include
in: query
description: Related resources to include in the response, separated by commas
required: false
explode: false
schema:
type: array
x-go-type: includeSlice
items:
type: string
enum:
- artist

View File

@@ -0,0 +1,13 @@
name: include
in: query
description: Related resources to include in the response, separated by commas
required: false
explode: false
schema:
type: array
x-go-type: includeSlice
items:
type: string
enum:
- album
- artist

View File

@@ -0,0 +1,9 @@
name: page[limit]
in: query
description: The number of items per page
required: false
schema:
type: integer
format: int32
minimum: 0
default: 10

View File

@@ -0,0 +1,9 @@
name: page[offset]
in: query
description: The offset for pagination
required: false
schema:
type: integer
format: int32
minimum: 0
default: 0

View File

@@ -0,0 +1,6 @@
name: sort
in: query
description: Sort the results by one or more properties, separated by commas. Prefix the property with '-' for descending order.
required: false
schema:
type: string

33
api/resources/album.yml Normal file
View File

@@ -0,0 +1,33 @@
get:
summary: Retrieve an individual album
operationId: getAlbum
parameters:
- $ref: '../parameters/query/includeForAlbum.yml'
- name: albumId
in: path
description: The unique identifier of the album
required: true
schema:
type: string
responses:
'200':
description: An album object
content:
application/vnd.api+json:
schema:
type: object
required: [data]
properties:
data:
$ref: '../schemas/Album.yml'
included:
description: Included resources, as requested by the `include` query parameter
type: array
items:
$ref: '../schemas/IncludedResource.yml'
'403':
$ref: '../responses/NotAuthorized.yml'
'404':
$ref: '../responses/NotFound.yml'
'500':
$ref: '../responses/InternalServerError.yml'

44
api/resources/albums.yml Normal file
View File

@@ -0,0 +1,44 @@
get:
summary: Retrieve a list of albums
operationId: getAlbums
parameters:
- $ref: '../parameters/query/pageLimit.yml'
- $ref: '../parameters/query/pageOffset.yml'
- $ref: '../parameters/query/filterEquals.yml'
- $ref: '../parameters/query/filterContains.yml'
- $ref: '../parameters/query/filterLessThan.yml'
- $ref: '../parameters/query/filterLessOrEqual.yml'
- $ref: '../parameters/query/filterGreaterThan.yml'
- $ref: '../parameters/query/filterGreaterOrEqual.yml'
- $ref: '../parameters/query/filterStartsWith.yml'
- $ref: '../parameters/query/filterEndsWith.yml'
- $ref: '../parameters/query/sort.yml'
- $ref: '../parameters/query/includeForAlbums.yml'
responses:
'200':
description: A list of albums
content:
application/vnd.api+json:
schema:
type: object
required: [data, links]
properties:
data:
type: array
items:
$ref: '../schemas/Album.yml'
links:
$ref: '../schemas/PaginationLinks.yml'
meta:
$ref: '../schemas/PaginationMeta.yml'
included:
description: Included resources, as requested by the `include` query parameter
type: array
items:
$ref: '../schemas/IncludedResource.yml'
'400':
$ref: '../responses/BadRequest.yml'
'403':
$ref: '../responses/NotAuthorized.yml'
'500':
$ref: '../responses/InternalServerError.yml'

28
api/resources/artist.yml Normal file
View File

@@ -0,0 +1,28 @@
get:
summary: Retrieve an individual artist
operationId: getArtist
parameters:
- $ref: '../parameters/query/include.yml'
- name: artistId
in: path
description: The unique identifier of the artist
required: true
schema:
type: string
responses:
'200':
description: An artist object
content:
application/vnd.api+json:
schema:
type: object
required: [data]
properties:
data:
$ref: '../schemas/Artist.yml'
'403':
$ref: '../responses/NotAuthorized.yml'
'404':
$ref: '../responses/NotFound.yml'
'500':
$ref: '../responses/InternalServerError.yml'

39
api/resources/artists.yml Normal file
View File

@@ -0,0 +1,39 @@
get:
summary: Retrieve a list of artists
operationId: getArtists
parameters:
- $ref: '../parameters/query/pageLimit.yml'
- $ref: '../parameters/query/pageOffset.yml'
- $ref: '../parameters/query/filterEquals.yml'
- $ref: '../parameters/query/filterContains.yml'
- $ref: '../parameters/query/filterLessThan.yml'
- $ref: '../parameters/query/filterLessOrEqual.yml'
- $ref: '../parameters/query/filterGreaterThan.yml'
- $ref: '../parameters/query/filterGreaterOrEqual.yml'
- $ref: '../parameters/query/filterStartsWith.yml'
- $ref: '../parameters/query/filterEndsWith.yml'
- $ref: '../parameters/query/sort.yml'
- $ref: '../parameters/query/include.yml'
responses:
'200':
description: A list of artists
content:
application/vnd.api+json:
schema:
type: object
required: [data, links]
properties:
data:
type: array
items:
$ref: '../schemas/Artist.yml'
links:
$ref: '../schemas/PaginationLinks.yml'
meta:
$ref: '../schemas/PaginationMeta.yml'
'400':
$ref: '../responses/BadRequest.yml'
'403':
$ref: '../responses/NotAuthorized.yml'
'500':
$ref: '../responses/InternalServerError.yml'

18
api/resources/server.yml Normal file
View File

@@ -0,0 +1,18 @@
get:
summary: Get server's global info
operationId: getServerInfo
responses:
'200':
description: The responses data key maps to a resource object dictionary representing the server.
content:
application/vnd.api+json:
schema:
type: object
required: [data]
properties:
data:
$ref: '../schemas/ServerInfo.yml'
'403':
$ref: '../responses/NotAuthorized.yml'
'500':
$ref: '../responses/InternalServerError.yml'

33
api/resources/track.yml Normal file
View File

@@ -0,0 +1,33 @@
get:
summary: Retrieve an individual track
operationId: getTrack
parameters:
- $ref: '../parameters/query/includeForTracks.yml'
- name: trackId
in: path
description: The unique identifier of the track
required: true
schema:
type: string
responses:
'200':
description: A track object
content:
application/vnd.api+json:
schema:
type: object
required: [data]
properties:
data:
$ref: '../schemas/Track.yml'
included:
description: Included resources, as requested by the `include` query parameter
type: array
items:
$ref: '../schemas/IncludedResource.yml'
'403':
$ref: '../responses/NotAuthorized.yml'
'404':
$ref: '../responses/NotFound.yml'
'500':
$ref: '../responses/InternalServerError.yml'

44
api/resources/tracks.yml Normal file
View File

@@ -0,0 +1,44 @@
get:
summary: Retrieve a list of tracks
operationId: getTracks
parameters:
- $ref: '../parameters/query/pageLimit.yml'
- $ref: '../parameters/query/pageOffset.yml'
- $ref: '../parameters/query/filterEquals.yml'
- $ref: '../parameters/query/filterContains.yml'
- $ref: '../parameters/query/filterLessThan.yml'
- $ref: '../parameters/query/filterLessOrEqual.yml'
- $ref: '../parameters/query/filterGreaterThan.yml'
- $ref: '../parameters/query/filterGreaterOrEqual.yml'
- $ref: '../parameters/query/filterStartsWith.yml'
- $ref: '../parameters/query/filterEndsWith.yml'
- $ref: '../parameters/query/sort.yml'
- $ref: '../parameters/query/includeForTracks.yml'
responses:
'200':
description: A list of tracks
content:
application/vnd.api+json:
schema:
type: object
required: [data, links]
properties:
data:
type: array
items:
$ref: '../schemas/Track.yml'
links:
$ref: '../schemas/PaginationLinks.yml'
meta:
$ref: '../schemas/PaginationMeta.yml'
included:
description: Included resources, as requested by the `include` query parameter
type: array
items:
$ref: '../schemas/IncludedResource.yml'
'400':
$ref: '../responses/BadRequest.yml'
'403':
$ref: '../responses/NotAuthorized.yml'
'500':
$ref: '../responses/InternalServerError.yml'

View File

@@ -0,0 +1,5 @@
description: Bad Request
content:
application/vnd.api+json:
schema:
$ref: '../schemas/ErrorList.yml'

View File

@@ -0,0 +1,5 @@
description: Internal Server Error
content:
application/vnd.api+json:
schema:
$ref: '../schemas/ErrorList.yml'

View File

@@ -0,0 +1,5 @@
description: Not Authorized
content:
application/vnd.api+json:
schema:
$ref: '../schemas/ErrorList.yml'

View File

@@ -0,0 +1,5 @@
description: Not Found
content:
application/vnd.api+json:
schema:
$ref: '../schemas/ErrorList.yml'

8
api/responses/_index.yml Normal file
View File

@@ -0,0 +1,8 @@
NotFound:
$ref: './NotFound.yml'
NotAuthorized:
$ref: './NotAuthorized.yml'
BadRequest:
$ref: './BadRequest.yml'
InternalServerError:
$ref: './InternalServerError.yml'

20
api/schemas/Album.yml Normal file
View File

@@ -0,0 +1,20 @@
allOf:
- $ref: './ResourceObject.yml'
- type: object
properties:
attributes:
$ref: './AlbumAttributes.yml'
relationships:
type: object
properties:
artists:
type: array
items:
$ref: './AlbumArtistRelationship.yml'
tracks:
type: array
items:
$ref: './AlbumTrackRelationship.yml'
required:
- artists
- tracks

View File

@@ -0,0 +1,9 @@
type: object
properties:
meta:
$ref: './ArtistMetaObject.yml'
data:
$ref: './ResourceObject.yml'
required:
- meta
- data

View File

@@ -0,0 +1,23 @@
type: object
properties:
title:
type: string
description: The title of the album
artist:
type: string
description: The artist of the album
releaseDate:
type: string
description: The release date of the album
tracktotal:
type: integer
description: The number of tracks on the album
disctotal:
type: integer
description: The number of discs in the album
genre:
type: string
description: The genre of the album
required:
- title
- artist

View File

@@ -0,0 +1,6 @@
type: object
properties:
data:
$ref: './ResourceObject.yml'
required:
- data

27
api/schemas/Artist.yml Normal file
View File

@@ -0,0 +1,27 @@
allOf:
- $ref: './ResourceObject.yml'
- type: object
properties:
attributes:
$ref: './ArtistAttributes.yml'
relationships:
type: object
properties:
tracks:
type: object
properties:
data:
type: array
items:
$ref: './ArtistTrackRelationship.yml'
required:
- data
albums:
type: object
properties:
data:
type: array
items:
$ref: './ArtistAlbumRelationship.yml'
required:
- data

View File

@@ -0,0 +1,9 @@
type: object
properties:
meta:
$ref: './ArtistMetaObject.yml'
data:
$ref: './ResourceObject.yml'
required:
- meta
- data

View File

@@ -0,0 +1,10 @@
type: object
properties:
name:
type: string
description: The name of the artist
bio:
type: string
description: A short biography of the artist
required:
- name

View File

@@ -0,0 +1,6 @@
type: object
properties:
role:
$ref: './ArtistRole.yml'
required:
- role

View File

@@ -0,0 +1,5 @@
type: string
enum:
- artist
- albumArtist
description: The role of an artist in a track or album

View File

@@ -0,0 +1,14 @@
type: object
properties:
meta:
type: object
properties:
role:
$ref: './ArtistRole.yml'
required:
- role
data:
$ref: './ResourceObject.yml'
required:
- meta
- data

View File

@@ -0,0 +1,7 @@
type: object
required: [errors]
properties:
errors:
type: array
items:
$ref: './ErrorObject.yml'

View File

@@ -0,0 +1,10 @@
type: object
properties:
id:
type: string
status:
type: string
title:
type: string
detail:
type: string

View File

@@ -0,0 +1,10 @@
oneOf:
- $ref: './Track.yml'
- $ref: './Album.yml'
- $ref: './Artist.yml'
discriminator:
propertyName: type
mapping:
track: './Track.yml'
album: './Album.yml'
artist: './Artist.yml'

View File

@@ -0,0 +1,14 @@
type: object
properties:
first:
type: string
format: uri
prev:
type: string
format: uri
next:
type: string
format: uri
last:
type: string
format: uri

View File

@@ -0,0 +1,14 @@
type: object
properties:
currentPage:
type: integer
format: int32
description: The current page in the collection
totalPages:
type: integer
format: int32
description: The total number of pages in the collection
totalItems:
type: integer
format: int32
description: The total number of items in the collection

View File

@@ -0,0 +1,18 @@
type: object
properties:
data:
oneOf:
- $ref: './Track.yml'
- $ref: './Album.yml'
- $ref: './Artist.yml'
- type: array
items:
$ref: './ResourceObject.yml'
included:
type: array
items:
$ref: './IncludedResource.yml'
links:
$ref: './PaginationLinks.yml'
meta:
$ref: './PaginationMeta.yml'

View File

@@ -0,0 +1,8 @@
type: object
required: [id, type]
properties:
id:
type: string
description: The unique identifier for the resource
type:
$ref: './ResourceType.yml'

View File

@@ -0,0 +1,6 @@
type: string
description: The type of the resource
enum:
- album
- artist
- track

View File

@@ -0,0 +1,24 @@
type: object
required: [server, serverVersion, authRequired, features]
properties:
server:
type: string
description: The name of the server software.
example: "navidrome"
serverVersion:
type: string
description: The version number of the server.
example: "0.60.0"
authRequired:
type: boolean
description: Whether the user has access to the server.
example: true
features:
type: array
description: A list of optional features the server supports.
items:
type: string
enum:
- albums
- artists
- images

8
api/schemas/Track.yml Normal file
View File

@@ -0,0 +1,8 @@
allOf:
- $ref: './ResourceObject.yml'
- type: object
properties:
attributes:
$ref: './TrackAttributes.yml'
relationships:
$ref: './TrackRelationships.yml'

View File

@@ -0,0 +1,9 @@
type: object
properties:
meta:
$ref: './ArtistMetaObject.yml'
data:
$ref: './ResourceObject.yml'
required:
- meta
- data

View File

@@ -0,0 +1,55 @@
type: object
required: [title, artist, album, albumartist, track, mimetype, duration, channels, bitrate, size]
properties:
title:
type: string
description: The title of the track
artist: # TODO: Remove
type: string
description: The name of the artist who performed the track
albumartist: # TODO: Remove
type: string
description: The primary artist of the album the track belongs to.
album: # TODO Remove
type: string
description: The name of the album the track belongs to
genre: # TODO Remove
type: string
description: The genre of the track.
track:
type: integer
description: The track number within the album.
disc:
type: integer
description: The disc number within a multi-disc album.
year:
type: integer
description: The release year of the track or album.
bpm:
type: integer
description: The beats per minute (BPM) of the track.
recording-mbid:
type: string
description: The MusicBrainz identifier for the recording of the track.
track-mbid:
type: string
description: The MusicBrainz identifier for the track.
comments:
type: string
description: Any additional comments or notes about the track.
mimetype:
type: string
description: The MIME type of the audio file.
duration:
type: number
format: float
description: The duration of the track in seconds
channels:
type: integer
description: The number of audio channels in the track.
bitrate:
type: integer
description: The bitrate of the audio file in kilobits per second (kbps).
size:
type: integer
description: The size of the audio file in bytes.

View File

@@ -0,0 +1,12 @@
type: object
properties:
artists:
type: array
items:
$ref: './TrackArtistRelationship.yml'
albums:
type: array
items:
$ref: './AlbumTrackRelationship.yml'
required:
- artists

46
api/schemas/_index.yml Normal file
View File

@@ -0,0 +1,46 @@
ServerInfo:
$ref: './ServerInfo.yml'
ResourceObject:
$ref: './ResourceObject.yml'
ResourceType:
$ref: './ResourceType.yml'
ResourceList:
$ref: './ResourceList.yml'
IncludedResource:
$ref: './IncludedResource.yml'
Track:
$ref: './Track.yml'
TrackAttributes:
$ref: './TrackAttributes.yml'
TrackRelationships:
$ref: './TrackRelationships.yml'
TrackArtistRelationship:
$ref: './TrackArtistRelationship.yml'
ArtistRole:
$ref: './ArtistRole.yml'
Artist:
$ref: './Artist.yml'
ArtistAttributes:
$ref: './ArtistAttributes.yml'
ArtistAlbumRelationship:
$ref: './ArtistAlbumRelationship.yml'
ArtistTrackRelationship:
$ref: './ArtistTrackRelationship.yml'
ArtistMetaObject:
$ref: './ArtistMetaObject.yml'
Album:
$ref: './Album.yml'
AlbumAttributes:
$ref: './AlbumAttributes.yml'
AlbumArtistRelationship:
$ref: './AlbumArtistRelationship.yml'
AlbumTrackRelationship:
$ref: './AlbumTrackRelationship.yml'
PaginationLinks:
$ref: './PaginationLinks.yml'
PaginationMeta:
$ref: './PaginationMeta.yml'
ErrorList:
$ref: './ErrorList.yml'
ErrorObject:
$ref: './ErrorObject.yml'

52
api/spec.yml Normal file
View File

@@ -0,0 +1,52 @@
openapi: 3.0.0
info:
version: 0.2.0
title: Navidrome API
description: >
This spec describes the Navidrome API, which allows users to browse and manage their music library via a JSON:API
based interface. The API provides endpoints for albums, tracks, artists, playlists and images, along with their
relationships. Clients can retrieve information about the items in the library, filter and sort results, and
perform actions such as creating and deleting playlists. With this API, developers can build music apps and
services that integrate with Navidrome music server, providing a seamless experience for users to access and
manage their music collection.
contact:
name: Navidrome
url: https://navidrome.org
license:
name: GNU General Public License v3.0
url: https://github.com/navidrome/navidrome/blob/master/LICENSE
servers:
- url: /api/v2
security:
- bearerAuth: []
paths:
/server:
$ref: './resources/server.yml'
/tracks:
$ref: './resources/tracks.yml'
/tracks/{trackId}:
$ref: './resources/track.yml'
/artists:
$ref: './resources/artists.yml'
/artists/{artistId}:
$ref: './resources/artist.yml'
/albums:
$ref: './resources/albums.yml'
/albums/{albumId}:
$ref: './resources/album.yml'
components:
parameters:
$ref: './parameters/_index.yml'
schemas:
$ref: './schemas/_index.yml'
responses:
$ref: './responses/_index.yml'
securitySchemes:
bearerAuth:
type: http
scheme: bearer
bearerFormat: JWT

View File

@@ -1,186 +0,0 @@
package cmd
import (
"context"
"fmt"
"os"
"strings"
"time"
"github.com/navidrome/navidrome/conf"
"github.com/navidrome/navidrome/db"
"github.com/navidrome/navidrome/log"
"github.com/spf13/cobra"
)
var (
backupCount int
backupDir string
force bool
restorePath string
)
func init() {
rootCmd.AddCommand(backupRoot)
backupCmd.Flags().StringVarP(&backupDir, "backup-dir", "d", "", "directory to manually make backup")
backupRoot.AddCommand(backupCmd)
pruneCmd.Flags().StringVarP(&backupDir, "backup-dir", "d", "", "directory holding Navidrome backups")
pruneCmd.Flags().IntVarP(&backupCount, "keep-count", "k", -1, "specify the number of backups to keep. 0 remove ALL backups, and negative values mean to use the default from configuration")
pruneCmd.Flags().BoolVarP(&force, "force", "f", false, "bypass warning when backup count is zero")
backupRoot.AddCommand(pruneCmd)
restoreCommand.Flags().StringVarP(&restorePath, "backup-file", "b", "", "path of backup database to restore")
restoreCommand.Flags().BoolVarP(&force, "force", "f", false, "bypass restore warning")
_ = restoreCommand.MarkFlagRequired("backup-file")
backupRoot.AddCommand(restoreCommand)
}
var (
backupRoot = &cobra.Command{
Use: "backup",
Aliases: []string{"bkp"},
Short: "Create, restore and prune database backups",
Long: "Create, restore and prune database backups",
}
backupCmd = &cobra.Command{
Use: "create",
Short: "Create a backup database",
Long: "Manually backup Navidrome database. This will ignore BackupCount",
Run: func(cmd *cobra.Command, _ []string) {
runBackup(cmd.Context())
},
}
pruneCmd = &cobra.Command{
Use: "prune",
Short: "Prune database backups",
Long: "Manually prune database backups according to backup rules",
Run: func(cmd *cobra.Command, _ []string) {
runPrune(cmd.Context())
},
}
restoreCommand = &cobra.Command{
Use: "restore",
Short: "Restore Navidrome database",
Long: "Restore Navidrome database from a backup. This must be done offline",
Run: func(cmd *cobra.Command, _ []string) {
runRestore(cmd.Context())
},
}
)
func runBackup(ctx context.Context) {
if backupDir != "" {
conf.Server.Backup.Path = backupDir
}
idx := strings.LastIndex(conf.Server.DbPath, "?")
var path string
if idx == -1 {
path = conf.Server.DbPath
} else {
path = conf.Server.DbPath[:idx]
}
if _, err := os.Stat(path); os.IsNotExist(err) {
log.Fatal("No existing database", "path", path)
return
}
start := time.Now()
path, err := db.Backup(ctx)
if err != nil {
log.Fatal("Error backing up database", "backup path", conf.Server.BasePath, err)
}
elapsed := time.Since(start)
log.Info("Backup complete", "elapsed", elapsed, "path", path)
}
func runPrune(ctx context.Context) {
if backupDir != "" {
conf.Server.Backup.Path = backupDir
}
if backupCount != -1 {
conf.Server.Backup.Count = backupCount
}
if conf.Server.Backup.Count == 0 && !force {
fmt.Println("Warning: pruning ALL backups")
fmt.Printf("Please enter YES (all caps) to continue: ")
var input string
_, err := fmt.Scanln(&input)
if input != "YES" || err != nil {
log.Warn("Prune cancelled")
return
}
}
idx := strings.LastIndex(conf.Server.DbPath, "?")
var path string
if idx == -1 {
path = conf.Server.DbPath
} else {
path = conf.Server.DbPath[:idx]
}
if _, err := os.Stat(path); os.IsNotExist(err) {
log.Fatal("No existing database", "path", path)
return
}
start := time.Now()
count, err := db.Prune(ctx)
if err != nil {
log.Fatal("Error pruning up database", "backup path", conf.Server.BasePath, err)
}
elapsed := time.Since(start)
log.Info("Prune complete", "elapsed", elapsed, "successfully pruned", count)
}
func runRestore(ctx context.Context) {
idx := strings.LastIndex(conf.Server.DbPath, "?")
var path string
if idx == -1 {
path = conf.Server.DbPath
} else {
path = conf.Server.DbPath[:idx]
}
if _, err := os.Stat(path); os.IsNotExist(err) {
log.Fatal("No existing database", "path", path)
return
}
if !force {
fmt.Println("Warning: restoring the Navidrome database should only be done offline, especially if your backup is very old.")
fmt.Printf("Please enter YES (all caps) to continue: ")
var input string
_, err := fmt.Scanln(&input)
if input != "YES" || err != nil {
log.Warn("Restore cancelled")
return
}
}
start := time.Now()
err := db.Restore(ctx, restorePath)
if err != nil {
log.Fatal("Error restoring database", "backup path", conf.Server.BasePath, err)
}
elapsed := time.Since(start)
log.Info("Restore complete", "elapsed", elapsed)
}

View File

@@ -1,17 +0,0 @@
package cmd
import (
"testing"
"github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/tests"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
func TestCmd(t *testing.T) {
tests.Init(t, false)
log.SetLevel(log.LevelFatal)
RegisterFailHandler(Fail)
RunSpecs(t, "Cmd Suite")
}

View File

@@ -1,79 +0,0 @@
package cmd
import (
"encoding/json"
"fmt"
"strings"
"github.com/navidrome/navidrome/core"
"github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/model"
"github.com/pelletier/go-toml/v2"
"github.com/spf13/cobra"
"gopkg.in/yaml.v3"
)
var (
format string
)
func init() {
inspectCmd.Flags().StringVarP(&format, "format", "f", "jsonindent", "output format (pretty, toml, yaml, json, jsonindent)")
rootCmd.AddCommand(inspectCmd)
}
var inspectCmd = &cobra.Command{
Use: "inspect [files to inspect]",
Short: "Inspect tags",
Long: "Show file tags as seen by Navidrome",
Args: cobra.MinimumNArgs(1),
Run: func(cmd *cobra.Command, args []string) {
runInspector(args)
},
}
var marshalers = map[string]func(interface{}) ([]byte, error){
"pretty": prettyMarshal,
"toml": toml.Marshal,
"yaml": yaml.Marshal,
"json": json.Marshal,
"jsonindent": func(v interface{}) ([]byte, error) {
return json.MarshalIndent(v, "", " ")
},
}
func prettyMarshal(v interface{}) ([]byte, error) {
out := v.([]core.InspectOutput)
var res strings.Builder
for i := range out {
res.WriteString(fmt.Sprintf("====================\nFile: %s\n\n", out[i].File))
t, _ := toml.Marshal(out[i].RawTags)
res.WriteString(fmt.Sprintf("Raw tags:\n%s\n\n", t))
t, _ = toml.Marshal(out[i].MappedTags)
res.WriteString(fmt.Sprintf("Mapped tags:\n%s\n\n", t))
}
return []byte(res.String()), nil
}
func runInspector(args []string) {
marshal := marshalers[format]
if marshal == nil {
log.Fatal("Invalid format", "format", format)
}
var out []core.InspectOutput
for _, filePath := range args {
if !model.IsAudioFile(filePath) {
log.Warn("Not an audio file", "file", filePath)
continue
}
output, err := core.Inspect(filePath, 1, "")
if err != nil {
log.Warn("Unable to process file", "file", filePath, "error", err)
continue
}
out = append(out, *output)
}
data, _ := marshal(out)
fmt.Println(string(data))
}

View File

@@ -2,12 +2,8 @@ package cmd
import (
"context"
"encoding/csv"
"encoding/json"
"errors"
"fmt"
"os"
"strconv"
"github.com/Masterminds/squirrel"
"github.com/navidrome/navidrome/core/auth"
@@ -19,57 +15,31 @@ import (
)
var (
playlistID string
outputFile string
userID string
outputFormat string
playlistID string
outputFile string
)
type displayPlaylist struct {
Id string `json:"id"`
Name string `json:"name"`
OwnerName string `json:"ownerName"`
OwnerId string `json:"ownerId"`
Public bool `json:"public"`
}
type displayPlaylists []displayPlaylist
func init() {
plsCmd.Flags().StringVarP(&playlistID, "playlist", "p", "", "playlist name or ID")
plsCmd.Flags().StringVarP(&outputFile, "output", "o", "", "output file (default stdout)")
_ = plsCmd.MarkFlagRequired("playlist")
rootCmd.AddCommand(plsCmd)
listCommand.Flags().StringVarP(&userID, "user", "u", "", "username or ID")
listCommand.Flags().StringVarP(&outputFormat, "format", "f", "csv", "output format [supported values: csv, json]")
plsCmd.AddCommand(listCommand)
}
var (
plsCmd = &cobra.Command{
Use: "pls",
Short: "Export playlists",
Long: "Export Navidrome playlists to M3U files",
Run: func(cmd *cobra.Command, args []string) {
runExporter()
},
}
listCommand = &cobra.Command{
Use: "list",
Short: "List playlists",
Run: func(cmd *cobra.Command, args []string) {
runList()
},
}
)
var plsCmd = &cobra.Command{
Use: "pls",
Short: "Export playlists",
Long: "Export Navidrome playlists to M3U files",
Run: func(cmd *cobra.Command, args []string) {
runExporter()
},
}
func runExporter() {
sqlDB := db.Db()
ds := persistence.New(sqlDB)
ctx := auth.WithAdminUser(context.Background(), ds)
playlist, err := ds.Playlist(ctx).GetWithTracks(playlistID, true, false)
playlist, err := ds.Playlist(ctx).GetWithTracks(playlistID, true)
if err != nil && !errors.Is(err, model.ErrNotFound) {
log.Fatal("Error retrieving playlist", "name", playlistID, err)
}
@@ -79,7 +49,7 @@ func runExporter() {
log.Fatal("Error retrieving playlist", "name", playlistID, err)
}
if len(playlists) > 0 {
playlist, err = ds.Playlist(ctx).GetWithTracks(playlists[0].ID, true, false)
playlist, err = ds.Playlist(ctx).GetWithTracks(playlists[0].ID, true)
if err != nil {
log.Fatal("Error retrieving playlist", "name", playlistID, err)
}
@@ -99,58 +69,3 @@ func runExporter() {
log.Fatal("Error writing to the output file", "file", outputFile, err)
}
}
func runList() {
if outputFormat != "csv" && outputFormat != "json" {
log.Fatal("Invalid output format. Must be one of csv, json", "format", outputFormat)
}
sqlDB := db.Db()
ds := persistence.New(sqlDB)
ctx := auth.WithAdminUser(context.Background(), ds)
options := model.QueryOptions{Sort: "owner_name"}
if userID != "" {
user, err := ds.User(ctx).FindByUsername(userID)
if err != nil && !errors.Is(err, model.ErrNotFound) {
log.Fatal("Error retrieving user by name", "name", userID, err)
}
if errors.Is(err, model.ErrNotFound) {
user, err = ds.User(ctx).Get(userID)
if err != nil {
log.Fatal("Error retrieving user by id", "id", userID, err)
}
}
options.Filters = squirrel.Eq{"owner_id": user.ID}
}
playlists, err := ds.Playlist(ctx).GetAll(options)
if err != nil {
log.Fatal(ctx, "Failed to retrieve playlists", err)
}
if outputFormat == "csv" {
w := csv.NewWriter(os.Stdout)
_ = w.Write([]string{"playlist id", "playlist name", "owner id", "owner name", "public"})
for _, playlist := range playlists {
_ = w.Write([]string{playlist.ID, playlist.Name, playlist.OwnerID, playlist.OwnerName, strconv.FormatBool(playlist.Public)})
}
w.Flush()
} else {
display := make(displayPlaylists, len(playlists))
for idx, playlist := range playlists {
display[idx].Id = playlist.ID
display[idx].Name = playlist.Name
display[idx].OwnerId = playlist.OwnerID
display[idx].OwnerName = playlist.OwnerName
display[idx].Public = playlist.Public
}
j, _ := json.Marshal(display)
fmt.Printf("%s\n", j)
}
}

View File

@@ -1,716 +0,0 @@
package cmd
import (
"cmp"
"crypto/sha256"
"encoding/hex"
"fmt"
"io"
"os"
"path/filepath"
"strings"
"text/tabwriter"
"time"
"github.com/navidrome/navidrome/conf"
"github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/plugins"
"github.com/navidrome/navidrome/plugins/schema"
"github.com/navidrome/navidrome/utils"
"github.com/navidrome/navidrome/utils/slice"
"github.com/spf13/cobra"
)
const (
pluginPackageExtension = ".ndp"
pluginDirPermissions = 0700
pluginFilePermissions = 0600
)
func init() {
pluginCmd := &cobra.Command{
Use: "plugin",
Short: "Manage Navidrome plugins",
Long: "Commands for managing Navidrome plugins",
}
listCmd := &cobra.Command{
Use: "list",
Short: "List installed plugins",
Long: "List all installed plugins with their metadata",
Run: pluginList,
}
infoCmd := &cobra.Command{
Use: "info [pluginPackage|pluginName]",
Short: "Show details of a plugin",
Long: "Show detailed information about a plugin package (.ndp file) or an installed plugin",
Args: cobra.ExactArgs(1),
Run: pluginInfo,
}
installCmd := &cobra.Command{
Use: "install [pluginPackage]",
Short: "Install a plugin from a .ndp file",
Long: "Install a Navidrome Plugin Package (.ndp) file",
Args: cobra.ExactArgs(1),
Run: pluginInstall,
}
removeCmd := &cobra.Command{
Use: "remove [pluginName]",
Short: "Remove an installed plugin",
Long: "Remove a plugin by name",
Args: cobra.ExactArgs(1),
Run: pluginRemove,
}
updateCmd := &cobra.Command{
Use: "update [pluginPackage]",
Short: "Update an existing plugin",
Long: "Update an installed plugin with a new version from a .ndp file",
Args: cobra.ExactArgs(1),
Run: pluginUpdate,
}
refreshCmd := &cobra.Command{
Use: "refresh [pluginName]",
Short: "Reload a plugin without restarting Navidrome",
Long: "Reload and recompile a plugin without needing to restart Navidrome",
Args: cobra.ExactArgs(1),
Run: pluginRefresh,
}
devCmd := &cobra.Command{
Use: "dev [folder_path]",
Short: "Create symlink to development folder",
Long: "Create a symlink from a plugin development folder to the plugins directory for easier development",
Args: cobra.ExactArgs(1),
Run: pluginDev,
}
pluginCmd.AddCommand(listCmd, infoCmd, installCmd, removeCmd, updateCmd, refreshCmd, devCmd)
rootCmd.AddCommand(pluginCmd)
}
// Validation helpers
func validatePluginPackageFile(path string) error {
if !utils.FileExists(path) {
return fmt.Errorf("plugin package not found: %s", path)
}
if filepath.Ext(path) != pluginPackageExtension {
return fmt.Errorf("not a valid plugin package: %s (expected %s extension)", path, pluginPackageExtension)
}
return nil
}
func validatePluginDirectory(pluginsDir, pluginName string) (string, error) {
pluginDir := filepath.Join(pluginsDir, pluginName)
if !utils.FileExists(pluginDir) {
return "", fmt.Errorf("plugin not found: %s (path: %s)", pluginName, pluginDir)
}
return pluginDir, nil
}
func resolvePluginPath(pluginDir string) (resolvedPath string, isSymlink bool, err error) {
// Check if it's a directory or a symlink
lstat, err := os.Lstat(pluginDir)
if err != nil {
return "", false, fmt.Errorf("failed to stat plugin: %w", err)
}
isSymlink = lstat.Mode()&os.ModeSymlink != 0
if isSymlink {
// Resolve the symlink target
targetDir, err := os.Readlink(pluginDir)
if err != nil {
return "", true, fmt.Errorf("failed to resolve symlink: %w", err)
}
// If target is a relative path, make it absolute
if !filepath.IsAbs(targetDir) {
targetDir = filepath.Join(filepath.Dir(pluginDir), targetDir)
}
// Verify the target exists and is a directory
targetInfo, err := os.Stat(targetDir)
if err != nil {
return "", true, fmt.Errorf("failed to access symlink target %s: %w", targetDir, err)
}
if !targetInfo.IsDir() {
return "", true, fmt.Errorf("symlink target is not a directory: %s", targetDir)
}
return targetDir, true, nil
} else if !lstat.IsDir() {
return "", false, fmt.Errorf("not a valid plugin directory: %s", pluginDir)
}
return pluginDir, false, nil
}
// Package handling helpers
func loadAndValidatePackage(ndpPath string) (*plugins.PluginPackage, error) {
if err := validatePluginPackageFile(ndpPath); err != nil {
return nil, err
}
pkg, err := plugins.LoadPackage(ndpPath)
if err != nil {
return nil, fmt.Errorf("failed to load plugin package: %w", err)
}
return pkg, nil
}
func extractAndSetupPlugin(ndpPath, targetDir string) error {
if err := plugins.ExtractPackage(ndpPath, targetDir); err != nil {
return fmt.Errorf("failed to extract plugin package: %w", err)
}
ensurePluginDirPermissions(targetDir)
return nil
}
// Display helpers
func displayPluginTableRow(w *tabwriter.Writer, discovery plugins.PluginDiscoveryEntry) {
if discovery.Error != nil {
// Handle global errors (like directory read failure)
if discovery.ID == "" {
log.Error("Failed to read plugins directory", "folder", conf.Server.Plugins.Folder, discovery.Error)
return
}
// Handle individual plugin errors - show them in the table
fmt.Fprintf(w, "%s\tERROR\tERROR\tERROR\tERROR\t%v\n", discovery.ID, discovery.Error)
return
}
// Mark symlinks with an indicator
nameDisplay := discovery.Manifest.Name
if discovery.IsSymlink {
nameDisplay = nameDisplay + " (dev)"
}
// Convert capabilities to strings
capabilities := slice.Map(discovery.Manifest.Capabilities, func(cap schema.PluginManifestCapabilitiesElem) string {
return string(cap)
})
fmt.Fprintf(w, "%s\t%s\t%s\t%s\t%s\t%s\n",
discovery.ID,
nameDisplay,
cmp.Or(discovery.Manifest.Author, "-"),
cmp.Or(discovery.Manifest.Version, "-"),
strings.Join(capabilities, ", "),
cmp.Or(discovery.Manifest.Description, "-"))
}
func displayTypedPermissions(permissions schema.PluginManifestPermissions, indent string) {
if permissions.Http != nil {
fmt.Printf("%shttp:\n", indent)
fmt.Printf("%s Reason: %s\n", indent, permissions.Http.Reason)
fmt.Printf("%s Allow Local Network: %t\n", indent, permissions.Http.AllowLocalNetwork)
fmt.Printf("%s Allowed URLs:\n", indent)
for urlPattern, methodEnums := range permissions.Http.AllowedUrls {
methods := make([]string, len(methodEnums))
for i, methodEnum := range methodEnums {
methods[i] = string(methodEnum)
}
fmt.Printf("%s %s: [%s]\n", indent, urlPattern, strings.Join(methods, ", "))
}
fmt.Println()
}
if permissions.Config != nil {
fmt.Printf("%sconfig:\n", indent)
fmt.Printf("%s Reason: %s\n", indent, permissions.Config.Reason)
fmt.Println()
}
if permissions.Scheduler != nil {
fmt.Printf("%sscheduler:\n", indent)
fmt.Printf("%s Reason: %s\n", indent, permissions.Scheduler.Reason)
fmt.Println()
}
if permissions.Websocket != nil {
fmt.Printf("%swebsocket:\n", indent)
fmt.Printf("%s Reason: %s\n", indent, permissions.Websocket.Reason)
fmt.Printf("%s Allow Local Network: %t\n", indent, permissions.Websocket.AllowLocalNetwork)
fmt.Printf("%s Allowed URLs: [%s]\n", indent, strings.Join(permissions.Websocket.AllowedUrls, ", "))
fmt.Println()
}
if permissions.Cache != nil {
fmt.Printf("%scache:\n", indent)
fmt.Printf("%s Reason: %s\n", indent, permissions.Cache.Reason)
fmt.Println()
}
if permissions.Artwork != nil {
fmt.Printf("%sartwork:\n", indent)
fmt.Printf("%s Reason: %s\n", indent, permissions.Artwork.Reason)
fmt.Println()
}
if permissions.Subsonicapi != nil {
allowedUsers := "All Users"
if len(permissions.Subsonicapi.AllowedUsernames) > 0 {
allowedUsers = strings.Join(permissions.Subsonicapi.AllowedUsernames, ", ")
}
fmt.Printf("%ssubsonicapi:\n", indent)
fmt.Printf("%s Reason: %s\n", indent, permissions.Subsonicapi.Reason)
fmt.Printf("%s Allow Admins: %t\n", indent, permissions.Subsonicapi.AllowAdmins)
fmt.Printf("%s Allowed Usernames: [%s]\n", indent, allowedUsers)
fmt.Println()
}
}
func displayPluginDetails(manifest *schema.PluginManifest, fileInfo *pluginFileInfo, permInfo *pluginPermissionInfo) {
fmt.Println("\nPlugin Information:")
fmt.Printf(" Name: %s\n", manifest.Name)
fmt.Printf(" Author: %s\n", manifest.Author)
fmt.Printf(" Version: %s\n", manifest.Version)
fmt.Printf(" Description: %s\n", manifest.Description)
fmt.Print(" Capabilities: ")
capabilities := make([]string, len(manifest.Capabilities))
for i, cap := range manifest.Capabilities {
capabilities[i] = string(cap)
}
fmt.Print(strings.Join(capabilities, ", "))
fmt.Println()
// Display manifest permissions using the typed permissions
fmt.Println(" Required Permissions:")
displayTypedPermissions(manifest.Permissions, " ")
// Print file information if available
if fileInfo != nil {
fmt.Println("Package Information:")
fmt.Printf(" File: %s\n", fileInfo.path)
fmt.Printf(" Size: %d bytes (%.2f KB)\n", fileInfo.size, float64(fileInfo.size)/1024)
fmt.Printf(" SHA-256: %s\n", fileInfo.hash)
fmt.Printf(" Modified: %s\n", fileInfo.modTime.Format(time.RFC3339))
}
// Print file permissions information if available
if permInfo != nil {
fmt.Println("File Permissions:")
fmt.Printf(" Plugin Directory: %s (%s)\n", permInfo.dirPath, permInfo.dirMode)
if permInfo.isSymlink {
fmt.Printf(" Symlink Target: %s (%s)\n", permInfo.targetPath, permInfo.targetMode)
}
fmt.Printf(" Manifest File: %s\n", permInfo.manifestMode)
if permInfo.wasmMode != "" {
fmt.Printf(" WASM File: %s\n", permInfo.wasmMode)
}
}
}
type pluginFileInfo struct {
path string
size int64
hash string
modTime time.Time
}
type pluginPermissionInfo struct {
dirPath string
dirMode string
isSymlink bool
targetPath string
targetMode string
manifestMode string
wasmMode string
}
func getFileInfo(path string) *pluginFileInfo {
fileInfo, err := os.Stat(path)
if err != nil {
log.Error("Failed to get file information", err)
return nil
}
return &pluginFileInfo{
path: path,
size: fileInfo.Size(),
hash: calculateSHA256(path),
modTime: fileInfo.ModTime(),
}
}
func getPermissionInfo(pluginDir string) *pluginPermissionInfo {
// Get plugin directory permissions
dirInfo, err := os.Lstat(pluginDir)
if err != nil {
log.Error("Failed to get plugin directory permissions", err)
return nil
}
permInfo := &pluginPermissionInfo{
dirPath: pluginDir,
dirMode: dirInfo.Mode().String(),
}
// Check if it's a symlink
if dirInfo.Mode()&os.ModeSymlink != 0 {
permInfo.isSymlink = true
// Get target path and permissions
targetPath, err := os.Readlink(pluginDir)
if err == nil {
if !filepath.IsAbs(targetPath) {
targetPath = filepath.Join(filepath.Dir(pluginDir), targetPath)
}
permInfo.targetPath = targetPath
if targetInfo, err := os.Stat(targetPath); err == nil {
permInfo.targetMode = targetInfo.Mode().String()
}
}
}
// Get manifest file permissions
manifestPath := filepath.Join(pluginDir, "manifest.json")
if manifestInfo, err := os.Stat(manifestPath); err == nil {
permInfo.manifestMode = manifestInfo.Mode().String()
}
// Get WASM file permissions (look for .wasm files)
entries, err := os.ReadDir(pluginDir)
if err == nil {
for _, entry := range entries {
if filepath.Ext(entry.Name()) == ".wasm" {
wasmPath := filepath.Join(pluginDir, entry.Name())
if wasmInfo, err := os.Stat(wasmPath); err == nil {
permInfo.wasmMode = wasmInfo.Mode().String()
break // Just show the first WASM file found
}
}
}
}
return permInfo
}
// Command implementations
func pluginList(cmd *cobra.Command, args []string) {
discoveries := plugins.DiscoverPlugins(conf.Server.Plugins.Folder)
w := tabwriter.NewWriter(os.Stdout, 0, 0, 2, ' ', 0)
fmt.Fprintln(w, "ID\tNAME\tAUTHOR\tVERSION\tCAPABILITIES\tDESCRIPTION")
for _, discovery := range discoveries {
displayPluginTableRow(w, discovery)
}
w.Flush()
}
func pluginInfo(cmd *cobra.Command, args []string) {
path := args[0]
pluginsDir := conf.Server.Plugins.Folder
var manifest *schema.PluginManifest
var fileInfo *pluginFileInfo
var permInfo *pluginPermissionInfo
if filepath.Ext(path) == pluginPackageExtension {
// It's a package file
pkg, err := loadAndValidatePackage(path)
if err != nil {
log.Fatal("Failed to load plugin package", err)
}
manifest = pkg.Manifest
fileInfo = getFileInfo(path)
// No permission info for package files
} else {
// It's a plugin name
pluginDir, err := validatePluginDirectory(pluginsDir, path)
if err != nil {
log.Fatal("Plugin validation failed", err)
}
manifest, err = plugins.LoadManifest(pluginDir)
if err != nil {
log.Fatal("Failed to load plugin manifest", err)
}
// Get permission info for installed plugins
permInfo = getPermissionInfo(pluginDir)
}
displayPluginDetails(manifest, fileInfo, permInfo)
}
func pluginInstall(cmd *cobra.Command, args []string) {
ndpPath := args[0]
pluginsDir := conf.Server.Plugins.Folder
pkg, err := loadAndValidatePackage(ndpPath)
if err != nil {
log.Fatal("Package validation failed", err)
}
// Create target directory based on plugin name
targetDir := filepath.Join(pluginsDir, pkg.Manifest.Name)
// Check if plugin already exists
if utils.FileExists(targetDir) {
log.Fatal("Plugin already installed", "name", pkg.Manifest.Name, "path", targetDir,
"use", "navidrome plugin update")
}
if err := extractAndSetupPlugin(ndpPath, targetDir); err != nil {
log.Fatal("Plugin installation failed", err)
}
fmt.Printf("Plugin '%s' v%s installed successfully\n", pkg.Manifest.Name, pkg.Manifest.Version)
}
func pluginRemove(cmd *cobra.Command, args []string) {
pluginName := args[0]
pluginsDir := conf.Server.Plugins.Folder
pluginDir, err := validatePluginDirectory(pluginsDir, pluginName)
if err != nil {
log.Fatal("Plugin validation failed", err)
}
_, isSymlink, err := resolvePluginPath(pluginDir)
if err != nil {
log.Fatal("Failed to resolve plugin path", err)
}
if isSymlink {
// For symlinked plugins (dev mode), just remove the symlink
if err := os.Remove(pluginDir); err != nil {
log.Fatal("Failed to remove plugin symlink", "name", pluginName, err)
}
fmt.Printf("Development plugin symlink '%s' removed successfully (target directory preserved)\n", pluginName)
} else {
// For regular plugins, remove the entire directory
if err := os.RemoveAll(pluginDir); err != nil {
log.Fatal("Failed to remove plugin directory", "name", pluginName, err)
}
fmt.Printf("Plugin '%s' removed successfully\n", pluginName)
}
}
func pluginUpdate(cmd *cobra.Command, args []string) {
ndpPath := args[0]
pluginsDir := conf.Server.Plugins.Folder
pkg, err := loadAndValidatePackage(ndpPath)
if err != nil {
log.Fatal("Package validation failed", err)
}
// Check if plugin exists
targetDir := filepath.Join(pluginsDir, pkg.Manifest.Name)
if !utils.FileExists(targetDir) {
log.Fatal("Plugin not found", "name", pkg.Manifest.Name, "path", targetDir,
"use", "navidrome plugin install")
}
// Create a backup of the existing plugin
backupDir := targetDir + ".bak." + time.Now().Format("20060102150405")
if err := os.Rename(targetDir, backupDir); err != nil {
log.Fatal("Failed to backup existing plugin", err)
}
// Extract the new package
if err := extractAndSetupPlugin(ndpPath, targetDir); err != nil {
// Restore backup if extraction failed
os.RemoveAll(targetDir)
_ = os.Rename(backupDir, targetDir) // Ignore error as we're already in a fatal path
log.Fatal("Plugin update failed", err)
}
// Remove the backup
os.RemoveAll(backupDir)
fmt.Printf("Plugin '%s' updated to v%s successfully\n", pkg.Manifest.Name, pkg.Manifest.Version)
}
func pluginRefresh(cmd *cobra.Command, args []string) {
pluginName := args[0]
pluginsDir := conf.Server.Plugins.Folder
pluginDir, err := validatePluginDirectory(pluginsDir, pluginName)
if err != nil {
log.Fatal("Plugin validation failed", err)
}
resolvedPath, isSymlink, err := resolvePluginPath(pluginDir)
if err != nil {
log.Fatal("Failed to resolve plugin path", err)
}
if isSymlink {
log.Debug("Processing symlinked plugin", "name", pluginName, "link", pluginDir, "target", resolvedPath)
}
fmt.Printf("Refreshing plugin '%s'...\n", pluginName)
// Get the plugin manager and refresh
mgr := GetPluginManager(cmd.Context())
log.Debug("Scanning plugins directory", "path", pluginsDir)
mgr.ScanPlugins()
log.Info("Waiting for plugin compilation to complete", "name", pluginName)
// Wait for compilation to complete
if err := mgr.EnsureCompiled(pluginName); err != nil {
log.Fatal("Failed to compile refreshed plugin", "name", pluginName, err)
}
log.Info("Plugin compilation completed successfully", "name", pluginName)
fmt.Printf("Plugin '%s' refreshed successfully\n", pluginName)
}
func pluginDev(cmd *cobra.Command, args []string) {
sourcePath, err := filepath.Abs(args[0])
if err != nil {
log.Fatal("Invalid path", "path", args[0], err)
}
pluginsDir := conf.Server.Plugins.Folder
// Validate source directory and manifest
if err := validateDevSource(sourcePath); err != nil {
log.Fatal("Source validation failed", err)
}
// Load manifest to get plugin name
manifest, err := plugins.LoadManifest(sourcePath)
if err != nil {
log.Fatal("Failed to load plugin manifest", "path", filepath.Join(sourcePath, "manifest.json"), err)
}
pluginName := cmp.Or(manifest.Name, filepath.Base(sourcePath))
targetPath := filepath.Join(pluginsDir, pluginName)
// Handle existing target
if err := handleExistingTarget(targetPath, sourcePath); err != nil {
log.Fatal("Failed to handle existing target", err)
}
// Create target directory if needed
if err := os.MkdirAll(filepath.Dir(targetPath), 0755); err != nil {
log.Fatal("Failed to create plugins directory", "path", filepath.Dir(targetPath), err)
}
// Create the symlink
if err := os.Symlink(sourcePath, targetPath); err != nil {
log.Fatal("Failed to create symlink", "source", sourcePath, "target", targetPath, err)
}
fmt.Printf("Development symlink created: '%s' -> '%s'\n", targetPath, sourcePath)
fmt.Println("Plugin can be refreshed with: navidrome plugin refresh", pluginName)
}
// Utility functions
func validateDevSource(sourcePath string) error {
sourceInfo, err := os.Stat(sourcePath)
if err != nil {
return fmt.Errorf("source folder not found: %s (%w)", sourcePath, err)
}
if !sourceInfo.IsDir() {
return fmt.Errorf("source path is not a directory: %s", sourcePath)
}
manifestPath := filepath.Join(sourcePath, "manifest.json")
if !utils.FileExists(manifestPath) {
return fmt.Errorf("source folder missing manifest.json: %s", sourcePath)
}
return nil
}
func handleExistingTarget(targetPath, sourcePath string) error {
if !utils.FileExists(targetPath) {
return nil // Nothing to handle
}
// Check if it's already a symlink to our source
existingLink, err := os.Readlink(targetPath)
if err == nil && existingLink == sourcePath {
fmt.Printf("Symlink already exists and points to the correct source\n")
return fmt.Errorf("symlink already exists") // This will cause early return in caller
}
// Handle case where target exists but is not a symlink to our source
fmt.Printf("Target path '%s' already exists.\n", targetPath)
fmt.Print("Do you want to replace it? (y/N): ")
var response string
_, err = fmt.Scanln(&response)
if err != nil || strings.ToLower(response) != "y" {
if err != nil {
log.Debug("Error reading input, assuming 'no'", err)
}
return fmt.Errorf("operation canceled")
}
// Remove existing target
if err := os.RemoveAll(targetPath); err != nil {
return fmt.Errorf("failed to remove existing target %s: %w", targetPath, err)
}
return nil
}
func ensurePluginDirPermissions(dir string) {
if err := os.Chmod(dir, pluginDirPermissions); err != nil {
log.Error("Failed to set plugin directory permissions", "dir", dir, err)
}
// Apply permissions to all files in the directory
entries, err := os.ReadDir(dir)
if err != nil {
log.Error("Failed to read plugin directory", "dir", dir, err)
return
}
for _, entry := range entries {
path := filepath.Join(dir, entry.Name())
info, err := os.Stat(path)
if err != nil {
log.Error("Failed to stat file", "path", path, err)
continue
}
mode := os.FileMode(pluginFilePermissions) // Files
if info.IsDir() {
mode = os.FileMode(pluginDirPermissions) // Directories
ensurePluginDirPermissions(path) // Recursive
}
if err := os.Chmod(path, mode); err != nil {
log.Error("Failed to set file permissions", "path", path, err)
}
}
}
func calculateSHA256(filePath string) string {
file, err := os.Open(filePath)
if err != nil {
log.Error("Failed to open file for hashing", err)
return "N/A"
}
defer file.Close()
hasher := sha256.New()
if _, err := io.Copy(hasher, file); err != nil {
log.Error("Failed to calculate hash", err)
return "N/A"
}
return hex.EncodeToString(hasher.Sum(nil))
}

View File

@@ -1,193 +0,0 @@
package cmd
import (
"io"
"os"
"path/filepath"
"strings"
"github.com/navidrome/navidrome/conf"
"github.com/navidrome/navidrome/conf/configtest"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
"github.com/spf13/cobra"
)
var _ = Describe("Plugin CLI Commands", func() {
var tempDir string
var cmd *cobra.Command
var stdOut *os.File
var origStdout *os.File
var outReader *os.File
// Helper to create a test plugin with the given name and details
createTestPlugin := func(name, author, version string, capabilities []string) string {
pluginDir := filepath.Join(tempDir, name)
Expect(os.MkdirAll(pluginDir, 0755)).To(Succeed())
// Create a properly formatted capabilities JSON array
capabilitiesJSON := `"` + strings.Join(capabilities, `", "`) + `"`
manifest := `{
"name": "` + name + `",
"author": "` + author + `",
"version": "` + version + `",
"description": "Plugin for testing",
"website": "https://test.navidrome.org/` + name + `",
"capabilities": [` + capabilitiesJSON + `],
"permissions": {}
}`
Expect(os.WriteFile(filepath.Join(pluginDir, "manifest.json"), []byte(manifest), 0600)).To(Succeed())
// Create a dummy WASM file
wasmContent := []byte("dummy wasm content for testing")
Expect(os.WriteFile(filepath.Join(pluginDir, "plugin.wasm"), wasmContent, 0600)).To(Succeed())
return pluginDir
}
// Helper to execute a command and return captured output
captureOutput := func(reader io.Reader) string {
stdOut.Close()
outputBytes, err := io.ReadAll(reader)
Expect(err).NotTo(HaveOccurred())
return string(outputBytes)
}
BeforeEach(func() {
DeferCleanup(configtest.SetupConfig())
tempDir = GinkgoT().TempDir()
// Setup config
conf.Server.Plugins.Enabled = true
conf.Server.Plugins.Folder = tempDir
// Create a command for testing
cmd = &cobra.Command{Use: "test"}
// Setup stdout capture
origStdout = os.Stdout
var err error
outReader, stdOut, err = os.Pipe()
Expect(err).NotTo(HaveOccurred())
os.Stdout = stdOut
DeferCleanup(func() {
os.Stdout = origStdout
})
})
AfterEach(func() {
os.Stdout = origStdout
if stdOut != nil {
stdOut.Close()
}
if outReader != nil {
outReader.Close()
}
})
Describe("Plugin list command", func() {
It("should list installed plugins", func() {
// Create test plugins
createTestPlugin("plugin1", "Test Author", "1.0.0", []string{"MetadataAgent"})
createTestPlugin("plugin2", "Another Author", "2.1.0", []string{"Scrobbler"})
// Execute command
pluginList(cmd, []string{})
// Verify output
output := captureOutput(outReader)
Expect(output).To(ContainSubstring("plugin1"))
Expect(output).To(ContainSubstring("Test Author"))
Expect(output).To(ContainSubstring("1.0.0"))
Expect(output).To(ContainSubstring("MetadataAgent"))
Expect(output).To(ContainSubstring("plugin2"))
Expect(output).To(ContainSubstring("Another Author"))
Expect(output).To(ContainSubstring("2.1.0"))
Expect(output).To(ContainSubstring("Scrobbler"))
})
})
Describe("Plugin info command", func() {
It("should display information about an installed plugin", func() {
// Create test plugin with multiple capabilities
createTestPlugin("test-plugin", "Test Author", "1.0.0",
[]string{"MetadataAgent", "Scrobbler"})
// Execute command
pluginInfo(cmd, []string{"test-plugin"})
// Verify output
output := captureOutput(outReader)
Expect(output).To(ContainSubstring("Name: test-plugin"))
Expect(output).To(ContainSubstring("Author: Test Author"))
Expect(output).To(ContainSubstring("Version: 1.0.0"))
Expect(output).To(ContainSubstring("Description: Plugin for testing"))
Expect(output).To(ContainSubstring("Capabilities: MetadataAgent, Scrobbler"))
})
})
Describe("Plugin remove command", func() {
It("should remove a regular plugin directory", func() {
// Create test plugin
pluginDir := createTestPlugin("regular-plugin", "Test Author", "1.0.0",
[]string{"MetadataAgent"})
// Execute command
pluginRemove(cmd, []string{"regular-plugin"})
// Verify output
output := captureOutput(outReader)
Expect(output).To(ContainSubstring("Plugin 'regular-plugin' removed successfully"))
// Verify directory is actually removed
_, err := os.Stat(pluginDir)
Expect(os.IsNotExist(err)).To(BeTrue())
})
It("should remove only the symlink for a development plugin", func() {
// Create a real source directory
sourceDir := filepath.Join(GinkgoT().TempDir(), "dev-plugin-source")
Expect(os.MkdirAll(sourceDir, 0755)).To(Succeed())
manifest := `{
"name": "dev-plugin",
"author": "Dev Author",
"version": "0.1.0",
"description": "Development plugin for testing",
"website": "https://test.navidrome.org/dev-plugin",
"capabilities": ["Scrobbler"],
"permissions": {}
}`
Expect(os.WriteFile(filepath.Join(sourceDir, "manifest.json"), []byte(manifest), 0600)).To(Succeed())
// Create a dummy WASM file
wasmContent := []byte("dummy wasm content for testing")
Expect(os.WriteFile(filepath.Join(sourceDir, "plugin.wasm"), wasmContent, 0600)).To(Succeed())
// Create a symlink in the plugins directory
symlinkPath := filepath.Join(tempDir, "dev-plugin")
Expect(os.Symlink(sourceDir, symlinkPath)).To(Succeed())
// Execute command
pluginRemove(cmd, []string{"dev-plugin"})
// Verify output
output := captureOutput(outReader)
Expect(output).To(ContainSubstring("Development plugin symlink 'dev-plugin' removed successfully"))
Expect(output).To(ContainSubstring("target directory preserved"))
// Verify the symlink is removed but source directory exists
_, err := os.Lstat(symlinkPath)
Expect(os.IsNotExist(err)).To(BeTrue())
_, err = os.Stat(sourceDir)
Expect(err).NotTo(HaveOccurred())
})
})
})

View File

@@ -2,28 +2,30 @@ package cmd
import (
"context"
"errors"
"fmt"
"os"
"os/signal"
"strings"
"syscall"
"time"
"github.com/go-chi/chi/v5/middleware"
_ "github.com/navidrome/navidrome/adapters/taglib"
"github.com/navidrome/navidrome/conf"
"github.com/navidrome/navidrome/consts"
"github.com/navidrome/navidrome/core"
"github.com/navidrome/navidrome/db"
"github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/resources"
"github.com/navidrome/navidrome/scanner"
"github.com/navidrome/navidrome/scheduler"
"github.com/navidrome/navidrome/server/backgrounds"
"github.com/prometheus/client_golang/prometheus/promhttp"
"golang.org/x/sync/errgroup"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"golang.org/x/sync/errgroup"
)
var interrupted = errors.New("service was interrupted")
var (
cfgFile string
noBanner bool
@@ -37,23 +39,17 @@ Complete documentation is available at https://www.navidrome.org/docs`,
preRun()
},
Run: func(cmd *cobra.Command, args []string) {
runNavidrome(cmd.Context())
},
PostRun: func(cmd *cobra.Command, args []string) {
postRun()
runNavidrome()
},
Version: consts.Version,
}
)
// Execute runs the root cobra command, which will start the Navidrome server by calling the runNavidrome function.
func Execute() {
ctx, cancel := mainContext(context.Background())
defer cancel()
rootCmd.SetVersionTemplate(`{{println .Version}}`)
if err := rootCmd.ExecuteContext(ctx); err != nil {
log.Fatal(err)
if err := rootCmd.Execute(); err != nil {
fmt.Println(err)
os.Exit(1)
}
}
@@ -61,57 +57,34 @@ func preRun() {
if !noBanner {
println(resources.Banner())
}
conf.Load(noBanner)
conf.Load()
}
func postRun() {
log.Info("Navidrome stopped, bye.")
}
func runNavidrome() {
db.Init()
defer func() {
if err := db.Close(); err != nil {
log.Error("Error closing DB", err)
}
log.Info("Navidrome stopped, bye.")
}()
// runNavidrome is the main entry point for the Navidrome server. It starts all the services and blocks.
// If any of the services returns an error, it will log it and exit. If the process receives a signal to exit,
// it will cancel the context and exit gracefully.
func runNavidrome(ctx context.Context) {
defer db.Init(ctx)()
g, ctx := errgroup.WithContext(ctx)
g, ctx := errgroup.WithContext(context.Background())
g.Go(startServer(ctx))
g.Go(startSignaller(ctx))
g.Go(startSignaler(ctx))
g.Go(startScheduler(ctx))
g.Go(startPlaybackServer(ctx))
g.Go(schedulePeriodicBackup(ctx))
g.Go(startInsightsCollector(ctx))
g.Go(scheduleDBOptimizer(ctx))
g.Go(startPluginManager(ctx))
g.Go(runInitialScan(ctx))
if conf.Server.Scanner.Enabled {
g.Go(startScanWatcher(ctx))
g.Go(schedulePeriodicScan(ctx))
} else {
log.Warn(ctx, "Automatic Scanning is DISABLED")
}
g.Go(schedulePeriodicScan(ctx))
if err := g.Wait(); err != nil {
if err := g.Wait(); err != nil && !errors.Is(err, interrupted) {
log.Error("Fatal error in Navidrome. Aborting", err)
}
}
// mainContext returns a context that is cancelled when the process receives a signal to exit.
func mainContext(ctx context.Context) (context.Context, context.CancelFunc) {
return signal.NotifyContext(ctx,
os.Interrupt,
syscall.SIGHUP,
syscall.SIGTERM,
syscall.SIGABRT,
)
}
// startServer starts the Navidrome web server, adding all the necessary routers.
func startServer(ctx context.Context) func() error {
return func() error {
a := CreateServer()
a.MountRouter("Native API", consts.URLPathNativeAPI, CreateNativeAPIRouter(ctx))
a.MountRouter("Subsonic API", consts.URLPathSubsonicAPI, CreateSubsonicAPIRouter(ctx))
a := CreateServer(conf.Server.MusicFolder)
a.MountRouter("Native API", consts.URLPathNativeAPI, CreateNativeAPIRouter())
a.MountRouter("Subsonic API", consts.URLPathSubsonicAPI, CreateSubsonicAPIRouter())
a.MountRouter("Public Endpoints", consts.URLPathPublic, CreatePublicRouter())
if conf.Server.LastFM.Enabled {
a.MountRouter("LastFM Auth", consts.URLPathNativeAPI+"/lastfm", CreateLastFMRouter())
@@ -120,229 +93,60 @@ func startServer(ctx context.Context) func() error {
a.MountRouter("ListenBrainz Auth", consts.URLPathNativeAPI+"/listenbrainz", CreateListenBrainzRouter())
}
if conf.Server.Prometheus.Enabled {
p := CreatePrometheus()
// blocking call because takes <100ms but useful if fails
p.WriteInitialMetrics(ctx)
a.MountRouter("Prometheus metrics", conf.Server.Prometheus.MetricsPath, p.GetHandler())
// blocking call because takes <1ms but useful if fails
core.WriteInitialMetrics()
a.MountRouter("Prometheus metrics", conf.Server.Prometheus.MetricsPath, promhttp.Handler())
}
if conf.Server.DevEnableProfiler {
a.MountRouter("Profiling", "/debug", middleware.Profiler())
}
if strings.HasPrefix(conf.Server.UILoginBackgroundURL, "/") {
a.MountRouter("Background images", conf.Server.UILoginBackgroundURL, backgrounds.NewHandler())
a.MountRouter("Background images", consts.DefaultUILoginBackgroundURL, backgrounds.NewHandler())
}
a.MountRouter("New Native API", consts.URLPathAPI, CreateNewNativeAPIRouter())
return a.Run(ctx, conf.Server.Address, conf.Server.Port, conf.Server.TLSCert, conf.Server.TLSKey)
}
}
// schedulePeriodicScan schedules a periodic scan of the music library, if configured.
func schedulePeriodicScan(ctx context.Context) func() error {
return func() error {
schedule := conf.Server.Scanner.Schedule
schedule := conf.Server.ScanSchedule
if schedule == "" {
log.Info(ctx, "Periodic scan is DISABLED")
log.Warn("Periodic scan is DISABLED")
return nil
}
s := CreateScanner(ctx)
scanner := GetScanner()
schedulerInstance := scheduler.GetInstance()
log.Info("Scheduling periodic scan", "schedule", schedule)
_, err := schedulerInstance.Add(schedule, func() {
_, err := s.ScanAll(ctx, false)
if err != nil {
log.Error(ctx, "Error executing periodic scan", err)
}
err := schedulerInstance.Add(schedule, func() {
_ = scanner.RescanAll(ctx, false)
})
if err != nil {
log.Error(ctx, "Error scheduling periodic scan", err)
log.Error("Error scheduling periodic scan", err)
}
return nil
}
}
func pidHashChanged(ds model.DataStore) (bool, error) {
pidAlbum, err := ds.Property(context.Background()).DefaultGet(consts.PIDAlbumKey, "")
if err != nil {
return false, err
}
pidTrack, err := ds.Property(context.Background()).DefaultGet(consts.PIDTrackKey, "")
if err != nil {
return false, err
}
return !strings.EqualFold(pidAlbum, conf.Server.PID.Album) || !strings.EqualFold(pidTrack, conf.Server.PID.Track), nil
}
// runInitialScan runs an initial scan of the music library if needed.
func runInitialScan(ctx context.Context) func() error {
return func() error {
ds := CreateDataStore()
fullScanRequired, err := ds.Property(ctx).DefaultGet(consts.FullScanAfterMigrationFlagKey, "0")
if err != nil {
return err
}
inProgress, err := ds.Library(ctx).ScanInProgress()
if err != nil {
return err
}
pidHasChanged, err := pidHashChanged(ds)
if err != nil {
return err
}
scanNeeded := conf.Server.Scanner.ScanOnStartup || inProgress || fullScanRequired == "1" || pidHasChanged
time.Sleep(2 * time.Second) // Wait 2 seconds before the initial scan
if scanNeeded {
s := CreateScanner(ctx)
switch {
case fullScanRequired == "1":
log.Warn(ctx, "Full scan required after migration")
_ = ds.Property(ctx).Delete(consts.FullScanAfterMigrationFlagKey)
case pidHasChanged:
log.Warn(ctx, "PID config changed, performing full scan")
fullScanRequired = "1"
case inProgress:
log.Warn(ctx, "Resuming interrupted scan")
default:
log.Info("Executing initial scan")
}
_, err = s.ScanAll(ctx, fullScanRequired == "1")
if err != nil {
log.Error(ctx, "Scan failed", err)
} else {
log.Info(ctx, "Scan completed")
}
} else {
log.Debug(ctx, "Initial scan not needed")
log.Debug("Executing initial scan")
if err := scanner.RescanAll(ctx, false); err != nil {
log.Error("Error executing initial scan", err)
}
log.Debug("Finished initial scan")
return nil
}
}
func startScanWatcher(ctx context.Context) func() error {
return func() error {
if conf.Server.Scanner.WatcherWait == 0 {
log.Debug("Folder watcher is DISABLED")
return nil
}
w := CreateScanWatcher(ctx)
err := w.Run(ctx)
if err != nil {
log.Error("Error starting watcher", err)
}
return nil
}
}
func schedulePeriodicBackup(ctx context.Context) func() error {
return func() error {
schedule := conf.Server.Backup.Schedule
if schedule == "" {
log.Info(ctx, "Periodic backup is DISABLED")
return nil
}
schedulerInstance := scheduler.GetInstance()
log.Info("Scheduling periodic backup", "schedule", schedule)
_, err := schedulerInstance.Add(schedule, func() {
start := time.Now()
path, err := db.Backup(ctx)
elapsed := time.Since(start)
if err != nil {
log.Error(ctx, "Error backing up database", "elapsed", elapsed, err)
return
}
log.Info(ctx, "Backup complete", "elapsed", elapsed, "path", path)
count, err := db.Prune(ctx)
if err != nil {
log.Error(ctx, "Error pruning database", "error", err)
} else if count > 0 {
log.Info(ctx, "Successfully pruned old files", "count", count)
} else {
log.Info(ctx, "No backups pruned")
}
})
return err
}
}
func scheduleDBOptimizer(ctx context.Context) func() error {
return func() error {
log.Info(ctx, "Scheduling DB optimizer", "schedule", consts.OptimizeDBSchedule)
schedulerInstance := scheduler.GetInstance()
_, err := schedulerInstance.Add(consts.OptimizeDBSchedule, func() {
if scanner.IsScanning() {
log.Debug(ctx, "Skipping DB optimization because a scan is in progress")
return
}
db.Optimize(ctx)
})
return err
}
}
// startScheduler starts the Navidrome scheduler, which is used to run periodic tasks.
func startScheduler(ctx context.Context) func() error {
log.Info(ctx, "Starting scheduler")
schedulerInstance := scheduler.GetInstance()
return func() error {
log.Info(ctx, "Starting scheduler")
schedulerInstance := scheduler.GetInstance()
schedulerInstance.Run(ctx)
return nil
}
}
// startInsightsCollector starts the Navidrome Insight Collector, if configured.
func startInsightsCollector(ctx context.Context) func() error {
return func() error {
if !conf.Server.EnableInsightsCollector {
log.Info(ctx, "Insight Collector is DISABLED")
return nil
}
log.Info(ctx, "Starting Insight Collector")
select {
case <-time.After(conf.Server.DevInsightsInitialDelay):
case <-ctx.Done():
return nil
}
ic := CreateInsights()
ic.Run(ctx)
return nil
}
}
// startPlaybackServer starts the Navidrome playback server, if configured.
// It is responsible for the Jukebox functionality
func startPlaybackServer(ctx context.Context) func() error {
return func() error {
if !conf.Server.Jukebox.Enabled {
log.Debug("Jukebox is DISABLED")
return nil
}
log.Info(ctx, "Starting Jukebox service")
playbackInstance := GetPlaybackServer()
return playbackInstance.Run(ctx)
}
}
// startPluginManager starts the plugin manager, if configured.
func startPluginManager(ctx context.Context) func() error {
return func() error {
if !conf.Server.Plugins.Enabled {
log.Debug("Plugins are DISABLED")
return nil
}
log.Info(ctx, "Starting plugin manager")
// Get the manager instance and scan for plugins
manager := GetPluginManager(ctx)
manager.ScanPlugins()
return nil
}
}
// TODO: Implement some struct tags to map flags to viper
func init() {
cobra.OnInitialize(func() {
@@ -355,19 +159,16 @@ func init() {
rootCmd.PersistentFlags().String("datafolder", viper.GetString("datafolder"), "folder to store application data (DB), needs write access")
rootCmd.PersistentFlags().String("cachefolder", viper.GetString("cachefolder"), "folder to store cache data (transcoding, images...), needs write access")
rootCmd.PersistentFlags().StringP("loglevel", "l", viper.GetString("loglevel"), "log level, possible values: error, info, debug, trace")
rootCmd.PersistentFlags().String("logfile", viper.GetString("logfile"), "log file path, if not set logs will be printed to stderr")
_ = viper.BindPFlag("musicfolder", rootCmd.PersistentFlags().Lookup("musicfolder"))
_ = viper.BindPFlag("datafolder", rootCmd.PersistentFlags().Lookup("datafolder"))
_ = viper.BindPFlag("cachefolder", rootCmd.PersistentFlags().Lookup("cachefolder"))
_ = viper.BindPFlag("loglevel", rootCmd.PersistentFlags().Lookup("loglevel"))
_ = viper.BindPFlag("logfile", rootCmd.PersistentFlags().Lookup("logfile"))
rootCmd.Flags().StringP("address", "a", viper.GetString("address"), "IP address to bind to")
rootCmd.Flags().IntP("port", "p", viper.GetInt("port"), "HTTP port Navidrome will listen to")
rootCmd.Flags().String("baseurl", viper.GetString("baseurl"), "base URL to configure Navidrome behind a proxy (ex: /music or http://my.server.com)")
rootCmd.Flags().String("tlscert", viper.GetString("tlscert"), "optional path to a TLS cert file (enables HTTPS listening)")
rootCmd.Flags().String("unixsocketperm", viper.GetString("unixsocketperm"), "optional file permission for the unix socket")
rootCmd.Flags().String("tlskey", viper.GetString("tlskey"), "optional path to a TLS key file (enables HTTPS listening)")
rootCmd.Flags().Duration("sessiontimeout", viper.GetDuration("sessiontimeout"), "how long Navidrome will wait before closing web ui idle sessions")
@@ -376,7 +177,6 @@ func init() {
rootCmd.Flags().Bool("enabletranscodingconfig", viper.GetBool("enabletranscodingconfig"), "enables transcoding configuration in the UI")
rootCmd.Flags().String("transcodingcachesize", viper.GetString("transcodingcachesize"), "size of transcoding cache")
rootCmd.Flags().String("imagecachesize", viper.GetString("imagecachesize"), "size of image (art work) cache. set to 0 to disable cache")
rootCmd.Flags().String("albumplaycountmode", viper.GetString("albumplaycountmode"), "how to compute playcount for albums. absolute (default) or normalized")
rootCmd.Flags().Bool("autoimportplaylists", viper.GetBool("autoimportplaylists"), "enable/disable .m3u playlist auto-import`")
rootCmd.Flags().Bool("prometheus.enabled", viper.GetBool("prometheus.enabled"), "enable/disable prometheus metrics endpoint`")
@@ -385,7 +185,6 @@ func init() {
_ = viper.BindPFlag("address", rootCmd.Flags().Lookup("address"))
_ = viper.BindPFlag("port", rootCmd.Flags().Lookup("port"))
_ = viper.BindPFlag("tlscert", rootCmd.Flags().Lookup("tlscert"))
_ = viper.BindPFlag("unixsocketperm", rootCmd.Flags().Lookup("unixsocketperm"))
_ = viper.BindPFlag("tlskey", rootCmd.Flags().Lookup("tlskey"))
_ = viper.BindPFlag("baseurl", rootCmd.Flags().Lookup("baseurl"))

View File

@@ -2,26 +2,15 @@ package cmd
import (
"context"
"encoding/gob"
"os"
"github.com/navidrome/navidrome/core"
"github.com/navidrome/navidrome/db"
"github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/persistence"
"github.com/navidrome/navidrome/scanner"
"github.com/navidrome/navidrome/utils/pl"
"github.com/spf13/cobra"
)
var (
fullScan bool
subprocess bool
)
var fullRescan bool
func init() {
scanCmd.Flags().BoolVarP(&fullScan, "full", "f", false, "check all subfolders, ignoring timestamps")
scanCmd.Flags().BoolVarP(&subprocess, "subprocess", "", false, "run as subprocess (internal use)")
scanCmd.Flags().BoolVarP(&fullRescan, "full", "f", false, "check all subfolders, ignoring timestamps")
rootCmd.AddCommand(scanCmd)
}
@@ -30,53 +19,16 @@ var scanCmd = &cobra.Command{
Short: "Scan music folder",
Long: "Scan music folder for updates",
Run: func(cmd *cobra.Command, args []string) {
runScanner(cmd.Context())
runScanner()
},
}
func trackScanInteractively(ctx context.Context, progress <-chan *scanner.ProgressInfo) {
for status := range pl.ReadOrDone(ctx, progress) {
if status.Warning != "" {
log.Warn(ctx, "Scan warning", "error", status.Warning)
}
if status.Error != "" {
log.Error(ctx, "Scan error", "error", status.Error)
}
// Discard the progress status, we only care about errors
}
if fullScan {
func runScanner() {
scanner := GetScanner()
_ = scanner.RescanAll(context.Background(), fullRescan)
if fullRescan {
log.Info("Finished full rescan")
} else {
log.Info("Finished rescan")
}
}
func trackScanAsSubprocess(ctx context.Context, progress <-chan *scanner.ProgressInfo) {
encoder := gob.NewEncoder(os.Stdout)
for status := range pl.ReadOrDone(ctx, progress) {
err := encoder.Encode(status)
if err != nil {
log.Error(ctx, "Failed to encode status", err)
}
}
}
func runScanner(ctx context.Context) {
sqlDB := db.Db()
defer db.Db().Close()
ds := persistence.New(sqlDB)
pls := core.NewPlaylists(ds)
progress, err := scanner.CallScan(ctx, ds, pls, fullScan)
if err != nil {
log.Fatal(ctx, "Failed to scan", err)
}
// Wait for the scanner to finish
if subprocess {
trackScanAsSubprocess(ctx, progress)
} else {
trackScanInteractively(ctx, progress)
}
}

27
cmd/signaler_nonunix.go Normal file
View File

@@ -0,0 +1,27 @@
//go:build windows || plan9
package cmd
import (
"context"
"os"
"os/signal"
"github.com/navidrome/navidrome/log"
)
func startSignaler(ctx context.Context) func() error {
log.Info(ctx, "Starting signaler")
return func() error {
var sigChan = make(chan os.Signal, 1)
signal.Notify(sigChan, os.Interrupt)
select {
case sig := <-sigChan:
log.Info(ctx, "Received termination signal", "signal", sig)
return interrupted
case <-ctx.Done():
return nil
}
}
}

View File

@@ -14,24 +14,35 @@ import (
const triggerScanSignal = syscall.SIGUSR1
func startSignaller(ctx context.Context) func() error {
func startSignaler(ctx context.Context) func() error {
log.Info(ctx, "Starting signaler")
scanner := CreateScanner(ctx)
scanner := GetScanner()
return func() error {
var sigChan = make(chan os.Signal, 1)
signal.Notify(sigChan, triggerScanSignal)
signal.Notify(
sigChan,
os.Interrupt,
triggerScanSignal,
syscall.SIGHUP,
syscall.SIGTERM,
syscall.SIGABRT,
)
for {
select {
case sig := <-sigChan:
if sig != triggerScanSignal {
log.Info(ctx, "Received termination signal", "signal", sig)
return interrupted
}
log.Info(ctx, "Received signal, triggering a new scan", "signal", sig)
start := time.Now()
_, err := scanner.ScanAll(ctx, false)
err := scanner.RescanAll(ctx, false)
if err != nil {
log.Error(ctx, "Error scanning", err)
}
log.Info(ctx, "Triggered scan complete", "elapsed", time.Since(start))
log.Info(ctx, "Triggered scan complete", "elapsed", time.Since(start).Round(100*time.Millisecond))
case <-ctx.Done():
return nil
}

View File

@@ -1,14 +0,0 @@
//go:build windows || plan9
package cmd
import (
"context"
)
// Windows and Plan9 don't support SIGUSR1, so we don't need to start a signaler
func startSignaller(ctx context.Context) func() error {
return func() error {
return nil
}
}

View File

@@ -1,267 +0,0 @@
package cmd
import (
"context"
"fmt"
"os"
"path/filepath"
"sync"
"time"
"github.com/kardianos/service"
"github.com/navidrome/navidrome/conf"
"github.com/navidrome/navidrome/log"
"github.com/spf13/cobra"
)
var (
svcStatusLabels = map[service.Status]string{
service.StatusUnknown: "Unknown",
service.StatusStopped: "Stopped",
service.StatusRunning: "Running",
}
installUser string
workingDirectory string
)
func init() {
svcCmd.AddCommand(buildInstallCmd())
svcCmd.AddCommand(buildUninstallCmd())
svcCmd.AddCommand(buildStartCmd())
svcCmd.AddCommand(buildStopCmd())
svcCmd.AddCommand(buildStatusCmd())
svcCmd.AddCommand(buildExecuteCmd())
rootCmd.AddCommand(svcCmd)
}
var svcCmd = &cobra.Command{
Use: "service",
Aliases: []string{"svc"},
Short: "Manage Navidrome as a service",
Long: fmt.Sprintf("Manage Navidrome as a service, using the OS service manager (%s)", service.Platform()),
Run: runServiceCmd,
}
type svcControl struct {
ctx context.Context
cancel context.CancelFunc
done chan struct{}
}
func (p *svcControl) Start(service.Service) error {
p.done = make(chan struct{})
p.ctx, p.cancel = context.WithCancel(context.Background())
go func() {
runNavidrome(p.ctx)
close(p.done)
}()
return nil
}
func (p *svcControl) Stop(service.Service) error {
log.Info("Stopping service")
p.cancel()
select {
case <-p.done:
log.Info("Service stopped gracefully")
case <-time.After(10 * time.Second):
log.Error("Service did not stop in time. Killing it.")
}
return nil
}
var svcInstance = sync.OnceValue(func() service.Service {
options := make(service.KeyValue)
options["Restart"] = "on-failure"
options["SuccessExitStatus"] = "1 2 8 SIGKILL"
options["UserService"] = false
options["LogDirectory"] = conf.Server.DataFolder
options["SystemdScript"] = systemdScript
if conf.Server.LogFile != "" {
options["LogOutput"] = false
} else {
options["LogOutput"] = true
options["LogDirectory"] = conf.Server.DataFolder
}
svcConfig := &service.Config{
UserName: installUser,
Name: "navidrome",
DisplayName: "Navidrome",
Description: "Your Personal Streaming Service",
Dependencies: []string{
"After=remote-fs.target network.target",
},
WorkingDirectory: executablePath(),
Option: options,
}
arguments := []string{"service", "execute"}
if conf.Server.ConfigFile != "" {
arguments = append(arguments, "-c", conf.Server.ConfigFile)
}
svcConfig.Arguments = arguments
prg := &svcControl{}
svc, err := service.New(prg, svcConfig)
if err != nil {
log.Fatal(err)
}
return svc
})
func runServiceCmd(cmd *cobra.Command, _ []string) {
_ = cmd.Help()
}
func executablePath() string {
if workingDirectory != "" {
return workingDirectory
}
ex, err := os.Executable()
if err != nil {
log.Fatal(err)
}
return filepath.Dir(ex)
}
func buildInstallCmd() *cobra.Command {
runInstallCmd := func(_ *cobra.Command, _ []string) {
var err error
println("Installing service with:")
println(" working directory: " + executablePath())
println(" music folder: " + conf.Server.MusicFolder)
println(" data folder: " + conf.Server.DataFolder)
if conf.Server.LogFile != "" {
println(" log file: " + conf.Server.LogFile)
} else {
println(" logs folder: " + conf.Server.DataFolder)
}
if cfgFile != "" {
conf.Server.ConfigFile, err = filepath.Abs(cfgFile)
if err != nil {
log.Fatal(err)
}
println(" config file: " + conf.Server.ConfigFile)
}
err = svcInstance().Install()
if err != nil {
log.Fatal(err)
}
println("Service installed. Use 'navidrome svc start' to start it.")
}
cmd := &cobra.Command{
Use: "install",
Short: "Install Navidrome service.",
Run: runInstallCmd,
}
cmd.Flags().StringVarP(&installUser, "user", "u", "", "user to run service")
cmd.Flags().StringVarP(&workingDirectory, "working-directory", "w", "", "working directory of service")
return cmd
}
func buildUninstallCmd() *cobra.Command {
return &cobra.Command{
Use: "uninstall",
Short: "Uninstall Navidrome service. Does not delete the music or data folders",
Run: func(cmd *cobra.Command, args []string) {
err := svcInstance().Uninstall()
if err != nil {
log.Fatal(err)
}
println("Service uninstalled. Music and data folders are still intact.")
},
}
}
func buildStartCmd() *cobra.Command {
return &cobra.Command{
Use: "start",
Short: "Start Navidrome service",
Run: func(cmd *cobra.Command, args []string) {
err := svcInstance().Start()
if err != nil {
log.Fatal(err)
}
println("Service started. Use 'navidrome svc status' to check its status.")
},
}
}
func buildStopCmd() *cobra.Command {
return &cobra.Command{
Use: "stop",
Short: "Stop Navidrome service",
Run: func(cmd *cobra.Command, args []string) {
err := svcInstance().Stop()
if err != nil {
log.Fatal(err)
}
println("Service stopped. Use 'navidrome svc status' to check its status.")
},
}
}
func buildStatusCmd() *cobra.Command {
return &cobra.Command{
Use: "status",
Short: "Show Navidrome service status",
Run: func(cmd *cobra.Command, args []string) {
status, err := svcInstance().Status()
if err != nil {
log.Fatal(err)
}
fmt.Printf("Navidrome is %s.\n", svcStatusLabels[status])
},
}
}
func buildExecuteCmd() *cobra.Command {
return &cobra.Command{
Use: "execute",
Short: "Run navidrome as a service in the foreground (it is very unlikely you want to run this, you are better off running just navidrome)",
Run: func(cmd *cobra.Command, args []string) {
err := svcInstance().Run()
if err != nil {
log.Fatal(err)
}
},
}
}
const systemdScript = `[Unit]
Description={{.Description}}
ConditionFileIsExecutable={{.Path|cmdEscape}}
{{range $i, $dep := .Dependencies}}
{{$dep}} {{end}}
[Service]
StartLimitInterval=5
StartLimitBurst=10
ExecStart={{.Path|cmdEscape}}{{range .Arguments}} {{.|cmd}}{{end}}
{{if .WorkingDirectory}}WorkingDirectory={{.WorkingDirectory|cmdEscape}}{{end}}
{{if .UserName}}User={{.UserName}}{{end}}
{{if .Restart}}Restart={{.Restart}}{{end}}
{{if .SuccessExitStatus}}SuccessExitStatus={{.SuccessExitStatus}}{{end}}
TimeoutStopSec=20
RestartSec=120
EnvironmentFile=-/etc/sysconfig/{{.Name}}
DevicePolicy=closed
NoNewPrivileges=yes
PrivateTmp=yes
ProtectControlGroups=yes
ProtectKernelModules=yes
ProtectKernelTunables=yes
RestrictAddressFamilies=AF_UNIX AF_INET AF_INET6
RestrictNamespaces=yes
RestrictRealtime=yes
SystemCallFilter=~@clock @debug @module @mount @obsolete @reboot @setuid @swap
{{if .WorkingDirectory}}ReadWritePaths={{.WorkingDirectory|cmdEscape}}{{end}}
ProtectSystem=full
[Install]
WantedBy=multi-user.target
`

View File

@@ -1,104 +1,75 @@
// Code generated by Wire. DO NOT EDIT.
//go:generate go run -mod=mod github.com/google/wire/cmd/wire gen -tags "netgo"
//go:generate go run github.com/google/wire/cmd/wire
//go:build !wireinject
// +build !wireinject
package cmd
import (
"context"
"github.com/google/wire"
"github.com/navidrome/navidrome/core"
"github.com/navidrome/navidrome/core/agents"
"github.com/navidrome/navidrome/core/agents/lastfm"
"github.com/navidrome/navidrome/core/agents/listenbrainz"
"github.com/navidrome/navidrome/core/artwork"
"github.com/navidrome/navidrome/core/external"
"github.com/navidrome/navidrome/core/ffmpeg"
"github.com/navidrome/navidrome/core/metrics"
"github.com/navidrome/navidrome/core/playback"
"github.com/navidrome/navidrome/core/scrobbler"
"github.com/navidrome/navidrome/db"
"github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/persistence"
"github.com/navidrome/navidrome/plugins"
"github.com/navidrome/navidrome/scanner"
"github.com/navidrome/navidrome/server"
"github.com/navidrome/navidrome/server/api"
"github.com/navidrome/navidrome/server/events"
"github.com/navidrome/navidrome/server/nativeapi"
"github.com/navidrome/navidrome/server/public"
"github.com/navidrome/navidrome/server/subsonic"
)
import (
_ "github.com/navidrome/navidrome/adapters/taglib"
"sync"
)
// Injectors from wire_injectors.go:
func CreateDataStore() model.DataStore {
sqlDB := db.Db()
dataStore := persistence.New(sqlDB)
return dataStore
}
func CreateServer() *server.Server {
func CreateServer(musicFolder string) *server.Server {
sqlDB := db.Db()
dataStore := persistence.New(sqlDB)
broker := events.GetBroker()
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
manager := plugins.GetManager(dataStore, metricsMetrics)
insights := metrics.GetInstance(dataStore, manager)
serverServer := server.New(dataStore, broker, insights)
serverServer := server.New(dataStore, broker)
return serverServer
}
func CreateNativeAPIRouter(ctx context.Context) *nativeapi.Router {
func CreateNativeAPIRouter() *nativeapi.Router {
sqlDB := db.Db()
dataStore := persistence.New(sqlDB)
share := core.NewShare(dataStore)
playlists := core.NewPlaylists(dataStore)
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
manager := plugins.GetManager(dataStore, metricsMetrics)
insights := metrics.GetInstance(dataStore, manager)
fileCache := artwork.GetImageCache()
fFmpeg := ffmpeg.New()
agentsAgents := agents.GetAgents(dataStore, manager)
provider := external.NewProvider(dataStore, agentsAgents)
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
cacheWarmer := artwork.NewCacheWarmer(artworkArtwork, fileCache)
broker := events.GetBroker()
scannerScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
watcher := scanner.GetWatcher(dataStore, scannerScanner)
library := core.NewLibrary(dataStore, scannerScanner, watcher, broker)
maintenance := core.NewMaintenance(dataStore)
router := nativeapi.New(dataStore, share, playlists, insights, library, maintenance)
router := nativeapi.New(dataStore, share)
return router
}
func CreateSubsonicAPIRouter(ctx context.Context) *subsonic.Router {
func CreateNewNativeAPIRouter() *api.Router {
sqlDB := db.Db()
dataStore := persistence.New(sqlDB)
router := api.New(dataStore)
return router
}
func CreateSubsonicAPIRouter() *subsonic.Router {
sqlDB := db.Db()
dataStore := persistence.New(sqlDB)
fileCache := artwork.GetImageCache()
fFmpeg := ffmpeg.New()
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
manager := plugins.GetManager(dataStore, metricsMetrics)
agentsAgents := agents.GetAgents(dataStore, manager)
provider := external.NewProvider(dataStore, agentsAgents)
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
agentsAgents := agents.New(dataStore)
externalMetadata := core.NewExternalMetadata(dataStore, agentsAgents)
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, externalMetadata)
transcodingCache := core.GetTranscodingCache()
mediaStreamer := core.NewMediaStreamer(dataStore, fFmpeg, transcodingCache)
share := core.NewShare(dataStore)
archiver := core.NewArchiver(mediaStreamer, dataStore, share)
players := core.NewPlayers(dataStore)
cacheWarmer := artwork.NewCacheWarmer(artworkArtwork, fileCache)
scanner := GetScanner()
broker := events.GetBroker()
playlists := core.NewPlaylists(dataStore)
scannerScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
playTracker := scrobbler.GetPlayTracker(dataStore, broker, manager)
playbackServer := playback.GetInstance(dataStore)
router := subsonic.New(dataStore, artworkArtwork, mediaStreamer, archiver, players, provider, scannerScanner, broker, playlists, playTracker, share, playbackServer, metricsMetrics)
playTracker := scrobbler.GetPlayTracker(dataStore, broker)
router := subsonic.New(dataStore, artworkArtwork, mediaStreamer, archiver, players, externalMetadata, scanner, broker, playlists, playTracker, share)
return router
}
@@ -107,11 +78,9 @@ func CreatePublicRouter() *public.Router {
dataStore := persistence.New(sqlDB)
fileCache := artwork.GetImageCache()
fFmpeg := ffmpeg.New()
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
manager := plugins.GetManager(dataStore, metricsMetrics)
agentsAgents := agents.GetAgents(dataStore, manager)
provider := external.NewProvider(dataStore, agentsAgents)
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
agentsAgents := agents.New(dataStore)
externalMetadata := core.NewExternalMetadata(dataStore, agentsAgents)
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, externalMetadata)
transcodingCache := core.GetTranscodingCache()
mediaStreamer := core.NewMediaStreamer(dataStore, fFmpeg, transcodingCache)
share := core.NewShare(dataStore)
@@ -134,78 +103,34 @@ func CreateListenBrainzRouter() *listenbrainz.Router {
return router
}
func CreateInsights() metrics.Insights {
sqlDB := db.Db()
dataStore := persistence.New(sqlDB)
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
manager := plugins.GetManager(dataStore, metricsMetrics)
insights := metrics.GetInstance(dataStore, manager)
return insights
}
func CreatePrometheus() metrics.Metrics {
sqlDB := db.Db()
dataStore := persistence.New(sqlDB)
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
return metricsMetrics
}
func CreateScanner(ctx context.Context) scanner.Scanner {
func createScanner() scanner.Scanner {
sqlDB := db.Db()
dataStore := persistence.New(sqlDB)
playlists := core.NewPlaylists(dataStore)
fileCache := artwork.GetImageCache()
fFmpeg := ffmpeg.New()
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
manager := plugins.GetManager(dataStore, metricsMetrics)
agentsAgents := agents.GetAgents(dataStore, manager)
provider := external.NewProvider(dataStore, agentsAgents)
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
agentsAgents := agents.New(dataStore)
externalMetadata := core.NewExternalMetadata(dataStore, agentsAgents)
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, externalMetadata)
cacheWarmer := artwork.NewCacheWarmer(artworkArtwork, fileCache)
broker := events.GetBroker()
playlists := core.NewPlaylists(dataStore)
scannerScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
scannerScanner := scanner.New(dataStore, playlists, cacheWarmer, broker)
return scannerScanner
}
func CreateScanWatcher(ctx context.Context) scanner.Watcher {
sqlDB := db.Db()
dataStore := persistence.New(sqlDB)
fileCache := artwork.GetImageCache()
fFmpeg := ffmpeg.New()
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
manager := plugins.GetManager(dataStore, metricsMetrics)
agentsAgents := agents.GetAgents(dataStore, manager)
provider := external.NewProvider(dataStore, agentsAgents)
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
cacheWarmer := artwork.NewCacheWarmer(artworkArtwork, fileCache)
broker := events.GetBroker()
playlists := core.NewPlaylists(dataStore)
scannerScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
watcher := scanner.GetWatcher(dataStore, scannerScanner)
return watcher
}
func GetPlaybackServer() playback.PlaybackServer {
sqlDB := db.Db()
dataStore := persistence.New(sqlDB)
playbackServer := playback.GetInstance(dataStore)
return playbackServer
}
func getPluginManager() plugins.Manager {
sqlDB := db.Db()
dataStore := persistence.New(sqlDB)
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
manager := plugins.GetManager(dataStore, metricsMetrics)
return manager
}
// wire_injectors.go:
var allProviders = wire.NewSet(core.Set, artwork.Set, server.New, subsonic.New, nativeapi.New, public.New, persistence.New, lastfm.NewRouter, listenbrainz.NewRouter, events.GetBroker, scanner.New, scanner.GetWatcher, plugins.GetManager, metrics.GetPrometheusInstance, db.Db, wire.Bind(new(agents.PluginLoader), new(plugins.Manager)), wire.Bind(new(scrobbler.PluginLoader), new(plugins.Manager)), wire.Bind(new(metrics.PluginLoader), new(plugins.Manager)), wire.Bind(new(core.Scanner), new(scanner.Scanner)), wire.Bind(new(core.Watcher), new(scanner.Watcher)))
var allProviders = wire.NewSet(core.Set, artwork.Set, subsonic.New, nativeapi.New, api.New, public.New, persistence.New, lastfm.NewRouter, listenbrainz.NewRouter, events.GetBroker, db.Db)
func GetPluginManager(ctx context.Context) plugins.Manager {
manager := getPluginManager()
manager.SetSubsonicRouter(CreateSubsonicAPIRouter(ctx))
return manager
// Scanner must be a Singleton
var (
onceScanner sync.Once
scannerInstance scanner.Scanner
)
func GetScanner() scanner.Scanner {
onceScanner.Do(func() {
scannerInstance = createScanner()
})
return scannerInstance
}

View File

@@ -3,23 +3,18 @@
package cmd
import (
"context"
"sync"
"github.com/google/wire"
"github.com/navidrome/navidrome/core"
"github.com/navidrome/navidrome/core/agents"
"github.com/navidrome/navidrome/core/agents/lastfm"
"github.com/navidrome/navidrome/core/agents/listenbrainz"
"github.com/navidrome/navidrome/core/artwork"
"github.com/navidrome/navidrome/core/metrics"
"github.com/navidrome/navidrome/core/playback"
"github.com/navidrome/navidrome/core/scrobbler"
"github.com/navidrome/navidrome/db"
"github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/persistence"
"github.com/navidrome/navidrome/plugins"
"github.com/navidrome/navidrome/scanner"
"github.com/navidrome/navidrome/server"
"github.com/navidrome/navidrome/server/api"
"github.com/navidrome/navidrome/server/events"
"github.com/navidrome/navidrome/server/nativeapi"
"github.com/navidrome/navidrome/server/public"
@@ -29,47 +24,40 @@ import (
var allProviders = wire.NewSet(
core.Set,
artwork.Set,
server.New,
subsonic.New,
nativeapi.New,
api.New,
public.New,
persistence.New,
lastfm.NewRouter,
listenbrainz.NewRouter,
events.GetBroker,
scanner.New,
scanner.GetWatcher,
plugins.GetManager,
metrics.GetPrometheusInstance,
db.Db,
wire.Bind(new(agents.PluginLoader), new(plugins.Manager)),
wire.Bind(new(scrobbler.PluginLoader), new(plugins.Manager)),
wire.Bind(new(metrics.PluginLoader), new(plugins.Manager)),
wire.Bind(new(core.Scanner), new(scanner.Scanner)),
wire.Bind(new(core.Watcher), new(scanner.Watcher)),
)
func CreateDataStore() model.DataStore {
func CreateServer(musicFolder string) *server.Server {
panic(wire.Build(
server.New,
allProviders,
))
}
func CreateNativeAPIRouter() *nativeapi.Router {
panic(wire.Build(
allProviders,
))
}
func CreateServer() *server.Server {
func CreateNewNativeAPIRouter() *api.Router {
panic(wire.Build(
allProviders,
))
}
func CreateNativeAPIRouter(ctx context.Context) *nativeapi.Router {
panic(wire.Build(
allProviders,
))
}
func CreateSubsonicAPIRouter(ctx context.Context) *subsonic.Router {
func CreateSubsonicAPIRouter() *subsonic.Router {
panic(wire.Build(
allProviders,
GetScanner,
))
}
@@ -91,44 +79,22 @@ func CreateListenBrainzRouter() *listenbrainz.Router {
))
}
func CreateInsights() metrics.Insights {
panic(wire.Build(
allProviders,
))
// Scanner must be a Singleton
var (
onceScanner sync.Once
scannerInstance scanner.Scanner
)
func GetScanner() scanner.Scanner {
onceScanner.Do(func() {
scannerInstance = createScanner()
})
return scannerInstance
}
func CreatePrometheus() metrics.Metrics {
func createScanner() scanner.Scanner {
panic(wire.Build(
allProviders,
scanner.New,
))
}
func CreateScanner(ctx context.Context) scanner.Scanner {
panic(wire.Build(
allProviders,
))
}
func CreateScanWatcher(ctx context.Context) scanner.Watcher {
panic(wire.Build(
allProviders,
))
}
func GetPlaybackServer() playback.PlaybackServer {
panic(wire.Build(
allProviders,
))
}
func getPluginManager() plugins.Manager {
panic(wire.Build(
allProviders,
))
}
func GetPluginManager(ctx context.Context) plugins.Manager {
manager := getPluginManager()
manager.SetSubsonicRouter(CreateSubsonicAPIRouter(ctx))
return manager
}

View File

@@ -1,4 +0,0 @@
package buildtags
// This file is left intentionally empty. It is used to make sure the package is not empty, in the case all
// required build tags are disabled.

View File

@@ -1,11 +0,0 @@
//go:build netgo
package buildtags
// NOTICE: This file was created to force the inclusion of the `netgo` tag when compiling the project.
// If the tag is not included, the compilation will fail because this variable won't be defined, and the `main.go`
// file requires it.
// Why this tag is required? See https://github.com/navidrome/navidrome/issues/700
var NETGO = true

View File

@@ -9,164 +9,109 @@ import (
"strings"
"time"
"github.com/bmatcuk/doublestar/v4"
"github.com/go-viper/encoding/ini"
"github.com/kr/pretty"
"github.com/navidrome/navidrome/consts"
"github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/utils/run"
"github.com/navidrome/navidrome/utils/number"
"github.com/robfig/cron/v3"
"github.com/spf13/viper"
)
type configOptions struct {
ConfigFile string
Address string
Port int
UnixSocketPerm string
MusicFolder string
DataFolder string
CacheFolder string
DbPath string
LogLevel string
LogFile string
SessionTimeout time.Duration
BaseURL string
BasePath string
BaseHost string
BaseScheme string
TLSCert string
TLSKey string
UILoginBackgroundURL string
UIWelcomeMessage string
MaxSidebarPlaylists int
EnableTranscodingConfig bool
EnableDownloads bool
EnableExternalServices bool
EnableInsightsCollector bool
EnableMediaFileCoverArt bool
TranscodingCacheSize string
ImageCacheSize string
AlbumPlayCountMode string
EnableArtworkPrecache bool
AutoImportPlaylists bool
DefaultPlaylistPublicVisibility bool
PlaylistsPath string
SmartPlaylistRefreshDelay time.Duration
AutoTranscodeDownload bool
DefaultDownsamplingFormat string
SearchFullString bool
RecentlyAddedByModTime bool
PreferSortTags bool
IgnoredArticles string
IndexGroups string
FFmpegPath string
MPVPath string
MPVCmdTemplate string
CoverArtPriority string
CoverJpegQuality int
ArtistArtPriority string
LyricsPriority string
EnableGravatar bool
EnableFavourites bool
EnableStarRating bool
EnableUserEditing bool
EnableSharing bool
ShareURL string
DefaultShareExpiration time.Duration
DefaultDownloadableShare bool
DefaultTheme string
DefaultLanguage string
DefaultUIVolume int
EnableReplayGain bool
EnableCoverAnimation bool
EnableNowPlaying bool
GATrackingID string
EnableLogRedacting bool
AuthRequestLimit int
AuthWindowLength time.Duration
PasswordEncryptionKey string
ReverseProxyUserHeader string
ReverseProxyWhitelist string
Plugins pluginsOptions
PluginConfig map[string]map[string]string
HTTPSecurityHeaders secureOptions `json:",omitzero"`
Prometheus prometheusOptions `json:",omitzero"`
Scanner scannerOptions `json:",omitzero"`
Jukebox jukeboxOptions `json:",omitzero"`
Backup backupOptions `json:",omitzero"`
PID pidOptions `json:",omitzero"`
Inspect inspectOptions `json:",omitzero"`
Subsonic subsonicOptions `json:",omitzero"`
LastFM lastfmOptions `json:",omitzero"`
Spotify spotifyOptions `json:",omitzero"`
Deezer deezerOptions `json:",omitzero"`
ListenBrainz listenBrainzOptions `json:",omitzero"`
Tags map[string]TagConf `json:",omitempty"`
Agents string
ConfigFile string
Address string
Port int
MusicFolder string
DataFolder string
CacheFolder string
DbPath string
LogLevel string
ScanInterval time.Duration
ScanSchedule string
SessionTimeout time.Duration
BaseURL string
BasePath string
BaseHost string
BaseScheme string
TLSCert string
TLSKey string
UILoginBackgroundURL string
UIWelcomeMessage string
MaxSidebarPlaylists int
EnableTranscodingConfig bool
EnableDownloads bool
EnableExternalServices bool
EnableMediaFileCoverArt bool
TranscodingCacheSize string
ImageCacheSize string
EnableArtworkPrecache bool
AutoImportPlaylists bool
PlaylistsPath string
AutoTranscodeDownload bool
DefaultDownsamplingFormat string
SearchFullString bool
RecentlyAddedByModTime bool
IgnoredArticles string
IndexGroups string
SubsonicArtistParticipations bool
FFmpegPath string
CoverArtPriority string
CoverJpegQuality int
ArtistArtPriority string
EnableGravatar bool
EnableFavourites bool
EnableStarRating bool
EnableUserEditing bool
EnableSharing bool
DefaultDownloadableShare bool
DefaultTheme string
DefaultLanguage string
DefaultUIVolume int
EnableReplayGain bool
EnableCoverAnimation bool
GATrackingID string
EnableLogRedacting bool
AuthRequestLimit int
AuthWindowLength time.Duration
PasswordEncryptionKey string
ReverseProxyUserHeader string
ReverseProxyWhitelist string
Prometheus prometheusOptions
Scanner scannerOptions
Agents string
LastFM lastfmOptions
Spotify spotifyOptions
ListenBrainz listenBrainzOptions
// DevFlags. These are used to enable/disable debugging and incomplete features
DevLogLevels map[string]string `json:",omitempty"`
DevLogSourceLine bool
DevLogLevels map[string]string
DevEnableProfiler bool
DevAutoCreateAdminPassword string
DevAutoLoginUsername string
DevActivityPanel bool
DevActivityPanelUpdateRate time.Duration
DevSidebarPlaylists bool
DevEnableBufferedScrobble bool
DevShowArtistPage bool
DevUIShowConfig bool
DevNewEventStream bool
DevOffsetOptimize int
DevArtworkMaxRequests int
DevArtworkThrottleBacklogLimit int
DevArtworkThrottleBacklogTimeout time.Duration
DevArtistInfoTimeToLive time.Duration
DevAlbumInfoTimeToLive time.Duration
DevExternalScanner bool
DevScannerThreads uint
DevInsightsInitialDelay time.Duration
DevEnablePlayerInsights bool
DevEnablePluginsInsights bool
DevPluginCompilationTimeout time.Duration
DevExternalArtistFetchMultiplier float64
}
type scannerOptions struct {
Enabled bool
Schedule string
WatcherWait time.Duration
ScanOnStartup bool
Extractor string
ArtistJoiner string
GenreSeparators string // Deprecated: Use Tags.genre.Split instead
GroupAlbumReleases bool // Deprecated: Use PID.Album instead
FollowSymlinks bool // Whether to follow symlinks when scanning directories
PurgeMissing string // Values: "never", "always", "full"
}
type subsonicOptions struct {
AppendSubtitle bool
ArtistParticipations bool
DefaultReportRealPath bool
LegacyClients string
}
type TagConf struct {
Ignore bool `yaml:"ignore" json:",omitempty"`
Aliases []string `yaml:"aliases" json:",omitempty"`
Type string `yaml:"type" json:",omitempty"`
MaxLength int `yaml:"maxLength" json:",omitempty"`
Split []string `yaml:"split" json:",omitempty"`
Album bool `yaml:"album" json:",omitempty"`
GenreSeparators string
GroupAlbumReleases bool
}
type lastfmOptions struct {
Enabled bool
ApiKey string
Secret string
Language string
ScrobbleFirstArtistOnly bool
Enabled bool
ApiKey string
Secret string
Language string
}
type spotifyOptions struct {
@@ -174,56 +119,14 @@ type spotifyOptions struct {
Secret string
}
type deezerOptions struct {
Enabled bool
}
type listenBrainzOptions struct {
Enabled bool
BaseURL string
}
type secureOptions struct {
CustomFrameOptionsValue string
}
type prometheusOptions struct {
Enabled bool
MetricsPath string
Password string
}
type AudioDeviceDefinition []string
type jukeboxOptions struct {
Enabled bool
Devices []AudioDeviceDefinition
Default string
AdminOnly bool
}
type backupOptions struct {
Count int
Path string
Schedule string
}
type pidOptions struct {
Track string
Album string
}
type inspectOptions struct {
Enabled bool
MaxRequests int
BacklogLimit int
BacklogTimeout int
}
type pluginsOptions struct {
Enabled bool
Folder string
CacheSize string
}
var (
@@ -238,21 +141,18 @@ func LoadFromFile(confFile string) {
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error reading config file:", err)
os.Exit(1)
}
Load(true)
Load()
}
func Load(noConfigDump bool) {
parseIniFileConfiguration()
func Load() {
err := viper.Unmarshal(&Server)
if err != nil {
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error parsing config:", err)
os.Exit(1)
}
err = os.MkdirAll(Server.DataFolder, os.ModePerm)
if err != nil {
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error creating data path:", err)
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error creating data path:", "path", Server.DataFolder, err)
os.Exit(1)
}
@@ -261,63 +161,28 @@ func Load(noConfigDump bool) {
}
err = os.MkdirAll(Server.CacheFolder, os.ModePerm)
if err != nil {
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error creating cache path:", err)
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error creating cache path:", "path", Server.CacheFolder, err)
os.Exit(1)
}
if Server.Plugins.Enabled {
if Server.Plugins.Folder == "" {
Server.Plugins.Folder = filepath.Join(Server.DataFolder, "plugins")
}
err = os.MkdirAll(Server.Plugins.Folder, 0700)
if err != nil {
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error creating plugins path:", err)
os.Exit(1)
}
}
Server.ConfigFile = viper.GetViper().ConfigFileUsed()
if Server.DbPath == "" {
Server.DbPath = filepath.Join(Server.DataFolder, consts.DefaultDbPath)
}
if Server.Backup.Path != "" {
err = os.MkdirAll(Server.Backup.Path, os.ModePerm)
if err != nil {
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error creating backup path:", err)
os.Exit(1)
}
}
out := os.Stderr
if Server.LogFile != "" {
out, err = os.OpenFile(Server.LogFile, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
_, _ = fmt.Fprintf(os.Stderr, "FATAL: Error opening log file %s: %s\n", Server.LogFile, err.Error())
os.Exit(1)
}
log.SetOutput(out)
}
log.SetLevelString(Server.LogLevel)
log.SetLogLevels(Server.DevLogLevels)
log.SetLogSourceLine(Server.DevLogSourceLine)
log.SetRedacting(Server.EnableLogRedacting)
err = run.Sequentially(
validateScanSchedule,
validateBackupSchedule,
validatePlaylistsPath,
validatePurgeMissingOption,
)
if err != nil {
if err := validateScanSchedule(); err != nil {
os.Exit(1)
}
if Server.BaseURL != "" {
u, err := url.Parse(Server.BaseURL)
if err != nil {
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Invalid BaseURL:", err)
_, _ = fmt.Fprintf(os.Stderr, "FATAL: Invalid BaseURL %s: %s\n", Server.BaseURL, err.Error())
os.Exit(1)
}
Server.BasePath = u.Path
@@ -328,75 +193,28 @@ func Load(noConfigDump bool) {
}
// Print current configuration if log level is Debug
if log.IsGreaterOrEqualTo(log.LevelDebug) && !noConfigDump {
if log.CurrentLevel() >= log.LevelDebug {
prettyConf := pretty.Sprintf("Loaded configuration from '%s': %# v", Server.ConfigFile, Server)
if Server.EnableLogRedacting {
prettyConf = log.Redact(prettyConf)
}
_, _ = fmt.Fprintln(out, prettyConf)
_, _ = fmt.Fprintln(os.Stderr, prettyConf)
}
if !Server.EnableExternalServices {
disableExternalServices()
}
if Server.Scanner.Extractor != consts.DefaultScannerExtractor {
log.Warn(fmt.Sprintf("Extractor '%s' is not implemented, using 'taglib'", Server.Scanner.Extractor))
Server.Scanner.Extractor = consts.DefaultScannerExtractor
}
logDeprecatedOptions("Scanner.GenreSeparators")
logDeprecatedOptions("Scanner.GroupAlbumReleases")
logDeprecatedOptions("DevEnableBufferedScrobble") // Deprecated: Buffered scrobbling is now always enabled and this option is ignored
// Call init hooks
for _, hook := range hooks {
hook()
}
}
func logDeprecatedOptions(options ...string) {
for _, option := range options {
envVar := "ND_" + strings.ToUpper(strings.ReplaceAll(option, ".", "_"))
if os.Getenv(envVar) != "" {
log.Warn(fmt.Sprintf("Option '%s' is deprecated and will be ignored in a future release", envVar))
}
if viper.InConfig(option) {
log.Warn(fmt.Sprintf("Option '%s' is deprecated and will be ignored in a future release", option))
}
}
}
// parseIniFileConfiguration is used to parse the config file when it is in INI format. For INI files, it
// would require a nested structure, so instead we unmarshal it to a map and then merge the nested [default]
// section into the root level.
func parseIniFileConfiguration() {
cfgFile := viper.ConfigFileUsed()
if strings.ToLower(filepath.Ext(cfgFile)) == ".ini" {
var iniConfig map[string]interface{}
err := viper.Unmarshal(&iniConfig)
if err != nil {
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error parsing config:", err)
os.Exit(1)
}
cfg, ok := iniConfig["default"].(map[string]any)
if !ok {
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error parsing config: missing [default] section:", iniConfig)
os.Exit(1)
}
err = viper.MergeConfigMap(cfg)
if err != nil {
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error parsing config:", err)
os.Exit(1)
}
}
}
func disableExternalServices() {
log.Info("All external integrations are DISABLED!")
Server.EnableInsightsCollector = false
Server.LastFM.Enabled = false
Server.Spotify.ID = ""
Server.Deezer.Enabled = false
Server.ListenBrainz.Enabled = false
Server.Agents = ""
if Server.UILoginBackgroundURL == consts.DefaultUILoginBackgroundURL {
@@ -404,67 +222,33 @@ func disableExternalServices() {
}
}
func validatePlaylistsPath() error {
for _, path := range strings.Split(Server.PlaylistsPath, string(filepath.ListSeparator)) {
_, err := doublestar.Match(path, "")
if err != nil {
log.Error("Invalid PlaylistsPath", "path", path, err)
return err
}
}
return nil
}
func validatePurgeMissingOption() error {
allowedValues := []string{consts.PurgeMissingNever, consts.PurgeMissingAlways, consts.PurgeMissingFull}
valid := false
for _, v := range allowedValues {
if v == Server.Scanner.PurgeMissing {
valid = true
break
}
}
if !valid {
err := fmt.Errorf("Invalid Scanner.PurgeMissing value: '%s'. Must be one of: %v", Server.Scanner.PurgeMissing, allowedValues)
log.Error(err.Error())
Server.Scanner.PurgeMissing = consts.PurgeMissingNever
return err
}
return nil
}
func validateScanSchedule() error {
if Server.Scanner.Schedule == "0" || Server.Scanner.Schedule == "" {
Server.Scanner.Schedule = ""
if Server.ScanInterval != -1 {
log.Warn("ScanInterval is DEPRECATED. Please use ScanSchedule. See docs at https://navidrome.org/docs/usage/configuration-options/")
if Server.ScanSchedule != "@every 1m" {
log.Error("You cannot specify both ScanInterval and ScanSchedule, ignoring ScanInterval")
} else {
if Server.ScanInterval == 0 {
Server.ScanSchedule = ""
} else {
Server.ScanSchedule = fmt.Sprintf("@every %s", Server.ScanInterval)
}
log.Warn("Setting ScanSchedule", "schedule", Server.ScanSchedule)
}
}
if Server.ScanSchedule == "0" || Server.ScanSchedule == "" {
Server.ScanSchedule = ""
return nil
}
var err error
Server.Scanner.Schedule, err = validateSchedule(Server.Scanner.Schedule, "Scanner.Schedule")
return err
}
func validateBackupSchedule() error {
if Server.Backup.Path == "" || Server.Backup.Schedule == "" || Server.Backup.Count == 0 {
Server.Backup.Schedule = ""
return nil
}
var err error
Server.Backup.Schedule, err = validateSchedule(Server.Backup.Schedule, "Backup.Schedule")
return err
}
func validateSchedule(schedule, field string) (string, error) {
if _, err := time.ParseDuration(schedule); err == nil {
schedule = "@every " + schedule
if _, err := time.ParseDuration(Server.ScanSchedule); err == nil {
Server.ScanSchedule = "@every " + Server.ScanSchedule
}
c := cron.New()
id, err := c.AddFunc(schedule, func() {})
_, err := c.AddFunc(Server.ScanSchedule, func() {})
if err != nil {
log.Error(fmt.Sprintf("Invalid %s. Please read format spec at https://pkg.go.dev/github.com/robfig/cron#hdr-CRON_Expression_Format", field), "schedule", schedule, err)
} else {
c.Remove(id)
log.Error("Invalid ScanSchedule. Please read format spec at https://pkg.go.dev/github.com/robfig/cron#hdr-CRON_Expression_Format", "schedule", Server.ScanSchedule, err)
}
return schedule, err
return err
}
// AddHook is used to register initialization code that should run as soon as the config is loaded
@@ -472,16 +256,16 @@ func AddHook(hook func()) {
hooks = append(hooks, hook)
}
func setViperDefaults() {
func init() {
viper.SetDefault("musicfolder", filepath.Join(".", "music"))
viper.SetDefault("cachefolder", "")
viper.SetDefault("datafolder", ".")
viper.SetDefault("loglevel", "info")
viper.SetDefault("logfile", "")
viper.SetDefault("address", "0.0.0.0")
viper.SetDefault("port", 4533)
viper.SetDefault("unixsocketperm", "0660")
viper.SetDefault("sessiontimeout", consts.DefaultSessionTimeout)
viper.SetDefault("scaninterval", -1)
viper.SetDefault("scanschedule", "@every 1m")
viper.SetDefault("baseurl", "")
viper.SetDefault("tlscert", "")
viper.SetDefault("tlskey", "")
@@ -491,28 +275,23 @@ func setViperDefaults() {
viper.SetDefault("enabletranscodingconfig", false)
viper.SetDefault("transcodingcachesize", "100MB")
viper.SetDefault("imagecachesize", "100MB")
viper.SetDefault("albumplaycountmode", consts.AlbumPlayCountModeAbsolute)
viper.SetDefault("enableartworkprecache", true)
viper.SetDefault("autoimportplaylists", true)
viper.SetDefault("defaultplaylistpublicvisibility", false)
viper.SetDefault("playlistspath", "")
viper.SetDefault("smartPlaylistRefreshDelay", 5*time.Second)
viper.SetDefault("playlistspath", consts.DefaultPlaylistsPath)
viper.SetDefault("enabledownloads", true)
viper.SetDefault("enableexternalservices", true)
viper.SetDefault("enablemediafilecoverart", true)
viper.SetDefault("enableMediaFileCoverArt", true)
viper.SetDefault("autotranscodedownload", false)
viper.SetDefault("defaultdownsamplingformat", consts.DefaultDownsamplingFormat)
viper.SetDefault("searchfullstring", false)
viper.SetDefault("recentlyaddedbymodtime", false)
viper.SetDefault("prefersorttags", false)
viper.SetDefault("ignoredarticles", "The El La Los Las Le Les Os As O A")
viper.SetDefault("indexgroups", "A B C D E F G H I J K L M N O P Q R S T U V W X-Z(XYZ) [Unknown]([)")
viper.SetDefault("subsonicartistparticipations", false)
viper.SetDefault("ffmpegpath", "")
viper.SetDefault("mpvcmdtemplate", "mpv --audio-device=%d --no-audio-display %f --input-ipc-server=%s")
viper.SetDefault("coverartpriority", "cover.*, folder.*, front.*, embedded, external")
viper.SetDefault("coverjpegquality", 75)
viper.SetDefault("artistartpriority", "artist.*, album/artist.*, external")
viper.SetDefault("lyricspriority", ".lrc,.txt,embedded")
viper.SetDefault("enablegravatar", false)
viper.SetDefault("enablefavourites", true)
viper.SetDefault("enablestarrating", true)
@@ -522,64 +301,31 @@ func setViperDefaults() {
viper.SetDefault("defaultuivolume", consts.DefaultUIVolume)
viper.SetDefault("enablereplaygain", true)
viper.SetDefault("enablecoveranimation", true)
viper.SetDefault("enablenowplaying", true)
viper.SetDefault("enablesharing", false)
viper.SetDefault("shareurl", "")
viper.SetDefault("defaultshareexpiration", 8760*time.Hour)
viper.SetDefault("defaultdownloadableshare", false)
viper.SetDefault("gatrackingid", "")
viper.SetDefault("enableinsightscollector", true)
viper.SetDefault("enablelogredacting", true)
viper.SetDefault("authrequestlimit", 5)
viper.SetDefault("authwindowlength", 20*time.Second)
viper.SetDefault("passwordencryptionkey", "")
viper.SetDefault("reverseproxyuserheader", "Remote-User")
viper.SetDefault("reverseproxywhitelist", "")
viper.SetDefault("prometheus.enabled", false)
viper.SetDefault("prometheus.metricspath", consts.PrometheusDefaultPath)
viper.SetDefault("prometheus.password", "")
viper.SetDefault("jukebox.enabled", false)
viper.SetDefault("jukebox.devices", []AudioDeviceDefinition{})
viper.SetDefault("jukebox.default", "")
viper.SetDefault("jukebox.adminonly", true)
viper.SetDefault("scanner.enabled", true)
viper.SetDefault("scanner.schedule", "0")
viper.SetDefault("prometheus.metricspath", "/metrics")
viper.SetDefault("scanner.extractor", consts.DefaultScannerExtractor)
viper.SetDefault("scanner.watcherwait", consts.DefaultWatcherWait)
viper.SetDefault("scanner.scanonstartup", true)
viper.SetDefault("scanner.artistjoiner", consts.ArtistJoiner)
viper.SetDefault("scanner.genreseparators", "")
viper.SetDefault("scanner.genreseparators", ";/,")
viper.SetDefault("scanner.groupalbumreleases", false)
viper.SetDefault("scanner.followsymlinks", true)
viper.SetDefault("scanner.purgemissing", consts.PurgeMissingNever)
viper.SetDefault("subsonic.appendsubtitle", true)
viper.SetDefault("subsonic.artistparticipations", false)
viper.SetDefault("subsonic.defaultreportrealpath", false)
viper.SetDefault("subsonic.legacyclients", "DSub")
viper.SetDefault("agents", "lastfm,spotify,deezer")
viper.SetDefault("agents", "lastfm,spotify")
viper.SetDefault("lastfm.enabled", true)
viper.SetDefault("lastfm.language", "en")
viper.SetDefault("lastfm.apikey", "")
viper.SetDefault("lastfm.secret", "")
viper.SetDefault("lastfm.scrobblefirstartistonly", false)
viper.SetDefault("lastfm.apikey", consts.LastFMAPIKey)
viper.SetDefault("lastfm.secret", consts.LastFMAPISecret)
viper.SetDefault("spotify.id", "")
viper.SetDefault("spotify.secret", "")
viper.SetDefault("deezer.enabled", true)
viper.SetDefault("listenbrainz.enabled", true)
viper.SetDefault("listenbrainz.baseurl", "https://api.listenbrainz.org/1/")
viper.SetDefault("httpsecurityheaders.customframeoptionsvalue", "DENY")
viper.SetDefault("backup.path", "")
viper.SetDefault("backup.schedule", "")
viper.SetDefault("backup.count", 0)
viper.SetDefault("pid.track", consts.DefaultTrackPID)
viper.SetDefault("pid.album", consts.DefaultAlbumPID)
viper.SetDefault("inspect.enabled", true)
viper.SetDefault("inspect.maxrequests", 1)
viper.SetDefault("inspect.backloglimit", consts.RequestThrottleBacklogLimit)
viper.SetDefault("inspect.backlogtimeout", consts.RequestThrottleBacklogTimeout)
viper.SetDefault("plugins.folder", "")
viper.SetDefault("plugins.enabled", false)
viper.SetDefault("plugins.cachesize", "100MB")
// DevFlags. These are used to enable/disable debugging and incomplete features
viper.SetDefault("devlogsourceline", false)
@@ -587,35 +333,19 @@ func setViperDefaults() {
viper.SetDefault("devautocreateadminpassword", "")
viper.SetDefault("devautologinusername", "")
viper.SetDefault("devactivitypanel", true)
viper.SetDefault("devactivitypanelupdaterate", 300*time.Millisecond)
viper.SetDefault("enablesharing", false)
viper.SetDefault("defaultdownloadableshare", false)
viper.SetDefault("devenablebufferedscrobble", true)
viper.SetDefault("devsidebarplaylists", true)
viper.SetDefault("devshowartistpage", true)
viper.SetDefault("devuishowconfig", true)
viper.SetDefault("devneweventstream", true)
viper.SetDefault("devoffsetoptimize", 50000)
viper.SetDefault("devartworkmaxrequests", max(2, runtime.NumCPU()/3))
viper.SetDefault("devartworkmaxrequests", number.Max(2, runtime.NumCPU()/3))
viper.SetDefault("devartworkthrottlebackloglimit", consts.RequestThrottleBacklogLimit)
viper.SetDefault("devartworkthrottlebacklogtimeout", consts.RequestThrottleBacklogTimeout)
viper.SetDefault("devartistinfotimetolive", consts.ArtistInfoTimeToLive)
viper.SetDefault("devalbuminfotimetolive", consts.AlbumInfoTimeToLive)
viper.SetDefault("devexternalscanner", true)
viper.SetDefault("devscannerthreads", 5)
viper.SetDefault("devinsightsinitialdelay", consts.InsightsInitialDelay)
viper.SetDefault("devenableplayerinsights", true)
viper.SetDefault("devenablepluginsinsights", true)
viper.SetDefault("devplugincompilationtimeout", time.Minute)
viper.SetDefault("devexternalartistfetchmultiplier", 1.5)
}
func init() {
setViperDefaults()
}
func InitConfig(cfgFile string) {
codecRegistry := viper.NewCodecRegistry()
_ = codecRegistry.RegisterCodec("ini", ini.Codec{})
viper.SetOptions(viper.WithCodecRegistry(codecRegistry))
cfgFile = getConfigFile(cfgFile)
if cfgFile != "" {
// Use config file from the flag.
@@ -639,17 +369,9 @@ func InitConfig(cfgFile string) {
}
}
// getConfigFile returns the path to the config file, either from the flag or from the environment variable.
// If it is defined in the environment variable, it will check if the file exists.
func getConfigFile(cfgFile string) string {
if cfgFile != "" {
return cfgFile
}
cfgFile = os.Getenv("ND_CONFIGFILE")
if cfgFile != "" {
if _, err := os.Stat(cfgFile); err == nil {
return cfgFile
}
}
return ""
return os.Getenv("ND_CONFIGFILE")
}

View File

@@ -1,51 +0,0 @@
package conf_test
import (
"fmt"
"path/filepath"
"testing"
"github.com/navidrome/navidrome/conf"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
"github.com/spf13/viper"
)
func TestConfiguration(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "Configuration Suite")
}
var _ = Describe("Configuration", func() {
BeforeEach(func() {
// Reset viper configuration
viper.Reset()
conf.SetViperDefaults()
viper.SetDefault("datafolder", GinkgoT().TempDir())
viper.SetDefault("loglevel", "error")
conf.ResetConf()
})
DescribeTable("should load configuration from",
func(format string) {
filename := filepath.Join("testdata", "cfg."+format)
// Initialize config with the test file
conf.InitConfig(filename)
// Load the configuration (with noConfigDump=true)
conf.Load(true)
// Execute the format-specific assertions
Expect(conf.Server.MusicFolder).To(Equal(fmt.Sprintf("/%s/music", format)))
Expect(conf.Server.UIWelcomeMessage).To(Equal("Welcome " + format))
Expect(conf.Server.Tags["custom"].Aliases).To(Equal([]string{format, "test"}))
// The config file used should be the one we created
Expect(conf.Server.ConfigFile).To(Equal(filename))
},
Entry("TOML format", "toml"),
Entry("YAML format", "yaml"),
Entry("INI format", "ini"),
Entry("JSON format", "json"),
)
})

View File

@@ -1,7 +0,0 @@
package conf
func ResetConf() {
Server = &configOptions{}
}
var SetViperDefaults = setViperDefaults

Some files were not shown because too many files have changed in this diff Show More