mirror of
https://github.com/navidrome/navidrome.git
synced 2026-01-01 11:28:04 -05:00
Compare commits
22 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7cf98af9da | ||
|
|
40f7170930 | ||
|
|
4a8f176f0d | ||
|
|
4507895d58 | ||
|
|
90c2d7d1cf | ||
|
|
f62231f728 | ||
|
|
fd1e06049f | ||
|
|
7dfe29af5e | ||
|
|
8e03d7d013 | ||
|
|
960415ed95 | ||
|
|
ea231fe265 | ||
|
|
09e52eba87 | ||
|
|
2650e4c27c | ||
|
|
3c0f23e3f2 | ||
|
|
250b6dbb33 | ||
|
|
be945e010a | ||
|
|
9308127342 | ||
|
|
141edb881e | ||
|
|
20a1e3160b | ||
|
|
d4c458d193 | ||
|
|
ed87e703ff | ||
|
|
dcb5725642 |
@@ -2,7 +2,7 @@
|
||||
|
||||
# [Choice] Go version: 1, 1.15, 1.14
|
||||
ARG VARIANT="1"
|
||||
FROM mcr.microsoft.com/vscode/devcontainers/go:${VARIANT}
|
||||
FROM mcr.microsoft.com/vscode/devcontainers/go:0-${VARIANT}
|
||||
|
||||
# [Option] Install Node.js
|
||||
ARG INSTALL_NODE="true"
|
||||
@@ -17,4 +17,4 @@ RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
|
||||
# RUN go get -x <your-dependency-or-tool>
|
||||
|
||||
# [Optional] Uncomment this line to install global node packages.
|
||||
# RUN su vscode -c "source /usr/local/share/nvm/nvm.sh && npm install -g <your-package-here>" 2>&1
|
||||
# RUN su vscode -c "source /usr/local/share/nvm/nvm.sh && npm install -g <your-package-here>" 2>&1
|
||||
@@ -4,10 +4,10 @@
|
||||
"dockerfile": "Dockerfile",
|
||||
"args": {
|
||||
// Update the VARIANT arg to pick a version of Go: 1, 1.15, 1.14
|
||||
"VARIANT": "1.24",
|
||||
"VARIANT": "1.20",
|
||||
// Options
|
||||
"INSTALL_NODE": "true",
|
||||
"NODE_VERSION": "v20"
|
||||
"NODE_VERSION": "v18"
|
||||
}
|
||||
},
|
||||
"workspaceMount": "",
|
||||
@@ -18,37 +18,33 @@
|
||||
"--volume=${localWorkspaceFolder}:/workspaces/${localWorkspaceFolderBasename}:Z"
|
||||
],
|
||||
// Set *default* container specific settings.json values on container create.
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"settings": {
|
||||
"terminal.integrated.shell.linux": "/bin/bash",
|
||||
"go.useGoProxyToCheckForToolUpdates": false,
|
||||
"go.useLanguageServer": true,
|
||||
"go.gopath": "/go",
|
||||
"go.goroot": "/usr/local/go",
|
||||
"go.toolsGopath": "/go/bin",
|
||||
"go.formatTool": "goimports",
|
||||
"go.lintOnSave": "package",
|
||||
"go.lintTool": "golangci-lint",
|
||||
"editor.formatOnSave": true,
|
||||
"[javascript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[json]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[jsonc]": {
|
||||
"editor.defaultFormatter": "vscode.json-language-features"
|
||||
}
|
||||
},
|
||||
// Add the IDs of extensions you want installed when the container is created.
|
||||
"extensions": [
|
||||
"golang.Go",
|
||||
"esbenp.prettier-vscode",
|
||||
"tamasfe.even-better-toml"
|
||||
]
|
||||
"settings": {
|
||||
"terminal.integrated.shell.linux": "/bin/bash",
|
||||
"go.useGoProxyToCheckForToolUpdates": false,
|
||||
"go.useLanguageServer": true,
|
||||
"go.gopath": "/go",
|
||||
"go.goroot": "/usr/local/go",
|
||||
"go.toolsGopath": "/go/bin",
|
||||
"go.formatTool": "goimports",
|
||||
"go.lintOnSave": "package",
|
||||
"go.lintTool": "golangci-lint",
|
||||
"editor.formatOnSave": true,
|
||||
"[javascript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[json]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[jsonc]": {
|
||||
"editor.defaultFormatter": "vscode.json-language-features"
|
||||
}
|
||||
},
|
||||
// Add the IDs of extensions you want installed when the container is created.
|
||||
"extensions": [
|
||||
"golang.Go",
|
||||
"esbenp.prettier-vscode",
|
||||
"tamasfe.even-better-toml"
|
||||
],
|
||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||
"forwardPorts": [
|
||||
4533,
|
||||
|
||||
@@ -1,18 +1,10 @@
|
||||
.DS_Store
|
||||
ui/node_modules
|
||||
ui/build
|
||||
!ui/build/.gitkeep
|
||||
Dockerfile
|
||||
docker-compose*.yml
|
||||
data
|
||||
*.db
|
||||
testDB
|
||||
navidrome
|
||||
navidrome.db
|
||||
navidrome.toml
|
||||
tmp
|
||||
!tmp/taglib
|
||||
dist
|
||||
binaries
|
||||
cache
|
||||
music
|
||||
!Dockerfile
|
||||
23
.github/actions/download-taglib/action.yml
vendored
23
.github/actions/download-taglib/action.yml
vendored
@@ -1,23 +0,0 @@
|
||||
name: 'Download TagLib'
|
||||
description: 'Downloads and extracts the TagLib library, adding it to PKG_CONFIG_PATH'
|
||||
inputs:
|
||||
version:
|
||||
description: 'Version of TagLib to download'
|
||||
required: true
|
||||
platform:
|
||||
description: 'Platform to download TagLib for'
|
||||
default: 'linux-amd64'
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Download TagLib
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p /tmp/taglib
|
||||
cd /tmp
|
||||
FILE=taglib-${{ inputs.platform }}.tar.gz
|
||||
wget https://github.com/navidrome/cross-taglib/releases/download/v${{ inputs.version }}/${FILE}
|
||||
tar -xzf ${FILE} -C taglib
|
||||
PKG_CONFIG_PREFIX=/tmp/taglib
|
||||
echo "PKG_CONFIG_PREFIX=${PKG_CONFIG_PREFIX}" >> $GITHUB_ENV
|
||||
echo "PKG_CONFIG_PATH=${PKG_CONFIG_PATH}:${PKG_CONFIG_PREFIX}/lib/pkgconfig" >> $GITHUB_ENV
|
||||
84
.github/actions/prepare-docker/action.yml
vendored
84
.github/actions/prepare-docker/action.yml
vendored
@@ -1,84 +0,0 @@
|
||||
name: 'Prepare Docker Buildx environment'
|
||||
description: 'Downloads and extracts the TagLib library, adding it to PKG_CONFIG_PATH'
|
||||
inputs:
|
||||
github_token:
|
||||
description: 'GitHub token'
|
||||
required: true
|
||||
default: ''
|
||||
hub_repository:
|
||||
description: 'Docker Hub repository to push images to'
|
||||
required: false
|
||||
default: ''
|
||||
hub_username:
|
||||
description: 'Docker Hub username'
|
||||
required: false
|
||||
default: ''
|
||||
hub_password:
|
||||
description: 'Docker Hub password'
|
||||
required: false
|
||||
default: ''
|
||||
outputs:
|
||||
tags:
|
||||
description: 'Docker image tags'
|
||||
value: ${{ steps.meta.outputs.tags }}
|
||||
labels:
|
||||
description: 'Docker image labels'
|
||||
value: ${{ steps.meta.outputs.labels }}
|
||||
annotations:
|
||||
description: 'Docker image annotations'
|
||||
value: ${{ steps.meta.outputs.annotations }}
|
||||
version:
|
||||
description: 'Docker image version'
|
||||
value: ${{ steps.meta.outputs.version }}
|
||||
hub_repository:
|
||||
description: 'Docker Hub repository'
|
||||
value: ${{ env.DOCKER_HUB_REPO }}
|
||||
hub_enabled:
|
||||
description: 'Is Docker Hub enabled'
|
||||
value: ${{ env.DOCKER_HUB_ENABLED }}
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Check Docker Hub configuration
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -z "${{inputs.hub_repository}}" ]; then
|
||||
echo "DOCKER_HUB_REPO=none" >> $GITHUB_ENV
|
||||
echo "DOCKER_HUB_ENABLED=false" >> $GITHUB_ENV
|
||||
else
|
||||
echo "DOCKER_HUB_REPO=${{inputs.hub_repository}}" >> $GITHUB_ENV
|
||||
echo "DOCKER_HUB_ENABLED=true" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Login to Docker Hub
|
||||
if: inputs.hub_username != '' && inputs.hub_password != ''
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ inputs.hub_username }}
|
||||
password: ${{ inputs.hub_password }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ inputs.github_token }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Extract metadata for Docker image
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
labels: |
|
||||
maintainer=deluan@navidrome.org
|
||||
images: |
|
||||
name=${{env.DOCKER_HUB_REPO}},enable=${{env.DOCKER_HUB_ENABLED}}
|
||||
name=ghcr.io/${{ github.repository }}
|
||||
tags: |
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=raw,value=develop,enable={{is_default_branch}}
|
||||
10
.github/dependabot.yml
vendored
10
.github/dependabot.yml
vendored
@@ -10,13 +10,3 @@ updates:
|
||||
schedule:
|
||||
interval: weekly
|
||||
open-pull-requests-limit: 10
|
||||
- package-ecosystem: docker
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: weekly
|
||||
open-pull-requests-limit: 10
|
||||
- package-ecosystem: github-actions
|
||||
directory: "/.github/workflows"
|
||||
schedule:
|
||||
interval: weekly
|
||||
open-pull-requests-limit: 10
|
||||
38
.github/workflows/pipeline.dockerfile
vendored
Normal file
38
.github/workflows/pipeline.dockerfile
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
#####################################################
|
||||
### Copy platform specific binary
|
||||
FROM bash as copy-binary
|
||||
ARG TARGETPLATFORM
|
||||
|
||||
RUN echo "Target Platform = ${TARGETPLATFORM}"
|
||||
|
||||
COPY dist .
|
||||
RUN if [ "$TARGETPLATFORM" = "linux/amd64" ]; then cp navidrome_linux_amd64_linux_amd64_v1/navidrome /navidrome; fi
|
||||
RUN if [ "$TARGETPLATFORM" = "linux/386" ]; then cp navidrome_linux_386_linux_386/navidrome /navidrome; fi
|
||||
RUN if [ "$TARGETPLATFORM" = "linux/arm64" ]; then cp navidrome_linux_arm64_linux_arm64/navidrome /navidrome; fi
|
||||
RUN if [ "$TARGETPLATFORM" = "linux/arm/v6" ]; then cp navidrome_linux_arm_linux_arm_6/navidrome /navidrome; fi
|
||||
RUN if [ "$TARGETPLATFORM" = "linux/arm/v7" ]; then cp navidrome_linux_arm_linux_arm_7/navidrome /navidrome; fi
|
||||
RUN chmod +x /navidrome
|
||||
|
||||
|
||||
#####################################################
|
||||
### Build Final Image
|
||||
FROM alpine as release
|
||||
LABEL maintainer="deluan@navidrome.org"
|
||||
|
||||
# Install ffmpeg and output build config
|
||||
RUN apk add --no-cache ffmpeg
|
||||
RUN ffmpeg -buildconf
|
||||
|
||||
COPY --from=copy-binary /navidrome /app/
|
||||
|
||||
VOLUME ["/data", "/music"]
|
||||
ENV ND_MUSICFOLDER /music
|
||||
ENV ND_DATAFOLDER /data
|
||||
ENV ND_PORT 4533
|
||||
ENV GODEBUG "asyncpreemptoff=1"
|
||||
|
||||
EXPOSE ${ND_PORT}
|
||||
HEALTHCHECK CMD wget -O- http://localhost:${ND_PORT}/ping || exit 1
|
||||
WORKDIR /app
|
||||
|
||||
ENTRYPOINT ["/app/navidrome"]
|
||||
514
.github/workflows/pipeline.yml
vendored
514
.github/workflows/pipeline.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: "Pipeline: Test, Lint, Build"
|
||||
name: 'Pipeline: Test, Lint, Build'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
@@ -9,117 +9,103 @@ on:
|
||||
branches:
|
||||
- master
|
||||
|
||||
concurrency:
|
||||
group: ${{ startsWith(github.ref, 'refs/tags/v') && 'tag' || 'branch' }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CROSS_TAGLIB_VERSION: "2.0.2-1"
|
||||
IS_RELEASE: ${{ startsWith(github.ref, 'refs/tags/') && 'true' || 'false' }}
|
||||
|
||||
jobs:
|
||||
git-version:
|
||||
name: Get version info
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
git_tag: ${{ steps.git-version.outputs.GIT_TAG }}
|
||||
git_sha: ${{ steps.git-version.outputs.GIT_SHA }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
fetch-tags: true
|
||||
|
||||
- name: Show git version info
|
||||
run: |
|
||||
echo "git describe (dirty): $(git describe --dirty --always --tags)"
|
||||
echo "git describe --tags: $(git describe --tags `git rev-list --tags --max-count=1`)"
|
||||
echo "git tag: $(git tag --sort=-committerdate | head -n 1)"
|
||||
echo "github_ref: $GITHUB_REF"
|
||||
echo "github_head_sha: ${{ github.event.pull_request.head.sha }}"
|
||||
git tag -l
|
||||
- name: Determine git current SHA and latest tag
|
||||
id: git-version
|
||||
run: |
|
||||
GIT_TAG=$(git tag --sort=-committerdate | head -n 1)
|
||||
if [ -n "$GIT_TAG" ]; then
|
||||
if [[ "$GITHUB_REF" != refs/tags/* ]]; then
|
||||
GIT_TAG=${GIT_TAG}-SNAPSHOT
|
||||
fi
|
||||
echo "GIT_TAG=$GIT_TAG" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
GIT_SHA=$(git rev-parse --short HEAD)
|
||||
PR_NUM=$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")
|
||||
if [[ $PR_NUM != "null" ]]; then
|
||||
GIT_SHA=$(echo "${{ github.event.pull_request.head.sha }}" | cut -c1-8)
|
||||
GIT_SHA="pr-${PR_NUM}/${GIT_SHA}"
|
||||
fi
|
||||
echo "GIT_SHA=$GIT_SHA" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "GIT_TAG=$GIT_TAG"
|
||||
echo "GIT_SHA=$GIT_SHA"
|
||||
|
||||
go-lint:
|
||||
name: Lint Go code
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install taglib
|
||||
run: sudo apt-get install libtag1-dev
|
||||
|
||||
- name: Download TagLib
|
||||
uses: ./.github/actions/download-taglib
|
||||
- name: Set up Go 1.20
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
version: ${{ env.CROSS_TAGLIB_VERSION }}
|
||||
go-version: 1.20.x
|
||||
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: golangci-lint
|
||||
uses: golangci/golangci-lint-action@v7
|
||||
uses: golangci/golangci-lint-action@v3
|
||||
with:
|
||||
version: latest
|
||||
problem-matchers: true
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
args: --timeout 2m
|
||||
|
||||
- name: Run go goimports
|
||||
run: go run golang.org/x/tools/cmd/goimports@latest -w `find . -name '*.go' | grep -v '_gen.go$'`
|
||||
- name: Install goimports
|
||||
run: go install golang.org/x/tools/cmd/goimports
|
||||
|
||||
- run: goimports -w `find . -name '*.go' | grep -v '_gen.go$'`
|
||||
- run: go mod tidy
|
||||
- name: Verify no changes from goimports and go mod tidy
|
||||
run: |
|
||||
git status --porcelain
|
||||
if [ -n "$(git status --porcelain)" ]; then
|
||||
echo 'To fix this check, run "make format" and commit the changes'
|
||||
echo 'To fix this check, run "goimports -w $(find . -name '*.go' | grep -v '_gen.go$') && go mod tidy"'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
go:
|
||||
name: Test Go code
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out code into the Go module directory
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download TagLib
|
||||
uses: ./.github/actions/download-taglib
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
version: ${{ env.CROSS_TAGLIB_VERSION }}
|
||||
node-version: 18
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
- name: Build and Lint OpenAPI spec
|
||||
run: make lintapi gen
|
||||
|
||||
- name: Verify no changes
|
||||
run: |
|
||||
git status --porcelain
|
||||
if [ -n "$(git status --porcelain)" ]; then
|
||||
echo 'Changes to OpenAPI spec caused changes to the code. Please review and commit the changes.'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: openapi.yaml
|
||||
path: api/openapi.yaml
|
||||
|
||||
go:
|
||||
name: Test with Go ${{ matrix.go_version }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
go_version: [1.20.x,1.19.x]
|
||||
steps:
|
||||
- name: Install taglib
|
||||
run: sudo apt-get install libtag1-dev
|
||||
|
||||
- name: Check out code into the Go module directory
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Go ${{ matrix.go_version }}
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ${{ matrix.go_version }}
|
||||
cache: true
|
||||
|
||||
- name: Download dependencies
|
||||
if: steps.cache-go.outputs.cache-hit != 'true'
|
||||
continue-on-error: ${{contains(matrix.go_version, 'beta') || contains(matrix.go_version, 'rc')}}
|
||||
run: go mod download
|
||||
|
||||
- name: Test
|
||||
run: |
|
||||
pkg-config --define-prefix --cflags --libs taglib # for debugging
|
||||
go test -shuffle=on -tags netgo -race -cover ./... -v
|
||||
continue-on-error: ${{contains(matrix.go_version, 'beta') || contains(matrix.go_version, 'rc')}}
|
||||
run: go test -shuffle=on -race -cover ./... -v
|
||||
|
||||
js:
|
||||
name: Test JS code
|
||||
name: Build JS bundle
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
NODE_OPTIONS: "--max_old_space_size=4096"
|
||||
NODE_OPTIONS: '--max_old_space_size=4096'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 20
|
||||
cache: "npm"
|
||||
cache-dependency-path: "**/package-lock.json"
|
||||
node-version: 18
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
- name: npm install dependencies
|
||||
run: |
|
||||
@@ -141,289 +127,119 @@ jobs:
|
||||
cd ui
|
||||
npm run build
|
||||
|
||||
i18n-lint:
|
||||
name: Lint i18n files
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: js-bundle
|
||||
path: ui/build
|
||||
|
||||
binaries:
|
||||
name: Build binaries
|
||||
needs: [js, go, go-lint]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- run: |
|
||||
set -e
|
||||
for file in resources/i18n/*.json; do
|
||||
echo "Validating $file"
|
||||
if ! jq empty "$file" 2>error.log; then
|
||||
error_message=$(cat error.log)
|
||||
line_number=$(echo "$error_message" | grep -oP 'line \K[0-9]+')
|
||||
echo "::error file=$file,line=$line_number::$error_message"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
check-push-enabled:
|
||||
name: Check Docker configuration
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
is_enabled: ${{ steps.check.outputs.is_enabled }}
|
||||
steps:
|
||||
- name: Check if Docker push is configured
|
||||
id: check
|
||||
run: echo "is_enabled=${{ secrets.DOCKER_HUB_USERNAME != '' }}" >> $GITHUB_OUTPUT
|
||||
|
||||
build:
|
||||
name: Build
|
||||
needs: [js, go, go-lint, i18n-lint, git-version, check-push-enabled]
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ linux/amd64, linux/arm64, linux/arm/v5, linux/arm/v6, linux/arm/v7, linux/386, darwin/amd64, darwin/arm64, windows/amd64, windows/386 ]
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
IS_LINUX: ${{ startsWith(matrix.platform, 'linux/') && 'true' || 'false' }}
|
||||
IS_ARMV5: ${{ matrix.platform == 'linux/arm/v5' && 'true' || 'false' }}
|
||||
IS_DOCKER_PUSH_CONFIGURED: ${{ needs.check-push-enabled.outputs.is_enabled == 'true' }}
|
||||
DOCKER_BUILD_SUMMARY: false
|
||||
GIT_SHA: ${{ needs.git-version.outputs.git_sha }}
|
||||
GIT_TAG: ${{ needs.git-version.outputs.git_tag }}
|
||||
steps:
|
||||
- name: Sanitize platform name
|
||||
id: set-platform
|
||||
run: |
|
||||
PLATFORM=$(echo ${{ matrix.platform }} | tr '/' '_')
|
||||
echo "PLATFORM=$PLATFORM" >> $GITHUB_ENV
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Prepare Docker Buildx
|
||||
uses: ./.github/actions/prepare-docker
|
||||
id: docker
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
hub_repository: ${{ vars.DOCKER_HUB_REPO }}
|
||||
hub_username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
hub_password: ${{ secrets.DOCKER_HUB_PASSWORD }}
|
||||
|
||||
- name: Build Binaries
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: Dockerfile
|
||||
platforms: ${{ matrix.platform }}
|
||||
outputs: |
|
||||
type=local,dest=./output/${{ env.PLATFORM }}
|
||||
target: binary
|
||||
build-args: |
|
||||
GIT_SHA=${{ env.GIT_SHA }}
|
||||
GIT_TAG=${{ env.GIT_TAG }}
|
||||
CROSS_TAGLIB_VERSION=${{ env.CROSS_TAGLIB_VERSION }}
|
||||
|
||||
- name: Upload Binaries
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: navidrome-${{ env.PLATFORM }}
|
||||
path: ./output
|
||||
retention-days: 7
|
||||
|
||||
- name: Build and push image by digest
|
||||
id: push-image
|
||||
if: env.IS_LINUX == 'true' && env.IS_DOCKER_PUSH_CONFIGURED == 'true' && env.IS_ARMV5 == 'false'
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: Dockerfile
|
||||
platforms: ${{ matrix.platform }}
|
||||
labels: ${{ steps.docker.outputs.labels }}
|
||||
build-args: |
|
||||
GIT_SHA=${{ env.GIT_SHA }}
|
||||
GIT_TAG=${{ env.GIT_TAG }}
|
||||
CROSS_TAGLIB_VERSION=${{ env.CROSS_TAGLIB_VERSION }}
|
||||
outputs: |
|
||||
type=image,name=${{ steps.docker.outputs.hub_repository }},push-by-digest=true,name-canonical=true,push=${{ steps.docker.outputs.hub_enabled }}
|
||||
type=image,name=ghcr.io/${{ github.repository }},push-by-digest=true,name-canonical=true,push=true
|
||||
|
||||
- name: Export digest
|
||||
if: env.IS_LINUX == 'true' && env.IS_DOCKER_PUSH_CONFIGURED == 'true' && env.IS_ARMV5 == 'false'
|
||||
run: |
|
||||
mkdir -p /tmp/digests
|
||||
digest="${{ steps.push-image.outputs.digest }}"
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v4
|
||||
if: env.IS_LINUX == 'true' && env.IS_DOCKER_PUSH_CONFIGURED == 'true' && env.IS_ARMV5 == 'false'
|
||||
with:
|
||||
name: digests-${{ env.PLATFORM }}
|
||||
path: /tmp/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
push-manifest:
|
||||
name: Push Docker manifest
|
||||
runs-on: ubuntu-latest
|
||||
needs: [build, check-push-enabled]
|
||||
if: needs.check-push-enabled.outputs.is_enabled == 'true'
|
||||
env:
|
||||
REGISTRY_IMAGE: ghcr.io/${{ github.repository }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Prepare Docker Buildx
|
||||
uses: ./.github/actions/prepare-docker
|
||||
id: docker
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
hub_repository: ${{ vars.DOCKER_HUB_REPO }}
|
||||
hub_username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
hub_password: ${{ secrets.DOCKER_HUB_PASSWORD }}
|
||||
|
||||
- name: Create manifest list and push to ghcr.io
|
||||
working-directory: /tmp/digests
|
||||
run: |
|
||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf '${{ env.REGISTRY_IMAGE }}@sha256:%s ' *)
|
||||
|
||||
- name: Create manifest list and push to Docker Hub
|
||||
working-directory: /tmp/digests
|
||||
if: vars.DOCKER_HUB_REPO != ''
|
||||
run: |
|
||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf '${{ vars.DOCKER_HUB_REPO }}@sha256:%s ' *)
|
||||
|
||||
- name: Inspect image in ghcr.io
|
||||
run: |
|
||||
docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:${{ steps.docker.outputs.version }}
|
||||
|
||||
- name: Inspect image in Docker Hub
|
||||
if: vars.DOCKER_HUB_REPO != ''
|
||||
run: |
|
||||
docker buildx imagetools inspect ${{ vars.DOCKER_HUB_REPO }}:${{ steps.docker.outputs.version }}
|
||||
|
||||
- name: Delete unnecessary digest artifacts
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
for artifact in $(gh api repos/${{ github.repository }}/actions/artifacts | jq -r '.artifacts[] | select(.name | startswith("digests-")) | .id'); do
|
||||
gh api --method DELETE repos/${{ github.repository }}/actions/artifacts/$artifact
|
||||
done
|
||||
|
||||
msi:
|
||||
name: Build Windows installers
|
||||
needs: [build, git-version]
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: ./binaries
|
||||
pattern: navidrome-windows*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Install Wix
|
||||
run: sudo apt-get install -y wixl jq
|
||||
|
||||
- name: Build MSI
|
||||
env:
|
||||
GIT_TAG: ${{ needs.git-version.outputs.git_tag }}
|
||||
run: |
|
||||
rm -rf binaries/msi
|
||||
sudo GIT_TAG=$GIT_TAG release/wix/build_msi.sh ${GITHUB_WORKSPACE} 386
|
||||
sudo GIT_TAG=$GIT_TAG release/wix/build_msi.sh ${GITHUB_WORKSPACE} amd64
|
||||
du -h binaries/msi/*.msi
|
||||
|
||||
- name: Upload MSI files
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: navidrome-windows-installers
|
||||
path: binaries/msi/*.msi
|
||||
retention-days: 7
|
||||
|
||||
release:
|
||||
name: Package/Release
|
||||
needs: [build, msi]
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
package_list: ${{ steps.set-package-list.outputs.package_list }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
fetch-tags: true
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
path: ./binaries
|
||||
pattern: navidrome-*
|
||||
merge-multiple: true
|
||||
name: js-bundle
|
||||
path: ui/build
|
||||
|
||||
- run: ls -lR ./binaries
|
||||
|
||||
- name: Set RELEASE_FLAGS for snapshot releases
|
||||
if: env.IS_RELEASE == 'false'
|
||||
run: echo 'RELEASE_FLAGS=--skip=publish --snapshot' >> $GITHUB_ENV
|
||||
|
||||
- name: Run GoReleaser
|
||||
uses: goreleaser/goreleaser-action@v6
|
||||
with:
|
||||
version: '~> v2'
|
||||
args: "release --clean -f release/goreleaser.yml ${{ env.RELEASE_FLAGS }}"
|
||||
- name: Config /github/workspace folder as trusted
|
||||
uses: docker://deluan/ci-goreleaser:1.20.3-1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Remove build artifacts
|
||||
run: |
|
||||
ls -l ./dist
|
||||
rm ./dist/*.tar.gz ./dist/*.zip
|
||||
|
||||
- name: Upload all-packages artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: packages
|
||||
path: dist/navidrome_0*
|
||||
args: /bin/bash -c "git config --global --add safe.directory /github/workspace; git describe --dirty --always --tags"
|
||||
|
||||
- id: set-package-list
|
||||
name: Export list of generated packages
|
||||
run: |
|
||||
cd dist
|
||||
set +x
|
||||
ITEMS=$(ls navidrome_0* | sed 's/^navidrome_0[^_]*_linux_//' | jq -R -s -c 'split("\n")[:-1]')
|
||||
echo $ITEMS
|
||||
echo "package_list=${ITEMS}" >> $GITHUB_OUTPUT
|
||||
- name: Run GoReleaser - SNAPSHOT
|
||||
if: startsWith(github.ref, 'refs/tags/') != true
|
||||
uses: docker://deluan/ci-goreleaser:1.20.3-1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
args: goreleaser release --rm-dist --skip-publish --snapshot
|
||||
|
||||
upload-packages:
|
||||
name: Upload Linux PKG
|
||||
- name: Run GoReleaser - RELEASE
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
uses: docker://deluan/ci-goreleaser:1.20.3-1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
args: goreleaser release --rm-dist
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries
|
||||
path: |
|
||||
dist
|
||||
!dist/*.tar.gz
|
||||
!dist/*.zip
|
||||
|
||||
docker:
|
||||
name: Build and publish Docker images
|
||||
needs: [binaries]
|
||||
runs-on: ubuntu-latest
|
||||
needs: [release]
|
||||
strategy:
|
||||
matrix:
|
||||
item: ${{ fromJson(needs.release.outputs.package_list) }}
|
||||
env:
|
||||
DOCKER_IMAGE: ${{secrets.DOCKER_IMAGE}}
|
||||
steps:
|
||||
- name: Download all-packages artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: packages
|
||||
path: ./dist
|
||||
- name: Set up QEMU
|
||||
id: qemu
|
||||
uses: docker/setup-qemu-action@v2
|
||||
if: env.DOCKER_IMAGE != ''
|
||||
|
||||
- name: Upload all-packages artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: navidrome_linux_${{ matrix.item }}
|
||||
path: dist/navidrome_0*_linux_${{ matrix.item }}
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
if: env.DOCKER_IMAGE != ''
|
||||
|
||||
# delete-artifacts:
|
||||
# name: Delete unused artifacts
|
||||
# runs-on: ubuntu-latest
|
||||
# needs: [upload-packages]
|
||||
# steps:
|
||||
# - name: Delete all-packages artifact
|
||||
# env:
|
||||
# GH_TOKEN: ${{ github.token }}
|
||||
# run: |
|
||||
# for artifact in $(gh api repos/${{ github.repository }}/actions/artifacts | jq -r '.artifacts[] | select(.name | startswith("packages")) | .id'); do
|
||||
# gh api --method DELETE repos/${{ github.repository }}/actions/artifacts/$artifact
|
||||
# done
|
||||
- uses: actions/checkout@v3
|
||||
if: env.DOCKER_IMAGE != ''
|
||||
|
||||
- uses: actions/download-artifact@v3
|
||||
if: env.DOCKER_IMAGE != ''
|
||||
with:
|
||||
name: binaries
|
||||
path: dist
|
||||
|
||||
- name: Login to Docker Hub
|
||||
if: env.DOCKER_IMAGE != ''
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: env.DOCKER_IMAGE != ''
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata for Docker
|
||||
if: env.DOCKER_IMAGE != ''
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
labels: |
|
||||
maintainer=deluan
|
||||
images: |
|
||||
name=${{secrets.DOCKER_IMAGE}},enable=${{env.GITHUB_REF_TYPE == 'tag' || github.ref == format('refs/heads/{0}', 'master')}}
|
||||
name=ghcr.io/${{ github.repository }}
|
||||
tags: |
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=raw,value=develop,enable={{is_default_branch}}
|
||||
|
||||
- name: Build and Push
|
||||
if: env.DOCKER_IMAGE != ''
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
file: .github/workflows/pipeline.dockerfile
|
||||
platforms: linux/amd64,linux/386,linux/arm/v6,linux/arm/v7,linux/arm64
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
|
||||
5
.github/workflows/stale.yml
vendored
5
.github/workflows/stale.yml
vendored
@@ -12,9 +12,8 @@ jobs:
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dessant/lock-threads@v5
|
||||
- uses: dessant/lock-threads@v4.0.0
|
||||
with:
|
||||
process-only: 'issues, prs'
|
||||
issue-inactive-days: 120
|
||||
pr-inactive-days: 120
|
||||
log-output: true
|
||||
@@ -28,7 +27,7 @@ jobs:
|
||||
This pull request has been automatically locked since there
|
||||
has not been any recent activity after it was closed.
|
||||
Please open a new issue for related bugs.
|
||||
- uses: actions/stale@v9
|
||||
- uses: actions/stale@v7
|
||||
with:
|
||||
operations-per-run: 999
|
||||
days-before-issue-stale: 180
|
||||
|
||||
93
.github/workflows/update-translations.sh
vendored
93
.github/workflows/update-translations.sh
vendored
@@ -1,93 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
|
||||
I18N_DIR=resources/i18n
|
||||
|
||||
# Function to process JSON: remove empty attributes and sort
|
||||
process_json() {
|
||||
jq 'walk(if type == "object" then with_entries(select(.value != null and .value != "" and .value != [] and .value != {})) | to_entries | sort_by(.key) | from_entries else . end)' "$1"
|
||||
}
|
||||
|
||||
# Function to check differences between local and remote translations
|
||||
check_lang_diff() {
|
||||
filename=${I18N_DIR}/"$1".json
|
||||
url=$(curl -s -X POST https://poeditor.com/api/ \
|
||||
-d api_token="${POEDITOR_APIKEY}" \
|
||||
-d action="export" \
|
||||
-d id="${POEDITOR_PROJECTID}" \
|
||||
-d language="$1" \
|
||||
-d type="key_value_json" | jq -r .item)
|
||||
if [ -z "$url" ]; then
|
||||
echo "Failed to export $1"
|
||||
return 1
|
||||
fi
|
||||
curl -sSL "$url" > poeditor.json
|
||||
|
||||
process_json "$filename" > "$filename".tmp
|
||||
process_json poeditor.json > poeditor.tmp
|
||||
|
||||
diff=$(diff -u "$filename".tmp poeditor.tmp) || true
|
||||
if [ -n "$diff" ]; then
|
||||
echo "$diff"
|
||||
mv poeditor.json "$filename"
|
||||
fi
|
||||
|
||||
rm -f poeditor.json poeditor.tmp "$filename".tmp
|
||||
}
|
||||
|
||||
# Function to get the list of languages
|
||||
get_language_list() {
|
||||
response=$(curl -s -X POST https://api.poeditor.com/v2/languages/list \
|
||||
-d api_token="${POEDITOR_APIKEY}" \
|
||||
-d id="${POEDITOR_PROJECTID}")
|
||||
|
||||
echo $response
|
||||
}
|
||||
|
||||
# Function to get the language name from the language code
|
||||
get_language_name() {
|
||||
lang_code="$1"
|
||||
lang_list="$2"
|
||||
|
||||
lang_name=$(echo "$lang_list" | jq -r ".result.languages[] | select(.code == \"$lang_code\") | .name")
|
||||
|
||||
if [ -z "$lang_name" ]; then
|
||||
echo "Error: Language code '$lang_code' not found" >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo "$lang_name"
|
||||
}
|
||||
|
||||
# Function to get the language code from the file path
|
||||
get_lang_code() {
|
||||
filepath="$1"
|
||||
# Extract just the filename
|
||||
filename=$(basename "$filepath")
|
||||
|
||||
# Remove the extension
|
||||
lang_code="${filename%.*}"
|
||||
|
||||
echo "$lang_code"
|
||||
}
|
||||
|
||||
lang_list=$(get_language_list)
|
||||
|
||||
# Check differences for each language
|
||||
for file in ${I18N_DIR}/*.json; do
|
||||
code=$(get_lang_code "$file")
|
||||
lang=$(jq -r .languageName < "$file")
|
||||
lang_name=$(get_language_name "$code" "$lang_list")
|
||||
echo "Downloading $lang_name - $lang ($code)"
|
||||
check_lang_diff "$code"
|
||||
done
|
||||
|
||||
# List changed languages to stderr
|
||||
languages=""
|
||||
for file in $(git diff --name-only --exit-code | grep json); do
|
||||
lang_code=$(get_lang_code "$file")
|
||||
lang_name=$(get_language_name "$lang_code" "$lang_list")
|
||||
languages="${languages}$(echo "$lang_name" | tr -d '\n'), "
|
||||
done
|
||||
echo "${languages%??}" 1>&2
|
||||
15
.github/workflows/update-translations.yml
vendored
15
.github/workflows/update-translations.yml
vendored
@@ -8,26 +8,21 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.repository_owner == 'navidrome' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: Get updated translations
|
||||
id: poeditor
|
||||
env:
|
||||
POEDITOR_PROJECTID: ${{ secrets.POEDITOR_PROJECTID }}
|
||||
POEDITOR_APIKEY: ${{ secrets.POEDITOR_APIKEY }}
|
||||
run: |
|
||||
.github/workflows/update-translations.sh 2> title.tmp
|
||||
title=$(cat title.tmp)
|
||||
echo "::set-output name=title::$title"
|
||||
rm title.tmp
|
||||
./update-translations.sh
|
||||
- name: Show changes, if any
|
||||
run: |
|
||||
git status --porcelain
|
||||
git diff
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.PAT }}
|
||||
author: "navidrome-bot <navidrome-bot@navidrome.org>"
|
||||
commit-message: "fix(ui): update ${{ steps.poeditor.outputs.title }} translations from POEditor"
|
||||
title: "fix(ui): update ${{ steps.poeditor.outputs.title }} translations from POEditor"
|
||||
commit-message: Update translations
|
||||
title: Update translations from POEditor
|
||||
branch: update-translations
|
||||
|
||||
11
.gitignore
vendored
11
.gitignore
vendored
@@ -11,17 +11,18 @@ wiki
|
||||
TODO.md
|
||||
var
|
||||
navidrome.toml
|
||||
!release/linux/navidrome.toml
|
||||
master.zip
|
||||
testDB
|
||||
navidrome.db
|
||||
cache/*
|
||||
*.swp
|
||||
embedded_gen.go
|
||||
dist
|
||||
music
|
||||
*.db*
|
||||
navidrome.db-shm
|
||||
navidrome.db-wal
|
||||
tags
|
||||
.gitinfo
|
||||
docker-compose.yml
|
||||
!contrib/docker-compose.yml
|
||||
binaries
|
||||
navidrome-master
|
||||
*.exe
|
||||
/api/openapi.yaml
|
||||
|
||||
@@ -1,58 +1,35 @@
|
||||
version: "2"
|
||||
run:
|
||||
build-tags:
|
||||
- netgo
|
||||
go: "1.19"
|
||||
|
||||
linters:
|
||||
enable:
|
||||
- asasalint
|
||||
- asciicheck
|
||||
- bidichk
|
||||
- bodyclose
|
||||
- copyloopvar
|
||||
- dogsled
|
||||
- durationcheck
|
||||
- errcheck
|
||||
- errorlint
|
||||
- gocritic
|
||||
- exportloopref
|
||||
- gocyclo
|
||||
- goprintffuncname
|
||||
- gosec
|
||||
- gosimple
|
||||
- govet
|
||||
- ineffassign
|
||||
- misspell
|
||||
- nakedret
|
||||
- nilerr
|
||||
- rowserrcheck
|
||||
- unconvert
|
||||
- whitespace
|
||||
disable:
|
||||
- staticcheck
|
||||
settings:
|
||||
gocritic:
|
||||
disable-all: true
|
||||
enabled-checks:
|
||||
- deprecatedComment
|
||||
gosec:
|
||||
excludes:
|
||||
- G501
|
||||
- G401
|
||||
- G505
|
||||
- G115
|
||||
govet:
|
||||
enable:
|
||||
- nilness
|
||||
exclusions:
|
||||
generated: lax
|
||||
presets:
|
||||
- comments
|
||||
- common-false-positives
|
||||
- legacy
|
||||
- std-error-handling
|
||||
paths:
|
||||
- third_party$
|
||||
- builtin$
|
||||
- examples$
|
||||
formatters:
|
||||
exclusions:
|
||||
generated: lax
|
||||
paths:
|
||||
- third_party$
|
||||
- builtin$
|
||||
- examples$
|
||||
- typecheck
|
||||
- unconvert
|
||||
- unused
|
||||
- whitespace
|
||||
|
||||
issues:
|
||||
exclude-rules:
|
||||
- linters:
|
||||
- gosec
|
||||
text: "(G501|G401|G505):"
|
||||
|
||||
139
.goreleaser.yml
Normal file
139
.goreleaser.yml
Normal file
@@ -0,0 +1,139 @@
|
||||
# GoReleaser config
|
||||
project_name: navidrome
|
||||
|
||||
builds:
|
||||
- id: navidrome_linux_amd64
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
goos:
|
||||
- linux
|
||||
goarch:
|
||||
- amd64
|
||||
flags:
|
||||
- -tags=netgo
|
||||
ldflags:
|
||||
- "-extldflags '-static -lz'"
|
||||
- -s -w -X github.com/navidrome/navidrome/consts.gitSha={{.ShortCommit}} -X github.com/navidrome/navidrome/consts.gitTag={{.Version}}
|
||||
|
||||
- id: navidrome_linux_386
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- PKG_CONFIG_PATH=/i386/lib/pkgconfig
|
||||
goos:
|
||||
- linux
|
||||
goarch:
|
||||
- "386"
|
||||
flags:
|
||||
- -tags=netgo
|
||||
ldflags:
|
||||
- "-extldflags '-static'"
|
||||
- -s -w -X github.com/navidrome/navidrome/consts.gitSha={{.ShortCommit}} -X github.com/navidrome/navidrome/consts.gitTag={{.Version}}
|
||||
|
||||
- id: navidrome_linux_arm
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- CC=arm-linux-gnueabi-gcc
|
||||
- CXX=arm-linux-gnueabi-g++
|
||||
- PKG_CONFIG_PATH=/arm/lib/pkgconfig
|
||||
goos:
|
||||
- linux
|
||||
goarch:
|
||||
- arm
|
||||
goarm:
|
||||
- "5"
|
||||
- "6"
|
||||
- "7"
|
||||
flags:
|
||||
- -tags=netgo
|
||||
ldflags:
|
||||
- "-extldflags '-static'"
|
||||
- -s -w -X github.com/navidrome/navidrome/consts.gitSha={{.ShortCommit}} -X github.com/navidrome/navidrome/consts.gitTag={{.Version}}
|
||||
|
||||
- id: navidrome_linux_arm64
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- CC=aarch64-linux-gnu-gcc
|
||||
- CXX=aarch64-linux-gnu-g++
|
||||
- PKG_CONFIG_PATH=/arm64/lib/pkgconfig
|
||||
goos:
|
||||
- linux
|
||||
goarch:
|
||||
- arm64
|
||||
flags:
|
||||
- -tags=netgo
|
||||
ldflags:
|
||||
- "-extldflags '-static'"
|
||||
- -s -w -X github.com/navidrome/navidrome/consts.gitSha={{.ShortCommit}} -X github.com/navidrome/navidrome/consts.gitTag={{.Version}}
|
||||
|
||||
- id: navidrome_windows_386
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- CC=i686-w64-mingw32-gcc
|
||||
- CXX=i686-w64-mingw32-g++
|
||||
- PKG_CONFIG_PATH=/mingw32/lib/pkgconfig
|
||||
goos:
|
||||
- windows
|
||||
goarch:
|
||||
- "386"
|
||||
flags:
|
||||
- -tags=netgo
|
||||
ldflags:
|
||||
- "-extldflags '-static'"
|
||||
- -s -w -X github.com/navidrome/navidrome/consts.gitSha={{.ShortCommit}} -X github.com/navidrome/navidrome/consts.gitTag={{.Version}}
|
||||
|
||||
- id: navidrome_windows_amd64
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- CC=x86_64-w64-mingw32-gcc
|
||||
- CXX=x86_64-w64-mingw32-g++
|
||||
- PKG_CONFIG_PATH=/mingw64/lib/pkgconfig
|
||||
goos:
|
||||
- windows
|
||||
goarch:
|
||||
- amd64
|
||||
flags:
|
||||
- -tags=netgo
|
||||
ldflags:
|
||||
- "-extldflags '-static'"
|
||||
- -s -w -X github.com/navidrome/navidrome/consts.gitSha={{.ShortCommit}} -X github.com/navidrome/navidrome/consts.gitTag={{.Version}}
|
||||
|
||||
- id: navidrome_darwin_amd64
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- CC=o64-clang
|
||||
- CXX=o64-clang++
|
||||
- PKG_CONFIG_PATH=/darwin/lib/pkgconfig
|
||||
goos:
|
||||
- darwin
|
||||
goarch:
|
||||
- amd64
|
||||
flags:
|
||||
- -tags=netgo
|
||||
ldflags:
|
||||
- -s -w -X github.com/navidrome/navidrome/consts.gitSha={{.ShortCommit}} -X github.com/navidrome/navidrome/consts.gitTag={{.Version}}
|
||||
|
||||
archives:
|
||||
- format_overrides:
|
||||
- goos: windows
|
||||
format: zip
|
||||
replacements:
|
||||
darwin: macOS
|
||||
linux: Linux
|
||||
windows: Windows
|
||||
386: i386
|
||||
amd64: x86_64
|
||||
|
||||
checksum:
|
||||
name_template: "{{ .ProjectName }}_checksums.txt"
|
||||
|
||||
snapshot:
|
||||
name_template: "{{ .Tag }}-SNAPSHOT"
|
||||
|
||||
release:
|
||||
draft: true
|
||||
|
||||
changelog:
|
||||
# sort: asc
|
||||
filters:
|
||||
exclude:
|
||||
- "^docs:"
|
||||
110
AGENTS.md
110
AGENTS.md
@@ -1,110 +0,0 @@
|
||||
# Testing Instructions
|
||||
|
||||
- **No implementation task is considered complete until it includes thorough, passing tests that cover the new or
|
||||
changed functionality. All new code must be accompanied by Ginkgo/Gomega tests, and PRs/commits without tests should
|
||||
be considered incomplete.**
|
||||
- All Go tests in this project **MUST** be written using the **Ginkgo v2** and **Gomega** frameworks.
|
||||
- To run all tests, use `make test`.
|
||||
- To run tests for a specific package, use `make test PKG=./pkgname/...`
|
||||
- Do not run tests in parallel
|
||||
- Don't use `--fail-on-pending`
|
||||
|
||||
## Mocking Convention
|
||||
|
||||
- Always try to use the mocks provided in the `tests` package before creating a new mock implementation.
|
||||
- Only create a new mock if the required functionality is not covered by the existing mocks in `tests`.
|
||||
- Never mock a real implementation when testing. Remember: there is no value in testing an interface, only the real implementation.
|
||||
|
||||
## Example
|
||||
|
||||
Every package that you write tests for, should have a `*_suite_test.go` file, to hook up the Ginkgo test suite. Example:
|
||||
```
|
||||
package core
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
func TestCore(t *testing.T) {
|
||||
tests.Init(t, false)
|
||||
log.SetLevel(log.LevelFatal)
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "Core Suite")
|
||||
}
|
||||
```
|
||||
Never put a `func Test*` in regular *_test.go files, only in `*_suite_test.go` files.
|
||||
|
||||
Refer to existing test suites for examples of proper setup and usage, such as the one defined in @core_suite_test.go
|
||||
|
||||
## Exceptions
|
||||
|
||||
There should be no exceptions to this rule. If you encounter tests written with the standard `testing` package or other frameworks, they should be refactored to use Ginkgo/Gomega. If you need a new mock, first confirm that it does not already exist in the `tests` package.
|
||||
|
||||
### Configuration
|
||||
|
||||
You can set config values in the BeforeEach/BeforeAll blocks. If you do so, remember to add `DeferCleanup(configtest.SetupConfig())` to reset the values. Example:
|
||||
|
||||
```go
|
||||
BeforeEach(func() {
|
||||
DeferCleanup(configtest.SetupConfig())
|
||||
conf.Server.EnableDownloads = true
|
||||
})
|
||||
```
|
||||
|
||||
# Logging System Usage Guide
|
||||
|
||||
This project uses a custom logging system built on top of logrus, `log/log.go`. Follow these conventions for all logging:
|
||||
|
||||
## Logging API
|
||||
- Use the provided functions for logging at different levels:
|
||||
- `Error(...)`, `Warn(...)`, `Info(...)`, `Debug(...)`, `Trace(...)`, `Fatal(...)`
|
||||
- These functions accept flexible arguments:
|
||||
- The first argument can be a context (`context.Context`), an HTTP request, or `nil`.
|
||||
- The next argument is the log message (string or error).
|
||||
- Additional arguments are key-value pairs (e.g., `"key", value`).
|
||||
- If the last argument is an error, it is logged under the `error` key.
|
||||
|
||||
**Examples:**
|
||||
```go
|
||||
log.Error("A message")
|
||||
log.Error(ctx, "A message with context")
|
||||
log.Error("Failed to save", "id", 123, err)
|
||||
log.Info(req, "Request received", "user", userID)
|
||||
```
|
||||
|
||||
## Logging errors
|
||||
- You don't need to add "err" key when logging an error, it is automatically added.
|
||||
- Error must always be the last parameter in the log call.
|
||||
Examples:
|
||||
```go
|
||||
log.Error("Failed to save", "id", 123, err) // GOOD
|
||||
log.Error("Failed to save", "id", 123, "err", err) // BAD
|
||||
log.Error("Failed to save", err, "id", 123) // BAD
|
||||
```
|
||||
|
||||
## Context and Request Logging
|
||||
- If a context or HTTP request is passed as the first argument, any logger fields in the context are included in the log entry.
|
||||
- Use `log.NewContext(ctx, "key", value, ...)` to add fields to a context for logging.
|
||||
|
||||
## Log Levels
|
||||
- Set the global log level with `log.SetLevel(log.LevelInfo)` or `log.SetLevelString("info")`.
|
||||
- Per-path log levels can be set with `log.SetLogLevels(map[string]string{"path": "level"})`.
|
||||
- Use `log.IsGreaterOrEqualTo(level)` to check if a log level is enabled for the current code path.
|
||||
|
||||
## Source Line Logging
|
||||
- Enable source file/line logging with `log.SetLogSourceLine(true)`.
|
||||
|
||||
## Best Practices
|
||||
- Always use the logging API, never log directly with logrus or fmt.
|
||||
- Prefer structured logging (key-value pairs) for important data.
|
||||
- Use context/request logging for traceability in web handlers.
|
||||
- For tests, use Ginkgo/Gomega and set up a test logger as in `log/log_test.go`.
|
||||
|
||||
## See Also
|
||||
- `log/log.go` for implementation details
|
||||
- `log/log_test.go` for usage examples and test patterns
|
||||
@@ -48,15 +48,14 @@ This improves the readability of the messages
|
||||
It can be one of the following:
|
||||
1. **feat**: Addition of a new feature
|
||||
2. **fix**: Bug fix
|
||||
3. **sec**: Fixing security issues
|
||||
4. **docs**: Documentation Changes
|
||||
5. **style**: Changes to styling
|
||||
6. **refactor**: Refactoring of code
|
||||
7. **perf**: Code that affects performance
|
||||
8. **test**: Updating or improving the current tests
|
||||
9. **build**: Changes to Build process
|
||||
10. **revert**: Reverting to a previous commit
|
||||
11. **chore** : updating grunt tasks etc
|
||||
3. **docs**: Documentation Changes
|
||||
4. **style**: Changes to styling
|
||||
5. **refactor**: Refactoring of code
|
||||
6. **perf**: Code that affects performance
|
||||
7. **test**: Updating or improving the current tests
|
||||
8. **build**: Changes to Build process
|
||||
9. **revert**: Reverting to a previous commit
|
||||
10. **chore** : updating grunt tasks etc
|
||||
|
||||
If there is a breaking change in your Pull Request, please add `BREAKING CHANGE` in the optional body section
|
||||
|
||||
|
||||
145
Dockerfile
145
Dockerfile
@@ -1,145 +0,0 @@
|
||||
FROM --platform=$BUILDPLATFORM ghcr.io/crazy-max/osxcross:14.5-debian AS osxcross
|
||||
|
||||
########################################################################################################################
|
||||
### Build xx (orignal image: tonistiigi/xx)
|
||||
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/alpine:3.21 AS xx-build
|
||||
|
||||
# v1.5.0
|
||||
ENV XX_VERSION=b4e4c451c778822e6742bfc9d9a91d7c7d885c8a
|
||||
|
||||
RUN apk add -U --no-cache git
|
||||
RUN git clone https://github.com/tonistiigi/xx && \
|
||||
cd xx && \
|
||||
git checkout ${XX_VERSION} && \
|
||||
mkdir -p /out && \
|
||||
cp src/xx-* /out/
|
||||
|
||||
RUN cd /out && \
|
||||
ln -s xx-cc /out/xx-clang && \
|
||||
ln -s xx-cc /out/xx-clang++ && \
|
||||
ln -s xx-cc /out/xx-c++ && \
|
||||
ln -s xx-apt /out/xx-apt-get
|
||||
|
||||
# xx mimics the original tonistiigi/xx image
|
||||
FROM scratch AS xx
|
||||
COPY --from=xx-build /out/ /usr/bin/
|
||||
|
||||
########################################################################################################################
|
||||
### Get TagLib
|
||||
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/alpine:3.21 AS taglib-build
|
||||
ARG TARGETPLATFORM
|
||||
ARG CROSS_TAGLIB_VERSION=2.0.2-1
|
||||
ENV CROSS_TAGLIB_RELEASES_URL=https://github.com/navidrome/cross-taglib/releases/download/v${CROSS_TAGLIB_VERSION}/
|
||||
|
||||
RUN <<EOT
|
||||
PLATFORM=$(echo ${TARGETPLATFORM} | tr '/' '-')
|
||||
FILE=taglib-${PLATFORM}.tar.gz
|
||||
|
||||
DOWNLOAD_URL=${CROSS_TAGLIB_RELEASES_URL}${FILE}
|
||||
wget ${DOWNLOAD_URL}
|
||||
|
||||
mkdir /taglib
|
||||
tar -xzf ${FILE} -C /taglib
|
||||
EOT
|
||||
|
||||
########################################################################################################################
|
||||
### Build Navidrome UI
|
||||
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/node:lts-alpine AS ui
|
||||
WORKDIR /app
|
||||
|
||||
# Install node dependencies
|
||||
COPY ui/package.json ui/package-lock.json ./
|
||||
COPY ui/bin/ ./bin/
|
||||
RUN npm ci
|
||||
|
||||
# Build bundle
|
||||
COPY ui/ ./
|
||||
RUN npm run build -- --outDir=/build
|
||||
|
||||
FROM scratch AS ui-bundle
|
||||
COPY --from=ui /build /build
|
||||
|
||||
########################################################################################################################
|
||||
### Build Navidrome binary
|
||||
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/golang:1.24-bookworm AS base
|
||||
RUN apt-get update && apt-get install -y clang lld
|
||||
COPY --from=xx / /
|
||||
WORKDIR /workspace
|
||||
|
||||
FROM --platform=$BUILDPLATFORM base AS build
|
||||
|
||||
# Install build dependencies for the target platform
|
||||
ARG TARGETPLATFORM
|
||||
|
||||
RUN xx-apt install -y binutils gcc g++ libc6-dev zlib1g-dev
|
||||
RUN xx-verify --setup
|
||||
|
||||
RUN --mount=type=bind,source=. \
|
||||
--mount=type=cache,target=/root/.cache \
|
||||
--mount=type=cache,target=/go/pkg/mod \
|
||||
go mod download
|
||||
|
||||
ARG GIT_SHA
|
||||
ARG GIT_TAG
|
||||
|
||||
RUN --mount=type=bind,source=. \
|
||||
--mount=from=ui,source=/build,target=./ui/build,ro \
|
||||
--mount=from=osxcross,src=/osxcross/SDK,target=/xx-sdk,ro \
|
||||
--mount=type=cache,target=/root/.cache \
|
||||
--mount=type=cache,target=/go/pkg/mod \
|
||||
--mount=from=taglib-build,target=/taglib,src=/taglib,ro <<EOT
|
||||
|
||||
# Setup CGO cross-compilation environment
|
||||
xx-go --wrap
|
||||
export CGO_ENABLED=1
|
||||
export PKG_CONFIG_PATH=/taglib/lib/pkgconfig
|
||||
cat $(go env GOENV)
|
||||
|
||||
# Only Darwin (macOS) requires clang (default), Windows requires gcc, everything else can use any compiler.
|
||||
# So let's use gcc for everything except Darwin.
|
||||
if [ "$(xx-info os)" != "darwin" ]; then
|
||||
export CC=$(xx-info)-gcc
|
||||
export CXX=$(xx-info)-g++
|
||||
export LD_EXTRA="-extldflags '-static -latomic'"
|
||||
fi
|
||||
if [ "$(xx-info os)" = "windows" ]; then
|
||||
export EXT=".exe"
|
||||
fi
|
||||
|
||||
go build -tags=netgo -ldflags="${LD_EXTRA} -w -s \
|
||||
-X github.com/navidrome/navidrome/consts.gitSha=${GIT_SHA} \
|
||||
-X github.com/navidrome/navidrome/consts.gitTag=${GIT_TAG}" \
|
||||
-o /out/navidrome${EXT} .
|
||||
EOT
|
||||
|
||||
# Verify if the binary was built for the correct platform and it is statically linked
|
||||
RUN xx-verify --static /out/navidrome*
|
||||
|
||||
FROM scratch AS binary
|
||||
COPY --from=build /out /
|
||||
|
||||
########################################################################################################################
|
||||
### Build Final Image
|
||||
FROM public.ecr.aws/docker/library/alpine:3.21 AS final
|
||||
LABEL maintainer="deluan@navidrome.org"
|
||||
LABEL org.opencontainers.image.source="https://github.com/navidrome/navidrome"
|
||||
|
||||
# Install ffmpeg and mpv
|
||||
RUN apk add -U --no-cache ffmpeg mpv sqlite
|
||||
|
||||
# Copy navidrome binary
|
||||
COPY --from=build /out/navidrome /app/
|
||||
|
||||
VOLUME ["/data", "/music"]
|
||||
ENV ND_MUSICFOLDER=/music
|
||||
ENV ND_DATAFOLDER=/data
|
||||
ENV ND_CONFIGFILE=/data/navidrome.toml
|
||||
ENV ND_PORT=4533
|
||||
ENV GODEBUG="asyncpreemptoff=1"
|
||||
RUN touch /.nddockerenv
|
||||
|
||||
EXPOSE ${ND_PORT}
|
||||
WORKDIR /app
|
||||
|
||||
ENTRYPOINT ["/app/navidrome"]
|
||||
|
||||
162
Makefile
162
Makefile
@@ -3,21 +3,13 @@ NODE_VERSION=$(shell cat .nvmrc)
|
||||
|
||||
ifneq ("$(wildcard .git/HEAD)","")
|
||||
GIT_SHA=$(shell git rev-parse --short HEAD)
|
||||
GIT_TAG=$(shell git describe --tags `git rev-list --tags --max-count=1`)-SNAPSHOT
|
||||
GIT_TAG=$(shell git describe --tags `git rev-list --tags --max-count=1`)
|
||||
else
|
||||
GIT_SHA=source_archive
|
||||
GIT_TAG=$(patsubst navidrome-%,v%,$(notdir $(PWD)))-SNAPSHOT
|
||||
GIT_TAG=$(patsubst navidrome-%,v%,$(notdir $(PWD)))
|
||||
endif
|
||||
|
||||
SUPPORTED_PLATFORMS ?= linux/amd64,linux/arm64,linux/arm/v5,linux/arm/v6,linux/arm/v7,linux/386,darwin/amd64,darwin/arm64,windows/amd64,windows/386
|
||||
IMAGE_PLATFORMS ?= $(shell echo $(SUPPORTED_PLATFORMS) | tr ',' '\n' | grep "linux" | grep -v "arm/v5" | tr '\n' ',' | sed 's/,$$//')
|
||||
PLATFORMS ?= $(SUPPORTED_PLATFORMS)
|
||||
DOCKER_TAG ?= deluan/navidrome:develop
|
||||
|
||||
# Taglib version to use in cross-compilation, from https://github.com/navidrome/cross-taglib
|
||||
CROSS_TAGLIB_VERSION ?= 2.0.2-1
|
||||
|
||||
UI_SRC_FILES := $(shell find ui -type f -not -path "ui/build/*" -not -path "ui/node_modules/*")
|
||||
CI_RELEASER_VERSION=1.20.3-1 ## https://github.com/navidrome/ci-goreleaser
|
||||
|
||||
setup: check_env download-deps setup-git ##@1_Run_First Install dependencies and prepare development environment
|
||||
@echo Downloading Node dependencies...
|
||||
@@ -25,61 +17,63 @@ setup: check_env download-deps setup-git ##@1_Run_First Install dependencies and
|
||||
.PHONY: setup
|
||||
|
||||
dev: check_env ##@Development Start Navidrome in development mode, with hot-reload for both frontend and backend
|
||||
ND_ENABLEINSIGHTSCOLLECTOR="false" npx foreman -j Procfile.dev -p 4533 start
|
||||
npx foreman -j Procfile.dev -p 4533 start
|
||||
.PHONY: dev
|
||||
|
||||
server: check_go_env buildjs ##@Development Start the backend in development mode
|
||||
@ND_ENABLEINSIGHTSCOLLECTOR="false" go tool reflex -d none -c reflex.conf
|
||||
server: check_go_env ##@Development Start the backend in development mode
|
||||
@go run github.com/cespare/reflex@latest -d none -c reflex.conf
|
||||
.PHONY: server
|
||||
|
||||
watch: ##@Development Start Go tests in watch mode (re-run when code changes)
|
||||
go tool ginkgo watch -tags=netgo -notify ./...
|
||||
go run github.com/onsi/ginkgo/v2/ginkgo@latest watch -notify ./...
|
||||
.PHONY: watch
|
||||
|
||||
PKG ?= ./...
|
||||
test: ##@Development Run Go tests
|
||||
go test -tags netgo $(PKG)
|
||||
go test -race -shuffle=on ./...
|
||||
.PHONY: test
|
||||
|
||||
testrace: ##@Development Run Go tests with race detector
|
||||
go test -tags netgo -race -shuffle=on ./...
|
||||
.PHONY: test
|
||||
|
||||
testall: testrace ##@Development Run Go and JS tests
|
||||
@(cd ./ui && npm run test:ci)
|
||||
testall: test ##@Development Run Go and JS tests, and validate OpenAPI spec
|
||||
@(cd ./ui && npm test -- --watchAll=false)
|
||||
.PHONY: testall
|
||||
|
||||
lint: ##@Development Lint Go code
|
||||
go run github.com/golangci/golangci-lint/v2/cmd/golangci-lint@latest run -v --timeout 5m
|
||||
go run github.com/golangci/golangci-lint/cmd/golangci-lint@latest run -v --timeout 5m
|
||||
.PHONY: lint
|
||||
|
||||
lintall: lint ##@Development Lint Go and JS code
|
||||
lintapi: api/openapi.yaml ##@Development Lint OpenAPI spec
|
||||
npx @redocly/cli lint api/openapi.yaml
|
||||
.PHONY: lintapi
|
||||
|
||||
lintall: lint lintapi ##@Development Lint Go and JS code
|
||||
@(cd ./ui && npm run check-formatting) || (echo "\n\nPlease run 'npm run prettier' to fix formatting issues." && exit 1)
|
||||
@(cd ./ui && npm run lint)
|
||||
.PHONY: lintall
|
||||
|
||||
format: ##@Development Format code
|
||||
@(cd ./ui && npm run prettier)
|
||||
@go tool goimports -w `find . -name '*.go' | grep -v _gen.go$$`
|
||||
@go mod tidy
|
||||
.PHONY: format
|
||||
|
||||
wire: check_go_env ##@Development Update Dependency Injection
|
||||
go tool wire gen -tags=netgo ./...
|
||||
gen: check_go_env api ##@Development Update Generated Code (wire, mocks, openapi, etc)
|
||||
go run github.com/google/wire/cmd/wire@latest ./...
|
||||
.PHONY: wire
|
||||
|
||||
api: check_go_env api/openapi.yaml
|
||||
go generate ./server/api/...
|
||||
.PHONY: api
|
||||
|
||||
spec_parts=$(shell find api -name '*.yml')
|
||||
api/openapi.yaml: $(spec_parts)
|
||||
@echo "Bundling OpenAPI spec..."
|
||||
npx @redocly/cli bundle api/spec.yml -o api/openapi.yaml
|
||||
|
||||
snapshots: ##@Development Update (GoLang) Snapshot tests
|
||||
UPDATE_SNAPSHOTS=true go tool ginkgo ./server/subsonic/responses/...
|
||||
UPDATE_SNAPSHOTS=true go run github.com/onsi/ginkgo/v2/ginkgo@latest ./server/subsonic/...
|
||||
.PHONY: snapshots
|
||||
|
||||
migration-sql: ##@Development Create an empty SQL migration file
|
||||
@if [ -z "${name}" ]; then echo "Usage: make migration-sql name=name_of_migration_file"; exit 1; fi
|
||||
go run github.com/pressly/goose/v3/cmd/goose@latest -dir db/migrations create ${name} sql
|
||||
go run github.com/pressly/goose/v3/cmd/goose@latest -dir db/migration create ${name} sql
|
||||
.PHONY: migration
|
||||
|
||||
migration-go: ##@Development Create an empty Go migration file
|
||||
@if [ -z "${name}" ]; then echo "Usage: make migration-go name=name_of_migration_file"; exit 1; fi
|
||||
go run github.com/pressly/goose/v3/cmd/goose@latest -dir db/migrations create ${name}
|
||||
go run github.com/pressly/goose/v3/cmd/goose@latest -dir db/migration create ${name}
|
||||
.PHONY: migration
|
||||
|
||||
setup-dev: setup
|
||||
@@ -91,76 +85,45 @@ setup-git: ##@Development Setup Git hooks (pre-commit and pre-push)
|
||||
@(cd .git/hooks && ln -sf ../../git/* .)
|
||||
.PHONY: setup-git
|
||||
|
||||
build: check_go_env buildjs ##@Build Build the project
|
||||
go build -ldflags="-X github.com/navidrome/navidrome/consts.gitSha=$(GIT_SHA) -X github.com/navidrome/navidrome/consts.gitTag=$(GIT_TAG)" -tags=netgo
|
||||
.PHONY: build
|
||||
|
||||
buildall: deprecated build
|
||||
buildall: buildjs build ##@Build Build the project, both frontend and backend
|
||||
.PHONY: buildall
|
||||
|
||||
debug-build: check_go_env buildjs ##@Build Build the project (with remote debug on)
|
||||
go build -gcflags="all=-N -l" -ldflags="-X github.com/navidrome/navidrome/consts.gitSha=$(GIT_SHA) -X github.com/navidrome/navidrome/consts.gitTag=$(GIT_TAG)" -tags=netgo
|
||||
.PHONY: debug-build
|
||||
build: warning-noui-build check_go_env ##@Build Build only backend
|
||||
go build -ldflags="-X github.com/navidrome/navidrome/consts.gitSha=$(GIT_SHA) -X github.com/navidrome/navidrome/consts.gitTag=$(GIT_TAG)-SNAPSHOT" -tags=netgo
|
||||
.PHONY: build
|
||||
|
||||
buildjs: check_node_env ui/build/index.html ##@Build Build only frontend
|
||||
buildjs: check_node_env ##@Build Build only frontend
|
||||
@(cd ./ui && npm run build)
|
||||
.PHONY: buildjs
|
||||
|
||||
docker-buildjs: ##@Build Build only frontend using Docker
|
||||
docker build --output "./ui" --target ui-bundle .
|
||||
.PHONY: docker-buildjs
|
||||
all: warning-noui-build ##@Cross_Compilation Build binaries for all supported platforms. It does not build the frontend
|
||||
docker run -t -v $(PWD):/workspace -w /workspace deluan/ci-goreleaser:$(CI_RELEASER_VERSION) \
|
||||
goreleaser release --rm-dist --skip-publish --snapshot
|
||||
.PHONY: all
|
||||
|
||||
ui/build/index.html: $(UI_SRC_FILES)
|
||||
@(cd ./ui && npm run build)
|
||||
single: warning-noui-build ##@Cross_Compilation Build binaries for a single supported platforms. It does not build the frontend
|
||||
@if [ -z "${GOOS}" -o -z "${GOARCH}" ]; then \
|
||||
echo "Usage: GOOS=<os> GOARCH=<arch> make single"; \
|
||||
echo "Options:"; \
|
||||
grep -- "- id: navidrome_" .goreleaser.yml | sed 's/- id: navidrome_//g'; \
|
||||
exit 1; \
|
||||
fi
|
||||
@echo "Building binaries for ${GOOS}/${GOARCH}"
|
||||
docker run -t -v $(PWD):/workspace -e GOOS -e GOARCH -w /workspace deluan/ci-goreleaser:$(CI_RELEASER_VERSION) \
|
||||
goreleaser build --rm-dist --snapshot --single-target --id navidrome_${GOOS}_${GOARCH}
|
||||
.PHONY: single
|
||||
|
||||
docker-platforms: ##@Cross_Compilation List supported platforms
|
||||
@echo "Supported platforms:"
|
||||
@echo "$(SUPPORTED_PLATFORMS)" | tr ',' '\n' | sort | sed 's/^/ /'
|
||||
@echo "\nUsage: make PLATFORMS=\"linux/amd64\" docker-build"
|
||||
@echo " make IMAGE_PLATFORMS=\"linux/amd64\" docker-image"
|
||||
.PHONY: docker-platforms
|
||||
|
||||
docker-build: ##@Cross_Compilation Cross-compile for any supported platform (check `make docker-platforms`)
|
||||
docker buildx build \
|
||||
--platform $(PLATFORMS) \
|
||||
--build-arg GIT_TAG=${GIT_TAG} \
|
||||
--build-arg GIT_SHA=${GIT_SHA} \
|
||||
--build-arg CROSS_TAGLIB_VERSION=${CROSS_TAGLIB_VERSION} \
|
||||
--output "./binaries" --target binary .
|
||||
.PHONY: docker-build
|
||||
|
||||
docker-image: ##@Cross_Compilation Build Docker image, tagged as `deluan/navidrome:develop`, override with DOCKER_TAG var. Use IMAGE_PLATFORMS to specify target platforms
|
||||
@echo $(IMAGE_PLATFORMS) | grep -q "windows" && echo "ERROR: Windows is not supported for Docker builds" && exit 1 || true
|
||||
@echo $(IMAGE_PLATFORMS) | grep -q "darwin" && echo "ERROR: macOS is not supported for Docker builds" && exit 1 || true
|
||||
@echo $(IMAGE_PLATFORMS) | grep -q "arm/v5" && echo "ERROR: Linux ARMv5 is not supported for Docker builds" && exit 1 || true
|
||||
docker buildx build \
|
||||
--platform $(IMAGE_PLATFORMS) \
|
||||
--build-arg GIT_TAG=${GIT_TAG} \
|
||||
--build-arg GIT_SHA=${GIT_SHA} \
|
||||
--build-arg CROSS_TAGLIB_VERSION=${CROSS_TAGLIB_VERSION} \
|
||||
--tag $(DOCKER_TAG) .
|
||||
.PHONY: docker-image
|
||||
|
||||
docker-msi: ##@Cross_Compilation Build MSI installer for Windows
|
||||
make docker-build PLATFORMS=windows/386,windows/amd64
|
||||
DOCKER_CLI_HINTS=false docker build -q -t navidrome-msi-builder -f release/wix/msitools.dockerfile .
|
||||
@rm -rf binaries/msi
|
||||
docker run -it --rm -v $(PWD):/workspace -v $(PWD)/binaries:/workspace/binaries -e GIT_TAG=${GIT_TAG} \
|
||||
navidrome-msi-builder sh -c "release/wix/build_msi.sh /workspace 386 && release/wix/build_msi.sh /workspace amd64"
|
||||
@du -h binaries/msi/*.msi
|
||||
.PHONY: docker-msi
|
||||
|
||||
package: docker-build ##@Cross_Compilation Create binaries and packages for ALL supported platforms
|
||||
@if [ -z `which goreleaser` ]; then echo "Please install goreleaser first: https://goreleaser.com/install/"; exit 1; fi
|
||||
goreleaser release -f release/goreleaser.yml --clean --skip=publish --snapshot
|
||||
.PHONY: package
|
||||
warning-noui-build:
|
||||
@echo "WARNING: This command does not build the frontend, it uses the latest built with 'make buildjs'"
|
||||
.PHONY: warning-noui-build
|
||||
|
||||
get-music: ##@Development Download some free music from Navidrome's demo instance
|
||||
mkdir -p music
|
||||
( cd music; \
|
||||
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=2Y3qQA6zJC3ObbBrF9ZBoV" > brock.zip; \
|
||||
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=04HrSORpypcLGNUdQp37gn" > back_on_earth.zip; \
|
||||
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=5xcMPJdeEgNrGtnzYbzAqb" > ugress.zip; \
|
||||
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=1jjQMAZrG3lUsJ0YH6ZRS0" > voodoocuts.zip; \
|
||||
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=ec2093ec4801402f1e17cc462195cdbb" > brock.zip; \
|
||||
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=b376eeb4652d2498aa2b25ba0696725e" > back_on_earth.zip; \
|
||||
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=e49c609b542fc51899ee8b53aa858cb4" > ugress.zip; \
|
||||
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=350bcab3a4c1d93869e39ce496464f03" > voodoocuts.zip; \
|
||||
for file in *.zip; do unzip -n $${file}; done )
|
||||
@echo "Done. Remember to set your MusicFolder to ./music"
|
||||
.PHONY: get-music
|
||||
@@ -169,11 +132,6 @@ get-music: ##@Development Download some free music from Navidrome's demo instanc
|
||||
##########################################
|
||||
#### Miscellaneous
|
||||
|
||||
clean:
|
||||
@rm -rf ./binaries ./dist ./ui/build/*
|
||||
@touch ./ui/build/.gitkeep
|
||||
.PHONY: clean
|
||||
|
||||
release:
|
||||
@if [[ ! "${V}" =~ ^[0-9]+\.[0-9]+\.[0-9]+.*$$ ]]; then echo "Usage: make release V=X.X.X"; exit 1; fi
|
||||
go mod tidy
|
||||
@@ -213,10 +171,6 @@ check_node_env:
|
||||
pre-push: lintall testall
|
||||
.PHONY: pre-push
|
||||
|
||||
deprecated:
|
||||
@echo "WARNING: This target is deprecated and will be removed in future releases. Use 'make build' instead."
|
||||
.PHONY: deprecated
|
||||
|
||||
.DEFAULT_GOAL := help
|
||||
|
||||
HELP_FUN = \
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
JS: sh -c "cd ./ui && npm start"
|
||||
GO: go tool reflex -d none -c reflex.conf
|
||||
GO: go run github.com/cespare/reflex@latest -d none -c reflex.conf
|
||||
|
||||
10
README.md
10
README.md
@@ -9,7 +9,6 @@
|
||||
[](https://discord.gg/xh7j7yF)
|
||||
[](https://www.reddit.com/r/navidrome/)
|
||||
[](CODE_OF_CONDUCT.md)
|
||||
[](https://gurubase.io/g/navidrome)
|
||||
|
||||
Navidrome is an open source web-based music collection server and streamer. It gives you freedom to listen to your
|
||||
music collection from any browser or mobile device. It's like your personal Spotify!
|
||||
@@ -57,15 +56,6 @@ A share of the revenue helps fund the development of Navidrome at no additional
|
||||
- **Transcoding** on the fly. Can be set per user/player. **Opus encoding is supported**
|
||||
- Translated to **various languages**
|
||||
|
||||
## Translations
|
||||
|
||||
Navidrome uses [POEditor](https://poeditor.com/) for translations, and we are always looking
|
||||
for [more contributors](https://www.navidrome.org/docs/developers/translations/)
|
||||
|
||||
<a href="https://poeditor.com/">
|
||||
<img height="32" src="https://github.com/user-attachments/assets/c19b1d2b-01e1-4682-a007-12356c42147c">
|
||||
</a>
|
||||
|
||||
## Documentation
|
||||
All documentation can be found in the project's website: https://www.navidrome.org/docs.
|
||||
Here are some useful direct links:
|
||||
|
||||
@@ -1,154 +0,0 @@
|
||||
package taglib
|
||||
|
||||
import (
|
||||
"io/fs"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/djherbis/times"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/model/metadata"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
type testFileInfo struct {
|
||||
fs.FileInfo
|
||||
}
|
||||
|
||||
func (t testFileInfo) BirthTime() time.Time {
|
||||
if ts := times.Get(t.FileInfo); ts.HasBirthTime() {
|
||||
return ts.BirthTime()
|
||||
}
|
||||
return t.FileInfo.ModTime()
|
||||
}
|
||||
|
||||
var _ = Describe("Extractor", func() {
|
||||
toP := func(name, sortName, mbid string) model.Participant {
|
||||
return model.Participant{
|
||||
Artist: model.Artist{Name: name, SortArtistName: sortName, MbzArtistID: mbid},
|
||||
}
|
||||
}
|
||||
|
||||
roles := []struct {
|
||||
model.Role
|
||||
model.ParticipantList
|
||||
}{
|
||||
{model.RoleComposer, model.ParticipantList{
|
||||
toP("coma a", "a, coma", "bf13b584-f27c-43db-8f42-32898d33d4e2"),
|
||||
toP("comb", "comb", "924039a2-09c6-4d29-9b4f-50cc54447d36"),
|
||||
}},
|
||||
{model.RoleLyricist, model.ParticipantList{
|
||||
toP("la a", "a, la", "c84f648f-68a6-40a2-a0cb-d135b25da3c2"),
|
||||
toP("lb", "lb", "0a7c582d-143a-4540-b4e9-77200835af65"),
|
||||
}},
|
||||
{model.RoleArranger, model.ParticipantList{
|
||||
toP("aa", "", "4605a1d4-8d15-42a3-bd00-9c20e42f71e6"),
|
||||
toP("ab", "", "002f0ff8-77bf-42cc-8216-61a9c43dc145"),
|
||||
}},
|
||||
{model.RoleConductor, model.ParticipantList{
|
||||
toP("cona", "", "af86879b-2141-42af-bad2-389a4dc91489"),
|
||||
toP("conb", "", "3dfa3c70-d7d3-4b97-b953-c298dd305e12"),
|
||||
}},
|
||||
{model.RoleDirector, model.ParticipantList{
|
||||
toP("dia", "", "f943187f-73de-4794-be47-88c66f0fd0f4"),
|
||||
toP("dib", "", "bceb75da-1853-4b3d-b399-b27f0cafc389"),
|
||||
}},
|
||||
{model.RoleEngineer, model.ParticipantList{
|
||||
toP("ea", "", "f634bf6d-d66a-425d-888a-28ad39392759"),
|
||||
toP("eb", "", "243d64ae-d514-44e1-901a-b918d692baee"),
|
||||
}},
|
||||
{model.RoleProducer, model.ParticipantList{
|
||||
toP("pra", "", "d971c8d7-999c-4a5f-ac31-719721ab35d6"),
|
||||
toP("prb", "", "f0a09070-9324-434f-a599-6d25ded87b69"),
|
||||
}},
|
||||
{model.RoleRemixer, model.ParticipantList{
|
||||
toP("ra", "", "c7dc6095-9534-4c72-87cc-aea0103462cf"),
|
||||
toP("rb", "", "8ebeef51-c08c-4736-992f-c37870becedd"),
|
||||
}},
|
||||
{model.RoleDJMixer, model.ParticipantList{
|
||||
toP("dja", "", "d063f13b-7589-4efc-ab7f-c60e6db17247"),
|
||||
toP("djb", "", "3636670c-385f-4212-89c8-0ff51d6bc456"),
|
||||
}},
|
||||
{model.RoleMixer, model.ParticipantList{
|
||||
toP("ma", "", "53fb5a2d-7016-427e-a563-d91819a5f35a"),
|
||||
toP("mb", "", "64c13e65-f0da-4ab9-a300-71ee53b0376a"),
|
||||
}},
|
||||
}
|
||||
|
||||
var e *extractor
|
||||
|
||||
BeforeEach(func() {
|
||||
e = &extractor{}
|
||||
})
|
||||
|
||||
Describe("Participants", func() {
|
||||
DescribeTable("test tags consistent across formats", func(format string) {
|
||||
path := "tests/fixtures/test." + format
|
||||
mds, err := e.Parse(path)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
info := mds[path]
|
||||
fileInfo, _ := os.Stat(path)
|
||||
info.FileInfo = testFileInfo{FileInfo: fileInfo}
|
||||
|
||||
metadata := metadata.New(path, info)
|
||||
mf := metadata.ToMediaFile(1, "folderID")
|
||||
|
||||
for _, data := range roles {
|
||||
role := data.Role
|
||||
artists := data.ParticipantList
|
||||
|
||||
actual := mf.Participants[role]
|
||||
Expect(actual).To(HaveLen(len(artists)))
|
||||
|
||||
for i := range artists {
|
||||
actualArtist := actual[i]
|
||||
expectedArtist := artists[i]
|
||||
|
||||
Expect(actualArtist.Name).To(Equal(expectedArtist.Name))
|
||||
Expect(actualArtist.SortArtistName).To(Equal(expectedArtist.SortArtistName))
|
||||
Expect(actualArtist.MbzArtistID).To(Equal(expectedArtist.MbzArtistID))
|
||||
}
|
||||
}
|
||||
|
||||
if format != "m4a" {
|
||||
performers := mf.Participants[model.RolePerformer]
|
||||
Expect(performers).To(HaveLen(8))
|
||||
|
||||
rules := map[string][]string{
|
||||
"pgaa": {"2fd0b311-9fa8-4ff9-be5d-f6f3d16b835e", "Guitar"},
|
||||
"pgbb": {"223d030b-bf97-4c2a-ad26-b7f7bbe25c93", "Guitar", ""},
|
||||
"pvaa": {"cb195f72-448f-41c8-b962-3f3c13d09d38", "Vocals"},
|
||||
"pvbb": {"60a1f832-8ca2-49f6-8660-84d57f07b520", "Vocals", "Flute"},
|
||||
"pfaa": {"51fb40c-0305-4bf9-a11b-2ee615277725", "", "Flute"},
|
||||
}
|
||||
|
||||
for name, rule := range rules {
|
||||
mbid := rule[0]
|
||||
for i := 1; i < len(rule); i++ {
|
||||
found := false
|
||||
|
||||
for _, mapped := range performers {
|
||||
if mapped.Name == name && mapped.MbzArtistID == mbid && mapped.SubRole == rule[i] {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
Expect(found).To(BeTrue(), "Could not find matching artist")
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
Entry("FLAC format", "flac"),
|
||||
Entry("M4a format", "m4a"),
|
||||
Entry("OGG format", "ogg"),
|
||||
Entry("WMA format", "wv"),
|
||||
|
||||
Entry("MP3 format", "mp3"),
|
||||
Entry("WAV format", "wav"),
|
||||
Entry("AIFF format", "aiff"),
|
||||
)
|
||||
})
|
||||
})
|
||||
@@ -1,151 +0,0 @@
|
||||
package taglib
|
||||
|
||||
import (
|
||||
"io/fs"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/core/storage/local"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model/metadata"
|
||||
)
|
||||
|
||||
type extractor struct {
|
||||
baseDir string
|
||||
}
|
||||
|
||||
func (e extractor) Parse(files ...string) (map[string]metadata.Info, error) {
|
||||
results := make(map[string]metadata.Info)
|
||||
for _, path := range files {
|
||||
props, err := e.extractMetadata(path)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
results[path] = *props
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (e extractor) Version() string {
|
||||
return Version()
|
||||
}
|
||||
|
||||
func (e extractor) extractMetadata(filePath string) (*metadata.Info, error) {
|
||||
fullPath := filepath.Join(e.baseDir, filePath)
|
||||
tags, err := Read(fullPath)
|
||||
if err != nil {
|
||||
log.Warn("extractor: Error reading metadata from file. Skipping", "filePath", fullPath, err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Parse audio properties
|
||||
ap := metadata.AudioProperties{}
|
||||
if length, ok := tags["_lengthinmilliseconds"]; ok && len(length) > 0 {
|
||||
millis, _ := strconv.Atoi(length[0])
|
||||
if millis > 0 {
|
||||
ap.Duration = (time.Millisecond * time.Duration(millis)).Round(time.Millisecond * 10)
|
||||
}
|
||||
delete(tags, "_lengthinmilliseconds")
|
||||
}
|
||||
parseProp := func(prop string, target *int) {
|
||||
if value, ok := tags[prop]; ok && len(value) > 0 {
|
||||
*target, _ = strconv.Atoi(value[0])
|
||||
delete(tags, prop)
|
||||
}
|
||||
}
|
||||
parseProp("_bitrate", &ap.BitRate)
|
||||
parseProp("_channels", &ap.Channels)
|
||||
parseProp("_samplerate", &ap.SampleRate)
|
||||
parseProp("_bitspersample", &ap.BitDepth)
|
||||
|
||||
// Parse track/disc totals
|
||||
parseTuple := func(prop string) {
|
||||
tagName := prop + "number"
|
||||
tagTotal := prop + "total"
|
||||
if value, ok := tags[tagName]; ok && len(value) > 0 {
|
||||
parts := strings.Split(value[0], "/")
|
||||
tags[tagName] = []string{parts[0]}
|
||||
if len(parts) == 2 {
|
||||
tags[tagTotal] = []string{parts[1]}
|
||||
}
|
||||
}
|
||||
}
|
||||
parseTuple("track")
|
||||
parseTuple("disc")
|
||||
|
||||
// Adjust some ID3 tags
|
||||
parseLyrics(tags)
|
||||
parseTIPL(tags)
|
||||
delete(tags, "tmcl") // TMCL is already parsed by TagLib
|
||||
|
||||
return &metadata.Info{
|
||||
Tags: tags,
|
||||
AudioProperties: ap,
|
||||
HasPicture: tags["has_picture"] != nil && len(tags["has_picture"]) > 0 && tags["has_picture"][0] == "true",
|
||||
}, nil
|
||||
}
|
||||
|
||||
// parseLyrics make sure lyrics tags have language
|
||||
func parseLyrics(tags map[string][]string) {
|
||||
lyrics := tags["lyrics"]
|
||||
if len(lyrics) > 0 {
|
||||
tags["lyrics:xxx"] = lyrics
|
||||
delete(tags, "lyrics")
|
||||
}
|
||||
}
|
||||
|
||||
// These are the only roles we support, based on Picard's tag map:
|
||||
// https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html
|
||||
var tiplMapping = map[string]string{
|
||||
"arranger": "arranger",
|
||||
"engineer": "engineer",
|
||||
"producer": "producer",
|
||||
"mix": "mixer",
|
||||
"DJ-mix": "djmixer",
|
||||
}
|
||||
|
||||
// parseTIPL parses the ID3v2.4 TIPL frame string, which is received from TagLib in the format:
|
||||
//
|
||||
// "arranger Andrew Powell engineer Chris Blair engineer Pat Stapley producer Eric Woolfson".
|
||||
//
|
||||
// and breaks it down into a map of roles and names, e.g.:
|
||||
//
|
||||
// {"arranger": ["Andrew Powell"], "engineer": ["Chris Blair", "Pat Stapley"], "producer": ["Eric Woolfson"]}.
|
||||
func parseTIPL(tags map[string][]string) {
|
||||
tipl := tags["tipl"]
|
||||
if len(tipl) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
addRole := func(currentRole string, currentValue []string) {
|
||||
if currentRole != "" && len(currentValue) > 0 {
|
||||
role := tiplMapping[currentRole]
|
||||
tags[role] = append(tags[role], strings.Join(currentValue, " "))
|
||||
}
|
||||
}
|
||||
|
||||
var currentRole string
|
||||
var currentValue []string
|
||||
for _, part := range strings.Split(tipl[0], " ") {
|
||||
if _, ok := tiplMapping[part]; ok {
|
||||
addRole(currentRole, currentValue)
|
||||
currentRole = part
|
||||
currentValue = nil
|
||||
continue
|
||||
}
|
||||
currentValue = append(currentValue, part)
|
||||
}
|
||||
addRole(currentRole, currentValue)
|
||||
delete(tags, "tipl")
|
||||
}
|
||||
|
||||
var _ local.Extractor = (*extractor)(nil)
|
||||
|
||||
func init() {
|
||||
local.RegisterExtractor("taglib", func(_ fs.FS, baseDir string) local.Extractor {
|
||||
// ignores fs, as taglib extractor only works with local files
|
||||
return &extractor{baseDir}
|
||||
})
|
||||
}
|
||||
@@ -1,296 +0,0 @@
|
||||
package taglib
|
||||
|
||||
import (
|
||||
"io/fs"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/navidrome/navidrome/utils"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("Extractor", func() {
|
||||
var e *extractor
|
||||
|
||||
BeforeEach(func() {
|
||||
e = &extractor{}
|
||||
})
|
||||
|
||||
Describe("Parse", func() {
|
||||
It("correctly parses metadata from all files in folder", func() {
|
||||
mds, err := e.Parse(
|
||||
"tests/fixtures/test.mp3",
|
||||
"tests/fixtures/test.ogg",
|
||||
)
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(mds).To(HaveLen(2))
|
||||
|
||||
// Test MP3
|
||||
m := mds["tests/fixtures/test.mp3"]
|
||||
Expect(m.Tags).To(HaveKeyWithValue("title", []string{"Song"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("album", []string{"Album"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("artist", []string{"Artist"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("albumartist", []string{"Album Artist"}))
|
||||
|
||||
Expect(m.HasPicture).To(BeTrue())
|
||||
Expect(m.AudioProperties.Duration.String()).To(Equal("1.02s"))
|
||||
Expect(m.AudioProperties.BitRate).To(Equal(192))
|
||||
Expect(m.AudioProperties.Channels).To(Equal(2))
|
||||
Expect(m.AudioProperties.SampleRate).To(Equal(44100))
|
||||
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("compilation", []string{"1"}),
|
||||
HaveKeyWithValue("tcmp", []string{"1"})),
|
||||
)
|
||||
Expect(m.Tags).To(HaveKeyWithValue("genre", []string{"Rock"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("date", []string{"2014-05-21"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("originaldate", []string{"1996-11-21"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("releasedate", []string{"2020-12-31"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("discnumber", []string{"1"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("disctotal", []string{"2"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("comment", []string{"Comment1\nComment2"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("bpm", []string{"123"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("replaygain_album_gain", []string{"+3.21518 dB"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("replaygain_album_peak", []string{"0.9125"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("replaygain_track_gain", []string{"-1.48 dB"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("replaygain_track_peak", []string{"0.4512"}))
|
||||
|
||||
Expect(m.Tags).To(HaveKeyWithValue("tracknumber", []string{"2"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("tracktotal", []string{"10"}))
|
||||
|
||||
Expect(m.Tags).ToNot(HaveKey("lyrics"))
|
||||
Expect(m.Tags).To(Or(HaveKeyWithValue("lyrics:eng", []string{
|
||||
"[00:00.00]This is\n[00:02.50]English SYLT\n",
|
||||
"[00:00.00]This is\n[00:02.50]English",
|
||||
}), HaveKeyWithValue("lyrics:eng", []string{
|
||||
"[00:00.00]This is\n[00:02.50]English",
|
||||
"[00:00.00]This is\n[00:02.50]English SYLT\n",
|
||||
})))
|
||||
Expect(m.Tags).To(Or(HaveKeyWithValue("lyrics:xxx", []string{
|
||||
"[00:00.00]This is\n[00:02.50]unspecified SYLT\n",
|
||||
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||
}), HaveKeyWithValue("lyrics:xxx", []string{
|
||||
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||
"[00:00.00]This is\n[00:02.50]unspecified SYLT\n",
|
||||
})))
|
||||
|
||||
// Test OGG
|
||||
m = mds["tests/fixtures/test.ogg"]
|
||||
Expect(err).To(BeNil())
|
||||
Expect(m.Tags).To(HaveKeyWithValue("fbpm", []string{"141.7"}))
|
||||
|
||||
// TabLib 1.12 returns 18, previous versions return 39.
|
||||
// See https://github.com/taglib/taglib/commit/2f238921824741b2cfe6fbfbfc9701d9827ab06b
|
||||
Expect(m.AudioProperties.BitRate).To(BeElementOf(18, 19, 39, 40, 43, 49))
|
||||
Expect(m.AudioProperties.Channels).To(BeElementOf(2))
|
||||
Expect(m.AudioProperties.SampleRate).To(BeElementOf(8000))
|
||||
Expect(m.AudioProperties.SampleRate).To(BeElementOf(8000))
|
||||
Expect(m.HasPicture).To(BeTrue())
|
||||
})
|
||||
|
||||
DescribeTable("Format-Specific tests",
|
||||
func(file, duration string, channels, samplerate, bitdepth int, albumGain, albumPeak, trackGain, trackPeak string, id3Lyrics bool, image bool) {
|
||||
file = "tests/fixtures/" + file
|
||||
mds, err := e.Parse(file)
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(mds).To(HaveLen(1))
|
||||
|
||||
m := mds[file]
|
||||
|
||||
Expect(m.HasPicture).To(Equal(image))
|
||||
Expect(m.AudioProperties.Duration.String()).To(Equal(duration))
|
||||
Expect(m.AudioProperties.Channels).To(Equal(channels))
|
||||
Expect(m.AudioProperties.SampleRate).To(Equal(samplerate))
|
||||
Expect(m.AudioProperties.BitDepth).To(Equal(bitdepth))
|
||||
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("replaygain_album_gain", []string{albumGain}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_track_gain", []string{albumGain}),
|
||||
))
|
||||
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("replaygain_album_peak", []string{albumPeak}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_album_peak", []string{albumPeak}),
|
||||
))
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("replaygain_track_gain", []string{trackGain}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_track_gain", []string{trackGain}),
|
||||
))
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("replaygain_track_peak", []string{trackPeak}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_track_peak", []string{trackPeak}),
|
||||
))
|
||||
|
||||
Expect(m.Tags).To(HaveKeyWithValue("title", []string{"Title"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("album", []string{"Album"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("artist", []string{"Artist"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("albumartist", []string{"Album Artist"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("genre", []string{"Rock"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("date", []string{"2014"}))
|
||||
|
||||
Expect(m.Tags).To(HaveKeyWithValue("bpm", []string{"123"}))
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("tracknumber", []string{"3"}),
|
||||
HaveKeyWithValue("tracknumber", []string{"3/10"}),
|
||||
))
|
||||
if !strings.HasSuffix(file, "test.wma") {
|
||||
// TODO Not sure why this is not working for WMA
|
||||
Expect(m.Tags).To(HaveKeyWithValue("tracktotal", []string{"10"}))
|
||||
}
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("discnumber", []string{"1"}),
|
||||
HaveKeyWithValue("discnumber", []string{"1/2"}),
|
||||
))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("disctotal", []string{"2"}))
|
||||
|
||||
// WMA does not have a "compilation" tag, but "wm/iscompilation"
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("compilation", []string{"1"}),
|
||||
HaveKeyWithValue("wm/iscompilation", []string{"1"})),
|
||||
)
|
||||
|
||||
if id3Lyrics {
|
||||
Expect(m.Tags).To(HaveKeyWithValue("lyrics:eng", []string{
|
||||
"[00:00.00]This is\n[00:02.50]English",
|
||||
}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("lyrics:xxx", []string{
|
||||
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||
}))
|
||||
} else {
|
||||
Expect(m.Tags).To(HaveKeyWithValue("lyrics:xxx", []string{
|
||||
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||
"[00:00.00]This is\n[00:02.50]English",
|
||||
}))
|
||||
}
|
||||
|
||||
Expect(m.Tags).To(HaveKeyWithValue("comment", []string{"Comment1\nComment2"}))
|
||||
},
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=1200:duration=1" test.flac
|
||||
Entry("correctly parses flac tags", "test.flac", "1s", 1, 44100, 16, "+4.06 dB", "0.12496948", "+4.06 dB", "0.12496948", false, true),
|
||||
|
||||
Entry("correctly parses m4a (aac) gain tags", "01 Invisible (RED) Edit Version.m4a", "1.04s", 2, 44100, 16, "0.37", "0.48", "0.37", "0.48", false, true),
|
||||
Entry("correctly parses m4a (aac) gain tags (uppercase)", "test.m4a", "1.04s", 2, 44100, 16, "0.37", "0.48", "0.37", "0.48", false, true),
|
||||
Entry("correctly parses ogg (vorbis) tags", "test.ogg", "1.04s", 2, 8000, 0, "+7.64 dB", "0.11772506", "+7.64 dB", "0.11772506", false, true),
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=900:duration=1" test.wma
|
||||
// Weird note: for the tag parsing to work, the lyrics are actually stored in the reverse order
|
||||
Entry("correctly parses wma/asf tags", "test.wma", "1.02s", 1, 44100, 16, "3.27 dB", "0.132914", "3.27 dB", "0.132914", false, true),
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=800:duration=1" test.wv
|
||||
Entry("correctly parses wv (wavpak) tags", "test.wv", "1s", 1, 44100, 16, "3.43 dB", "0.125061", "3.43 dB", "0.125061", false, false),
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=1000:duration=1" test.wav
|
||||
Entry("correctly parses wav tags", "test.wav", "1s", 1, 44100, 16, "3.06 dB", "0.125056", "3.06 dB", "0.125056", true, true),
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=1400:duration=1" test.aiff
|
||||
Entry("correctly parses aiff tags", "test.aiff", "1s", 1, 44100, 16, "2.00 dB", "0.124972", "2.00 dB", "0.124972", true, true),
|
||||
)
|
||||
|
||||
// Skip these tests when running as root
|
||||
Context("Access Forbidden", func() {
|
||||
var accessForbiddenFile string
|
||||
var RegularUserContext = XContext
|
||||
var isRegularUser = os.Getuid() != 0
|
||||
if isRegularUser {
|
||||
RegularUserContext = Context
|
||||
}
|
||||
|
||||
// Only run permission tests if we are not root
|
||||
RegularUserContext("when run without root privileges", func() {
|
||||
BeforeEach(func() {
|
||||
accessForbiddenFile = utils.TempFileName("access_forbidden-", ".mp3")
|
||||
|
||||
f, err := os.OpenFile(accessForbiddenFile, os.O_WRONLY|os.O_CREATE, 0222)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
DeferCleanup(func() {
|
||||
Expect(f.Close()).To(Succeed())
|
||||
Expect(os.Remove(accessForbiddenFile)).To(Succeed())
|
||||
})
|
||||
})
|
||||
|
||||
It("correctly handle unreadable file due to insufficient read permission", func() {
|
||||
_, err := e.extractMetadata(accessForbiddenFile)
|
||||
Expect(err).To(MatchError(os.ErrPermission))
|
||||
})
|
||||
|
||||
It("skips the file if it cannot be read", func() {
|
||||
files := []string{
|
||||
"tests/fixtures/test.mp3",
|
||||
"tests/fixtures/test.ogg",
|
||||
accessForbiddenFile,
|
||||
}
|
||||
mds, err := e.Parse(files...)
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(mds).To(HaveLen(2))
|
||||
Expect(mds).ToNot(HaveKey(accessForbiddenFile))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
Describe("Error Checking", func() {
|
||||
It("returns a generic ErrPath if file does not exist", func() {
|
||||
testFilePath := "tests/fixtures/NON_EXISTENT.ogg"
|
||||
_, err := e.extractMetadata(testFilePath)
|
||||
Expect(err).To(MatchError(fs.ErrNotExist))
|
||||
})
|
||||
It("does not throw a SIGSEGV error when reading a file with an invalid frame", func() {
|
||||
// File has an empty TDAT frame
|
||||
md, err := e.extractMetadata("tests/fixtures/invalid-files/test-invalid-frame.mp3")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(md.Tags).To(HaveKeyWithValue("albumartist", []string{"Elvis Presley"}))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("parseTIPL", func() {
|
||||
var tags map[string][]string
|
||||
|
||||
BeforeEach(func() {
|
||||
tags = make(map[string][]string)
|
||||
})
|
||||
|
||||
Context("when the TIPL string is populated", func() {
|
||||
It("correctly parses roles and names", func() {
|
||||
tags["tipl"] = []string{"arranger Andrew Powell DJ-mix François Kevorkian DJ-mix Jane Doe engineer Chris Blair"}
|
||||
parseTIPL(tags)
|
||||
Expect(tags["arranger"]).To(ConsistOf("Andrew Powell"))
|
||||
Expect(tags["engineer"]).To(ConsistOf("Chris Blair"))
|
||||
Expect(tags["djmixer"]).To(ConsistOf("François Kevorkian", "Jane Doe"))
|
||||
})
|
||||
|
||||
It("handles multiple names for a single role", func() {
|
||||
tags["tipl"] = []string{"engineer Pat Stapley producer Eric Woolfson engineer Chris Blair"}
|
||||
parseTIPL(tags)
|
||||
Expect(tags["producer"]).To(ConsistOf("Eric Woolfson"))
|
||||
Expect(tags["engineer"]).To(ConsistOf("Pat Stapley", "Chris Blair"))
|
||||
})
|
||||
|
||||
It("discards roles without names", func() {
|
||||
tags["tipl"] = []string{"engineer Pat Stapley producer engineer Chris Blair"}
|
||||
parseTIPL(tags)
|
||||
Expect(tags).ToNot(HaveKey("producer"))
|
||||
Expect(tags["engineer"]).To(ConsistOf("Pat Stapley", "Chris Blair"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the TIPL string is empty", func() {
|
||||
It("does nothing", func() {
|
||||
tags["tipl"] = []string{""}
|
||||
parseTIPL(tags)
|
||||
Expect(tags).To(BeEmpty())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the TIPL is not present", func() {
|
||||
It("does nothing", func() {
|
||||
parseTIPL(tags)
|
||||
Expect(tags).To(BeEmpty())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
})
|
||||
@@ -1,249 +0,0 @@
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <typeinfo>
|
||||
|
||||
#define TAGLIB_STATIC
|
||||
#include <apeproperties.h>
|
||||
#include <apetag.h>
|
||||
#include <aifffile.h>
|
||||
#include <asffile.h>
|
||||
#include <dsffile.h>
|
||||
#include <fileref.h>
|
||||
#include <flacfile.h>
|
||||
#include <id3v2tag.h>
|
||||
#include <unsynchronizedlyricsframe.h>
|
||||
#include <synchronizedlyricsframe.h>
|
||||
#include <mp4file.h>
|
||||
#include <mpegfile.h>
|
||||
#include <opusfile.h>
|
||||
#include <tpropertymap.h>
|
||||
#include <vorbisfile.h>
|
||||
#include <wavfile.h>
|
||||
#include <wavfile.h>
|
||||
#include <wavpackfile.h>
|
||||
|
||||
#include "taglib_wrapper.h"
|
||||
|
||||
char has_cover(const TagLib::FileRef f);
|
||||
|
||||
static char TAGLIB_VERSION[16];
|
||||
|
||||
char* taglib_version() {
|
||||
snprintf((char *)TAGLIB_VERSION, 16, "%d.%d.%d", TAGLIB_MAJOR_VERSION, TAGLIB_MINOR_VERSION, TAGLIB_PATCH_VERSION);
|
||||
return (char *)TAGLIB_VERSION;
|
||||
}
|
||||
|
||||
int taglib_read(const FILENAME_CHAR_T *filename, unsigned long id) {
|
||||
TagLib::FileRef f(filename, true, TagLib::AudioProperties::Fast);
|
||||
|
||||
if (f.isNull()) {
|
||||
return TAGLIB_ERR_PARSE;
|
||||
}
|
||||
|
||||
if (!f.audioProperties()) {
|
||||
return TAGLIB_ERR_AUDIO_PROPS;
|
||||
}
|
||||
|
||||
// Add audio properties to the tags
|
||||
const TagLib::AudioProperties *props(f.audioProperties());
|
||||
goPutInt(id, (char *)"_lengthinmilliseconds", props->lengthInMilliseconds());
|
||||
goPutInt(id, (char *)"_bitrate", props->bitrate());
|
||||
goPutInt(id, (char *)"_channels", props->channels());
|
||||
goPutInt(id, (char *)"_samplerate", props->sampleRate());
|
||||
|
||||
if (const auto* apeProperties{ dynamic_cast<const TagLib::APE::Properties*>(props) })
|
||||
goPutInt(id, (char *)"_bitspersample", apeProperties->bitsPerSample());
|
||||
if (const auto* asfProperties{ dynamic_cast<const TagLib::ASF::Properties*>(props) })
|
||||
goPutInt(id, (char *)"_bitspersample", asfProperties->bitsPerSample());
|
||||
else if (const auto* flacProperties{ dynamic_cast<const TagLib::FLAC::Properties*>(props) })
|
||||
goPutInt(id, (char *)"_bitspersample", flacProperties->bitsPerSample());
|
||||
else if (const auto* mp4Properties{ dynamic_cast<const TagLib::MP4::Properties*>(props) })
|
||||
goPutInt(id, (char *)"_bitspersample", mp4Properties->bitsPerSample());
|
||||
else if (const auto* wavePackProperties{ dynamic_cast<const TagLib::WavPack::Properties*>(props) })
|
||||
goPutInt(id, (char *)"_bitspersample", wavePackProperties->bitsPerSample());
|
||||
else if (const auto* aiffProperties{ dynamic_cast<const TagLib::RIFF::AIFF::Properties*>(props) })
|
||||
goPutInt(id, (char *)"_bitspersample", aiffProperties->bitsPerSample());
|
||||
else if (const auto* wavProperties{ dynamic_cast<const TagLib::RIFF::WAV::Properties*>(props) })
|
||||
goPutInt(id, (char *)"_bitspersample", wavProperties->bitsPerSample());
|
||||
else if (const auto* dsfProperties{ dynamic_cast<const TagLib::DSF::Properties*>(props) })
|
||||
goPutInt(id, (char *)"_bitspersample", dsfProperties->bitsPerSample());
|
||||
|
||||
// Send all properties to the Go map
|
||||
TagLib::PropertyMap tags = f.file()->properties();
|
||||
|
||||
TagLib::ID3v2::Tag *id3Tags = NULL;
|
||||
|
||||
// Get some extended/non-standard ID3-only tags (ex: iTunes extended frames)
|
||||
TagLib::MPEG::File *mp3File(dynamic_cast<TagLib::MPEG::File *>(f.file()));
|
||||
if (mp3File != NULL) {
|
||||
id3Tags = mp3File->ID3v2Tag();
|
||||
}
|
||||
|
||||
if (id3Tags == NULL) {
|
||||
TagLib::RIFF::WAV::File *wavFile(dynamic_cast<TagLib::RIFF::WAV::File *>(f.file()));
|
||||
if (wavFile != NULL && wavFile->hasID3v2Tag()) {
|
||||
id3Tags = wavFile->ID3v2Tag();
|
||||
}
|
||||
}
|
||||
|
||||
if (id3Tags == NULL) {
|
||||
TagLib::RIFF::AIFF::File *aiffFile(dynamic_cast<TagLib::RIFF::AIFF::File *>(f.file()));
|
||||
if (aiffFile && aiffFile->hasID3v2Tag()) {
|
||||
id3Tags = aiffFile->tag();
|
||||
}
|
||||
}
|
||||
|
||||
// Yes, it is possible to have ID3v2 tags in FLAC. However, that can cause problems
|
||||
// with many players, so they will not be parsed
|
||||
|
||||
if (id3Tags != NULL) {
|
||||
const auto &frames = id3Tags->frameListMap();
|
||||
|
||||
for (const auto &kv: frames) {
|
||||
if (kv.first == "USLT") {
|
||||
for (const auto &tag: kv.second) {
|
||||
TagLib::ID3v2::UnsynchronizedLyricsFrame *frame = dynamic_cast<TagLib::ID3v2::UnsynchronizedLyricsFrame *>(tag);
|
||||
if (frame == NULL) continue;
|
||||
|
||||
tags.erase("LYRICS");
|
||||
|
||||
const auto bv = frame->language();
|
||||
char language[4] = {'x', 'x', 'x', '\0'};
|
||||
if (bv.size() == 3) {
|
||||
strncpy(language, bv.data(), 3);
|
||||
}
|
||||
|
||||
char *val = (char *)frame->text().toCString(true);
|
||||
|
||||
goPutLyrics(id, language, val);
|
||||
}
|
||||
} else if (kv.first == "SYLT") {
|
||||
for (const auto &tag: kv.second) {
|
||||
TagLib::ID3v2::SynchronizedLyricsFrame *frame = dynamic_cast<TagLib::ID3v2::SynchronizedLyricsFrame *>(tag);
|
||||
if (frame == NULL) continue;
|
||||
|
||||
const auto bv = frame->language();
|
||||
char language[4] = {'x', 'x', 'x', '\0'};
|
||||
if (bv.size() == 3) {
|
||||
strncpy(language, bv.data(), 3);
|
||||
}
|
||||
|
||||
const auto format = frame->timestampFormat();
|
||||
if (format == TagLib::ID3v2::SynchronizedLyricsFrame::AbsoluteMilliseconds) {
|
||||
|
||||
for (const auto &line: frame->synchedText()) {
|
||||
char *text = (char *)line.text.toCString(true);
|
||||
goPutLyricLine(id, language, text, line.time);
|
||||
}
|
||||
} else if (format == TagLib::ID3v2::SynchronizedLyricsFrame::AbsoluteMpegFrames) {
|
||||
const int sampleRate = props->sampleRate();
|
||||
|
||||
if (sampleRate != 0) {
|
||||
for (const auto &line: frame->synchedText()) {
|
||||
const int timeInMs = (line.time * 1000) / sampleRate;
|
||||
char *text = (char *)line.text.toCString(true);
|
||||
goPutLyricLine(id, language, text, timeInMs);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (kv.first == "TIPL"){
|
||||
if (!kv.second.isEmpty()) {
|
||||
tags.insert(kv.first, kv.second.front()->toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// M4A may have some iTunes specific tags not captured by the PropertyMap interface
|
||||
TagLib::MP4::File *m4afile(dynamic_cast<TagLib::MP4::File *>(f.file()));
|
||||
if (m4afile != NULL) {
|
||||
const auto itemListMap = m4afile->tag()->itemMap();
|
||||
for (const auto item: itemListMap) {
|
||||
char *key = (char *)item.first.toCString(true);
|
||||
for (const auto value: item.second.toStringList()) {
|
||||
char *val = (char *)value.toCString(true);
|
||||
goPutM4AStr(id, key, val);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// WMA/ASF files may have additional tags not captured by the PropertyMap interface
|
||||
TagLib::ASF::File *asfFile(dynamic_cast<TagLib::ASF::File *>(f.file()));
|
||||
if (asfFile != NULL) {
|
||||
const TagLib::ASF::Tag *asfTags{asfFile->tag()};
|
||||
const auto itemListMap = asfTags->attributeListMap();
|
||||
for (const auto item : itemListMap) {
|
||||
tags.insert(item.first, item.second.front().toString());
|
||||
}
|
||||
}
|
||||
|
||||
// Send all collected tags to the Go map
|
||||
for (TagLib::PropertyMap::ConstIterator i = tags.begin(); i != tags.end();
|
||||
++i) {
|
||||
char *key = (char *)i->first.toCString(true);
|
||||
for (TagLib::StringList::ConstIterator j = i->second.begin();
|
||||
j != i->second.end(); ++j) {
|
||||
char *val = (char *)(*j).toCString(true);
|
||||
goPutStr(id, key, val);
|
||||
}
|
||||
}
|
||||
|
||||
// Cover art has to be handled separately
|
||||
if (has_cover(f)) {
|
||||
goPutStr(id, (char *)"has_picture", (char *)"true");
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Detect if the file has cover art. Returns 1 if the file has cover art, 0 otherwise.
|
||||
char has_cover(const TagLib::FileRef f) {
|
||||
char hasCover = 0;
|
||||
// ----- MP3
|
||||
if (TagLib::MPEG::File * mp3File{dynamic_cast<TagLib::MPEG::File *>(f.file())}) {
|
||||
if (mp3File->ID3v2Tag()) {
|
||||
const auto &frameListMap{mp3File->ID3v2Tag()->frameListMap()};
|
||||
hasCover = !frameListMap["APIC"].isEmpty();
|
||||
}
|
||||
}
|
||||
// ----- FLAC
|
||||
else if (TagLib::FLAC::File * flacFile{dynamic_cast<TagLib::FLAC::File *>(f.file())}) {
|
||||
hasCover = !flacFile->pictureList().isEmpty();
|
||||
}
|
||||
// ----- MP4
|
||||
else if (TagLib::MP4::File * mp4File{dynamic_cast<TagLib::MP4::File *>(f.file())}) {
|
||||
auto &coverItem{mp4File->tag()->itemMap()["covr"]};
|
||||
TagLib::MP4::CoverArtList coverArtList{coverItem.toCoverArtList()};
|
||||
hasCover = !coverArtList.isEmpty();
|
||||
}
|
||||
// ----- Ogg
|
||||
else if (TagLib::Ogg::Vorbis::File * vorbisFile{dynamic_cast<TagLib::Ogg::Vorbis::File *>(f.file())}) {
|
||||
hasCover = !vorbisFile->tag()->pictureList().isEmpty();
|
||||
}
|
||||
// ----- Opus
|
||||
else if (TagLib::Ogg::Opus::File * opusFile{dynamic_cast<TagLib::Ogg::Opus::File *>(f.file())}) {
|
||||
hasCover = !opusFile->tag()->pictureList().isEmpty();
|
||||
}
|
||||
// ----- WAV
|
||||
else if (TagLib::RIFF::WAV::File * wavFile{ dynamic_cast<TagLib::RIFF::WAV::File*>(f.file()) }) {
|
||||
if (wavFile->hasID3v2Tag()) {
|
||||
const auto& frameListMap{ wavFile->ID3v2Tag()->frameListMap() };
|
||||
hasCover = !frameListMap["APIC"].isEmpty();
|
||||
}
|
||||
}
|
||||
// ----- AIFF
|
||||
else if (TagLib::RIFF::AIFF::File * aiffFile{ dynamic_cast<TagLib::RIFF::AIFF::File *>(f.file())}) {
|
||||
if (aiffFile->hasID3v2Tag()) {
|
||||
const auto& frameListMap{ aiffFile->tag()->frameListMap() };
|
||||
hasCover = !frameListMap["APIC"].isEmpty();
|
||||
}
|
||||
}
|
||||
// ----- WMA
|
||||
else if (TagLib::ASF::File * asfFile{dynamic_cast<TagLib::ASF::File *>(f.file())}) {
|
||||
const TagLib::ASF::Tag *tag{ asfFile->tag() };
|
||||
hasCover = tag && asfFile->tag()->attributeListMap().contains("WM/Picture");
|
||||
}
|
||||
|
||||
return hasCover;
|
||||
}
|
||||
@@ -1,157 +0,0 @@
|
||||
package taglib
|
||||
|
||||
/*
|
||||
#cgo !windows pkg-config: --define-prefix taglib
|
||||
#cgo windows pkg-config: taglib
|
||||
#cgo illumos LDFLAGS: -lstdc++ -lsendfile
|
||||
#cgo linux darwin CXXFLAGS: -std=c++11
|
||||
#cgo darwin LDFLAGS: -L/opt/homebrew/opt/taglib/lib
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include "taglib_wrapper.h"
|
||||
*/
|
||||
import "C"
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"runtime/debug"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"unsafe"
|
||||
|
||||
"github.com/navidrome/navidrome/log"
|
||||
)
|
||||
|
||||
const iTunesKeyPrefix = "----:com.apple.itunes:"
|
||||
|
||||
func Version() string {
|
||||
return C.GoString(C.taglib_version())
|
||||
}
|
||||
|
||||
func Read(filename string) (tags map[string][]string, err error) {
|
||||
// Do not crash on failures in the C code/library
|
||||
debug.SetPanicOnFault(true)
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
log.Error("extractor: recovered from panic when reading tags", "file", filename, "error", r)
|
||||
err = fmt.Errorf("extractor: recovered from panic: %s", r)
|
||||
}
|
||||
}()
|
||||
|
||||
fp := getFilename(filename)
|
||||
defer C.free(unsafe.Pointer(fp))
|
||||
id, m, release := newMap()
|
||||
defer release()
|
||||
|
||||
log.Trace("extractor: reading tags", "filename", filename, "map_id", id)
|
||||
res := C.taglib_read(fp, C.ulong(id))
|
||||
switch res {
|
||||
case C.TAGLIB_ERR_PARSE:
|
||||
// Check additional case whether the file is unreadable due to permission
|
||||
file, fileErr := os.OpenFile(filename, os.O_RDONLY, 0600)
|
||||
defer file.Close()
|
||||
|
||||
if os.IsPermission(fileErr) {
|
||||
return nil, fmt.Errorf("navidrome does not have permission: %w", fileErr)
|
||||
} else if fileErr != nil {
|
||||
return nil, fmt.Errorf("cannot parse file media file: %w", fileErr)
|
||||
} else {
|
||||
return nil, fmt.Errorf("cannot parse file media file")
|
||||
}
|
||||
case C.TAGLIB_ERR_AUDIO_PROPS:
|
||||
return nil, fmt.Errorf("can't get audio properties from file")
|
||||
}
|
||||
if log.IsGreaterOrEqualTo(log.LevelDebug) {
|
||||
j, _ := json.Marshal(m)
|
||||
log.Trace("extractor: read tags", "tags", string(j), "filename", filename, "id", id)
|
||||
} else {
|
||||
log.Trace("extractor: read tags", "tags", m, "filename", filename, "id", id)
|
||||
}
|
||||
|
||||
return m, nil
|
||||
}
|
||||
|
||||
type tagMap map[string][]string
|
||||
|
||||
var allMaps sync.Map
|
||||
var mapsNextID atomic.Uint32
|
||||
|
||||
func newMap() (uint32, tagMap, func()) {
|
||||
id := mapsNextID.Add(1)
|
||||
|
||||
m := tagMap{}
|
||||
allMaps.Store(id, m)
|
||||
|
||||
return id, m, func() {
|
||||
allMaps.Delete(id)
|
||||
}
|
||||
}
|
||||
|
||||
func doPutTag(id C.ulong, key string, val *C.char) {
|
||||
if key == "" {
|
||||
return
|
||||
}
|
||||
|
||||
r, _ := allMaps.Load(uint32(id))
|
||||
m := r.(tagMap)
|
||||
k := strings.ToLower(key)
|
||||
v := strings.TrimSpace(C.GoString(val))
|
||||
m[k] = append(m[k], v)
|
||||
}
|
||||
|
||||
//export goPutM4AStr
|
||||
func goPutM4AStr(id C.ulong, key *C.char, val *C.char) {
|
||||
k := C.GoString(key)
|
||||
|
||||
// Special for M4A, do not catch keys that have no actual name
|
||||
k = strings.TrimPrefix(k, iTunesKeyPrefix)
|
||||
doPutTag(id, k, val)
|
||||
}
|
||||
|
||||
//export goPutStr
|
||||
func goPutStr(id C.ulong, key *C.char, val *C.char) {
|
||||
doPutTag(id, C.GoString(key), val)
|
||||
}
|
||||
|
||||
//export goPutInt
|
||||
func goPutInt(id C.ulong, key *C.char, val C.int) {
|
||||
valStr := strconv.Itoa(int(val))
|
||||
vp := C.CString(valStr)
|
||||
defer C.free(unsafe.Pointer(vp))
|
||||
goPutStr(id, key, vp)
|
||||
}
|
||||
|
||||
//export goPutLyrics
|
||||
func goPutLyrics(id C.ulong, lang *C.char, val *C.char) {
|
||||
doPutTag(id, "lyrics:"+C.GoString(lang), val)
|
||||
}
|
||||
|
||||
//export goPutLyricLine
|
||||
func goPutLyricLine(id C.ulong, lang *C.char, text *C.char, time C.int) {
|
||||
language := C.GoString(lang)
|
||||
line := C.GoString(text)
|
||||
timeGo := int64(time)
|
||||
|
||||
ms := timeGo % 1000
|
||||
timeGo /= 1000
|
||||
sec := timeGo % 60
|
||||
timeGo /= 60
|
||||
minimum := timeGo % 60
|
||||
formattedLine := fmt.Sprintf("[%02d:%02d.%02d]%s\n", minimum, sec, ms/10, line)
|
||||
|
||||
key := "lyrics:" + language
|
||||
|
||||
r, _ := allMaps.Load(uint32(id))
|
||||
m := r.(tagMap)
|
||||
k := strings.ToLower(key)
|
||||
existing, ok := m[k]
|
||||
if ok {
|
||||
existing[0] += formattedLine
|
||||
} else {
|
||||
m[k] = []string{formattedLine}
|
||||
}
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
#define TAGLIB_ERR_PARSE -1
|
||||
#define TAGLIB_ERR_AUDIO_PROPS -2
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
#ifdef WIN32
|
||||
#define FILENAME_CHAR_T wchar_t
|
||||
#else
|
||||
#define FILENAME_CHAR_T char
|
||||
#endif
|
||||
|
||||
extern void goPutM4AStr(unsigned long id, char *key, char *val);
|
||||
extern void goPutStr(unsigned long id, char *key, char *val);
|
||||
extern void goPutInt(unsigned long id, char *key, int val);
|
||||
extern void goPutLyrics(unsigned long id, char *lang, char *val);
|
||||
extern void goPutLyricLine(unsigned long id, char *lang, char *text, int time);
|
||||
int taglib_read(const FILENAME_CHAR_T *filename, unsigned long id);
|
||||
char* taglib_version();
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
28
api/parameters/_index.yml
Normal file
28
api/parameters/_index.yml
Normal file
@@ -0,0 +1,28 @@
|
||||
pageOffset:
|
||||
$ref: './query/pageOffset.yml'
|
||||
pageLimit:
|
||||
$ref: './query/pageLimit.yml'
|
||||
filterEquals:
|
||||
$ref: './query/filterEquals.yml'
|
||||
filterLessThan:
|
||||
$ref: './query/filterLessThan.yml'
|
||||
filterLessOrEqual:
|
||||
$ref: './query/filterLessOrEqual.yml'
|
||||
filterGreaterThan:
|
||||
$ref: './query/filterGreaterThan.yml'
|
||||
filterGreaterOrEqual:
|
||||
$ref: './query/filterGreaterOrEqual.yml'
|
||||
filterContains:
|
||||
$ref: './query/filterContains.yml'
|
||||
filterStartsWith:
|
||||
$ref: './query/filterStartsWith.yml'
|
||||
filterEndsWith:
|
||||
$ref: './query/filterEndsWith.yml'
|
||||
sort:
|
||||
$ref: './query/sort.yml'
|
||||
include:
|
||||
$ref: './query/include.yml'
|
||||
includeForTracks:
|
||||
$ref: './query/includeForTracks.yml'
|
||||
includeForAlbums:
|
||||
$ref: './query/includeForAlbums.yml'
|
||||
9
api/parameters/query/filterContains.yml
Normal file
9
api/parameters/query/filterContains.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
name: filter[contains]
|
||||
in: query
|
||||
description: 'Filter by any property containing text. Usage: filter[contains]=property:value'
|
||||
required: false
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
pattern: '^\w+:\w+'
|
||||
9
api/parameters/query/filterEndsWith.yml
Normal file
9
api/parameters/query/filterEndsWith.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
name: filter[endsWith]
|
||||
in: query
|
||||
description: 'Filter by any property that ends with text. Usage: filter[endsWith]=property:value'
|
||||
required: false
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
pattern: '^\w+:\w+'
|
||||
9
api/parameters/query/filterEquals.yml
Normal file
9
api/parameters/query/filterEquals.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
name: filter[equals]
|
||||
in: query
|
||||
description: 'Filter by any property with an exact match. Usage: filter[equals]=property:value'
|
||||
required: false
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
pattern: '^\w+:\w+'
|
||||
9
api/parameters/query/filterGreaterOrEqual.yml
Normal file
9
api/parameters/query/filterGreaterOrEqual.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
name: filter[greaterOrEqual]
|
||||
in: query
|
||||
description: 'Filter by any numeric property greater than or equal to a value. Usage: filter[greaterOrEqual]=property:value'
|
||||
required: false
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
pattern: '^\w+:\d+'
|
||||
9
api/parameters/query/filterGreaterThan.yml
Normal file
9
api/parameters/query/filterGreaterThan.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
name: filter[greaterThan]
|
||||
in: query
|
||||
description: 'Filter by any numeric property greater than a value. Usage: filter[greaterThan]=property:value'
|
||||
required: false
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
pattern: '^\w+:\d+'
|
||||
9
api/parameters/query/filterLessOrEqual.yml
Normal file
9
api/parameters/query/filterLessOrEqual.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
name: filter[lessOrEqual]
|
||||
in: query
|
||||
description: 'Filter by any numeric property less than or equal to a value. Usage: filter[lessOrEqual]=property:value'
|
||||
required: false
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
pattern: '^\w+:\d+'
|
||||
9
api/parameters/query/filterLessThan.yml
Normal file
9
api/parameters/query/filterLessThan.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
name: filter[lessThan]
|
||||
in: query
|
||||
description: 'Filter by any numeric property less than a value. Usage: filter[lessThan]=property:value'
|
||||
required: false
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
pattern: '^\w+:\d+'
|
||||
9
api/parameters/query/filterStartsWith.yml
Normal file
9
api/parameters/query/filterStartsWith.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
name: filter[startsWith]
|
||||
in: query
|
||||
description: 'Filter by any property that starts with text. Usage: filter[startsWith]=property:value'
|
||||
required: false
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
pattern: '^\w+:\w+'
|
||||
6
api/parameters/query/include.yml
Normal file
6
api/parameters/query/include.yml
Normal file
@@ -0,0 +1,6 @@
|
||||
name: include
|
||||
in: query
|
||||
description: Related resources to include in the response, separated by commas
|
||||
required: false
|
||||
schema:
|
||||
type: string
|
||||
13
api/parameters/query/includeForAlbum.yml
Normal file
13
api/parameters/query/includeForAlbum.yml
Normal file
@@ -0,0 +1,13 @@
|
||||
name: include
|
||||
in: query
|
||||
description: Related resources to include in the response, separated by commas
|
||||
required: false
|
||||
explode: false
|
||||
schema:
|
||||
type: array
|
||||
x-go-type: includeSlice
|
||||
items:
|
||||
type: string
|
||||
enum:
|
||||
- track
|
||||
- artist
|
||||
12
api/parameters/query/includeForAlbums.yml
Normal file
12
api/parameters/query/includeForAlbums.yml
Normal file
@@ -0,0 +1,12 @@
|
||||
name: include
|
||||
in: query
|
||||
description: Related resources to include in the response, separated by commas
|
||||
required: false
|
||||
explode: false
|
||||
schema:
|
||||
type: array
|
||||
x-go-type: includeSlice
|
||||
items:
|
||||
type: string
|
||||
enum:
|
||||
- artist
|
||||
13
api/parameters/query/includeForTracks.yml
Normal file
13
api/parameters/query/includeForTracks.yml
Normal file
@@ -0,0 +1,13 @@
|
||||
name: include
|
||||
in: query
|
||||
description: Related resources to include in the response, separated by commas
|
||||
required: false
|
||||
explode: false
|
||||
schema:
|
||||
type: array
|
||||
x-go-type: includeSlice
|
||||
items:
|
||||
type: string
|
||||
enum:
|
||||
- album
|
||||
- artist
|
||||
9
api/parameters/query/pageLimit.yml
Normal file
9
api/parameters/query/pageLimit.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
name: page[limit]
|
||||
in: query
|
||||
description: The number of items per page
|
||||
required: false
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
minimum: 0
|
||||
default: 10
|
||||
9
api/parameters/query/pageOffset.yml
Normal file
9
api/parameters/query/pageOffset.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
name: page[offset]
|
||||
in: query
|
||||
description: The offset for pagination
|
||||
required: false
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
minimum: 0
|
||||
default: 0
|
||||
6
api/parameters/query/sort.yml
Normal file
6
api/parameters/query/sort.yml
Normal file
@@ -0,0 +1,6 @@
|
||||
name: sort
|
||||
in: query
|
||||
description: Sort the results by one or more properties, separated by commas. Prefix the property with '-' for descending order.
|
||||
required: false
|
||||
schema:
|
||||
type: string
|
||||
33
api/resources/album.yml
Normal file
33
api/resources/album.yml
Normal file
@@ -0,0 +1,33 @@
|
||||
get:
|
||||
summary: Retrieve an individual album
|
||||
operationId: getAlbum
|
||||
parameters:
|
||||
- $ref: '../parameters/query/includeForAlbum.yml'
|
||||
- name: albumId
|
||||
in: path
|
||||
description: The unique identifier of the album
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: An album object
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
type: object
|
||||
required: [data]
|
||||
properties:
|
||||
data:
|
||||
$ref: '../schemas/Album.yml'
|
||||
included:
|
||||
description: Included resources, as requested by the `include` query parameter
|
||||
type: array
|
||||
items:
|
||||
$ref: '../schemas/IncludedResource.yml'
|
||||
'403':
|
||||
$ref: '../responses/NotAuthorized.yml'
|
||||
'404':
|
||||
$ref: '../responses/NotFound.yml'
|
||||
'500':
|
||||
$ref: '../responses/InternalServerError.yml'
|
||||
44
api/resources/albums.yml
Normal file
44
api/resources/albums.yml
Normal file
@@ -0,0 +1,44 @@
|
||||
get:
|
||||
summary: Retrieve a list of albums
|
||||
operationId: getAlbums
|
||||
parameters:
|
||||
- $ref: '../parameters/query/pageLimit.yml'
|
||||
- $ref: '../parameters/query/pageOffset.yml'
|
||||
- $ref: '../parameters/query/filterEquals.yml'
|
||||
- $ref: '../parameters/query/filterContains.yml'
|
||||
- $ref: '../parameters/query/filterLessThan.yml'
|
||||
- $ref: '../parameters/query/filterLessOrEqual.yml'
|
||||
- $ref: '../parameters/query/filterGreaterThan.yml'
|
||||
- $ref: '../parameters/query/filterGreaterOrEqual.yml'
|
||||
- $ref: '../parameters/query/filterStartsWith.yml'
|
||||
- $ref: '../parameters/query/filterEndsWith.yml'
|
||||
- $ref: '../parameters/query/sort.yml'
|
||||
- $ref: '../parameters/query/includeForAlbums.yml'
|
||||
responses:
|
||||
'200':
|
||||
description: A list of albums
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
type: object
|
||||
required: [data, links]
|
||||
properties:
|
||||
data:
|
||||
type: array
|
||||
items:
|
||||
$ref: '../schemas/Album.yml'
|
||||
links:
|
||||
$ref: '../schemas/PaginationLinks.yml'
|
||||
meta:
|
||||
$ref: '../schemas/PaginationMeta.yml'
|
||||
included:
|
||||
description: Included resources, as requested by the `include` query parameter
|
||||
type: array
|
||||
items:
|
||||
$ref: '../schemas/IncludedResource.yml'
|
||||
'400':
|
||||
$ref: '../responses/BadRequest.yml'
|
||||
'403':
|
||||
$ref: '../responses/NotAuthorized.yml'
|
||||
'500':
|
||||
$ref: '../responses/InternalServerError.yml'
|
||||
28
api/resources/artist.yml
Normal file
28
api/resources/artist.yml
Normal file
@@ -0,0 +1,28 @@
|
||||
get:
|
||||
summary: Retrieve an individual artist
|
||||
operationId: getArtist
|
||||
parameters:
|
||||
- $ref: '../parameters/query/include.yml'
|
||||
- name: artistId
|
||||
in: path
|
||||
description: The unique identifier of the artist
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: An artist object
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
type: object
|
||||
required: [data]
|
||||
properties:
|
||||
data:
|
||||
$ref: '../schemas/Artist.yml'
|
||||
'403':
|
||||
$ref: '../responses/NotAuthorized.yml'
|
||||
'404':
|
||||
$ref: '../responses/NotFound.yml'
|
||||
'500':
|
||||
$ref: '../responses/InternalServerError.yml'
|
||||
39
api/resources/artists.yml
Normal file
39
api/resources/artists.yml
Normal file
@@ -0,0 +1,39 @@
|
||||
get:
|
||||
summary: Retrieve a list of artists
|
||||
operationId: getArtists
|
||||
parameters:
|
||||
- $ref: '../parameters/query/pageLimit.yml'
|
||||
- $ref: '../parameters/query/pageOffset.yml'
|
||||
- $ref: '../parameters/query/filterEquals.yml'
|
||||
- $ref: '../parameters/query/filterContains.yml'
|
||||
- $ref: '../parameters/query/filterLessThan.yml'
|
||||
- $ref: '../parameters/query/filterLessOrEqual.yml'
|
||||
- $ref: '../parameters/query/filterGreaterThan.yml'
|
||||
- $ref: '../parameters/query/filterGreaterOrEqual.yml'
|
||||
- $ref: '../parameters/query/filterStartsWith.yml'
|
||||
- $ref: '../parameters/query/filterEndsWith.yml'
|
||||
- $ref: '../parameters/query/sort.yml'
|
||||
- $ref: '../parameters/query/include.yml'
|
||||
responses:
|
||||
'200':
|
||||
description: A list of artists
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
type: object
|
||||
required: [data, links]
|
||||
properties:
|
||||
data:
|
||||
type: array
|
||||
items:
|
||||
$ref: '../schemas/Artist.yml'
|
||||
links:
|
||||
$ref: '../schemas/PaginationLinks.yml'
|
||||
meta:
|
||||
$ref: '../schemas/PaginationMeta.yml'
|
||||
'400':
|
||||
$ref: '../responses/BadRequest.yml'
|
||||
'403':
|
||||
$ref: '../responses/NotAuthorized.yml'
|
||||
'500':
|
||||
$ref: '../responses/InternalServerError.yml'
|
||||
18
api/resources/server.yml
Normal file
18
api/resources/server.yml
Normal file
@@ -0,0 +1,18 @@
|
||||
get:
|
||||
summary: Get server's global info
|
||||
operationId: getServerInfo
|
||||
responses:
|
||||
'200':
|
||||
description: The response’s data key maps to a resource object dictionary representing the server.
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
type: object
|
||||
required: [data]
|
||||
properties:
|
||||
data:
|
||||
$ref: '../schemas/ServerInfo.yml'
|
||||
'403':
|
||||
$ref: '../responses/NotAuthorized.yml'
|
||||
'500':
|
||||
$ref: '../responses/InternalServerError.yml'
|
||||
33
api/resources/track.yml
Normal file
33
api/resources/track.yml
Normal file
@@ -0,0 +1,33 @@
|
||||
get:
|
||||
summary: Retrieve an individual track
|
||||
operationId: getTrack
|
||||
parameters:
|
||||
- $ref: '../parameters/query/includeForTracks.yml'
|
||||
- name: trackId
|
||||
in: path
|
||||
description: The unique identifier of the track
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: A track object
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
type: object
|
||||
required: [data]
|
||||
properties:
|
||||
data:
|
||||
$ref: '../schemas/Track.yml'
|
||||
included:
|
||||
description: Included resources, as requested by the `include` query parameter
|
||||
type: array
|
||||
items:
|
||||
$ref: '../schemas/IncludedResource.yml'
|
||||
'403':
|
||||
$ref: '../responses/NotAuthorized.yml'
|
||||
'404':
|
||||
$ref: '../responses/NotFound.yml'
|
||||
'500':
|
||||
$ref: '../responses/InternalServerError.yml'
|
||||
44
api/resources/tracks.yml
Normal file
44
api/resources/tracks.yml
Normal file
@@ -0,0 +1,44 @@
|
||||
get:
|
||||
summary: Retrieve a list of tracks
|
||||
operationId: getTracks
|
||||
parameters:
|
||||
- $ref: '../parameters/query/pageLimit.yml'
|
||||
- $ref: '../parameters/query/pageOffset.yml'
|
||||
- $ref: '../parameters/query/filterEquals.yml'
|
||||
- $ref: '../parameters/query/filterContains.yml'
|
||||
- $ref: '../parameters/query/filterLessThan.yml'
|
||||
- $ref: '../parameters/query/filterLessOrEqual.yml'
|
||||
- $ref: '../parameters/query/filterGreaterThan.yml'
|
||||
- $ref: '../parameters/query/filterGreaterOrEqual.yml'
|
||||
- $ref: '../parameters/query/filterStartsWith.yml'
|
||||
- $ref: '../parameters/query/filterEndsWith.yml'
|
||||
- $ref: '../parameters/query/sort.yml'
|
||||
- $ref: '../parameters/query/includeForTracks.yml'
|
||||
responses:
|
||||
'200':
|
||||
description: A list of tracks
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
type: object
|
||||
required: [data, links]
|
||||
properties:
|
||||
data:
|
||||
type: array
|
||||
items:
|
||||
$ref: '../schemas/Track.yml'
|
||||
links:
|
||||
$ref: '../schemas/PaginationLinks.yml'
|
||||
meta:
|
||||
$ref: '../schemas/PaginationMeta.yml'
|
||||
included:
|
||||
description: Included resources, as requested by the `include` query parameter
|
||||
type: array
|
||||
items:
|
||||
$ref: '../schemas/IncludedResource.yml'
|
||||
'400':
|
||||
$ref: '../responses/BadRequest.yml'
|
||||
'403':
|
||||
$ref: '../responses/NotAuthorized.yml'
|
||||
'500':
|
||||
$ref: '../responses/InternalServerError.yml'
|
||||
5
api/responses/BadRequest.yml
Normal file
5
api/responses/BadRequest.yml
Normal file
@@ -0,0 +1,5 @@
|
||||
description: Bad Request
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
$ref: '../schemas/ErrorList.yml'
|
||||
5
api/responses/InternalServerError.yml
Normal file
5
api/responses/InternalServerError.yml
Normal file
@@ -0,0 +1,5 @@
|
||||
description: Internal Server Error
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
$ref: '../schemas/ErrorList.yml'
|
||||
5
api/responses/NotAuthorized.yml
Normal file
5
api/responses/NotAuthorized.yml
Normal file
@@ -0,0 +1,5 @@
|
||||
description: Not Authorized
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
$ref: '../schemas/ErrorList.yml'
|
||||
5
api/responses/NotFound.yml
Normal file
5
api/responses/NotFound.yml
Normal file
@@ -0,0 +1,5 @@
|
||||
description: Not Found
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
$ref: '../schemas/ErrorList.yml'
|
||||
8
api/responses/_index.yml
Normal file
8
api/responses/_index.yml
Normal file
@@ -0,0 +1,8 @@
|
||||
NotFound:
|
||||
$ref: './NotFound.yml'
|
||||
NotAuthorized:
|
||||
$ref: './NotAuthorized.yml'
|
||||
BadRequest:
|
||||
$ref: './BadRequest.yml'
|
||||
InternalServerError:
|
||||
$ref: './InternalServerError.yml'
|
||||
20
api/schemas/Album.yml
Normal file
20
api/schemas/Album.yml
Normal file
@@ -0,0 +1,20 @@
|
||||
allOf:
|
||||
- $ref: './ResourceObject.yml'
|
||||
- type: object
|
||||
properties:
|
||||
attributes:
|
||||
$ref: './AlbumAttributes.yml'
|
||||
relationships:
|
||||
type: object
|
||||
properties:
|
||||
artists:
|
||||
type: array
|
||||
items:
|
||||
$ref: './AlbumArtistRelationship.yml'
|
||||
tracks:
|
||||
type: array
|
||||
items:
|
||||
$ref: './AlbumTrackRelationship.yml'
|
||||
required:
|
||||
- artists
|
||||
- tracks
|
||||
9
api/schemas/AlbumArtistRelationship.yml
Normal file
9
api/schemas/AlbumArtistRelationship.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
type: object
|
||||
properties:
|
||||
meta:
|
||||
$ref: './ArtistMetaObject.yml'
|
||||
data:
|
||||
$ref: './ResourceObject.yml'
|
||||
required:
|
||||
- meta
|
||||
- data
|
||||
23
api/schemas/AlbumAttributes.yml
Normal file
23
api/schemas/AlbumAttributes.yml
Normal file
@@ -0,0 +1,23 @@
|
||||
type: object
|
||||
properties:
|
||||
title:
|
||||
type: string
|
||||
description: The title of the album
|
||||
artist:
|
||||
type: string
|
||||
description: The artist of the album
|
||||
releaseDate:
|
||||
type: string
|
||||
description: The release date of the album
|
||||
tracktotal:
|
||||
type: integer
|
||||
description: The number of tracks on the album
|
||||
disctotal:
|
||||
type: integer
|
||||
description: The number of discs in the album
|
||||
genre:
|
||||
type: string
|
||||
description: The genre of the album
|
||||
required:
|
||||
- title
|
||||
- artist
|
||||
6
api/schemas/AlbumTrackRelationship.yml
Normal file
6
api/schemas/AlbumTrackRelationship.yml
Normal file
@@ -0,0 +1,6 @@
|
||||
type: object
|
||||
properties:
|
||||
data:
|
||||
$ref: './ResourceObject.yml'
|
||||
required:
|
||||
- data
|
||||
27
api/schemas/Artist.yml
Normal file
27
api/schemas/Artist.yml
Normal file
@@ -0,0 +1,27 @@
|
||||
allOf:
|
||||
- $ref: './ResourceObject.yml'
|
||||
- type: object
|
||||
properties:
|
||||
attributes:
|
||||
$ref: './ArtistAttributes.yml'
|
||||
relationships:
|
||||
type: object
|
||||
properties:
|
||||
tracks:
|
||||
type: object
|
||||
properties:
|
||||
data:
|
||||
type: array
|
||||
items:
|
||||
$ref: './ArtistTrackRelationship.yml'
|
||||
required:
|
||||
- data
|
||||
albums:
|
||||
type: object
|
||||
properties:
|
||||
data:
|
||||
type: array
|
||||
items:
|
||||
$ref: './ArtistAlbumRelationship.yml'
|
||||
required:
|
||||
- data
|
||||
9
api/schemas/ArtistAlbumRelationship.yml
Normal file
9
api/schemas/ArtistAlbumRelationship.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
type: object
|
||||
properties:
|
||||
meta:
|
||||
$ref: './ArtistMetaObject.yml'
|
||||
data:
|
||||
$ref: './ResourceObject.yml'
|
||||
required:
|
||||
- meta
|
||||
- data
|
||||
10
api/schemas/ArtistAttributes.yml
Normal file
10
api/schemas/ArtistAttributes.yml
Normal file
@@ -0,0 +1,10 @@
|
||||
type: object
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
description: The name of the artist
|
||||
bio:
|
||||
type: string
|
||||
description: A short biography of the artist
|
||||
required:
|
||||
- name
|
||||
6
api/schemas/ArtistMetaObject.yml
Normal file
6
api/schemas/ArtistMetaObject.yml
Normal file
@@ -0,0 +1,6 @@
|
||||
type: object
|
||||
properties:
|
||||
role:
|
||||
$ref: './ArtistRole.yml'
|
||||
required:
|
||||
- role
|
||||
5
api/schemas/ArtistRole.yml
Normal file
5
api/schemas/ArtistRole.yml
Normal file
@@ -0,0 +1,5 @@
|
||||
type: string
|
||||
enum:
|
||||
- artist
|
||||
- albumArtist
|
||||
description: The role of an artist in a track or album
|
||||
14
api/schemas/ArtistTrackRelationship.yml
Normal file
14
api/schemas/ArtistTrackRelationship.yml
Normal file
@@ -0,0 +1,14 @@
|
||||
type: object
|
||||
properties:
|
||||
meta:
|
||||
type: object
|
||||
properties:
|
||||
role:
|
||||
$ref: './ArtistRole.yml'
|
||||
required:
|
||||
- role
|
||||
data:
|
||||
$ref: './ResourceObject.yml'
|
||||
required:
|
||||
- meta
|
||||
- data
|
||||
7
api/schemas/ErrorList.yml
Normal file
7
api/schemas/ErrorList.yml
Normal file
@@ -0,0 +1,7 @@
|
||||
type: object
|
||||
required: [errors]
|
||||
properties:
|
||||
errors:
|
||||
type: array
|
||||
items:
|
||||
$ref: './ErrorObject.yml'
|
||||
10
api/schemas/ErrorObject.yml
Normal file
10
api/schemas/ErrorObject.yml
Normal file
@@ -0,0 +1,10 @@
|
||||
type: object
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
status:
|
||||
type: string
|
||||
title:
|
||||
type: string
|
||||
detail:
|
||||
type: string
|
||||
10
api/schemas/IncludedResource.yml
Normal file
10
api/schemas/IncludedResource.yml
Normal file
@@ -0,0 +1,10 @@
|
||||
oneOf:
|
||||
- $ref: './Track.yml'
|
||||
- $ref: './Album.yml'
|
||||
- $ref: './Artist.yml'
|
||||
discriminator:
|
||||
propertyName: type
|
||||
mapping:
|
||||
track: './Track.yml'
|
||||
album: './Album.yml'
|
||||
artist: './Artist.yml'
|
||||
14
api/schemas/PaginationLinks.yml
Normal file
14
api/schemas/PaginationLinks.yml
Normal file
@@ -0,0 +1,14 @@
|
||||
type: object
|
||||
properties:
|
||||
first:
|
||||
type: string
|
||||
format: uri
|
||||
prev:
|
||||
type: string
|
||||
format: uri
|
||||
next:
|
||||
type: string
|
||||
format: uri
|
||||
last:
|
||||
type: string
|
||||
format: uri
|
||||
14
api/schemas/PaginationMeta.yml
Normal file
14
api/schemas/PaginationMeta.yml
Normal file
@@ -0,0 +1,14 @@
|
||||
type: object
|
||||
properties:
|
||||
currentPage:
|
||||
type: integer
|
||||
format: int32
|
||||
description: The current page in the collection
|
||||
totalPages:
|
||||
type: integer
|
||||
format: int32
|
||||
description: The total number of pages in the collection
|
||||
totalItems:
|
||||
type: integer
|
||||
format: int32
|
||||
description: The total number of items in the collection
|
||||
18
api/schemas/ResourceList.yml
Normal file
18
api/schemas/ResourceList.yml
Normal file
@@ -0,0 +1,18 @@
|
||||
type: object
|
||||
properties:
|
||||
data:
|
||||
oneOf:
|
||||
- $ref: './Track.yml'
|
||||
- $ref: './Album.yml'
|
||||
- $ref: './Artist.yml'
|
||||
- type: array
|
||||
items:
|
||||
$ref: './ResourceObject.yml'
|
||||
included:
|
||||
type: array
|
||||
items:
|
||||
$ref: './IncludedResource.yml'
|
||||
links:
|
||||
$ref: './PaginationLinks.yml'
|
||||
meta:
|
||||
$ref: './PaginationMeta.yml'
|
||||
8
api/schemas/ResourceObject.yml
Normal file
8
api/schemas/ResourceObject.yml
Normal file
@@ -0,0 +1,8 @@
|
||||
type: object
|
||||
required: [id, type]
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
description: The unique identifier for the resource
|
||||
type:
|
||||
$ref: './ResourceType.yml'
|
||||
6
api/schemas/ResourceType.yml
Normal file
6
api/schemas/ResourceType.yml
Normal file
@@ -0,0 +1,6 @@
|
||||
type: string
|
||||
description: The type of the resource
|
||||
enum:
|
||||
- album
|
||||
- artist
|
||||
- track
|
||||
24
api/schemas/ServerInfo.yml
Normal file
24
api/schemas/ServerInfo.yml
Normal file
@@ -0,0 +1,24 @@
|
||||
type: object
|
||||
required: [server, serverVersion, authRequired, features]
|
||||
properties:
|
||||
server:
|
||||
type: string
|
||||
description: The name of the server software.
|
||||
example: "navidrome"
|
||||
serverVersion:
|
||||
type: string
|
||||
description: The version number of the server.
|
||||
example: "0.60.0"
|
||||
authRequired:
|
||||
type: boolean
|
||||
description: Whether the user has access to the server.
|
||||
example: true
|
||||
features:
|
||||
type: array
|
||||
description: A list of optional features the server supports.
|
||||
items:
|
||||
type: string
|
||||
enum:
|
||||
- albums
|
||||
- artists
|
||||
- images
|
||||
8
api/schemas/Track.yml
Normal file
8
api/schemas/Track.yml
Normal file
@@ -0,0 +1,8 @@
|
||||
allOf:
|
||||
- $ref: './ResourceObject.yml'
|
||||
- type: object
|
||||
properties:
|
||||
attributes:
|
||||
$ref: './TrackAttributes.yml'
|
||||
relationships:
|
||||
$ref: './TrackRelationships.yml'
|
||||
9
api/schemas/TrackArtistRelationship.yml
Normal file
9
api/schemas/TrackArtistRelationship.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
type: object
|
||||
properties:
|
||||
meta:
|
||||
$ref: './ArtistMetaObject.yml'
|
||||
data:
|
||||
$ref: './ResourceObject.yml'
|
||||
required:
|
||||
- meta
|
||||
- data
|
||||
55
api/schemas/TrackAttributes.yml
Normal file
55
api/schemas/TrackAttributes.yml
Normal file
@@ -0,0 +1,55 @@
|
||||
type: object
|
||||
required: [title, artist, album, albumartist, track, mimetype, duration, channels, bitrate, size]
|
||||
properties:
|
||||
title:
|
||||
type: string
|
||||
description: The title of the track
|
||||
artist: # TODO: Remove
|
||||
type: string
|
||||
description: The name of the artist who performed the track
|
||||
albumartist: # TODO: Remove
|
||||
type: string
|
||||
description: The primary artist of the album the track belongs to.
|
||||
album: # TODO Remove
|
||||
type: string
|
||||
description: The name of the album the track belongs to
|
||||
genre: # TODO Remove
|
||||
type: string
|
||||
description: The genre of the track.
|
||||
track:
|
||||
type: integer
|
||||
description: The track number within the album.
|
||||
disc:
|
||||
type: integer
|
||||
description: The disc number within a multi-disc album.
|
||||
year:
|
||||
type: integer
|
||||
description: The release year of the track or album.
|
||||
bpm:
|
||||
type: integer
|
||||
description: The beats per minute (BPM) of the track.
|
||||
recording-mbid:
|
||||
type: string
|
||||
description: The MusicBrainz identifier for the recording of the track.
|
||||
track-mbid:
|
||||
type: string
|
||||
description: The MusicBrainz identifier for the track.
|
||||
comments:
|
||||
type: string
|
||||
description: Any additional comments or notes about the track.
|
||||
mimetype:
|
||||
type: string
|
||||
description: The MIME type of the audio file.
|
||||
duration:
|
||||
type: number
|
||||
format: float
|
||||
description: The duration of the track in seconds
|
||||
channels:
|
||||
type: integer
|
||||
description: The number of audio channels in the track.
|
||||
bitrate:
|
||||
type: integer
|
||||
description: The bitrate of the audio file in kilobits per second (kbps).
|
||||
size:
|
||||
type: integer
|
||||
description: The size of the audio file in bytes.
|
||||
12
api/schemas/TrackRelationships.yml
Normal file
12
api/schemas/TrackRelationships.yml
Normal file
@@ -0,0 +1,12 @@
|
||||
type: object
|
||||
properties:
|
||||
artists:
|
||||
type: array
|
||||
items:
|
||||
$ref: './TrackArtistRelationship.yml'
|
||||
albums:
|
||||
type: array
|
||||
items:
|
||||
$ref: './AlbumTrackRelationship.yml'
|
||||
required:
|
||||
- artists
|
||||
46
api/schemas/_index.yml
Normal file
46
api/schemas/_index.yml
Normal file
@@ -0,0 +1,46 @@
|
||||
ServerInfo:
|
||||
$ref: './ServerInfo.yml'
|
||||
ResourceObject:
|
||||
$ref: './ResourceObject.yml'
|
||||
ResourceType:
|
||||
$ref: './ResourceType.yml'
|
||||
ResourceList:
|
||||
$ref: './ResourceList.yml'
|
||||
IncludedResource:
|
||||
$ref: './IncludedResource.yml'
|
||||
Track:
|
||||
$ref: './Track.yml'
|
||||
TrackAttributes:
|
||||
$ref: './TrackAttributes.yml'
|
||||
TrackRelationships:
|
||||
$ref: './TrackRelationships.yml'
|
||||
TrackArtistRelationship:
|
||||
$ref: './TrackArtistRelationship.yml'
|
||||
ArtistRole:
|
||||
$ref: './ArtistRole.yml'
|
||||
Artist:
|
||||
$ref: './Artist.yml'
|
||||
ArtistAttributes:
|
||||
$ref: './ArtistAttributes.yml'
|
||||
ArtistAlbumRelationship:
|
||||
$ref: './ArtistAlbumRelationship.yml'
|
||||
ArtistTrackRelationship:
|
||||
$ref: './ArtistTrackRelationship.yml'
|
||||
ArtistMetaObject:
|
||||
$ref: './ArtistMetaObject.yml'
|
||||
Album:
|
||||
$ref: './Album.yml'
|
||||
AlbumAttributes:
|
||||
$ref: './AlbumAttributes.yml'
|
||||
AlbumArtistRelationship:
|
||||
$ref: './AlbumArtistRelationship.yml'
|
||||
AlbumTrackRelationship:
|
||||
$ref: './AlbumTrackRelationship.yml'
|
||||
PaginationLinks:
|
||||
$ref: './PaginationLinks.yml'
|
||||
PaginationMeta:
|
||||
$ref: './PaginationMeta.yml'
|
||||
ErrorList:
|
||||
$ref: './ErrorList.yml'
|
||||
ErrorObject:
|
||||
$ref: './ErrorObject.yml'
|
||||
52
api/spec.yml
Normal file
52
api/spec.yml
Normal file
@@ -0,0 +1,52 @@
|
||||
openapi: 3.0.0
|
||||
info:
|
||||
version: 0.2.0
|
||||
title: Navidrome API
|
||||
description: >
|
||||
This spec describes the Navidrome API, which allows users to browse and manage their music library via a JSON:API
|
||||
based interface. The API provides endpoints for albums, tracks, artists, playlists and images, along with their
|
||||
relationships. Clients can retrieve information about the items in the library, filter and sort results, and
|
||||
perform actions such as creating and deleting playlists. With this API, developers can build music apps and
|
||||
services that integrate with Navidrome music server, providing a seamless experience for users to access and
|
||||
manage their music collection.
|
||||
contact:
|
||||
name: Navidrome
|
||||
url: https://navidrome.org
|
||||
license:
|
||||
name: GNU General Public License v3.0
|
||||
url: https://github.com/navidrome/navidrome/blob/master/LICENSE
|
||||
|
||||
servers:
|
||||
- url: /api/v2
|
||||
|
||||
security:
|
||||
- bearerAuth: []
|
||||
|
||||
paths:
|
||||
/server:
|
||||
$ref: './resources/server.yml'
|
||||
/tracks:
|
||||
$ref: './resources/tracks.yml'
|
||||
/tracks/{trackId}:
|
||||
$ref: './resources/track.yml'
|
||||
/artists:
|
||||
$ref: './resources/artists.yml'
|
||||
/artists/{artistId}:
|
||||
$ref: './resources/artist.yml'
|
||||
/albums:
|
||||
$ref: './resources/albums.yml'
|
||||
/albums/{albumId}:
|
||||
$ref: './resources/album.yml'
|
||||
|
||||
components:
|
||||
parameters:
|
||||
$ref: './parameters/_index.yml'
|
||||
schemas:
|
||||
$ref: './schemas/_index.yml'
|
||||
responses:
|
||||
$ref: './responses/_index.yml'
|
||||
securitySchemes:
|
||||
bearerAuth:
|
||||
type: http
|
||||
scheme: bearer
|
||||
bearerFormat: JWT
|
||||
186
cmd/backup.go
186
cmd/backup.go
@@ -1,186 +0,0 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/db"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var (
|
||||
backupCount int
|
||||
backupDir string
|
||||
force bool
|
||||
restorePath string
|
||||
)
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(backupRoot)
|
||||
|
||||
backupCmd.Flags().StringVarP(&backupDir, "backup-dir", "d", "", "directory to manually make backup")
|
||||
backupRoot.AddCommand(backupCmd)
|
||||
|
||||
pruneCmd.Flags().StringVarP(&backupDir, "backup-dir", "d", "", "directory holding Navidrome backups")
|
||||
pruneCmd.Flags().IntVarP(&backupCount, "keep-count", "k", -1, "specify the number of backups to keep. 0 remove ALL backups, and negative values mean to use the default from configuration")
|
||||
pruneCmd.Flags().BoolVarP(&force, "force", "f", false, "bypass warning when backup count is zero")
|
||||
backupRoot.AddCommand(pruneCmd)
|
||||
|
||||
restoreCommand.Flags().StringVarP(&restorePath, "backup-file", "b", "", "path of backup database to restore")
|
||||
restoreCommand.Flags().BoolVarP(&force, "force", "f", false, "bypass restore warning")
|
||||
_ = restoreCommand.MarkFlagRequired("backup-file")
|
||||
backupRoot.AddCommand(restoreCommand)
|
||||
}
|
||||
|
||||
var (
|
||||
backupRoot = &cobra.Command{
|
||||
Use: "backup",
|
||||
Aliases: []string{"bkp"},
|
||||
Short: "Create, restore and prune database backups",
|
||||
Long: "Create, restore and prune database backups",
|
||||
}
|
||||
|
||||
backupCmd = &cobra.Command{
|
||||
Use: "create",
|
||||
Short: "Create a backup database",
|
||||
Long: "Manually backup Navidrome database. This will ignore BackupCount",
|
||||
Run: func(cmd *cobra.Command, _ []string) {
|
||||
runBackup(cmd.Context())
|
||||
},
|
||||
}
|
||||
|
||||
pruneCmd = &cobra.Command{
|
||||
Use: "prune",
|
||||
Short: "Prune database backups",
|
||||
Long: "Manually prune database backups according to backup rules",
|
||||
Run: func(cmd *cobra.Command, _ []string) {
|
||||
runPrune(cmd.Context())
|
||||
},
|
||||
}
|
||||
|
||||
restoreCommand = &cobra.Command{
|
||||
Use: "restore",
|
||||
Short: "Restore Navidrome database",
|
||||
Long: "Restore Navidrome database from a backup. This must be done offline",
|
||||
Run: func(cmd *cobra.Command, _ []string) {
|
||||
runRestore(cmd.Context())
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
func runBackup(ctx context.Context) {
|
||||
if backupDir != "" {
|
||||
conf.Server.Backup.Path = backupDir
|
||||
}
|
||||
|
||||
idx := strings.LastIndex(conf.Server.DbPath, "?")
|
||||
var path string
|
||||
|
||||
if idx == -1 {
|
||||
path = conf.Server.DbPath
|
||||
} else {
|
||||
path = conf.Server.DbPath[:idx]
|
||||
}
|
||||
|
||||
if _, err := os.Stat(path); os.IsNotExist(err) {
|
||||
log.Fatal("No existing database", "path", path)
|
||||
return
|
||||
}
|
||||
|
||||
start := time.Now()
|
||||
path, err := db.Backup(ctx)
|
||||
if err != nil {
|
||||
log.Fatal("Error backing up database", "backup path", conf.Server.BasePath, err)
|
||||
}
|
||||
|
||||
elapsed := time.Since(start)
|
||||
log.Info("Backup complete", "elapsed", elapsed, "path", path)
|
||||
}
|
||||
|
||||
func runPrune(ctx context.Context) {
|
||||
if backupDir != "" {
|
||||
conf.Server.Backup.Path = backupDir
|
||||
}
|
||||
|
||||
if backupCount != -1 {
|
||||
conf.Server.Backup.Count = backupCount
|
||||
}
|
||||
|
||||
if conf.Server.Backup.Count == 0 && !force {
|
||||
fmt.Println("Warning: pruning ALL backups")
|
||||
fmt.Printf("Please enter YES (all caps) to continue: ")
|
||||
var input string
|
||||
_, err := fmt.Scanln(&input)
|
||||
|
||||
if input != "YES" || err != nil {
|
||||
log.Warn("Prune cancelled")
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
idx := strings.LastIndex(conf.Server.DbPath, "?")
|
||||
var path string
|
||||
|
||||
if idx == -1 {
|
||||
path = conf.Server.DbPath
|
||||
} else {
|
||||
path = conf.Server.DbPath[:idx]
|
||||
}
|
||||
|
||||
if _, err := os.Stat(path); os.IsNotExist(err) {
|
||||
log.Fatal("No existing database", "path", path)
|
||||
return
|
||||
}
|
||||
|
||||
start := time.Now()
|
||||
count, err := db.Prune(ctx)
|
||||
if err != nil {
|
||||
log.Fatal("Error pruning up database", "backup path", conf.Server.BasePath, err)
|
||||
}
|
||||
|
||||
elapsed := time.Since(start)
|
||||
|
||||
log.Info("Prune complete", "elapsed", elapsed, "successfully pruned", count)
|
||||
}
|
||||
|
||||
func runRestore(ctx context.Context) {
|
||||
idx := strings.LastIndex(conf.Server.DbPath, "?")
|
||||
var path string
|
||||
|
||||
if idx == -1 {
|
||||
path = conf.Server.DbPath
|
||||
} else {
|
||||
path = conf.Server.DbPath[:idx]
|
||||
}
|
||||
|
||||
if _, err := os.Stat(path); os.IsNotExist(err) {
|
||||
log.Fatal("No existing database", "path", path)
|
||||
return
|
||||
}
|
||||
|
||||
if !force {
|
||||
fmt.Println("Warning: restoring the Navidrome database should only be done offline, especially if your backup is very old.")
|
||||
fmt.Printf("Please enter YES (all caps) to continue: ")
|
||||
var input string
|
||||
_, err := fmt.Scanln(&input)
|
||||
|
||||
if input != "YES" || err != nil {
|
||||
log.Warn("Restore cancelled")
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
start := time.Now()
|
||||
err := db.Restore(ctx, restorePath)
|
||||
if err != nil {
|
||||
log.Fatal("Error restoring database", "backup path", conf.Server.BasePath, err)
|
||||
}
|
||||
|
||||
elapsed := time.Since(start)
|
||||
log.Info("Restore complete", "elapsed", elapsed)
|
||||
}
|
||||
@@ -1,79 +0,0 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/pelletier/go-toml/v2"
|
||||
"github.com/spf13/cobra"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
var (
|
||||
format string
|
||||
)
|
||||
|
||||
func init() {
|
||||
inspectCmd.Flags().StringVarP(&format, "format", "f", "jsonindent", "output format (pretty, toml, yaml, json, jsonindent)")
|
||||
rootCmd.AddCommand(inspectCmd)
|
||||
}
|
||||
|
||||
var inspectCmd = &cobra.Command{
|
||||
Use: "inspect [files to inspect]",
|
||||
Short: "Inspect tags",
|
||||
Long: "Show file tags as seen by Navidrome",
|
||||
Args: cobra.MinimumNArgs(1),
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
runInspector(args)
|
||||
},
|
||||
}
|
||||
|
||||
var marshalers = map[string]func(interface{}) ([]byte, error){
|
||||
"pretty": prettyMarshal,
|
||||
"toml": toml.Marshal,
|
||||
"yaml": yaml.Marshal,
|
||||
"json": json.Marshal,
|
||||
"jsonindent": func(v interface{}) ([]byte, error) {
|
||||
return json.MarshalIndent(v, "", " ")
|
||||
},
|
||||
}
|
||||
|
||||
func prettyMarshal(v interface{}) ([]byte, error) {
|
||||
out := v.([]core.InspectOutput)
|
||||
var res strings.Builder
|
||||
for i := range out {
|
||||
res.WriteString(fmt.Sprintf("====================\nFile: %s\n\n", out[i].File))
|
||||
t, _ := toml.Marshal(out[i].RawTags)
|
||||
res.WriteString(fmt.Sprintf("Raw tags:\n%s\n\n", t))
|
||||
t, _ = toml.Marshal(out[i].MappedTags)
|
||||
res.WriteString(fmt.Sprintf("Mapped tags:\n%s\n\n", t))
|
||||
}
|
||||
return []byte(res.String()), nil
|
||||
}
|
||||
|
||||
func runInspector(args []string) {
|
||||
marshal := marshalers[format]
|
||||
if marshal == nil {
|
||||
log.Fatal("Invalid format", "format", format)
|
||||
}
|
||||
var out []core.InspectOutput
|
||||
for _, filePath := range args {
|
||||
if !model.IsAudioFile(filePath) {
|
||||
log.Warn("Not an audio file", "file", filePath)
|
||||
continue
|
||||
}
|
||||
output, err := core.Inspect(filePath, 1, "")
|
||||
if err != nil {
|
||||
log.Warn("Unable to process file", "file", filePath, "error", err)
|
||||
continue
|
||||
}
|
||||
|
||||
out = append(out, *output)
|
||||
}
|
||||
data, _ := marshal(out)
|
||||
fmt.Println(string(data))
|
||||
}
|
||||
109
cmd/pls.go
109
cmd/pls.go
@@ -2,12 +2,8 @@ package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/csv"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"strconv"
|
||||
|
||||
"github.com/Masterminds/squirrel"
|
||||
"github.com/navidrome/navidrome/core/auth"
|
||||
@@ -19,57 +15,31 @@ import (
|
||||
)
|
||||
|
||||
var (
|
||||
playlistID string
|
||||
outputFile string
|
||||
userID string
|
||||
outputFormat string
|
||||
playlistID string
|
||||
outputFile string
|
||||
)
|
||||
|
||||
type displayPlaylist struct {
|
||||
Id string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
OwnerName string `json:"ownerName"`
|
||||
OwnerId string `json:"ownerId"`
|
||||
Public bool `json:"public"`
|
||||
}
|
||||
|
||||
type displayPlaylists []displayPlaylist
|
||||
|
||||
func init() {
|
||||
plsCmd.Flags().StringVarP(&playlistID, "playlist", "p", "", "playlist name or ID")
|
||||
plsCmd.Flags().StringVarP(&outputFile, "output", "o", "", "output file (default stdout)")
|
||||
_ = plsCmd.MarkFlagRequired("playlist")
|
||||
rootCmd.AddCommand(plsCmd)
|
||||
|
||||
listCommand.Flags().StringVarP(&userID, "user", "u", "", "username or ID")
|
||||
listCommand.Flags().StringVarP(&outputFormat, "format", "f", "csv", "output format [supported values: csv, json]")
|
||||
plsCmd.AddCommand(listCommand)
|
||||
}
|
||||
|
||||
var (
|
||||
plsCmd = &cobra.Command{
|
||||
Use: "pls",
|
||||
Short: "Export playlists",
|
||||
Long: "Export Navidrome playlists to M3U files",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
runExporter()
|
||||
},
|
||||
}
|
||||
|
||||
listCommand = &cobra.Command{
|
||||
Use: "list",
|
||||
Short: "List playlists",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
runList()
|
||||
},
|
||||
}
|
||||
)
|
||||
var plsCmd = &cobra.Command{
|
||||
Use: "pls",
|
||||
Short: "Export playlists",
|
||||
Long: "Export Navidrome playlists to M3U files",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
runExporter()
|
||||
},
|
||||
}
|
||||
|
||||
func runExporter() {
|
||||
sqlDB := db.Db()
|
||||
ds := persistence.New(sqlDB)
|
||||
ctx := auth.WithAdminUser(context.Background(), ds)
|
||||
playlist, err := ds.Playlist(ctx).GetWithTracks(playlistID, true, false)
|
||||
playlist, err := ds.Playlist(ctx).GetWithTracks(playlistID, true)
|
||||
if err != nil && !errors.Is(err, model.ErrNotFound) {
|
||||
log.Fatal("Error retrieving playlist", "name", playlistID, err)
|
||||
}
|
||||
@@ -79,7 +49,7 @@ func runExporter() {
|
||||
log.Fatal("Error retrieving playlist", "name", playlistID, err)
|
||||
}
|
||||
if len(playlists) > 0 {
|
||||
playlist, err = ds.Playlist(ctx).GetWithTracks(playlists[0].ID, true, false)
|
||||
playlist, err = ds.Playlist(ctx).GetWithTracks(playlists[0].ID, true)
|
||||
if err != nil {
|
||||
log.Fatal("Error retrieving playlist", "name", playlistID, err)
|
||||
}
|
||||
@@ -99,58 +69,3 @@ func runExporter() {
|
||||
log.Fatal("Error writing to the output file", "file", outputFile, err)
|
||||
}
|
||||
}
|
||||
|
||||
func runList() {
|
||||
if outputFormat != "csv" && outputFormat != "json" {
|
||||
log.Fatal("Invalid output format. Must be one of csv, json", "format", outputFormat)
|
||||
}
|
||||
|
||||
sqlDB := db.Db()
|
||||
ds := persistence.New(sqlDB)
|
||||
ctx := auth.WithAdminUser(context.Background(), ds)
|
||||
|
||||
options := model.QueryOptions{Sort: "owner_name"}
|
||||
|
||||
if userID != "" {
|
||||
user, err := ds.User(ctx).FindByUsername(userID)
|
||||
|
||||
if err != nil && !errors.Is(err, model.ErrNotFound) {
|
||||
log.Fatal("Error retrieving user by name", "name", userID, err)
|
||||
}
|
||||
|
||||
if errors.Is(err, model.ErrNotFound) {
|
||||
user, err = ds.User(ctx).Get(userID)
|
||||
if err != nil {
|
||||
log.Fatal("Error retrieving user by id", "id", userID, err)
|
||||
}
|
||||
}
|
||||
|
||||
options.Filters = squirrel.Eq{"owner_id": user.ID}
|
||||
}
|
||||
|
||||
playlists, err := ds.Playlist(ctx).GetAll(options)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "Failed to retrieve playlists", err)
|
||||
}
|
||||
|
||||
if outputFormat == "csv" {
|
||||
w := csv.NewWriter(os.Stdout)
|
||||
_ = w.Write([]string{"playlist id", "playlist name", "owner id", "owner name", "public"})
|
||||
for _, playlist := range playlists {
|
||||
_ = w.Write([]string{playlist.ID, playlist.Name, playlist.OwnerID, playlist.OwnerName, strconv.FormatBool(playlist.Public)})
|
||||
}
|
||||
w.Flush()
|
||||
} else {
|
||||
display := make(displayPlaylists, len(playlists))
|
||||
for idx, playlist := range playlists {
|
||||
display[idx].Id = playlist.ID
|
||||
display[idx].Name = playlist.Name
|
||||
display[idx].OwnerId = playlist.OwnerID
|
||||
display[idx].OwnerName = playlist.OwnerName
|
||||
display[idx].Public = playlist.Public
|
||||
}
|
||||
|
||||
j, _ := json.Marshal(display)
|
||||
fmt.Printf("%s\n", j)
|
||||
}
|
||||
}
|
||||
|
||||
271
cmd/root.go
271
cmd/root.go
@@ -2,28 +2,30 @@ package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/signal"
|
||||
"strings"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
"github.com/go-chi/chi/v5/middleware"
|
||||
_ "github.com/navidrome/navidrome/adapters/taglib"
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/db"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/resources"
|
||||
"github.com/navidrome/navidrome/scanner"
|
||||
"github.com/navidrome/navidrome/scheduler"
|
||||
"github.com/navidrome/navidrome/server/backgrounds"
|
||||
"github.com/prometheus/client_golang/prometheus/promhttp"
|
||||
"golang.org/x/sync/errgroup"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/viper"
|
||||
"golang.org/x/sync/errgroup"
|
||||
)
|
||||
|
||||
var interrupted = errors.New("service was interrupted")
|
||||
|
||||
var (
|
||||
cfgFile string
|
||||
noBanner bool
|
||||
@@ -37,23 +39,17 @@ Complete documentation is available at https://www.navidrome.org/docs`,
|
||||
preRun()
|
||||
},
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
runNavidrome(cmd.Context())
|
||||
},
|
||||
PostRun: func(cmd *cobra.Command, args []string) {
|
||||
postRun()
|
||||
runNavidrome()
|
||||
},
|
||||
Version: consts.Version,
|
||||
}
|
||||
)
|
||||
|
||||
// Execute runs the root cobra command, which will start the Navidrome server by calling the runNavidrome function.
|
||||
func Execute() {
|
||||
ctx, cancel := mainContext(context.Background())
|
||||
defer cancel()
|
||||
|
||||
rootCmd.SetVersionTemplate(`{{println .Version}}`)
|
||||
if err := rootCmd.ExecuteContext(ctx); err != nil {
|
||||
log.Fatal(err)
|
||||
if err := rootCmd.Execute(); err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -61,56 +57,34 @@ func preRun() {
|
||||
if !noBanner {
|
||||
println(resources.Banner())
|
||||
}
|
||||
conf.Load(noBanner)
|
||||
conf.Load()
|
||||
}
|
||||
|
||||
func postRun() {
|
||||
log.Info("Navidrome stopped, bye.")
|
||||
}
|
||||
func runNavidrome() {
|
||||
db.Init()
|
||||
defer func() {
|
||||
if err := db.Close(); err != nil {
|
||||
log.Error("Error closing DB", err)
|
||||
}
|
||||
log.Info("Navidrome stopped, bye.")
|
||||
}()
|
||||
|
||||
// runNavidrome is the main entry point for the Navidrome server. It starts all the services and blocks.
|
||||
// If any of the services returns an error, it will log it and exit. If the process receives a signal to exit,
|
||||
// it will cancel the context and exit gracefully.
|
||||
func runNavidrome(ctx context.Context) {
|
||||
defer db.Init(ctx)()
|
||||
|
||||
g, ctx := errgroup.WithContext(ctx)
|
||||
g, ctx := errgroup.WithContext(context.Background())
|
||||
g.Go(startServer(ctx))
|
||||
g.Go(startSignaller(ctx))
|
||||
g.Go(startSignaler(ctx))
|
||||
g.Go(startScheduler(ctx))
|
||||
g.Go(startPlaybackServer(ctx))
|
||||
g.Go(schedulePeriodicBackup(ctx))
|
||||
g.Go(startInsightsCollector(ctx))
|
||||
g.Go(scheduleDBOptimizer(ctx))
|
||||
if conf.Server.Scanner.Enabled {
|
||||
g.Go(runInitialScan(ctx))
|
||||
g.Go(startScanWatcher(ctx))
|
||||
g.Go(schedulePeriodicScan(ctx))
|
||||
} else {
|
||||
log.Warn(ctx, "Automatic Scanning is DISABLED")
|
||||
}
|
||||
g.Go(schedulePeriodicScan(ctx))
|
||||
|
||||
if err := g.Wait(); err != nil {
|
||||
if err := g.Wait(); err != nil && !errors.Is(err, interrupted) {
|
||||
log.Error("Fatal error in Navidrome. Aborting", err)
|
||||
}
|
||||
}
|
||||
|
||||
// mainContext returns a context that is cancelled when the process receives a signal to exit.
|
||||
func mainContext(ctx context.Context) (context.Context, context.CancelFunc) {
|
||||
return signal.NotifyContext(ctx,
|
||||
os.Interrupt,
|
||||
syscall.SIGHUP,
|
||||
syscall.SIGTERM,
|
||||
syscall.SIGABRT,
|
||||
)
|
||||
}
|
||||
|
||||
// startServer starts the Navidrome web server, adding all the necessary routers.
|
||||
func startServer(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
a := CreateServer()
|
||||
a := CreateServer(conf.Server.MusicFolder)
|
||||
a.MountRouter("Native API", consts.URLPathNativeAPI, CreateNativeAPIRouter())
|
||||
a.MountRouter("Subsonic API", consts.URLPathSubsonicAPI, CreateSubsonicAPIRouter(ctx))
|
||||
a.MountRouter("Subsonic API", consts.URLPathSubsonicAPI, CreateSubsonicAPIRouter())
|
||||
a.MountRouter("Public Endpoints", consts.URLPathPublic, CreatePublicRouter())
|
||||
if conf.Server.LastFM.Enabled {
|
||||
a.MountRouter("LastFM Auth", consts.URLPathNativeAPI+"/lastfm", CreateLastFMRouter())
|
||||
@@ -119,212 +93,60 @@ func startServer(ctx context.Context) func() error {
|
||||
a.MountRouter("ListenBrainz Auth", consts.URLPathNativeAPI+"/listenbrainz", CreateListenBrainzRouter())
|
||||
}
|
||||
if conf.Server.Prometheus.Enabled {
|
||||
p := CreatePrometheus()
|
||||
// blocking call because takes <100ms but useful if fails
|
||||
p.WriteInitialMetrics(ctx)
|
||||
a.MountRouter("Prometheus metrics", conf.Server.Prometheus.MetricsPath, p.GetHandler())
|
||||
// blocking call because takes <1ms but useful if fails
|
||||
core.WriteInitialMetrics()
|
||||
a.MountRouter("Prometheus metrics", conf.Server.Prometheus.MetricsPath, promhttp.Handler())
|
||||
}
|
||||
if conf.Server.DevEnableProfiler {
|
||||
a.MountRouter("Profiling", "/debug", middleware.Profiler())
|
||||
}
|
||||
if strings.HasPrefix(conf.Server.UILoginBackgroundURL, "/") {
|
||||
a.MountRouter("Background images", conf.Server.UILoginBackgroundURL, backgrounds.NewHandler())
|
||||
a.MountRouter("Background images", consts.DefaultUILoginBackgroundURL, backgrounds.NewHandler())
|
||||
}
|
||||
a.MountRouter("New Native API", consts.URLPathAPI, CreateNewNativeAPIRouter())
|
||||
return a.Run(ctx, conf.Server.Address, conf.Server.Port, conf.Server.TLSCert, conf.Server.TLSKey)
|
||||
}
|
||||
}
|
||||
|
||||
// schedulePeriodicScan schedules a periodic scan of the music library, if configured.
|
||||
func schedulePeriodicScan(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
schedule := conf.Server.Scanner.Schedule
|
||||
schedule := conf.Server.ScanSchedule
|
||||
if schedule == "" {
|
||||
log.Info(ctx, "Periodic scan is DISABLED")
|
||||
log.Warn("Periodic scan is DISABLED")
|
||||
return nil
|
||||
}
|
||||
|
||||
s := CreateScanner(ctx)
|
||||
scanner := GetScanner()
|
||||
schedulerInstance := scheduler.GetInstance()
|
||||
|
||||
log.Info("Scheduling periodic scan", "schedule", schedule)
|
||||
err := schedulerInstance.Add(schedule, func() {
|
||||
_, err := s.ScanAll(ctx, false)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error executing periodic scan", err)
|
||||
}
|
||||
_ = scanner.RescanAll(ctx, false)
|
||||
})
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error scheduling periodic scan", err)
|
||||
log.Error("Error scheduling periodic scan", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func pidHashChanged(ds model.DataStore) (bool, error) {
|
||||
pidAlbum, err := ds.Property(context.Background()).DefaultGet(consts.PIDAlbumKey, "")
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
pidTrack, err := ds.Property(context.Background()).DefaultGet(consts.PIDTrackKey, "")
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return !strings.EqualFold(pidAlbum, conf.Server.PID.Album) || !strings.EqualFold(pidTrack, conf.Server.PID.Track), nil
|
||||
}
|
||||
|
||||
func runInitialScan(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
ds := CreateDataStore()
|
||||
fullScanRequired, err := ds.Property(ctx).DefaultGet(consts.FullScanAfterMigrationFlagKey, "0")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
inProgress, err := ds.Library(ctx).ScanInProgress()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
pidHasChanged, err := pidHashChanged(ds)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
scanNeeded := conf.Server.Scanner.ScanOnStartup || inProgress || fullScanRequired == "1" || pidHasChanged
|
||||
time.Sleep(2 * time.Second) // Wait 2 seconds before the initial scan
|
||||
if scanNeeded {
|
||||
scanner := CreateScanner(ctx)
|
||||
switch {
|
||||
case fullScanRequired == "1":
|
||||
log.Warn(ctx, "Full scan required after migration")
|
||||
_ = ds.Property(ctx).Delete(consts.FullScanAfterMigrationFlagKey)
|
||||
case pidHasChanged:
|
||||
log.Warn(ctx, "PID config changed, performing full scan")
|
||||
fullScanRequired = "1"
|
||||
case inProgress:
|
||||
log.Warn(ctx, "Resuming interrupted scan")
|
||||
default:
|
||||
log.Info("Executing initial scan")
|
||||
}
|
||||
|
||||
_, err = scanner.ScanAll(ctx, fullScanRequired == "1")
|
||||
if err != nil {
|
||||
log.Error(ctx, "Scan failed", err)
|
||||
} else {
|
||||
log.Info(ctx, "Scan completed")
|
||||
}
|
||||
} else {
|
||||
log.Debug(ctx, "Initial scan not needed")
|
||||
log.Debug("Executing initial scan")
|
||||
if err := scanner.RescanAll(ctx, false); err != nil {
|
||||
log.Error("Error executing initial scan", err)
|
||||
}
|
||||
log.Debug("Finished initial scan")
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func startScanWatcher(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
if conf.Server.Scanner.WatcherWait == 0 {
|
||||
log.Debug("Folder watcher is DISABLED")
|
||||
return nil
|
||||
}
|
||||
w := CreateScanWatcher(ctx)
|
||||
err := w.Run(ctx)
|
||||
if err != nil {
|
||||
log.Error("Error starting watcher", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func schedulePeriodicBackup(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
schedule := conf.Server.Backup.Schedule
|
||||
if schedule == "" {
|
||||
log.Info(ctx, "Periodic backup is DISABLED")
|
||||
return nil
|
||||
}
|
||||
|
||||
schedulerInstance := scheduler.GetInstance()
|
||||
|
||||
log.Info("Scheduling periodic backup", "schedule", schedule)
|
||||
err := schedulerInstance.Add(schedule, func() {
|
||||
start := time.Now()
|
||||
path, err := db.Backup(ctx)
|
||||
elapsed := time.Since(start)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error backing up database", "elapsed", elapsed, err)
|
||||
return
|
||||
}
|
||||
log.Info(ctx, "Backup complete", "elapsed", elapsed, "path", path)
|
||||
|
||||
count, err := db.Prune(ctx)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error pruning database", "error", err)
|
||||
} else if count > 0 {
|
||||
log.Info(ctx, "Successfully pruned old files", "count", count)
|
||||
} else {
|
||||
log.Info(ctx, "No backups pruned")
|
||||
}
|
||||
})
|
||||
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
func scheduleDBOptimizer(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
log.Info(ctx, "Scheduling DB optimizer", "schedule", consts.OptimizeDBSchedule)
|
||||
schedulerInstance := scheduler.GetInstance()
|
||||
err := schedulerInstance.Add(consts.OptimizeDBSchedule, func() {
|
||||
if scanner.IsScanning() {
|
||||
log.Debug(ctx, "Skipping DB optimization because a scan is in progress")
|
||||
return
|
||||
}
|
||||
db.Optimize(ctx)
|
||||
})
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// startScheduler starts the Navidrome scheduler, which is used to run periodic tasks.
|
||||
func startScheduler(ctx context.Context) func() error {
|
||||
log.Info(ctx, "Starting scheduler")
|
||||
schedulerInstance := scheduler.GetInstance()
|
||||
|
||||
return func() error {
|
||||
log.Info(ctx, "Starting scheduler")
|
||||
schedulerInstance := scheduler.GetInstance()
|
||||
schedulerInstance.Run(ctx)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// startInsightsCollector starts the Navidrome Insight Collector, if configured.
|
||||
func startInsightsCollector(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
if !conf.Server.EnableInsightsCollector {
|
||||
log.Info(ctx, "Insight Collector is DISABLED")
|
||||
return nil
|
||||
}
|
||||
log.Info(ctx, "Starting Insight Collector")
|
||||
select {
|
||||
case <-time.After(conf.Server.DevInsightsInitialDelay):
|
||||
case <-ctx.Done():
|
||||
return nil
|
||||
}
|
||||
ic := CreateInsights()
|
||||
ic.Run(ctx)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// startPlaybackServer starts the Navidrome playback server, if configured.
|
||||
// It is responsible for the Jukebox functionality
|
||||
func startPlaybackServer(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
if !conf.Server.Jukebox.Enabled {
|
||||
log.Debug("Jukebox is DISABLED")
|
||||
return nil
|
||||
}
|
||||
log.Info(ctx, "Starting Jukebox service")
|
||||
playbackInstance := GetPlaybackServer()
|
||||
return playbackInstance.Run(ctx)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Implement some struct tags to map flags to viper
|
||||
func init() {
|
||||
cobra.OnInitialize(func() {
|
||||
@@ -337,19 +159,16 @@ func init() {
|
||||
rootCmd.PersistentFlags().String("datafolder", viper.GetString("datafolder"), "folder to store application data (DB), needs write access")
|
||||
rootCmd.PersistentFlags().String("cachefolder", viper.GetString("cachefolder"), "folder to store cache data (transcoding, images...), needs write access")
|
||||
rootCmd.PersistentFlags().StringP("loglevel", "l", viper.GetString("loglevel"), "log level, possible values: error, info, debug, trace")
|
||||
rootCmd.PersistentFlags().String("logfile", viper.GetString("logfile"), "log file path, if not set logs will be printed to stderr")
|
||||
|
||||
_ = viper.BindPFlag("musicfolder", rootCmd.PersistentFlags().Lookup("musicfolder"))
|
||||
_ = viper.BindPFlag("datafolder", rootCmd.PersistentFlags().Lookup("datafolder"))
|
||||
_ = viper.BindPFlag("cachefolder", rootCmd.PersistentFlags().Lookup("cachefolder"))
|
||||
_ = viper.BindPFlag("loglevel", rootCmd.PersistentFlags().Lookup("loglevel"))
|
||||
_ = viper.BindPFlag("logfile", rootCmd.PersistentFlags().Lookup("logfile"))
|
||||
|
||||
rootCmd.Flags().StringP("address", "a", viper.GetString("address"), "IP address to bind to")
|
||||
rootCmd.Flags().IntP("port", "p", viper.GetInt("port"), "HTTP port Navidrome will listen to")
|
||||
rootCmd.Flags().String("baseurl", viper.GetString("baseurl"), "base URL to configure Navidrome behind a proxy (ex: /music or http://my.server.com)")
|
||||
rootCmd.Flags().String("tlscert", viper.GetString("tlscert"), "optional path to a TLS cert file (enables HTTPS listening)")
|
||||
rootCmd.Flags().String("unixsocketperm", viper.GetString("unixsocketperm"), "optional file permission for the unix socket")
|
||||
rootCmd.Flags().String("tlskey", viper.GetString("tlskey"), "optional path to a TLS key file (enables HTTPS listening)")
|
||||
|
||||
rootCmd.Flags().Duration("sessiontimeout", viper.GetDuration("sessiontimeout"), "how long Navidrome will wait before closing web ui idle sessions")
|
||||
@@ -358,7 +177,6 @@ func init() {
|
||||
rootCmd.Flags().Bool("enabletranscodingconfig", viper.GetBool("enabletranscodingconfig"), "enables transcoding configuration in the UI")
|
||||
rootCmd.Flags().String("transcodingcachesize", viper.GetString("transcodingcachesize"), "size of transcoding cache")
|
||||
rootCmd.Flags().String("imagecachesize", viper.GetString("imagecachesize"), "size of image (art work) cache. set to 0 to disable cache")
|
||||
rootCmd.Flags().String("albumplaycountmode", viper.GetString("albumplaycountmode"), "how to compute playcount for albums. absolute (default) or normalized")
|
||||
rootCmd.Flags().Bool("autoimportplaylists", viper.GetBool("autoimportplaylists"), "enable/disable .m3u playlist auto-import`")
|
||||
|
||||
rootCmd.Flags().Bool("prometheus.enabled", viper.GetBool("prometheus.enabled"), "enable/disable prometheus metrics endpoint`")
|
||||
@@ -367,7 +185,6 @@ func init() {
|
||||
_ = viper.BindPFlag("address", rootCmd.Flags().Lookup("address"))
|
||||
_ = viper.BindPFlag("port", rootCmd.Flags().Lookup("port"))
|
||||
_ = viper.BindPFlag("tlscert", rootCmd.Flags().Lookup("tlscert"))
|
||||
_ = viper.BindPFlag("unixsocketperm", rootCmd.Flags().Lookup("unixsocketperm"))
|
||||
_ = viper.BindPFlag("tlskey", rootCmd.Flags().Lookup("tlskey"))
|
||||
_ = viper.BindPFlag("baseurl", rootCmd.Flags().Lookup("baseurl"))
|
||||
|
||||
|
||||
64
cmd/scan.go
64
cmd/scan.go
@@ -2,28 +2,15 @@ package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/gob"
|
||||
"os"
|
||||
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/core/artwork"
|
||||
"github.com/navidrome/navidrome/core/metrics"
|
||||
"github.com/navidrome/navidrome/db"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/persistence"
|
||||
"github.com/navidrome/navidrome/scanner"
|
||||
"github.com/navidrome/navidrome/utils/pl"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var (
|
||||
fullScan bool
|
||||
subprocess bool
|
||||
)
|
||||
var fullRescan bool
|
||||
|
||||
func init() {
|
||||
scanCmd.Flags().BoolVarP(&fullScan, "full", "f", false, "check all subfolders, ignoring timestamps")
|
||||
scanCmd.Flags().BoolVarP(&subprocess, "subprocess", "", false, "run as subprocess (internal use)")
|
||||
scanCmd.Flags().BoolVarP(&fullRescan, "full", "f", false, "check all subfolders, ignoring timestamps")
|
||||
rootCmd.AddCommand(scanCmd)
|
||||
}
|
||||
|
||||
@@ -32,53 +19,16 @@ var scanCmd = &cobra.Command{
|
||||
Short: "Scan music folder",
|
||||
Long: "Scan music folder for updates",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
runScanner(cmd.Context())
|
||||
runScanner()
|
||||
},
|
||||
}
|
||||
|
||||
func trackScanInteractively(ctx context.Context, progress <-chan *scanner.ProgressInfo) {
|
||||
for status := range pl.ReadOrDone(ctx, progress) {
|
||||
if status.Warning != "" {
|
||||
log.Warn(ctx, "Scan warning", "error", status.Warning)
|
||||
}
|
||||
if status.Error != "" {
|
||||
log.Error(ctx, "Scan error", "error", status.Error)
|
||||
}
|
||||
// Discard the progress status, we only care about errors
|
||||
}
|
||||
|
||||
if fullScan {
|
||||
func runScanner() {
|
||||
scanner := GetScanner()
|
||||
_ = scanner.RescanAll(context.Background(), fullRescan)
|
||||
if fullRescan {
|
||||
log.Info("Finished full rescan")
|
||||
} else {
|
||||
log.Info("Finished rescan")
|
||||
}
|
||||
}
|
||||
|
||||
func trackScanAsSubprocess(ctx context.Context, progress <-chan *scanner.ProgressInfo) {
|
||||
encoder := gob.NewEncoder(os.Stdout)
|
||||
for status := range pl.ReadOrDone(ctx, progress) {
|
||||
err := encoder.Encode(status)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Failed to encode status", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func runScanner(ctx context.Context) {
|
||||
sqlDB := db.Db()
|
||||
defer db.Db().Close()
|
||||
ds := persistence.New(sqlDB)
|
||||
pls := core.NewPlaylists(ds)
|
||||
|
||||
progress, err := scanner.CallScan(ctx, ds, artwork.NoopCacheWarmer(), pls, metrics.NewNoopInstance(), fullScan)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "Failed to scan", err)
|
||||
}
|
||||
|
||||
// Wait for the scanner to finish
|
||||
if subprocess {
|
||||
trackScanAsSubprocess(ctx, progress)
|
||||
} else {
|
||||
trackScanInteractively(ctx, progress)
|
||||
}
|
||||
}
|
||||
|
||||
27
cmd/signaler_nonunix.go
Normal file
27
cmd/signaler_nonunix.go
Normal file
@@ -0,0 +1,27 @@
|
||||
//go:build windows || plan9
|
||||
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"os"
|
||||
"os/signal"
|
||||
|
||||
"github.com/navidrome/navidrome/log"
|
||||
)
|
||||
|
||||
func startSignaler(ctx context.Context) func() error {
|
||||
log.Info(ctx, "Starting signaler")
|
||||
|
||||
return func() error {
|
||||
var sigChan = make(chan os.Signal, 1)
|
||||
signal.Notify(sigChan, os.Interrupt)
|
||||
select {
|
||||
case sig := <-sigChan:
|
||||
log.Info(ctx, "Received termination signal", "signal", sig)
|
||||
return interrupted
|
||||
case <-ctx.Done():
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -14,24 +14,35 @@ import (
|
||||
|
||||
const triggerScanSignal = syscall.SIGUSR1
|
||||
|
||||
func startSignaller(ctx context.Context) func() error {
|
||||
func startSignaler(ctx context.Context) func() error {
|
||||
log.Info(ctx, "Starting signaler")
|
||||
scanner := CreateScanner(ctx)
|
||||
scanner := GetScanner()
|
||||
|
||||
return func() error {
|
||||
var sigChan = make(chan os.Signal, 1)
|
||||
signal.Notify(sigChan, triggerScanSignal)
|
||||
signal.Notify(
|
||||
sigChan,
|
||||
os.Interrupt,
|
||||
triggerScanSignal,
|
||||
syscall.SIGHUP,
|
||||
syscall.SIGTERM,
|
||||
syscall.SIGABRT,
|
||||
)
|
||||
|
||||
for {
|
||||
select {
|
||||
case sig := <-sigChan:
|
||||
if sig != triggerScanSignal {
|
||||
log.Info(ctx, "Received termination signal", "signal", sig)
|
||||
return interrupted
|
||||
}
|
||||
log.Info(ctx, "Received signal, triggering a new scan", "signal", sig)
|
||||
start := time.Now()
|
||||
_, err := scanner.ScanAll(ctx, false)
|
||||
err := scanner.RescanAll(ctx, false)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error scanning", err)
|
||||
}
|
||||
log.Info(ctx, "Triggered scan complete", "elapsed", time.Since(start))
|
||||
log.Info(ctx, "Triggered scan complete", "elapsed", time.Since(start).Round(100*time.Millisecond))
|
||||
case <-ctx.Done():
|
||||
return nil
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
//go:build windows || plan9
|
||||
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
)
|
||||
|
||||
// Windows and Plan9 don't support SIGUSR1, so we don't need to start a signaler
|
||||
func startSignaller(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
267
cmd/svc.go
267
cmd/svc.go
@@ -1,267 +0,0 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/kardianos/service"
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var (
|
||||
svcStatusLabels = map[service.Status]string{
|
||||
service.StatusUnknown: "Unknown",
|
||||
service.StatusStopped: "Stopped",
|
||||
service.StatusRunning: "Running",
|
||||
}
|
||||
|
||||
installUser string
|
||||
workingDirectory string
|
||||
)
|
||||
|
||||
func init() {
|
||||
svcCmd.AddCommand(buildInstallCmd())
|
||||
svcCmd.AddCommand(buildUninstallCmd())
|
||||
svcCmd.AddCommand(buildStartCmd())
|
||||
svcCmd.AddCommand(buildStopCmd())
|
||||
svcCmd.AddCommand(buildStatusCmd())
|
||||
svcCmd.AddCommand(buildExecuteCmd())
|
||||
rootCmd.AddCommand(svcCmd)
|
||||
}
|
||||
|
||||
var svcCmd = &cobra.Command{
|
||||
Use: "service",
|
||||
Aliases: []string{"svc"},
|
||||
Short: "Manage Navidrome as a service",
|
||||
Long: fmt.Sprintf("Manage Navidrome as a service, using the OS service manager (%s)", service.Platform()),
|
||||
Run: runServiceCmd,
|
||||
}
|
||||
|
||||
type svcControl struct {
|
||||
ctx context.Context
|
||||
cancel context.CancelFunc
|
||||
done chan struct{}
|
||||
}
|
||||
|
||||
func (p *svcControl) Start(service.Service) error {
|
||||
p.done = make(chan struct{})
|
||||
p.ctx, p.cancel = context.WithCancel(context.Background())
|
||||
go func() {
|
||||
runNavidrome(p.ctx)
|
||||
close(p.done)
|
||||
}()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *svcControl) Stop(service.Service) error {
|
||||
log.Info("Stopping service")
|
||||
p.cancel()
|
||||
select {
|
||||
case <-p.done:
|
||||
log.Info("Service stopped gracefully")
|
||||
case <-time.After(10 * time.Second):
|
||||
log.Error("Service did not stop in time. Killing it.")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
var svcInstance = sync.OnceValue(func() service.Service {
|
||||
options := make(service.KeyValue)
|
||||
options["Restart"] = "on-failure"
|
||||
options["SuccessExitStatus"] = "1 2 8 SIGKILL"
|
||||
options["UserService"] = false
|
||||
options["LogDirectory"] = conf.Server.DataFolder
|
||||
options["SystemdScript"] = systemdScript
|
||||
if conf.Server.LogFile != "" {
|
||||
options["LogOutput"] = false
|
||||
} else {
|
||||
options["LogOutput"] = true
|
||||
options["LogDirectory"] = conf.Server.DataFolder
|
||||
}
|
||||
svcConfig := &service.Config{
|
||||
UserName: installUser,
|
||||
Name: "navidrome",
|
||||
DisplayName: "Navidrome",
|
||||
Description: "Your Personal Streaming Service",
|
||||
Dependencies: []string{
|
||||
"After=remote-fs.target network.target",
|
||||
},
|
||||
WorkingDirectory: executablePath(),
|
||||
Option: options,
|
||||
}
|
||||
arguments := []string{"service", "execute"}
|
||||
if conf.Server.ConfigFile != "" {
|
||||
arguments = append(arguments, "-c", conf.Server.ConfigFile)
|
||||
}
|
||||
svcConfig.Arguments = arguments
|
||||
|
||||
prg := &svcControl{}
|
||||
svc, err := service.New(prg, svcConfig)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
return svc
|
||||
})
|
||||
|
||||
func runServiceCmd(cmd *cobra.Command, _ []string) {
|
||||
_ = cmd.Help()
|
||||
}
|
||||
|
||||
func executablePath() string {
|
||||
if workingDirectory != "" {
|
||||
return workingDirectory
|
||||
}
|
||||
|
||||
ex, err := os.Executable()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
return filepath.Dir(ex)
|
||||
}
|
||||
|
||||
func buildInstallCmd() *cobra.Command {
|
||||
runInstallCmd := func(_ *cobra.Command, _ []string) {
|
||||
var err error
|
||||
println("Installing service with:")
|
||||
println(" working directory: " + executablePath())
|
||||
println(" music folder: " + conf.Server.MusicFolder)
|
||||
println(" data folder: " + conf.Server.DataFolder)
|
||||
if conf.Server.LogFile != "" {
|
||||
println(" log file: " + conf.Server.LogFile)
|
||||
} else {
|
||||
println(" logs folder: " + conf.Server.DataFolder)
|
||||
}
|
||||
if cfgFile != "" {
|
||||
conf.Server.ConfigFile, err = filepath.Abs(cfgFile)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
println(" config file: " + conf.Server.ConfigFile)
|
||||
}
|
||||
err = svcInstance().Install()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
println("Service installed. Use 'navidrome svc start' to start it.")
|
||||
}
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "install",
|
||||
Short: "Install Navidrome service.",
|
||||
Run: runInstallCmd,
|
||||
}
|
||||
cmd.Flags().StringVarP(&installUser, "user", "u", "", "user to run service")
|
||||
cmd.Flags().StringVarP(&workingDirectory, "working-directory", "w", "", "working directory of service")
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func buildUninstallCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: "uninstall",
|
||||
Short: "Uninstall Navidrome service. Does not delete the music or data folders",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
err := svcInstance().Uninstall()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
println("Service uninstalled. Music and data folders are still intact.")
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func buildStartCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: "start",
|
||||
Short: "Start Navidrome service",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
err := svcInstance().Start()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
println("Service started. Use 'navidrome svc status' to check its status.")
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func buildStopCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: "stop",
|
||||
Short: "Stop Navidrome service",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
err := svcInstance().Stop()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
println("Service stopped. Use 'navidrome svc status' to check its status.")
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func buildStatusCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: "status",
|
||||
Short: "Show Navidrome service status",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
status, err := svcInstance().Status()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
fmt.Printf("Navidrome is %s.\n", svcStatusLabels[status])
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func buildExecuteCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: "execute",
|
||||
Short: "Run navidrome as a service in the foreground (it is very unlikely you want to run this, you are better off running just navidrome)",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
err := svcInstance().Run()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const systemdScript = `[Unit]
|
||||
Description={{.Description}}
|
||||
ConditionFileIsExecutable={{.Path|cmdEscape}}
|
||||
{{range $i, $dep := .Dependencies}}
|
||||
{{$dep}} {{end}}
|
||||
|
||||
[Service]
|
||||
StartLimitInterval=5
|
||||
StartLimitBurst=10
|
||||
ExecStart={{.Path|cmdEscape}}{{range .Arguments}} {{.|cmd}}{{end}}
|
||||
{{if .WorkingDirectory}}WorkingDirectory={{.WorkingDirectory|cmdEscape}}{{end}}
|
||||
{{if .UserName}}User={{.UserName}}{{end}}
|
||||
{{if .Restart}}Restart={{.Restart}}{{end}}
|
||||
{{if .SuccessExitStatus}}SuccessExitStatus={{.SuccessExitStatus}}{{end}}
|
||||
TimeoutStopSec=20
|
||||
RestartSec=120
|
||||
EnvironmentFile=-/etc/sysconfig/{{.Name}}
|
||||
|
||||
DevicePolicy=closed
|
||||
NoNewPrivileges=yes
|
||||
PrivateTmp=yes
|
||||
ProtectControlGroups=yes
|
||||
ProtectKernelModules=yes
|
||||
ProtectKernelTunables=yes
|
||||
RestrictAddressFamilies=AF_UNIX AF_INET AF_INET6
|
||||
RestrictNamespaces=yes
|
||||
RestrictRealtime=yes
|
||||
SystemCallFilter=~@clock @debug @module @mount @obsolete @reboot @setuid @swap
|
||||
{{if .WorkingDirectory}}ReadWritePaths={{.WorkingDirectory|cmdEscape}}{{end}}
|
||||
ProtectSystem=full
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
`
|
||||
122
cmd/wire_gen.go
122
cmd/wire_gen.go
@@ -1,53 +1,39 @@
|
||||
// Code generated by Wire. DO NOT EDIT.
|
||||
|
||||
//go:generate go run -mod=mod github.com/google/wire/cmd/wire gen -tags "netgo"
|
||||
//go:generate go run github.com/google/wire/cmd/wire
|
||||
//go:build !wireinject
|
||||
// +build !wireinject
|
||||
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/google/wire"
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
"github.com/navidrome/navidrome/core/agents/lastfm"
|
||||
"github.com/navidrome/navidrome/core/agents/listenbrainz"
|
||||
"github.com/navidrome/navidrome/core/artwork"
|
||||
"github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/core/ffmpeg"
|
||||
"github.com/navidrome/navidrome/core/metrics"
|
||||
"github.com/navidrome/navidrome/core/playback"
|
||||
"github.com/navidrome/navidrome/core/scrobbler"
|
||||
"github.com/navidrome/navidrome/db"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/persistence"
|
||||
"github.com/navidrome/navidrome/scanner"
|
||||
"github.com/navidrome/navidrome/server"
|
||||
"github.com/navidrome/navidrome/server/api"
|
||||
"github.com/navidrome/navidrome/server/events"
|
||||
"github.com/navidrome/navidrome/server/nativeapi"
|
||||
"github.com/navidrome/navidrome/server/public"
|
||||
"github.com/navidrome/navidrome/server/subsonic"
|
||||
)
|
||||
|
||||
import (
|
||||
_ "github.com/navidrome/navidrome/adapters/taglib"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// Injectors from wire_injectors.go:
|
||||
|
||||
func CreateDataStore() model.DataStore {
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
return dataStore
|
||||
}
|
||||
|
||||
func CreateServer() *server.Server {
|
||||
func CreateServer(musicFolder string) *server.Server {
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
broker := events.GetBroker()
|
||||
insights := metrics.GetInstance(dataStore)
|
||||
serverServer := server.New(dataStore, broker, insights)
|
||||
serverServer := server.New(dataStore, broker)
|
||||
return serverServer
|
||||
}
|
||||
|
||||
@@ -55,33 +41,35 @@ func CreateNativeAPIRouter() *nativeapi.Router {
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
share := core.NewShare(dataStore)
|
||||
playlists := core.NewPlaylists(dataStore)
|
||||
insights := metrics.GetInstance(dataStore)
|
||||
router := nativeapi.New(dataStore, share, playlists, insights)
|
||||
router := nativeapi.New(dataStore, share)
|
||||
return router
|
||||
}
|
||||
|
||||
func CreateSubsonicAPIRouter(ctx context.Context) *subsonic.Router {
|
||||
func CreateNewNativeAPIRouter() *api.Router {
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
router := api.New(dataStore)
|
||||
return router
|
||||
}
|
||||
|
||||
func CreateSubsonicAPIRouter() *subsonic.Router {
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
fileCache := artwork.GetImageCache()
|
||||
fFmpeg := ffmpeg.New()
|
||||
agentsAgents := agents.GetAgents(dataStore)
|
||||
provider := external.NewProvider(dataStore, agentsAgents)
|
||||
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
|
||||
agentsAgents := agents.New(dataStore)
|
||||
externalMetadata := core.NewExternalMetadata(dataStore, agentsAgents)
|
||||
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, externalMetadata)
|
||||
transcodingCache := core.GetTranscodingCache()
|
||||
mediaStreamer := core.NewMediaStreamer(dataStore, fFmpeg, transcodingCache)
|
||||
share := core.NewShare(dataStore)
|
||||
archiver := core.NewArchiver(mediaStreamer, dataStore, share)
|
||||
players := core.NewPlayers(dataStore)
|
||||
cacheWarmer := artwork.NewCacheWarmer(artworkArtwork, fileCache)
|
||||
scanner := GetScanner()
|
||||
broker := events.GetBroker()
|
||||
playlists := core.NewPlaylists(dataStore)
|
||||
metricsMetrics := metrics.NewPrometheusInstance(dataStore)
|
||||
scannerScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
|
||||
playTracker := scrobbler.GetPlayTracker(dataStore, broker)
|
||||
playbackServer := playback.GetInstance(dataStore)
|
||||
router := subsonic.New(dataStore, artworkArtwork, mediaStreamer, archiver, players, provider, scannerScanner, broker, playlists, playTracker, share, playbackServer)
|
||||
router := subsonic.New(dataStore, artworkArtwork, mediaStreamer, archiver, players, externalMetadata, scanner, broker, playlists, playTracker, share)
|
||||
return router
|
||||
}
|
||||
|
||||
@@ -90,9 +78,9 @@ func CreatePublicRouter() *public.Router {
|
||||
dataStore := persistence.New(sqlDB)
|
||||
fileCache := artwork.GetImageCache()
|
||||
fFmpeg := ffmpeg.New()
|
||||
agentsAgents := agents.GetAgents(dataStore)
|
||||
provider := external.NewProvider(dataStore, agentsAgents)
|
||||
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
|
||||
agentsAgents := agents.New(dataStore)
|
||||
externalMetadata := core.NewExternalMetadata(dataStore, agentsAgents)
|
||||
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, externalMetadata)
|
||||
transcodingCache := core.GetTranscodingCache()
|
||||
mediaStreamer := core.NewMediaStreamer(dataStore, fFmpeg, transcodingCache)
|
||||
share := core.NewShare(dataStore)
|
||||
@@ -115,60 +103,34 @@ func CreateListenBrainzRouter() *listenbrainz.Router {
|
||||
return router
|
||||
}
|
||||
|
||||
func CreateInsights() metrics.Insights {
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
insights := metrics.GetInstance(dataStore)
|
||||
return insights
|
||||
}
|
||||
|
||||
func CreatePrometheus() metrics.Metrics {
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
metricsMetrics := metrics.NewPrometheusInstance(dataStore)
|
||||
return metricsMetrics
|
||||
}
|
||||
|
||||
func CreateScanner(ctx context.Context) scanner.Scanner {
|
||||
func createScanner() scanner.Scanner {
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
playlists := core.NewPlaylists(dataStore)
|
||||
fileCache := artwork.GetImageCache()
|
||||
fFmpeg := ffmpeg.New()
|
||||
agentsAgents := agents.GetAgents(dataStore)
|
||||
provider := external.NewProvider(dataStore, agentsAgents)
|
||||
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
|
||||
agentsAgents := agents.New(dataStore)
|
||||
externalMetadata := core.NewExternalMetadata(dataStore, agentsAgents)
|
||||
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, externalMetadata)
|
||||
cacheWarmer := artwork.NewCacheWarmer(artworkArtwork, fileCache)
|
||||
broker := events.GetBroker()
|
||||
playlists := core.NewPlaylists(dataStore)
|
||||
metricsMetrics := metrics.NewPrometheusInstance(dataStore)
|
||||
scannerScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
|
||||
scannerScanner := scanner.New(dataStore, playlists, cacheWarmer, broker)
|
||||
return scannerScanner
|
||||
}
|
||||
|
||||
func CreateScanWatcher(ctx context.Context) scanner.Watcher {
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
fileCache := artwork.GetImageCache()
|
||||
fFmpeg := ffmpeg.New()
|
||||
agentsAgents := agents.GetAgents(dataStore)
|
||||
provider := external.NewProvider(dataStore, agentsAgents)
|
||||
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
|
||||
cacheWarmer := artwork.NewCacheWarmer(artworkArtwork, fileCache)
|
||||
broker := events.GetBroker()
|
||||
playlists := core.NewPlaylists(dataStore)
|
||||
metricsMetrics := metrics.NewPrometheusInstance(dataStore)
|
||||
scannerScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
|
||||
watcher := scanner.NewWatcher(dataStore, scannerScanner)
|
||||
return watcher
|
||||
}
|
||||
|
||||
func GetPlaybackServer() playback.PlaybackServer {
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
playbackServer := playback.GetInstance(dataStore)
|
||||
return playbackServer
|
||||
}
|
||||
|
||||
// wire_injectors.go:
|
||||
|
||||
var allProviders = wire.NewSet(core.Set, artwork.Set, server.New, subsonic.New, nativeapi.New, public.New, persistence.New, lastfm.NewRouter, listenbrainz.NewRouter, events.GetBroker, scanner.New, scanner.NewWatcher, metrics.NewPrometheusInstance, db.Db)
|
||||
var allProviders = wire.NewSet(core.Set, artwork.Set, subsonic.New, nativeapi.New, api.New, public.New, persistence.New, lastfm.NewRouter, listenbrainz.NewRouter, events.GetBroker, db.Db)
|
||||
|
||||
// Scanner must be a Singleton
|
||||
var (
|
||||
onceScanner sync.Once
|
||||
scannerInstance scanner.Scanner
|
||||
)
|
||||
|
||||
func GetScanner() scanner.Scanner {
|
||||
onceScanner.Do(func() {
|
||||
scannerInstance = createScanner()
|
||||
})
|
||||
return scannerInstance
|
||||
}
|
||||
|
||||
@@ -3,20 +3,18 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"sync"
|
||||
|
||||
"github.com/google/wire"
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/core/agents/lastfm"
|
||||
"github.com/navidrome/navidrome/core/agents/listenbrainz"
|
||||
"github.com/navidrome/navidrome/core/artwork"
|
||||
"github.com/navidrome/navidrome/core/metrics"
|
||||
"github.com/navidrome/navidrome/core/playback"
|
||||
"github.com/navidrome/navidrome/db"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/persistence"
|
||||
"github.com/navidrome/navidrome/scanner"
|
||||
"github.com/navidrome/navidrome/server"
|
||||
"github.com/navidrome/navidrome/server/api"
|
||||
"github.com/navidrome/navidrome/server/events"
|
||||
"github.com/navidrome/navidrome/server/nativeapi"
|
||||
"github.com/navidrome/navidrome/server/public"
|
||||
@@ -26,28 +24,20 @@ import (
|
||||
var allProviders = wire.NewSet(
|
||||
core.Set,
|
||||
artwork.Set,
|
||||
server.New,
|
||||
subsonic.New,
|
||||
nativeapi.New,
|
||||
api.New,
|
||||
public.New,
|
||||
persistence.New,
|
||||
lastfm.NewRouter,
|
||||
listenbrainz.NewRouter,
|
||||
events.GetBroker,
|
||||
scanner.New,
|
||||
scanner.NewWatcher,
|
||||
metrics.NewPrometheusInstance,
|
||||
db.Db,
|
||||
)
|
||||
|
||||
func CreateDataStore() model.DataStore {
|
||||
panic(wire.Build(
|
||||
allProviders,
|
||||
))
|
||||
}
|
||||
|
||||
func CreateServer() *server.Server {
|
||||
func CreateServer(musicFolder string) *server.Server {
|
||||
panic(wire.Build(
|
||||
server.New,
|
||||
allProviders,
|
||||
))
|
||||
}
|
||||
@@ -58,12 +48,19 @@ func CreateNativeAPIRouter() *nativeapi.Router {
|
||||
))
|
||||
}
|
||||
|
||||
func CreateSubsonicAPIRouter(ctx context.Context) *subsonic.Router {
|
||||
func CreateNewNativeAPIRouter() *api.Router {
|
||||
panic(wire.Build(
|
||||
allProviders,
|
||||
))
|
||||
}
|
||||
|
||||
func CreateSubsonicAPIRouter() *subsonic.Router {
|
||||
panic(wire.Build(
|
||||
allProviders,
|
||||
GetScanner,
|
||||
))
|
||||
}
|
||||
|
||||
func CreatePublicRouter() *public.Router {
|
||||
panic(wire.Build(
|
||||
allProviders,
|
||||
@@ -82,32 +79,22 @@ func CreateListenBrainzRouter() *listenbrainz.Router {
|
||||
))
|
||||
}
|
||||
|
||||
func CreateInsights() metrics.Insights {
|
||||
panic(wire.Build(
|
||||
allProviders,
|
||||
))
|
||||
// Scanner must be a Singleton
|
||||
var (
|
||||
onceScanner sync.Once
|
||||
scannerInstance scanner.Scanner
|
||||
)
|
||||
|
||||
func GetScanner() scanner.Scanner {
|
||||
onceScanner.Do(func() {
|
||||
scannerInstance = createScanner()
|
||||
})
|
||||
return scannerInstance
|
||||
}
|
||||
|
||||
func CreatePrometheus() metrics.Metrics {
|
||||
panic(wire.Build(
|
||||
allProviders,
|
||||
))
|
||||
}
|
||||
|
||||
func CreateScanner(ctx context.Context) scanner.Scanner {
|
||||
panic(wire.Build(
|
||||
allProviders,
|
||||
))
|
||||
}
|
||||
|
||||
func CreateScanWatcher(ctx context.Context) scanner.Watcher {
|
||||
panic(wire.Build(
|
||||
allProviders,
|
||||
))
|
||||
}
|
||||
|
||||
func GetPlaybackServer() playback.PlaybackServer {
|
||||
func createScanner() scanner.Scanner {
|
||||
panic(wire.Build(
|
||||
allProviders,
|
||||
scanner.New,
|
||||
))
|
||||
}
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
package buildtags
|
||||
|
||||
// This file is left intentionally empty. It is used to make sure the package is not empty, in the case all
|
||||
// required build tags are disabled.
|
||||
@@ -1,11 +0,0 @@
|
||||
//go:build netgo
|
||||
|
||||
package buildtags
|
||||
|
||||
// NOTICE: This file was created to force the inclusion of the `netgo` tag when compiling the project.
|
||||
// If the tag is not included, the compilation will fail because this variable won't be defined, and the `main.go`
|
||||
// file requires it.
|
||||
|
||||
// Why this tag is required? See https://github.com/navidrome/navidrome/issues/700
|
||||
|
||||
var NETGO = true
|
||||
@@ -9,97 +9,80 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/bmatcuk/doublestar/v4"
|
||||
"github.com/go-viper/encoding/ini"
|
||||
"github.com/kr/pretty"
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/utils/chain"
|
||||
"github.com/navidrome/navidrome/utils/number"
|
||||
"github.com/robfig/cron/v3"
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
type configOptions struct {
|
||||
ConfigFile string
|
||||
Address string
|
||||
Port int
|
||||
UnixSocketPerm string
|
||||
MusicFolder string
|
||||
DataFolder string
|
||||
CacheFolder string
|
||||
DbPath string
|
||||
LogLevel string
|
||||
LogFile string
|
||||
SessionTimeout time.Duration
|
||||
BaseURL string
|
||||
BasePath string
|
||||
BaseHost string
|
||||
BaseScheme string
|
||||
TLSCert string
|
||||
TLSKey string
|
||||
UILoginBackgroundURL string
|
||||
UIWelcomeMessage string
|
||||
MaxSidebarPlaylists int
|
||||
EnableTranscodingConfig bool
|
||||
EnableDownloads bool
|
||||
EnableExternalServices bool
|
||||
EnableInsightsCollector bool
|
||||
EnableMediaFileCoverArt bool
|
||||
TranscodingCacheSize string
|
||||
ImageCacheSize string
|
||||
AlbumPlayCountMode string
|
||||
EnableArtworkPrecache bool
|
||||
AutoImportPlaylists bool
|
||||
DefaultPlaylistPublicVisibility bool
|
||||
PlaylistsPath string
|
||||
SmartPlaylistRefreshDelay time.Duration
|
||||
AutoTranscodeDownload bool
|
||||
DefaultDownsamplingFormat string
|
||||
SearchFullString bool
|
||||
RecentlyAddedByModTime bool
|
||||
PreferSortTags bool
|
||||
IgnoredArticles string
|
||||
IndexGroups string
|
||||
FFmpegPath string
|
||||
MPVPath string
|
||||
MPVCmdTemplate string
|
||||
CoverArtPriority string
|
||||
CoverJpegQuality int
|
||||
ArtistArtPriority string
|
||||
EnableGravatar bool
|
||||
EnableFavourites bool
|
||||
EnableStarRating bool
|
||||
EnableUserEditing bool
|
||||
EnableSharing bool
|
||||
ShareURL string
|
||||
DefaultDownloadableShare bool
|
||||
DefaultTheme string
|
||||
DefaultLanguage string
|
||||
DefaultUIVolume int
|
||||
EnableReplayGain bool
|
||||
EnableCoverAnimation bool
|
||||
GATrackingID string
|
||||
EnableLogRedacting bool
|
||||
AuthRequestLimit int
|
||||
AuthWindowLength time.Duration
|
||||
PasswordEncryptionKey string
|
||||
ReverseProxyUserHeader string
|
||||
ReverseProxyWhitelist string
|
||||
HTTPSecurityHeaders secureOptions
|
||||
Prometheus prometheusOptions
|
||||
Scanner scannerOptions
|
||||
Jukebox jukeboxOptions
|
||||
Backup backupOptions
|
||||
PID pidOptions
|
||||
Inspect inspectOptions
|
||||
Subsonic subsonicOptions
|
||||
LyricsPriority string
|
||||
ConfigFile string
|
||||
Address string
|
||||
Port int
|
||||
MusicFolder string
|
||||
DataFolder string
|
||||
CacheFolder string
|
||||
DbPath string
|
||||
LogLevel string
|
||||
ScanInterval time.Duration
|
||||
ScanSchedule string
|
||||
SessionTimeout time.Duration
|
||||
BaseURL string
|
||||
BasePath string
|
||||
BaseHost string
|
||||
BaseScheme string
|
||||
TLSCert string
|
||||
TLSKey string
|
||||
UILoginBackgroundURL string
|
||||
UIWelcomeMessage string
|
||||
MaxSidebarPlaylists int
|
||||
EnableTranscodingConfig bool
|
||||
EnableDownloads bool
|
||||
EnableExternalServices bool
|
||||
EnableMediaFileCoverArt bool
|
||||
TranscodingCacheSize string
|
||||
ImageCacheSize string
|
||||
EnableArtworkPrecache bool
|
||||
AutoImportPlaylists bool
|
||||
PlaylistsPath string
|
||||
AutoTranscodeDownload bool
|
||||
DefaultDownsamplingFormat string
|
||||
SearchFullString bool
|
||||
RecentlyAddedByModTime bool
|
||||
IgnoredArticles string
|
||||
IndexGroups string
|
||||
SubsonicArtistParticipations bool
|
||||
FFmpegPath string
|
||||
CoverArtPriority string
|
||||
CoverJpegQuality int
|
||||
ArtistArtPriority string
|
||||
EnableGravatar bool
|
||||
EnableFavourites bool
|
||||
EnableStarRating bool
|
||||
EnableUserEditing bool
|
||||
EnableSharing bool
|
||||
DefaultDownloadableShare bool
|
||||
DefaultTheme string
|
||||
DefaultLanguage string
|
||||
DefaultUIVolume int
|
||||
EnableReplayGain bool
|
||||
EnableCoverAnimation bool
|
||||
GATrackingID string
|
||||
EnableLogRedacting bool
|
||||
AuthRequestLimit int
|
||||
AuthWindowLength time.Duration
|
||||
PasswordEncryptionKey string
|
||||
ReverseProxyUserHeader string
|
||||
ReverseProxyWhitelist string
|
||||
Prometheus prometheusOptions
|
||||
Scanner scannerOptions
|
||||
|
||||
Agents string
|
||||
LastFM lastfmOptions
|
||||
Spotify spotifyOptions
|
||||
ListenBrainz listenBrainzOptions
|
||||
Tags map[string]TagConf
|
||||
|
||||
// DevFlags. These are used to enable/disable debugging and incomplete features
|
||||
DevLogSourceLine bool
|
||||
@@ -108,47 +91,20 @@ type configOptions struct {
|
||||
DevAutoCreateAdminPassword string
|
||||
DevAutoLoginUsername string
|
||||
DevActivityPanel bool
|
||||
DevActivityPanelUpdateRate time.Duration
|
||||
DevSidebarPlaylists bool
|
||||
DevEnableBufferedScrobble bool
|
||||
DevShowArtistPage bool
|
||||
DevOffsetOptimize int
|
||||
DevArtworkMaxRequests int
|
||||
DevArtworkThrottleBacklogLimit int
|
||||
DevArtworkThrottleBacklogTimeout time.Duration
|
||||
DevArtistInfoTimeToLive time.Duration
|
||||
DevAlbumInfoTimeToLive time.Duration
|
||||
DevExternalScanner bool
|
||||
DevScannerThreads uint
|
||||
DevInsightsInitialDelay time.Duration
|
||||
DevEnablePlayerInsights bool
|
||||
}
|
||||
|
||||
type scannerOptions struct {
|
||||
Enabled bool
|
||||
Schedule string
|
||||
WatcherWait time.Duration
|
||||
ScanOnStartup bool
|
||||
Extractor string
|
||||
ArtistJoiner string
|
||||
GenreSeparators string // Deprecated: Use Tags.genre.Split instead
|
||||
GroupAlbumReleases bool // Deprecated: Use PID.Album instead
|
||||
FollowSymlinks bool // Whether to follow symlinks when scanning directories
|
||||
}
|
||||
|
||||
type subsonicOptions struct {
|
||||
AppendSubtitle bool
|
||||
ArtistParticipations bool
|
||||
DefaultReportRealPath bool
|
||||
LegacyClients string
|
||||
}
|
||||
|
||||
type TagConf struct {
|
||||
Ignore bool `yaml:"ignore"`
|
||||
Aliases []string `yaml:"aliases"`
|
||||
Type string `yaml:"type"`
|
||||
MaxLength int `yaml:"maxLength"`
|
||||
Split []string `yaml:"split"`
|
||||
Album bool `yaml:"album"`
|
||||
GenreSeparators string
|
||||
GroupAlbumReleases bool
|
||||
}
|
||||
|
||||
type lastfmOptions struct {
|
||||
@@ -168,41 +124,9 @@ type listenBrainzOptions struct {
|
||||
BaseURL string
|
||||
}
|
||||
|
||||
type secureOptions struct {
|
||||
CustomFrameOptionsValue string
|
||||
}
|
||||
|
||||
type prometheusOptions struct {
|
||||
Enabled bool
|
||||
MetricsPath string
|
||||
Password string
|
||||
}
|
||||
|
||||
type AudioDeviceDefinition []string
|
||||
|
||||
type jukeboxOptions struct {
|
||||
Enabled bool
|
||||
Devices []AudioDeviceDefinition
|
||||
Default string
|
||||
AdminOnly bool
|
||||
}
|
||||
|
||||
type backupOptions struct {
|
||||
Count int
|
||||
Path string
|
||||
Schedule string
|
||||
}
|
||||
|
||||
type pidOptions struct {
|
||||
Track string
|
||||
Album string
|
||||
}
|
||||
|
||||
type inspectOptions struct {
|
||||
Enabled bool
|
||||
MaxRequests int
|
||||
BacklogLimit int
|
||||
BacklogTimeout int
|
||||
}
|
||||
|
||||
var (
|
||||
@@ -217,21 +141,18 @@ func LoadFromFile(confFile string) {
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error reading config file:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
Load(true)
|
||||
Load()
|
||||
}
|
||||
|
||||
func Load(noConfigDump bool) {
|
||||
parseIniFileConfiguration()
|
||||
|
||||
func Load() {
|
||||
err := viper.Unmarshal(&Server)
|
||||
if err != nil {
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error parsing config:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
err = os.MkdirAll(Server.DataFolder, os.ModePerm)
|
||||
if err != nil {
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error creating data path:", err)
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error creating data path:", "path", Server.DataFolder, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
@@ -240,7 +161,7 @@ func Load(noConfigDump bool) {
|
||||
}
|
||||
err = os.MkdirAll(Server.CacheFolder, os.ModePerm)
|
||||
if err != nil {
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error creating cache path:", err)
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error creating cache path:", "path", Server.CacheFolder, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
@@ -249,42 +170,19 @@ func Load(noConfigDump bool) {
|
||||
Server.DbPath = filepath.Join(Server.DataFolder, consts.DefaultDbPath)
|
||||
}
|
||||
|
||||
if Server.Backup.Path != "" {
|
||||
err = os.MkdirAll(Server.Backup.Path, os.ModePerm)
|
||||
if err != nil {
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error creating backup path:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
out := os.Stderr
|
||||
if Server.LogFile != "" {
|
||||
out, err = os.OpenFile(Server.LogFile, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
|
||||
if err != nil {
|
||||
_, _ = fmt.Fprintf(os.Stderr, "FATAL: Error opening log file %s: %s\n", Server.LogFile, err.Error())
|
||||
os.Exit(1)
|
||||
}
|
||||
log.SetOutput(out)
|
||||
}
|
||||
|
||||
log.SetLevelString(Server.LogLevel)
|
||||
log.SetLogLevels(Server.DevLogLevels)
|
||||
log.SetLogSourceLine(Server.DevLogSourceLine)
|
||||
log.SetRedacting(Server.EnableLogRedacting)
|
||||
|
||||
err = chain.RunSequentially(
|
||||
validateScanSchedule,
|
||||
validateBackupSchedule,
|
||||
validatePlaylistsPath,
|
||||
)
|
||||
if err != nil {
|
||||
if err := validateScanSchedule(); err != nil {
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
if Server.BaseURL != "" {
|
||||
u, err := url.Parse(Server.BaseURL)
|
||||
if err != nil {
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Invalid BaseURL:", err)
|
||||
_, _ = fmt.Fprintf(os.Stderr, "FATAL: Invalid BaseURL %s: %s\n", Server.BaseURL, err.Error())
|
||||
os.Exit(1)
|
||||
}
|
||||
Server.BasePath = u.Path
|
||||
@@ -295,72 +193,26 @@ func Load(noConfigDump bool) {
|
||||
}
|
||||
|
||||
// Print current configuration if log level is Debug
|
||||
if log.IsGreaterOrEqualTo(log.LevelDebug) && !noConfigDump {
|
||||
if log.CurrentLevel() >= log.LevelDebug {
|
||||
prettyConf := pretty.Sprintf("Loaded configuration from '%s': %# v", Server.ConfigFile, Server)
|
||||
if Server.EnableLogRedacting {
|
||||
prettyConf = log.Redact(prettyConf)
|
||||
}
|
||||
_, _ = fmt.Fprintln(out, prettyConf)
|
||||
_, _ = fmt.Fprintln(os.Stderr, prettyConf)
|
||||
}
|
||||
|
||||
if !Server.EnableExternalServices {
|
||||
disableExternalServices()
|
||||
}
|
||||
|
||||
if Server.Scanner.Extractor != consts.DefaultScannerExtractor {
|
||||
log.Warn(fmt.Sprintf("Extractor '%s' is not implemented, using 'taglib'", Server.Scanner.Extractor))
|
||||
Server.Scanner.Extractor = consts.DefaultScannerExtractor
|
||||
}
|
||||
logDeprecatedOptions("Scanner.GenreSeparators")
|
||||
logDeprecatedOptions("Scanner.GroupAlbumReleases")
|
||||
logDeprecatedOptions("DevEnableBufferedScrobble") // Deprecated: Buffered scrobbling is now always enabled and this option is ignored
|
||||
|
||||
// Call init hooks
|
||||
for _, hook := range hooks {
|
||||
hook()
|
||||
}
|
||||
}
|
||||
|
||||
func logDeprecatedOptions(options ...string) {
|
||||
for _, option := range options {
|
||||
envVar := "ND_" + strings.ToUpper(strings.ReplaceAll(option, ".", "_"))
|
||||
if os.Getenv(envVar) != "" {
|
||||
log.Warn(fmt.Sprintf("Option '%s' is deprecated and will be ignored in a future release", envVar))
|
||||
}
|
||||
if viper.InConfig(option) {
|
||||
log.Warn(fmt.Sprintf("Option '%s' is deprecated and will be ignored in a future release", option))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// parseIniFileConfiguration is used to parse the config file when it is in INI format. For INI files, it
|
||||
// would require a nested structure, so instead we unmarshal it to a map and then merge the nested [default]
|
||||
// section into the root level.
|
||||
func parseIniFileConfiguration() {
|
||||
cfgFile := viper.ConfigFileUsed()
|
||||
if strings.ToLower(filepath.Ext(cfgFile)) == ".ini" {
|
||||
var iniConfig map[string]interface{}
|
||||
err := viper.Unmarshal(&iniConfig)
|
||||
if err != nil {
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error parsing config:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
cfg, ok := iniConfig["default"].(map[string]any)
|
||||
if !ok {
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error parsing config: missing [default] section:", iniConfig)
|
||||
os.Exit(1)
|
||||
}
|
||||
err = viper.MergeConfigMap(cfg)
|
||||
if err != nil {
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error parsing config:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func disableExternalServices() {
|
||||
log.Info("All external integrations are DISABLED!")
|
||||
Server.EnableInsightsCollector = false
|
||||
Server.LastFM.Enabled = false
|
||||
Server.Spotify.ID = ""
|
||||
Server.ListenBrainz.Enabled = false
|
||||
@@ -370,49 +222,33 @@ func disableExternalServices() {
|
||||
}
|
||||
}
|
||||
|
||||
func validatePlaylistsPath() error {
|
||||
for _, path := range strings.Split(Server.PlaylistsPath, string(filepath.ListSeparator)) {
|
||||
_, err := doublestar.Match(path, "")
|
||||
if err != nil {
|
||||
log.Error("Invalid PlaylistsPath", "path", path, err)
|
||||
return err
|
||||
func validateScanSchedule() error {
|
||||
if Server.ScanInterval != -1 {
|
||||
log.Warn("ScanInterval is DEPRECATED. Please use ScanSchedule. See docs at https://navidrome.org/docs/usage/configuration-options/")
|
||||
if Server.ScanSchedule != "@every 1m" {
|
||||
log.Error("You cannot specify both ScanInterval and ScanSchedule, ignoring ScanInterval")
|
||||
} else {
|
||||
if Server.ScanInterval == 0 {
|
||||
Server.ScanSchedule = ""
|
||||
} else {
|
||||
Server.ScanSchedule = fmt.Sprintf("@every %s", Server.ScanInterval)
|
||||
}
|
||||
log.Warn("Setting ScanSchedule", "schedule", Server.ScanSchedule)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func validateScanSchedule() error {
|
||||
if Server.Scanner.Schedule == "0" || Server.Scanner.Schedule == "" {
|
||||
Server.Scanner.Schedule = ""
|
||||
if Server.ScanSchedule == "0" || Server.ScanSchedule == "" {
|
||||
Server.ScanSchedule = ""
|
||||
return nil
|
||||
}
|
||||
var err error
|
||||
Server.Scanner.Schedule, err = validateSchedule(Server.Scanner.Schedule, "Scanner.Schedule")
|
||||
return err
|
||||
}
|
||||
|
||||
func validateBackupSchedule() error {
|
||||
if Server.Backup.Path == "" || Server.Backup.Schedule == "" || Server.Backup.Count == 0 {
|
||||
Server.Backup.Schedule = ""
|
||||
return nil
|
||||
}
|
||||
var err error
|
||||
Server.Backup.Schedule, err = validateSchedule(Server.Backup.Schedule, "Backup.Schedule")
|
||||
return err
|
||||
}
|
||||
|
||||
func validateSchedule(schedule, field string) (string, error) {
|
||||
if _, err := time.ParseDuration(schedule); err == nil {
|
||||
schedule = "@every " + schedule
|
||||
if _, err := time.ParseDuration(Server.ScanSchedule); err == nil {
|
||||
Server.ScanSchedule = "@every " + Server.ScanSchedule
|
||||
}
|
||||
c := cron.New()
|
||||
id, err := c.AddFunc(schedule, func() {})
|
||||
_, err := c.AddFunc(Server.ScanSchedule, func() {})
|
||||
if err != nil {
|
||||
log.Error(fmt.Sprintf("Invalid %s. Please read format spec at https://pkg.go.dev/github.com/robfig/cron#hdr-CRON_Expression_Format", field), "schedule", schedule, err)
|
||||
} else {
|
||||
c.Remove(id)
|
||||
log.Error("Invalid ScanSchedule. Please read format spec at https://pkg.go.dev/github.com/robfig/cron#hdr-CRON_Expression_Format", "schedule", Server.ScanSchedule, err)
|
||||
}
|
||||
return schedule, err
|
||||
return err
|
||||
}
|
||||
|
||||
// AddHook is used to register initialization code that should run as soon as the config is loaded
|
||||
@@ -425,11 +261,11 @@ func init() {
|
||||
viper.SetDefault("cachefolder", "")
|
||||
viper.SetDefault("datafolder", ".")
|
||||
viper.SetDefault("loglevel", "info")
|
||||
viper.SetDefault("logfile", "")
|
||||
viper.SetDefault("address", "0.0.0.0")
|
||||
viper.SetDefault("port", 4533)
|
||||
viper.SetDefault("unixsocketperm", "0660")
|
||||
viper.SetDefault("sessiontimeout", consts.DefaultSessionTimeout)
|
||||
viper.SetDefault("scaninterval", -1)
|
||||
viper.SetDefault("scanschedule", "@every 1m")
|
||||
viper.SetDefault("baseurl", "")
|
||||
viper.SetDefault("tlscert", "")
|
||||
viper.SetDefault("tlskey", "")
|
||||
@@ -439,25 +275,20 @@ func init() {
|
||||
viper.SetDefault("enabletranscodingconfig", false)
|
||||
viper.SetDefault("transcodingcachesize", "100MB")
|
||||
viper.SetDefault("imagecachesize", "100MB")
|
||||
viper.SetDefault("albumplaycountmode", consts.AlbumPlayCountModeAbsolute)
|
||||
viper.SetDefault("enableartworkprecache", true)
|
||||
viper.SetDefault("autoimportplaylists", true)
|
||||
viper.SetDefault("defaultplaylistpublicvisibility", false)
|
||||
viper.SetDefault("playlistspath", "")
|
||||
viper.SetDefault("smartPlaylistRefreshDelay", 5*time.Second)
|
||||
viper.SetDefault("playlistspath", consts.DefaultPlaylistsPath)
|
||||
viper.SetDefault("enabledownloads", true)
|
||||
viper.SetDefault("enableexternalservices", true)
|
||||
viper.SetDefault("enablemediafilecoverart", true)
|
||||
viper.SetDefault("enableMediaFileCoverArt", true)
|
||||
viper.SetDefault("autotranscodedownload", false)
|
||||
viper.SetDefault("defaultdownsamplingformat", consts.DefaultDownsamplingFormat)
|
||||
viper.SetDefault("searchfullstring", false)
|
||||
viper.SetDefault("recentlyaddedbymodtime", false)
|
||||
viper.SetDefault("prefersorttags", false)
|
||||
viper.SetDefault("ignoredarticles", "The El La Los Las Le Les Os As O A")
|
||||
viper.SetDefault("indexgroups", "A B C D E F G H I J K L M N O P Q R S T U V W X-Z(XYZ) [Unknown]([)")
|
||||
viper.SetDefault("subsonicartistparticipations", false)
|
||||
viper.SetDefault("ffmpegpath", "")
|
||||
viper.SetDefault("mpvcmdtemplate", "mpv --audio-device=%d --no-audio-display --pause %f --input-ipc-server=%s")
|
||||
|
||||
viper.SetDefault("coverartpriority", "cover.*, folder.*, front.*, embedded, external")
|
||||
viper.SetDefault("coverjpegquality", 75)
|
||||
viper.SetDefault("artistartpriority", "artist.*, album/artist.*, external")
|
||||
@@ -470,11 +301,7 @@ func init() {
|
||||
viper.SetDefault("defaultuivolume", consts.DefaultUIVolume)
|
||||
viper.SetDefault("enablereplaygain", true)
|
||||
viper.SetDefault("enablecoveranimation", true)
|
||||
viper.SetDefault("enablesharing", false)
|
||||
viper.SetDefault("shareurl", "")
|
||||
viper.SetDefault("defaultdownloadableshare", false)
|
||||
viper.SetDefault("gatrackingid", "")
|
||||
viper.SetDefault("enableinsightscollector", true)
|
||||
viper.SetDefault("enablelogredacting", true)
|
||||
viper.SetDefault("authrequestlimit", 5)
|
||||
viper.SetDefault("authwindowlength", 20*time.Second)
|
||||
@@ -484,81 +311,41 @@ func init() {
|
||||
viper.SetDefault("reverseproxywhitelist", "")
|
||||
|
||||
viper.SetDefault("prometheus.enabled", false)
|
||||
viper.SetDefault("prometheus.metricspath", consts.PrometheusDefaultPath)
|
||||
viper.SetDefault("prometheus.password", "")
|
||||
viper.SetDefault("prometheus.metricspath", "/metrics")
|
||||
|
||||
viper.SetDefault("jukebox.enabled", false)
|
||||
viper.SetDefault("jukebox.devices", []AudioDeviceDefinition{})
|
||||
viper.SetDefault("jukebox.default", "")
|
||||
viper.SetDefault("jukebox.adminonly", true)
|
||||
|
||||
viper.SetDefault("scanner.enabled", true)
|
||||
viper.SetDefault("scanner.schedule", "0")
|
||||
viper.SetDefault("scanner.extractor", consts.DefaultScannerExtractor)
|
||||
viper.SetDefault("scanner.watcherwait", consts.DefaultWatcherWait)
|
||||
viper.SetDefault("scanner.scanonstartup", true)
|
||||
viper.SetDefault("scanner.artistjoiner", consts.ArtistJoiner)
|
||||
viper.SetDefault("scanner.genreseparators", "")
|
||||
viper.SetDefault("scanner.genreseparators", ";/,")
|
||||
viper.SetDefault("scanner.groupalbumreleases", false)
|
||||
viper.SetDefault("scanner.followsymlinks", true)
|
||||
|
||||
viper.SetDefault("subsonic.appendsubtitle", true)
|
||||
viper.SetDefault("subsonic.artistparticipations", false)
|
||||
viper.SetDefault("subsonic.defaultreportrealpath", false)
|
||||
viper.SetDefault("subsonic.legacyclients", "DSub")
|
||||
|
||||
viper.SetDefault("agents", "lastfm,spotify")
|
||||
viper.SetDefault("lastfm.enabled", true)
|
||||
viper.SetDefault("lastfm.language", "en")
|
||||
viper.SetDefault("lastfm.apikey", "")
|
||||
viper.SetDefault("lastfm.secret", "")
|
||||
viper.SetDefault("lastfm.apikey", consts.LastFMAPIKey)
|
||||
viper.SetDefault("lastfm.secret", consts.LastFMAPISecret)
|
||||
viper.SetDefault("spotify.id", "")
|
||||
viper.SetDefault("spotify.secret", "")
|
||||
viper.SetDefault("listenbrainz.enabled", true)
|
||||
viper.SetDefault("listenbrainz.baseurl", "https://api.listenbrainz.org/1/")
|
||||
|
||||
viper.SetDefault("httpsecurityheaders.customframeoptionsvalue", "DENY")
|
||||
|
||||
viper.SetDefault("backup.path", "")
|
||||
viper.SetDefault("backup.schedule", "")
|
||||
viper.SetDefault("backup.count", 0)
|
||||
|
||||
viper.SetDefault("pid.track", consts.DefaultTrackPID)
|
||||
viper.SetDefault("pid.album", consts.DefaultAlbumPID)
|
||||
|
||||
viper.SetDefault("inspect.enabled", true)
|
||||
viper.SetDefault("inspect.maxrequests", 1)
|
||||
viper.SetDefault("inspect.backloglimit", consts.RequestThrottleBacklogLimit)
|
||||
viper.SetDefault("inspect.backlogtimeout", consts.RequestThrottleBacklogTimeout)
|
||||
|
||||
viper.SetDefault("lyricspriority", ".lrc,.txt,embedded")
|
||||
|
||||
// DevFlags. These are used to enable/disable debugging and incomplete features
|
||||
viper.SetDefault("devlogsourceline", false)
|
||||
viper.SetDefault("devenableprofiler", false)
|
||||
viper.SetDefault("devautocreateadminpassword", "")
|
||||
viper.SetDefault("devautologinusername", "")
|
||||
viper.SetDefault("devactivitypanel", true)
|
||||
viper.SetDefault("devactivitypanelupdaterate", 300*time.Millisecond)
|
||||
viper.SetDefault("enablesharing", false)
|
||||
viper.SetDefault("defaultdownloadableshare", false)
|
||||
viper.SetDefault("devenablebufferedscrobble", true)
|
||||
viper.SetDefault("devsidebarplaylists", true)
|
||||
viper.SetDefault("devshowartistpage", true)
|
||||
viper.SetDefault("devoffsetoptimize", 50000)
|
||||
viper.SetDefault("devartworkmaxrequests", max(2, runtime.NumCPU()/3))
|
||||
viper.SetDefault("devartworkmaxrequests", number.Max(2, runtime.NumCPU()/3))
|
||||
viper.SetDefault("devartworkthrottlebackloglimit", consts.RequestThrottleBacklogLimit)
|
||||
viper.SetDefault("devartworkthrottlebacklogtimeout", consts.RequestThrottleBacklogTimeout)
|
||||
viper.SetDefault("devartistinfotimetolive", consts.ArtistInfoTimeToLive)
|
||||
viper.SetDefault("devalbuminfotimetolive", consts.AlbumInfoTimeToLive)
|
||||
viper.SetDefault("devexternalscanner", true)
|
||||
viper.SetDefault("devscannerthreads", 5)
|
||||
viper.SetDefault("devinsightsinitialdelay", consts.InsightsInitialDelay)
|
||||
viper.SetDefault("devenableplayerinsights", true)
|
||||
}
|
||||
|
||||
func InitConfig(cfgFile string) {
|
||||
codecRegistry := viper.NewCodecRegistry()
|
||||
_ = codecRegistry.RegisterCodec("ini", ini.Codec{})
|
||||
viper.SetOptions(viper.WithCodecRegistry(codecRegistry))
|
||||
|
||||
cfgFile = getConfigFile(cfgFile)
|
||||
if cfgFile != "" {
|
||||
// Use config file from the flag.
|
||||
@@ -582,17 +369,9 @@ func InitConfig(cfgFile string) {
|
||||
}
|
||||
}
|
||||
|
||||
// getConfigFile returns the path to the config file, either from the flag or from the environment variable.
|
||||
// If it is defined in the environment variable, it will check if the file exists.
|
||||
func getConfigFile(cfgFile string) string {
|
||||
if cfgFile != "" {
|
||||
return cfgFile
|
||||
}
|
||||
cfgFile = os.Getenv("ND_CONFIGFILE")
|
||||
if cfgFile != "" {
|
||||
if _, err := os.Stat(cfgFile); err == nil {
|
||||
return cfgFile
|
||||
}
|
||||
}
|
||||
return ""
|
||||
return os.Getenv("ND_CONFIGFILE")
|
||||
}
|
||||
|
||||
@@ -1,50 +0,0 @@
|
||||
package conf_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
. "github.com/navidrome/navidrome/conf"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
func TestConfiguration(t *testing.T) {
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "Configuration Suite")
|
||||
}
|
||||
|
||||
var _ = Describe("Configuration", func() {
|
||||
BeforeEach(func() {
|
||||
// Reset viper configuration
|
||||
viper.Reset()
|
||||
viper.SetDefault("datafolder", GinkgoT().TempDir())
|
||||
viper.SetDefault("loglevel", "error")
|
||||
ResetConf()
|
||||
})
|
||||
|
||||
DescribeTable("should load configuration from",
|
||||
func(format string) {
|
||||
filename := filepath.Join("testdata", "cfg."+format)
|
||||
|
||||
// Initialize config with the test file
|
||||
InitConfig(filename)
|
||||
// Load the configuration (with noConfigDump=true)
|
||||
Load(true)
|
||||
|
||||
// Execute the format-specific assertions
|
||||
Expect(Server.MusicFolder).To(Equal(fmt.Sprintf("/%s/music", format)))
|
||||
Expect(Server.UIWelcomeMessage).To(Equal("Welcome " + format))
|
||||
Expect(Server.Tags["custom"].Aliases).To(Equal([]string{format, "test"}))
|
||||
|
||||
// The config file used should be the one we created
|
||||
Expect(Server.ConfigFile).To(Equal(filename))
|
||||
},
|
||||
Entry("TOML format", "toml"),
|
||||
Entry("YAML format", "yaml"),
|
||||
Entry("INI format", "ini"),
|
||||
Entry("JSON format", "json"),
|
||||
)
|
||||
})
|
||||
@@ -1,5 +0,0 @@
|
||||
package conf
|
||||
|
||||
func ResetConf() {
|
||||
Server = &configOptions{}
|
||||
}
|
||||
@@ -1,48 +0,0 @@
|
||||
package mime
|
||||
|
||||
import (
|
||||
"mime"
|
||||
"strings"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/resources"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type mimeConf struct {
|
||||
Types map[string]string `yaml:"types"`
|
||||
Lossless []string `yaml:"lossless"`
|
||||
}
|
||||
|
||||
var LosslessFormats []string
|
||||
|
||||
func initMimeTypes() {
|
||||
// In some circumstances, Windows sets JS mime-type to `text/plain`!
|
||||
_ = mime.AddExtensionType(".js", "text/javascript")
|
||||
_ = mime.AddExtensionType(".css", "text/css")
|
||||
_ = mime.AddExtensionType(".webmanifest", "application/manifest+json")
|
||||
|
||||
f, err := resources.FS().Open("mime_types.yaml")
|
||||
if err != nil {
|
||||
log.Fatal("Fatal error opening mime_types.yaml", err)
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
var mimeConf mimeConf
|
||||
err = yaml.NewDecoder(f).Decode(&mimeConf)
|
||||
if err != nil {
|
||||
log.Fatal("Fatal error parsing mime_types.yaml", err)
|
||||
}
|
||||
for ext, typ := range mimeConf.Types {
|
||||
_ = mime.AddExtensionType(ext, typ)
|
||||
}
|
||||
|
||||
for _, ext := range mimeConf.Lossless {
|
||||
LosslessFormats = append(LosslessFormats, strings.TrimPrefix(ext, "."))
|
||||
}
|
||||
}
|
||||
|
||||
func init() {
|
||||
conf.AddHook(initMimeTypes)
|
||||
}
|
||||
6
conf/testdata/cfg.ini
vendored
6
conf/testdata/cfg.ini
vendored
@@ -1,6 +0,0 @@
|
||||
[default]
|
||||
MusicFolder = /ini/music
|
||||
UIWelcomeMessage = Welcome ini
|
||||
|
||||
[Tags]
|
||||
Custom.Aliases = ini,test
|
||||
12
conf/testdata/cfg.json
vendored
12
conf/testdata/cfg.json
vendored
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"musicFolder": "/json/music",
|
||||
"uiWelcomeMessage": "Welcome json",
|
||||
"Tags": {
|
||||
"custom": {
|
||||
"aliases": [
|
||||
"json",
|
||||
"test"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
5
conf/testdata/cfg.toml
vendored
5
conf/testdata/cfg.toml
vendored
@@ -1,5 +0,0 @@
|
||||
musicFolder = "/toml/music"
|
||||
uiWelcomeMessage = "Welcome toml"
|
||||
|
||||
[Tags.custom]
|
||||
aliases = ["toml", "test"]
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user