mirror of
https://github.com/navidrome/navidrome.git
synced 2026-02-05 20:41:07 -05:00
Compare commits
22 Commits
fix-parent
...
new-api
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7cf98af9da | ||
|
|
40f7170930 | ||
|
|
4a8f176f0d | ||
|
|
4507895d58 | ||
|
|
90c2d7d1cf | ||
|
|
f62231f728 | ||
|
|
fd1e06049f | ||
|
|
7dfe29af5e | ||
|
|
8e03d7d013 | ||
|
|
960415ed95 | ||
|
|
ea231fe265 | ||
|
|
09e52eba87 | ||
|
|
2650e4c27c | ||
|
|
3c0f23e3f2 | ||
|
|
250b6dbb33 | ||
|
|
be945e010a | ||
|
|
9308127342 | ||
|
|
141edb881e | ||
|
|
20a1e3160b | ||
|
|
d4c458d193 | ||
|
|
ed87e703ff | ||
|
|
dcb5725642 |
@@ -2,28 +2,19 @@
|
||||
|
||||
# [Choice] Go version: 1, 1.15, 1.14
|
||||
ARG VARIANT="1"
|
||||
FROM mcr.microsoft.com/vscode/devcontainers/go:${VARIANT}
|
||||
FROM mcr.microsoft.com/vscode/devcontainers/go:0-${VARIANT}
|
||||
|
||||
# [Option] Install Node.js
|
||||
ARG INSTALL_NODE="true"
|
||||
ARG NODE_VERSION="lts/*"
|
||||
RUN if [ "${INSTALL_NODE}" = "true" ]; then su vscode -c "source /usr/local/share/nvm/nvm.sh && nvm install ${NODE_VERSION} 2>&1"; fi
|
||||
|
||||
# Install additional OS packages
|
||||
# [Optional] Uncomment this section to install additional OS packages.
|
||||
RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
|
||||
&& apt-get -y install --no-install-recommends ffmpeg
|
||||
&& apt-get -y install --no-install-recommends libtag1-dev ffmpeg
|
||||
|
||||
# Install TagLib from cross-taglib releases
|
||||
ARG CROSS_TAGLIB_VERSION="2.1.1-1"
|
||||
ARG TARGETARCH
|
||||
RUN DOWNLOAD_ARCH="linux-${TARGETARCH}" \
|
||||
&& wget -q "https://github.com/navidrome/cross-taglib/releases/download/v${CROSS_TAGLIB_VERSION}/taglib-${DOWNLOAD_ARCH}.tar.gz" -O /tmp/cross-taglib.tar.gz \
|
||||
&& tar -xzf /tmp/cross-taglib.tar.gz -C /usr --strip-components=1 \
|
||||
&& mv /usr/include/taglib/* /usr/include/ \
|
||||
&& rmdir /usr/include/taglib \
|
||||
&& rm /tmp/cross-taglib.tar.gz /usr/provenance.json
|
||||
|
||||
ENV CGO_CFLAGS_ALLOW="--define-prefix"
|
||||
# [Optional] Uncomment the next line to use go get to install anything else you need
|
||||
# RUN go get -x <your-dependency-or-tool>
|
||||
|
||||
# [Optional] Uncomment this line to install global node packages.
|
||||
# RUN su vscode -c "source /usr/local/share/nvm/nvm.sh && npm install -g <your-package-here>" 2>&1
|
||||
# RUN su vscode -c "source /usr/local/share/nvm/nvm.sh && npm install -g <your-package-here>" 2>&1
|
||||
@@ -4,11 +4,10 @@
|
||||
"dockerfile": "Dockerfile",
|
||||
"args": {
|
||||
// Update the VARIANT arg to pick a version of Go: 1, 1.15, 1.14
|
||||
"VARIANT": "1.25",
|
||||
"VARIANT": "1.20",
|
||||
// Options
|
||||
"INSTALL_NODE": "true",
|
||||
"NODE_VERSION": "v24",
|
||||
"CROSS_TAGLIB_VERSION": "2.1.1-1"
|
||||
"NODE_VERSION": "v18"
|
||||
}
|
||||
},
|
||||
"workspaceMount": "",
|
||||
@@ -19,46 +18,44 @@
|
||||
"--volume=${localWorkspaceFolder}:/workspaces/${localWorkspaceFolderBasename}:Z"
|
||||
],
|
||||
// Set *default* container specific settings.json values on container create.
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"settings": {
|
||||
"terminal.integrated.shell.linux": "/bin/bash",
|
||||
"go.useGoProxyToCheckForToolUpdates": false,
|
||||
"go.useLanguageServer": true,
|
||||
"go.gopath": "/go",
|
||||
"go.goroot": "/usr/local/go",
|
||||
"go.toolsGopath": "/go/bin",
|
||||
"go.formatTool": "goimports",
|
||||
"go.lintOnSave": "package",
|
||||
"go.lintTool": "golangci-lint",
|
||||
"editor.formatOnSave": true,
|
||||
"[javascript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[json]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[jsonc]": {
|
||||
"editor.defaultFormatter": "vscode.json-language-features"
|
||||
}
|
||||
},
|
||||
// Add the IDs of extensions you want installed when the container is created.
|
||||
"extensions": [
|
||||
"golang.Go",
|
||||
"esbenp.prettier-vscode",
|
||||
"tamasfe.even-better-toml"
|
||||
]
|
||||
"settings": {
|
||||
"terminal.integrated.shell.linux": "/bin/bash",
|
||||
"go.useGoProxyToCheckForToolUpdates": false,
|
||||
"go.useLanguageServer": true,
|
||||
"go.gopath": "/go",
|
||||
"go.goroot": "/usr/local/go",
|
||||
"go.toolsGopath": "/go/bin",
|
||||
"go.formatTool": "goimports",
|
||||
"go.lintOnSave": "package",
|
||||
"go.lintTool": "golangci-lint",
|
||||
"editor.formatOnSave": true,
|
||||
"[javascript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[json]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[jsonc]": {
|
||||
"editor.defaultFormatter": "vscode.json-language-features"
|
||||
}
|
||||
},
|
||||
// Add the IDs of extensions you want installed when the container is created.
|
||||
"extensions": [
|
||||
"golang.Go",
|
||||
"esbenp.prettier-vscode",
|
||||
"tamasfe.even-better-toml"
|
||||
],
|
||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||
"forwardPorts": [
|
||||
4533,
|
||||
4633
|
||||
],
|
||||
// Use 'postCreateCommand' to run commands after the container is created.
|
||||
// "postCreateCommand": "make setup-dev",
|
||||
// Comment out connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
|
||||
"remoteUser": "vscode",
|
||||
"remoteEnv": {
|
||||
"ND_MUSICFOLDER": "./music",
|
||||
"ND_DATAFOLDER": "./data"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,18 +1,10 @@
|
||||
.DS_Store
|
||||
ui/node_modules
|
||||
ui/build
|
||||
!ui/build/.gitkeep
|
||||
Dockerfile
|
||||
docker-compose*.yml
|
||||
data
|
||||
*.db
|
||||
testDB
|
||||
navidrome
|
||||
navidrome.db
|
||||
navidrome.toml
|
||||
tmp
|
||||
!tmp/taglib
|
||||
dist
|
||||
binaries
|
||||
cache
|
||||
music
|
||||
!Dockerfile
|
||||
23
.github/actions/download-taglib/action.yml
vendored
23
.github/actions/download-taglib/action.yml
vendored
@@ -1,23 +0,0 @@
|
||||
name: 'Download TagLib'
|
||||
description: 'Downloads and extracts the TagLib library, adding it to PKG_CONFIG_PATH'
|
||||
inputs:
|
||||
version:
|
||||
description: 'Version of TagLib to download'
|
||||
required: true
|
||||
platform:
|
||||
description: 'Platform to download TagLib for'
|
||||
default: 'linux-amd64'
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Download TagLib
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p /tmp/taglib
|
||||
cd /tmp
|
||||
FILE=taglib-${{ inputs.platform }}.tar.gz
|
||||
wget https://github.com/navidrome/cross-taglib/releases/download/v${{ inputs.version }}/${FILE}
|
||||
tar -xzf ${FILE} -C taglib
|
||||
PKG_CONFIG_PREFIX=/tmp/taglib
|
||||
echo "PKG_CONFIG_PREFIX=${PKG_CONFIG_PREFIX}" >> $GITHUB_ENV
|
||||
echo "PKG_CONFIG_PATH=${PKG_CONFIG_PATH}:${PKG_CONFIG_PREFIX}/lib/pkgconfig" >> $GITHUB_ENV
|
||||
84
.github/actions/prepare-docker/action.yml
vendored
84
.github/actions/prepare-docker/action.yml
vendored
@@ -1,84 +0,0 @@
|
||||
name: 'Prepare Docker Buildx environment'
|
||||
description: 'Downloads and extracts the TagLib library, adding it to PKG_CONFIG_PATH'
|
||||
inputs:
|
||||
github_token:
|
||||
description: 'GitHub token'
|
||||
required: true
|
||||
default: ''
|
||||
hub_repository:
|
||||
description: 'Docker Hub repository to push images to'
|
||||
required: false
|
||||
default: ''
|
||||
hub_username:
|
||||
description: 'Docker Hub username'
|
||||
required: false
|
||||
default: ''
|
||||
hub_password:
|
||||
description: 'Docker Hub password'
|
||||
required: false
|
||||
default: ''
|
||||
outputs:
|
||||
tags:
|
||||
description: 'Docker image tags'
|
||||
value: ${{ steps.meta.outputs.tags }}
|
||||
labels:
|
||||
description: 'Docker image labels'
|
||||
value: ${{ steps.meta.outputs.labels }}
|
||||
annotations:
|
||||
description: 'Docker image annotations'
|
||||
value: ${{ steps.meta.outputs.annotations }}
|
||||
version:
|
||||
description: 'Docker image version'
|
||||
value: ${{ steps.meta.outputs.version }}
|
||||
hub_repository:
|
||||
description: 'Docker Hub repository'
|
||||
value: ${{ env.DOCKER_HUB_REPO }}
|
||||
hub_enabled:
|
||||
description: 'Is Docker Hub enabled'
|
||||
value: ${{ env.DOCKER_HUB_ENABLED }}
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Check Docker Hub configuration
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -z "${{inputs.hub_repository}}" ]; then
|
||||
echo "DOCKER_HUB_REPO=none" >> $GITHUB_ENV
|
||||
echo "DOCKER_HUB_ENABLED=false" >> $GITHUB_ENV
|
||||
else
|
||||
echo "DOCKER_HUB_REPO=${{inputs.hub_repository}}" >> $GITHUB_ENV
|
||||
echo "DOCKER_HUB_ENABLED=true" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Login to Docker Hub
|
||||
if: inputs.hub_username != '' && inputs.hub_password != ''
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ inputs.hub_username }}
|
||||
password: ${{ inputs.hub_password }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ inputs.github_token }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Extract metadata for Docker image
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
labels: |
|
||||
maintainer=deluan@navidrome.org
|
||||
images: |
|
||||
name=${{env.DOCKER_HUB_REPO}},enable=${{env.DOCKER_HUB_ENABLED}}
|
||||
name=ghcr.io/${{ github.repository }}
|
||||
tags: |
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=raw,value=develop,enable={{is_default_branch}}
|
||||
10
.github/dependabot.yml
vendored
10
.github/dependabot.yml
vendored
@@ -10,13 +10,3 @@ updates:
|
||||
schedule:
|
||||
interval: weekly
|
||||
open-pull-requests-limit: 10
|
||||
- package-ecosystem: docker
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: weekly
|
||||
open-pull-requests-limit: 10
|
||||
- package-ecosystem: github-actions
|
||||
directory: "/.github/workflows"
|
||||
schedule:
|
||||
interval: weekly
|
||||
open-pull-requests-limit: 10
|
||||
38
.github/pull_request_template.md
vendored
38
.github/pull_request_template.md
vendored
@@ -1,38 +0,0 @@
|
||||
### Description
|
||||
<!-- Please provide a clear and concise description of what this PR does and why it is needed. -->
|
||||
|
||||
### Related Issues
|
||||
<!-- List any related issues, e.g., "Fixes #123" or "Related to #456". -->
|
||||
|
||||
### Type of Change
|
||||
- [ ] Bug fix
|
||||
- [ ] New feature
|
||||
- [ ] Documentation update
|
||||
- [ ] Refactor
|
||||
- [ ] Other (please describe):
|
||||
|
||||
### Checklist
|
||||
Please review and check all that apply:
|
||||
|
||||
- [ ] My code follows the project’s coding style
|
||||
- [ ] I have tested the changes locally
|
||||
- [ ] I have added or updated documentation as needed
|
||||
- [ ] I have added tests that prove my fix/feature works (or explain why not)
|
||||
- [ ] All existing and new tests pass
|
||||
|
||||
### How to Test
|
||||
<!-- Describe the steps to test your changes. Include setup, commands, and expected results. -->
|
||||
|
||||
### Screenshots / Demos (if applicable)
|
||||
<!-- Add screenshots, GIFs, or links to demos if your change includes UI updates or visual changes. -->
|
||||
|
||||
### Additional Notes
|
||||
<!-- Anything else the maintainer should know? Potential side effects, breaking changes, or areas of concern? -->
|
||||
|
||||
<!--
|
||||
**Tips for Contributors:**
|
||||
- Be concise but thorough.
|
||||
- If your PR is large, consider breaking it into smaller PRs.
|
||||
- Tag the maintainer if you need a prompt review.
|
||||
- Avoid force pushing to the branch after opening the PR, as it can complicate the review process.
|
||||
-->
|
||||
38
.github/workflows/pipeline.dockerfile
vendored
Normal file
38
.github/workflows/pipeline.dockerfile
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
#####################################################
|
||||
### Copy platform specific binary
|
||||
FROM bash as copy-binary
|
||||
ARG TARGETPLATFORM
|
||||
|
||||
RUN echo "Target Platform = ${TARGETPLATFORM}"
|
||||
|
||||
COPY dist .
|
||||
RUN if [ "$TARGETPLATFORM" = "linux/amd64" ]; then cp navidrome_linux_amd64_linux_amd64_v1/navidrome /navidrome; fi
|
||||
RUN if [ "$TARGETPLATFORM" = "linux/386" ]; then cp navidrome_linux_386_linux_386/navidrome /navidrome; fi
|
||||
RUN if [ "$TARGETPLATFORM" = "linux/arm64" ]; then cp navidrome_linux_arm64_linux_arm64/navidrome /navidrome; fi
|
||||
RUN if [ "$TARGETPLATFORM" = "linux/arm/v6" ]; then cp navidrome_linux_arm_linux_arm_6/navidrome /navidrome; fi
|
||||
RUN if [ "$TARGETPLATFORM" = "linux/arm/v7" ]; then cp navidrome_linux_arm_linux_arm_7/navidrome /navidrome; fi
|
||||
RUN chmod +x /navidrome
|
||||
|
||||
|
||||
#####################################################
|
||||
### Build Final Image
|
||||
FROM alpine as release
|
||||
LABEL maintainer="deluan@navidrome.org"
|
||||
|
||||
# Install ffmpeg and output build config
|
||||
RUN apk add --no-cache ffmpeg
|
||||
RUN ffmpeg -buildconf
|
||||
|
||||
COPY --from=copy-binary /navidrome /app/
|
||||
|
||||
VOLUME ["/data", "/music"]
|
||||
ENV ND_MUSICFOLDER /music
|
||||
ENV ND_DATAFOLDER /data
|
||||
ENV ND_PORT 4533
|
||||
ENV GODEBUG "asyncpreemptoff=1"
|
||||
|
||||
EXPOSE ${ND_PORT}
|
||||
HEALTHCHECK CMD wget -O- http://localhost:${ND_PORT}/ping || exit 1
|
||||
WORKDIR /app
|
||||
|
||||
ENTRYPOINT ["/app/navidrome"]
|
||||
558
.github/workflows/pipeline.yml
vendored
558
.github/workflows/pipeline.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: "Pipeline: Test, Lint, Build"
|
||||
name: 'Pipeline: Test, Lint, Build'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
@@ -9,135 +9,103 @@ on:
|
||||
branches:
|
||||
- master
|
||||
|
||||
concurrency:
|
||||
group: ${{ startsWith(github.ref, 'refs/tags/v') && 'tag' || 'branch' }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CROSS_TAGLIB_VERSION: "2.1.1-2"
|
||||
CGO_CFLAGS_ALLOW: "--define-prefix"
|
||||
IS_RELEASE: ${{ startsWith(github.ref, 'refs/tags/') && 'true' || 'false' }}
|
||||
|
||||
jobs:
|
||||
git-version:
|
||||
name: Get version info
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
git_tag: ${{ steps.git-version.outputs.GIT_TAG }}
|
||||
git_sha: ${{ steps.git-version.outputs.GIT_SHA }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
fetch-tags: true
|
||||
|
||||
- name: Show git version info
|
||||
run: |
|
||||
echo "git describe (dirty): $(git describe --dirty --always --tags)"
|
||||
echo "git describe --tags: $(git describe --tags `git rev-list --tags --max-count=1`)"
|
||||
echo "git tag: $(git tag --sort=-committerdate | head -n 1)"
|
||||
echo "github_ref: $GITHUB_REF"
|
||||
echo "github_head_sha: ${{ github.event.pull_request.head.sha }}"
|
||||
git tag -l
|
||||
- name: Determine git current SHA and latest tag
|
||||
id: git-version
|
||||
run: |
|
||||
GIT_TAG=$(git tag --sort=-committerdate | head -n 1)
|
||||
if [ -n "$GIT_TAG" ]; then
|
||||
if [[ "$GITHUB_REF" != refs/tags/* ]]; then
|
||||
GIT_TAG=${GIT_TAG}-SNAPSHOT
|
||||
fi
|
||||
echo "GIT_TAG=$GIT_TAG" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
GIT_SHA=$(git rev-parse --short HEAD)
|
||||
PR_NUM=$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")
|
||||
if [[ $PR_NUM != "null" ]]; then
|
||||
GIT_SHA=$(echo "${{ github.event.pull_request.head.sha }}" | cut -c1-8)
|
||||
GIT_SHA="pr-${PR_NUM}/${GIT_SHA}"
|
||||
fi
|
||||
echo "GIT_SHA=$GIT_SHA" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "GIT_TAG=$GIT_TAG"
|
||||
echo "GIT_SHA=$GIT_SHA"
|
||||
|
||||
go-lint:
|
||||
name: Lint Go code
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- name: Install taglib
|
||||
run: sudo apt-get install libtag1-dev
|
||||
|
||||
- name: Download TagLib
|
||||
uses: ./.github/actions/download-taglib
|
||||
- name: Set up Go 1.20
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
version: ${{ env.CROSS_TAGLIB_VERSION }}
|
||||
go-version: 1.20.x
|
||||
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: golangci-lint
|
||||
uses: golangci/golangci-lint-action@v9
|
||||
uses: golangci/golangci-lint-action@v3
|
||||
with:
|
||||
version: latest
|
||||
problem-matchers: true
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
args: --timeout 2m
|
||||
|
||||
- name: Run go goimports
|
||||
run: go run golang.org/x/tools/cmd/goimports@latest -w `find . -name '*.go' | grep -v '_gen.go$' | grep -v '.pb.go$'`
|
||||
- name: Install goimports
|
||||
run: go install golang.org/x/tools/cmd/goimports
|
||||
|
||||
- run: goimports -w `find . -name '*.go' | grep -v '_gen.go$'`
|
||||
- run: go mod tidy
|
||||
- name: Verify no changes from goimports and go mod tidy
|
||||
run: |
|
||||
git status --porcelain
|
||||
if [ -n "$(git status --porcelain)" ]; then
|
||||
echo 'To fix this check, run "make format" and commit the changes'
|
||||
echo 'To fix this check, run "goimports -w $(find . -name '*.go' | grep -v '_gen.go$') && go mod tidy"'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Run go generate
|
||||
run: go generate ./...
|
||||
- name: Verify no changes from go generate
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 18
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
- name: Build and Lint OpenAPI spec
|
||||
run: make lintapi gen
|
||||
|
||||
- name: Verify no changes
|
||||
run: |
|
||||
git status --porcelain
|
||||
if [ -n "$(git status --porcelain)" ]; then
|
||||
echo 'Generated code is out of date. Run "make gen" and commit the changes'
|
||||
echo 'Changes to OpenAPI spec caused changes to the code. Please review and commit the changes.'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
go:
|
||||
name: Test Go code
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out code into the Go module directory
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Download TagLib
|
||||
uses: ./.github/actions/download-taglib
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
version: ${{ env.CROSS_TAGLIB_VERSION }}
|
||||
name: openapi.yaml
|
||||
path: api/openapi.yaml
|
||||
|
||||
go:
|
||||
name: Test with Go ${{ matrix.go_version }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
go_version: [1.20.x,1.19.x]
|
||||
steps:
|
||||
- name: Install taglib
|
||||
run: sudo apt-get install libtag1-dev
|
||||
|
||||
- name: Check out code into the Go module directory
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Go ${{ matrix.go_version }}
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: ${{ matrix.go_version }}
|
||||
cache: true
|
||||
|
||||
- name: Download dependencies
|
||||
if: steps.cache-go.outputs.cache-hit != 'true'
|
||||
continue-on-error: ${{contains(matrix.go_version, 'beta') || contains(matrix.go_version, 'rc')}}
|
||||
run: go mod download
|
||||
|
||||
- name: Test
|
||||
run: |
|
||||
pkg-config --define-prefix --cflags --libs taglib # for debugging
|
||||
go test -shuffle=on -tags netgo -race ./... -v
|
||||
|
||||
- name: Test ndpgen
|
||||
run: |
|
||||
cd plugins/cmd/ndpgen
|
||||
go test -shuffle=on -v
|
||||
go build -o ndpgen .
|
||||
./ndpgen --help
|
||||
continue-on-error: ${{contains(matrix.go_version, 'beta') || contains(matrix.go_version, 'rc')}}
|
||||
run: go test -shuffle=on -race -cover ./... -v
|
||||
|
||||
js:
|
||||
name: Test JS code
|
||||
name: Build JS bundle
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
NODE_OPTIONS: "--max_old_space_size=4096"
|
||||
NODE_OPTIONS: '--max_old_space_size=4096'
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-node@v6
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 24
|
||||
cache: "npm"
|
||||
cache-dependency-path: "**/package-lock.json"
|
||||
node-version: 18
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
- name: npm install dependencies
|
||||
run: |
|
||||
@@ -159,327 +127,119 @@ jobs:
|
||||
cd ui
|
||||
npm run build
|
||||
|
||||
i18n-lint:
|
||||
name: Lint i18n files
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: js-bundle
|
||||
path: ui/build
|
||||
|
||||
binaries:
|
||||
name: Build binaries
|
||||
needs: [js, go, go-lint]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- run: |
|
||||
set -e
|
||||
for file in resources/i18n/*.json; do
|
||||
echo "Validating $file"
|
||||
if ! jq empty "$file" 2>error.log; then
|
||||
error_message=$(cat error.log)
|
||||
line_number=$(echo "$error_message" | grep -oP 'line \K[0-9]+')
|
||||
echo "::error file=$file,line=$line_number::$error_message"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
- run: ./.github/workflows/validate-translations.sh -v
|
||||
|
||||
|
||||
check-push-enabled:
|
||||
name: Check Docker configuration
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
is_enabled: ${{ steps.check.outputs.is_enabled }}
|
||||
steps:
|
||||
- name: Check if Docker push is configured
|
||||
id: check
|
||||
run: echo "is_enabled=${{ secrets.DOCKER_HUB_USERNAME != '' }}" >> $GITHUB_OUTPUT
|
||||
|
||||
build:
|
||||
name: Build
|
||||
needs: [js, go, go-lint, i18n-lint, git-version, check-push-enabled]
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ linux/amd64, linux/arm64, linux/arm/v5, linux/arm/v6, linux/arm/v7, linux/386, linux/riscv64, darwin/amd64, darwin/arm64, windows/amd64, windows/386 ]
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
IS_LINUX: ${{ startsWith(matrix.platform, 'linux/') && 'true' || 'false' }}
|
||||
IS_ARMV5: ${{ matrix.platform == 'linux/arm/v5' && 'true' || 'false' }}
|
||||
IS_DOCKER_PUSH_CONFIGURED: ${{ needs.check-push-enabled.outputs.is_enabled == 'true' }}
|
||||
DOCKER_BUILD_SUMMARY: false
|
||||
GIT_SHA: ${{ needs.git-version.outputs.git_sha }}
|
||||
GIT_TAG: ${{ needs.git-version.outputs.git_tag }}
|
||||
steps:
|
||||
- name: Sanitize platform name
|
||||
id: set-platform
|
||||
run: |
|
||||
PLATFORM=$(echo ${{ matrix.platform }} | tr '/' '_')
|
||||
echo "PLATFORM=$PLATFORM" >> $GITHUB_ENV
|
||||
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Prepare Docker Buildx
|
||||
uses: ./.github/actions/prepare-docker
|
||||
id: docker
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
hub_repository: ${{ vars.DOCKER_HUB_REPO }}
|
||||
hub_username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
hub_password: ${{ secrets.DOCKER_HUB_PASSWORD }}
|
||||
|
||||
- name: Build Binaries
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: Dockerfile
|
||||
platforms: ${{ matrix.platform }}
|
||||
outputs: |
|
||||
type=local,dest=./output/${{ env.PLATFORM }}
|
||||
target: binary
|
||||
build-args: |
|
||||
GIT_SHA=${{ env.GIT_SHA }}
|
||||
GIT_TAG=${{ env.GIT_TAG }}
|
||||
CROSS_TAGLIB_VERSION=${{ env.CROSS_TAGLIB_VERSION }}
|
||||
|
||||
- name: Upload Binaries
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: navidrome-${{ env.PLATFORM }}
|
||||
path: ./output
|
||||
retention-days: 7
|
||||
|
||||
- name: Build and push image by digest
|
||||
id: push-image
|
||||
if: env.IS_LINUX == 'true' && env.IS_DOCKER_PUSH_CONFIGURED == 'true' && env.IS_ARMV5 == 'false'
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: Dockerfile
|
||||
platforms: ${{ matrix.platform }}
|
||||
labels: ${{ steps.docker.outputs.labels }}
|
||||
build-args: |
|
||||
GIT_SHA=${{ env.GIT_SHA }}
|
||||
GIT_TAG=${{ env.GIT_TAG }}
|
||||
CROSS_TAGLIB_VERSION=${{ env.CROSS_TAGLIB_VERSION }}
|
||||
outputs: |
|
||||
type=image,name=${{ steps.docker.outputs.hub_repository }},push-by-digest=true,name-canonical=true,push=${{ steps.docker.outputs.hub_enabled }}
|
||||
type=image,name=ghcr.io/${{ github.repository }},push-by-digest=true,name-canonical=true,push=true
|
||||
|
||||
- name: Export digest
|
||||
if: env.IS_LINUX == 'true' && env.IS_DOCKER_PUSH_CONFIGURED == 'true' && env.IS_ARMV5 == 'false'
|
||||
run: |
|
||||
mkdir -p /tmp/digests
|
||||
digest="${{ steps.push-image.outputs.digest }}"
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v6
|
||||
if: env.IS_LINUX == 'true' && env.IS_DOCKER_PUSH_CONFIGURED == 'true' && env.IS_ARMV5 == 'false'
|
||||
with:
|
||||
name: digests-${{ env.PLATFORM }}
|
||||
path: /tmp/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
push-manifest-ghcr:
|
||||
name: Push to GHCR
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
runs-on: ubuntu-latest
|
||||
needs: [build, check-push-enabled]
|
||||
if: needs.check-push-enabled.outputs.is_enabled == 'true'
|
||||
env:
|
||||
REGISTRY_IMAGE: ghcr.io/${{ github.repository }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Prepare Docker Buildx
|
||||
uses: ./.github/actions/prepare-docker
|
||||
id: docker
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Create manifest list and push to ghcr.io
|
||||
working-directory: /tmp/digests
|
||||
run: |
|
||||
docker buildx imagetools create $(jq -cr '.tags | map(select(startswith("ghcr.io"))) | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf '${{ env.REGISTRY_IMAGE }}@sha256:%s ' *)
|
||||
|
||||
- name: Inspect image in ghcr.io
|
||||
run: |
|
||||
docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:${{ steps.docker.outputs.version }}
|
||||
|
||||
push-manifest-dockerhub:
|
||||
name: Push to Docker Hub
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
needs: [build, check-push-enabled]
|
||||
if: needs.check-push-enabled.outputs.is_enabled == 'true' && vars.DOCKER_HUB_REPO != ''
|
||||
continue-on-error: true
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Prepare Docker Buildx
|
||||
uses: ./.github/actions/prepare-docker
|
||||
id: docker
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
hub_repository: ${{ vars.DOCKER_HUB_REPO }}
|
||||
hub_username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
hub_password: ${{ secrets.DOCKER_HUB_PASSWORD }}
|
||||
|
||||
- name: Create manifest list and push to Docker Hub
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 5
|
||||
max_attempts: 3
|
||||
retry_wait_seconds: 30
|
||||
command: |
|
||||
cd /tmp/digests
|
||||
docker buildx imagetools create $(jq -cr '.tags | map(select(startswith("ghcr.io") | not)) | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf 'ghcr.io/${{ github.repository }}@sha256:%s ' *)
|
||||
|
||||
- name: Inspect image in Docker Hub
|
||||
run: |
|
||||
docker buildx imagetools inspect ${{ vars.DOCKER_HUB_REPO }}:${{ steps.docker.outputs.version }}
|
||||
|
||||
cleanup-digests:
|
||||
name: Cleanup digest artifacts
|
||||
runs-on: ubuntu-latest
|
||||
needs: [push-manifest-ghcr, push-manifest-dockerhub]
|
||||
if: always() && needs.push-manifest-ghcr.result == 'success'
|
||||
steps:
|
||||
- name: Delete unnecessary digest artifacts
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
for artifact in $(gh api repos/${{ github.repository }}/actions/artifacts | jq -r '.artifacts[] | select(.name | startswith("digests-")) | .id'); do
|
||||
gh api --method DELETE repos/${{ github.repository }}/actions/artifacts/$artifact
|
||||
done
|
||||
|
||||
msi:
|
||||
name: Build Windows installers
|
||||
needs: [build, git-version]
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- uses: actions/download-artifact@v7
|
||||
with:
|
||||
path: ./binaries
|
||||
pattern: navidrome-windows*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Install Wix
|
||||
run: sudo apt-get install -y wixl jq
|
||||
|
||||
- name: Build MSI
|
||||
env:
|
||||
GIT_TAG: ${{ needs.git-version.outputs.git_tag }}
|
||||
run: |
|
||||
rm -rf binaries/msi
|
||||
sudo GIT_TAG=$GIT_TAG release/wix/build_msi.sh ${GITHUB_WORKSPACE} 386
|
||||
sudo GIT_TAG=$GIT_TAG release/wix/build_msi.sh ${GITHUB_WORKSPACE} amd64
|
||||
du -h binaries/msi/*.msi
|
||||
|
||||
- name: Upload MSI files
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: navidrome-windows-installers
|
||||
path: binaries/msi/*.msi
|
||||
retention-days: 7
|
||||
|
||||
release:
|
||||
name: Package/Release
|
||||
needs: [build, msi]
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
package_list: ${{ steps.set-package-list.outputs.package_list }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
fetch-tags: true
|
||||
|
||||
- uses: actions/download-artifact@v7
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
path: ./binaries
|
||||
pattern: navidrome-*
|
||||
merge-multiple: true
|
||||
name: js-bundle
|
||||
path: ui/build
|
||||
|
||||
- run: ls -lR ./binaries
|
||||
|
||||
- name: Set RELEASE_FLAGS for snapshot releases
|
||||
if: env.IS_RELEASE == 'false'
|
||||
run: echo 'RELEASE_FLAGS=--skip=publish --snapshot' >> $GITHUB_ENV
|
||||
|
||||
- name: Run GoReleaser
|
||||
uses: goreleaser/goreleaser-action@v6
|
||||
with:
|
||||
version: '~> v2'
|
||||
args: "release --clean -f release/goreleaser.yml ${{ env.RELEASE_FLAGS }}"
|
||||
- name: Config /github/workspace folder as trusted
|
||||
uses: docker://deluan/ci-goreleaser:1.20.3-1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Remove build artifacts
|
||||
run: |
|
||||
ls -l ./dist
|
||||
rm ./dist/*.tar.gz ./dist/*.zip
|
||||
|
||||
- name: Upload all-packages artifact
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: packages
|
||||
path: dist/navidrome_0*
|
||||
args: /bin/bash -c "git config --global --add safe.directory /github/workspace; git describe --dirty --always --tags"
|
||||
|
||||
- id: set-package-list
|
||||
name: Export list of generated packages
|
||||
run: |
|
||||
cd dist
|
||||
set +x
|
||||
ITEMS=$(ls navidrome_0* | sed 's/^navidrome_0[^_]*_linux_//' | jq -R -s -c 'split("\n")[:-1]')
|
||||
echo $ITEMS
|
||||
echo "package_list=${ITEMS}" >> $GITHUB_OUTPUT
|
||||
- name: Run GoReleaser - SNAPSHOT
|
||||
if: startsWith(github.ref, 'refs/tags/') != true
|
||||
uses: docker://deluan/ci-goreleaser:1.20.3-1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
args: goreleaser release --rm-dist --skip-publish --snapshot
|
||||
|
||||
upload-packages:
|
||||
name: Upload Linux PKG
|
||||
- name: Run GoReleaser - RELEASE
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
uses: docker://deluan/ci-goreleaser:1.20.3-1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
args: goreleaser release --rm-dist
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: binaries
|
||||
path: |
|
||||
dist
|
||||
!dist/*.tar.gz
|
||||
!dist/*.zip
|
||||
|
||||
docker:
|
||||
name: Build and publish Docker images
|
||||
needs: [binaries]
|
||||
runs-on: ubuntu-latest
|
||||
needs: [release]
|
||||
strategy:
|
||||
matrix:
|
||||
item: ${{ fromJson(needs.release.outputs.package_list) }}
|
||||
env:
|
||||
DOCKER_IMAGE: ${{secrets.DOCKER_IMAGE}}
|
||||
steps:
|
||||
- name: Download all-packages artifact
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
name: packages
|
||||
path: ./dist
|
||||
- name: Set up QEMU
|
||||
id: qemu
|
||||
uses: docker/setup-qemu-action@v2
|
||||
if: env.DOCKER_IMAGE != ''
|
||||
|
||||
- name: Upload all-packages artifact
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: navidrome_linux_${{ matrix.item }}
|
||||
path: dist/navidrome_0*_linux_${{ matrix.item }}
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
if: env.DOCKER_IMAGE != ''
|
||||
|
||||
# delete-artifacts:
|
||||
# name: Delete unused artifacts
|
||||
# runs-on: ubuntu-latest
|
||||
# needs: [upload-packages]
|
||||
# steps:
|
||||
# - name: Delete all-packages artifact
|
||||
# env:
|
||||
# GH_TOKEN: ${{ github.token }}
|
||||
# run: |
|
||||
# for artifact in $(gh api repos/${{ github.repository }}/actions/artifacts | jq -r '.artifacts[] | select(.name | startswith("packages")) | .id'); do
|
||||
# gh api --method DELETE repos/${{ github.repository }}/actions/artifacts/$artifact
|
||||
# done
|
||||
- uses: actions/checkout@v3
|
||||
if: env.DOCKER_IMAGE != ''
|
||||
|
||||
- uses: actions/download-artifact@v3
|
||||
if: env.DOCKER_IMAGE != ''
|
||||
with:
|
||||
name: binaries
|
||||
path: dist
|
||||
|
||||
- name: Login to Docker Hub
|
||||
if: env.DOCKER_IMAGE != ''
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: env.DOCKER_IMAGE != ''
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata for Docker
|
||||
if: env.DOCKER_IMAGE != ''
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
labels: |
|
||||
maintainer=deluan
|
||||
images: |
|
||||
name=${{secrets.DOCKER_IMAGE}},enable=${{env.GITHUB_REF_TYPE == 'tag' || github.ref == format('refs/heads/{0}', 'master')}}
|
||||
name=ghcr.io/${{ github.repository }}
|
||||
tags: |
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=raw,value=develop,enable={{is_default_branch}}
|
||||
|
||||
- name: Build and Push
|
||||
if: env.DOCKER_IMAGE != ''
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
file: .github/workflows/pipeline.dockerfile
|
||||
platforms: linux/amd64,linux/386,linux/arm/v6,linux/arm/v7,linux/arm64
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
|
||||
5
.github/workflows/stale.yml
vendored
5
.github/workflows/stale.yml
vendored
@@ -12,9 +12,8 @@ jobs:
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dessant/lock-threads@v6
|
||||
- uses: dessant/lock-threads@v4.0.0
|
||||
with:
|
||||
process-only: 'issues, prs'
|
||||
issue-inactive-days: 120
|
||||
pr-inactive-days: 120
|
||||
log-output: true
|
||||
@@ -28,7 +27,7 @@ jobs:
|
||||
This pull request has been automatically locked since there
|
||||
has not been any recent activity after it was closed.
|
||||
Please open a new issue for related bugs.
|
||||
- uses: actions/stale@v9
|
||||
- uses: actions/stale@v7
|
||||
with:
|
||||
operations-per-run: 999
|
||||
days-before-issue-stale: 180
|
||||
|
||||
93
.github/workflows/update-translations.sh
vendored
93
.github/workflows/update-translations.sh
vendored
@@ -1,93 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
|
||||
I18N_DIR=resources/i18n
|
||||
|
||||
# Function to process JSON: remove empty attributes and sort
|
||||
process_json() {
|
||||
jq 'walk(if type == "object" then with_entries(select(.value != null and .value != "" and .value != [] and .value != {})) | to_entries | sort_by(.key) | from_entries else . end)' "$1"
|
||||
}
|
||||
|
||||
# Function to check differences between local and remote translations
|
||||
check_lang_diff() {
|
||||
filename=${I18N_DIR}/"$1".json
|
||||
url=$(curl -s -X POST https://poeditor.com/api/ \
|
||||
-d api_token="${POEDITOR_APIKEY}" \
|
||||
-d action="export" \
|
||||
-d id="${POEDITOR_PROJECTID}" \
|
||||
-d language="$1" \
|
||||
-d type="key_value_json" | jq -r .item)
|
||||
if [ -z "$url" ]; then
|
||||
echo "Failed to export $1"
|
||||
return 1
|
||||
fi
|
||||
curl -sSL "$url" > poeditor.json
|
||||
|
||||
process_json "$filename" > "$filename".tmp
|
||||
process_json poeditor.json > poeditor.tmp
|
||||
|
||||
diff=$(diff -u "$filename".tmp poeditor.tmp) || true
|
||||
if [ -n "$diff" ]; then
|
||||
echo "$diff"
|
||||
mv poeditor.json "$filename"
|
||||
fi
|
||||
|
||||
rm -f poeditor.json poeditor.tmp "$filename".tmp
|
||||
}
|
||||
|
||||
# Function to get the list of languages
|
||||
get_language_list() {
|
||||
response=$(curl -s -X POST https://api.poeditor.com/v2/languages/list \
|
||||
-d api_token="${POEDITOR_APIKEY}" \
|
||||
-d id="${POEDITOR_PROJECTID}")
|
||||
|
||||
echo $response
|
||||
}
|
||||
|
||||
# Function to get the language name from the language code
|
||||
get_language_name() {
|
||||
lang_code="$1"
|
||||
lang_list="$2"
|
||||
|
||||
lang_name=$(echo "$lang_list" | jq -r ".result.languages[] | select(.code == \"$lang_code\") | .name")
|
||||
|
||||
if [ -z "$lang_name" ]; then
|
||||
echo "Error: Language code '$lang_code' not found" >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo "$lang_name"
|
||||
}
|
||||
|
||||
# Function to get the language code from the file path
|
||||
get_lang_code() {
|
||||
filepath="$1"
|
||||
# Extract just the filename
|
||||
filename=$(basename "$filepath")
|
||||
|
||||
# Remove the extension
|
||||
lang_code="${filename%.*}"
|
||||
|
||||
echo "$lang_code"
|
||||
}
|
||||
|
||||
lang_list=$(get_language_list)
|
||||
|
||||
# Check differences for each language
|
||||
for file in ${I18N_DIR}/*.json; do
|
||||
code=$(get_lang_code "$file")
|
||||
lang=$(jq -r .languageName < "$file")
|
||||
lang_name=$(get_language_name "$code" "$lang_list")
|
||||
echo "Downloading $lang_name - $lang ($code)"
|
||||
check_lang_diff "$code"
|
||||
done
|
||||
|
||||
# List changed languages to stderr
|
||||
languages=""
|
||||
for file in $(git diff --name-only --exit-code | grep json); do
|
||||
lang_code=$(get_lang_code "$file")
|
||||
lang_name=$(get_language_name "$lang_code" "$lang_list")
|
||||
languages="${languages}$(echo "$lang_name" | tr -d '\n'), "
|
||||
done
|
||||
echo "${languages%??}" 1>&2
|
||||
15
.github/workflows/update-translations.yml
vendored
15
.github/workflows/update-translations.yml
vendored
@@ -8,26 +8,21 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.repository_owner == 'navidrome' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@v3
|
||||
- name: Get updated translations
|
||||
id: poeditor
|
||||
env:
|
||||
POEDITOR_PROJECTID: ${{ secrets.POEDITOR_PROJECTID }}
|
||||
POEDITOR_APIKEY: ${{ secrets.POEDITOR_APIKEY }}
|
||||
run: |
|
||||
.github/workflows/update-translations.sh 2> title.tmp
|
||||
title=$(cat title.tmp)
|
||||
echo "::set-output name=title::$title"
|
||||
rm title.tmp
|
||||
./update-translations.sh
|
||||
- name: Show changes, if any
|
||||
run: |
|
||||
git status --porcelain
|
||||
git diff
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v8
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.PAT }}
|
||||
author: "navidrome-bot <navidrome-bot@navidrome.org>"
|
||||
commit-message: "fix(ui): update ${{ steps.poeditor.outputs.title }} translations from POEditor"
|
||||
title: "fix(ui): update ${{ steps.poeditor.outputs.title }} translations from POEditor"
|
||||
commit-message: Update translations
|
||||
title: Update translations from POEditor
|
||||
branch: update-translations
|
||||
|
||||
236
.github/workflows/validate-translations.sh
vendored
236
.github/workflows/validate-translations.sh
vendored
@@ -1,236 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# validate-translations.sh
|
||||
#
|
||||
# This script validates the structure of JSON translation files by comparing them
|
||||
# against the reference English translation file (ui/src/i18n/en.json).
|
||||
#
|
||||
# The script performs the following validations:
|
||||
# 1. JSON syntax validation using jq
|
||||
# 2. Structural validation - ensures all keys from English file are present
|
||||
# 3. Reports missing keys (translation incomplete)
|
||||
# 4. Reports extra keys (keys not in English reference, possibly deprecated)
|
||||
# 5. Emits GitHub Actions annotations for CI/CD integration
|
||||
#
|
||||
# Usage:
|
||||
# ./validate-translations.sh
|
||||
#
|
||||
# Environment Variables:
|
||||
# EN_FILE - Path to reference English file (default: ui/src/i18n/en.json)
|
||||
# TRANSLATION_DIR - Directory containing translation files (default: resources/i18n)
|
||||
#
|
||||
# Exit codes:
|
||||
# 0 - All translations are valid
|
||||
# 1 - One or more translations have structural issues
|
||||
#
|
||||
# GitHub Actions Integration:
|
||||
# The script outputs GitHub Actions annotations using ::error and ::warning
|
||||
# format that will be displayed in PR checks and workflow summaries.
|
||||
|
||||
# Script to validate JSON translation files structure against en.json
|
||||
set -e
|
||||
|
||||
# Path to the reference English translation file
|
||||
EN_FILE="${EN_FILE:-ui/src/i18n/en.json}"
|
||||
TRANSLATION_DIR="${TRANSLATION_DIR:-resources/i18n}"
|
||||
VERBOSE=false
|
||||
|
||||
# Parse command line arguments
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
-v|--verbose)
|
||||
VERBOSE=true
|
||||
shift
|
||||
;;
|
||||
-h|--help)
|
||||
echo "Usage: $0 [options]"
|
||||
echo ""
|
||||
echo "Validates JSON translation files structure against English reference file."
|
||||
echo ""
|
||||
echo "Options:"
|
||||
echo " -h, --help Show this help message"
|
||||
echo " -v, --verbose Show detailed output (default: only show errors)"
|
||||
echo ""
|
||||
echo "Environment Variables:"
|
||||
echo " EN_FILE Path to reference English file (default: ui/src/i18n/en.json)"
|
||||
echo " TRANSLATION_DIR Directory with translation files (default: resources/i18n)"
|
||||
echo ""
|
||||
echo "Examples:"
|
||||
echo " $0 # Validate all translation files (quiet mode)"
|
||||
echo " $0 -v # Validate with detailed output"
|
||||
echo " EN_FILE=custom/en.json $0 # Use custom reference file"
|
||||
echo " TRANSLATION_DIR=custom/i18n $0 # Use custom translations directory"
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
echo "Unknown option: $1" >&2
|
||||
echo "Use --help for usage information" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Color codes for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
if [[ "$VERBOSE" == "true" ]]; then
|
||||
echo "Validating translation files structure against ${EN_FILE}..."
|
||||
fi
|
||||
|
||||
# Check if English reference file exists
|
||||
if [[ ! -f "$EN_FILE" ]]; then
|
||||
echo "::error::Reference file $EN_FILE not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Function to extract all JSON keys from a file, creating a flat list of dot-separated paths
|
||||
extract_keys() {
|
||||
local file="$1"
|
||||
jq -r 'paths(scalars) as $p | $p | join(".")' "$file" 2>/dev/null | sort
|
||||
}
|
||||
|
||||
# Function to extract all non-empty string keys (to identify structural issues)
|
||||
extract_structure_keys() {
|
||||
local file="$1"
|
||||
# Get only keys where values are not empty strings
|
||||
jq -r 'paths(scalars) as $p | select(getpath($p) != "") | $p | join(".")' "$file" 2>/dev/null | sort
|
||||
}
|
||||
|
||||
# Function to validate a single translation file
|
||||
validate_translation() {
|
||||
local translation_file="$1"
|
||||
local filename=$(basename "$translation_file")
|
||||
local has_errors=false
|
||||
local verbose=${2:-false}
|
||||
|
||||
if [[ "$verbose" == "true" ]]; then
|
||||
echo "Validating $filename..."
|
||||
fi
|
||||
|
||||
# First validate JSON syntax
|
||||
if ! jq empty "$translation_file" 2>/dev/null; then
|
||||
echo "::error file=$translation_file::Invalid JSON syntax"
|
||||
echo -e "${RED}✗ $filename has invalid JSON syntax${NC}"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Extract all keys from both files (for statistics)
|
||||
local en_keys_file=$(mktemp)
|
||||
local translation_keys_file=$(mktemp)
|
||||
|
||||
extract_keys "$EN_FILE" > "$en_keys_file"
|
||||
extract_keys "$translation_file" > "$translation_keys_file"
|
||||
|
||||
# Extract only non-empty structure keys (to validate structural issues)
|
||||
local en_structure_file=$(mktemp)
|
||||
local translation_structure_file=$(mktemp)
|
||||
|
||||
extract_structure_keys "$EN_FILE" > "$en_structure_file"
|
||||
extract_structure_keys "$translation_file" > "$translation_structure_file"
|
||||
|
||||
# Find structural issues: keys in translation not in English (misplaced)
|
||||
local extra_keys=$(comm -13 "$en_keys_file" "$translation_keys_file")
|
||||
|
||||
# Find missing keys (for statistics only)
|
||||
local missing_keys=$(comm -23 "$en_keys_file" "$translation_keys_file")
|
||||
|
||||
# Count keys for statistics
|
||||
local total_en_keys=$(wc -l < "$en_keys_file")
|
||||
local total_translation_keys=$(wc -l < "$translation_keys_file")
|
||||
local missing_count=0
|
||||
local extra_count=0
|
||||
|
||||
if [[ -n "$missing_keys" ]]; then
|
||||
missing_count=$(echo "$missing_keys" | grep -c '^' || echo 0)
|
||||
fi
|
||||
|
||||
if [[ -n "$extra_keys" ]]; then
|
||||
extra_count=$(echo "$extra_keys" | grep -c '^' || echo 0)
|
||||
has_errors=true
|
||||
fi
|
||||
|
||||
# Report extra/misplaced keys (these are structural issues)
|
||||
if [[ -n "$extra_keys" ]]; then
|
||||
if [[ "$verbose" == "true" ]]; then
|
||||
echo -e "${YELLOW}Misplaced keys in $filename ($extra_count):${NC}"
|
||||
fi
|
||||
|
||||
while IFS= read -r key; do
|
||||
# Try to find the line number
|
||||
line=$(grep -n "\"$(echo "$key" | sed 's/.*\.//')" "$translation_file" | head -1 | cut -d: -f1)
|
||||
line=${line:-1} # Default to line 1 if not found
|
||||
|
||||
echo "::error file=$translation_file,line=$line::Misplaced key: $key"
|
||||
|
||||
if [[ "$verbose" == "true" ]]; then
|
||||
echo " + $key (line ~$line)"
|
||||
fi
|
||||
done <<< "$extra_keys"
|
||||
fi
|
||||
|
||||
# Clean up temp files
|
||||
rm -f "$en_keys_file" "$translation_keys_file" "$en_structure_file" "$translation_structure_file"
|
||||
|
||||
# Print statistics
|
||||
if [[ "$verbose" == "true" ]]; then
|
||||
echo " Keys: $total_translation_keys/$total_en_keys (Missing: $missing_count, Extra/Misplaced: $extra_count)"
|
||||
|
||||
if [[ "$has_errors" == "true" ]]; then
|
||||
echo -e "${RED}✗ $filename has structural issues${NC}"
|
||||
else
|
||||
echo -e "${GREEN}✓ $filename structure is valid${NC}"
|
||||
fi
|
||||
elif [[ "$has_errors" == "true" ]]; then
|
||||
echo -e "${RED}✗ $filename has structural issues (Extra/Misplaced: $extra_count)${NC}"
|
||||
fi
|
||||
|
||||
return $([[ "$has_errors" == "true" ]] && echo 1 || echo 0)
|
||||
}
|
||||
|
||||
# Main validation loop
|
||||
validation_failed=false
|
||||
total_files=0
|
||||
failed_files=0
|
||||
valid_files=0
|
||||
|
||||
for translation_file in "$TRANSLATION_DIR"/*.json; do
|
||||
if [[ -f "$translation_file" ]]; then
|
||||
total_files=$((total_files + 1))
|
||||
if ! validate_translation "$translation_file" "$VERBOSE"; then
|
||||
validation_failed=true
|
||||
failed_files=$((failed_files + 1))
|
||||
else
|
||||
valid_files=$((valid_files + 1))
|
||||
fi
|
||||
|
||||
if [[ "$VERBOSE" == "true" ]]; then
|
||||
echo "" # Add spacing between files
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
# Summary
|
||||
if [[ "$VERBOSE" == "true" ]]; then
|
||||
echo "========================================="
|
||||
echo "Translation Validation Summary:"
|
||||
echo " Total files: $total_files"
|
||||
echo " Valid files: $valid_files"
|
||||
echo " Files with structural issues: $failed_files"
|
||||
echo "========================================="
|
||||
fi
|
||||
|
||||
if [[ "$validation_failed" == "true" ]]; then
|
||||
if [[ "$VERBOSE" == "true" ]]; then
|
||||
echo -e "${RED}Translation validation failed - $failed_files file(s) have structural issues${NC}"
|
||||
else
|
||||
echo -e "${RED}Translation validation failed - $failed_files/$total_files file(s) have structural issues${NC}"
|
||||
fi
|
||||
exit 1
|
||||
elif [[ "$VERBOSE" == "true" ]]; then
|
||||
echo -e "${GREEN}All translation files are structurally valid${NC}"
|
||||
fi
|
||||
|
||||
exit 0
|
||||
23
.gitignore
vendored
23
.gitignore
vendored
@@ -5,35 +5,24 @@
|
||||
/navidrome
|
||||
/iTunes*.xml
|
||||
/tmp
|
||||
/bin
|
||||
data/*
|
||||
vendor/*/
|
||||
wiki
|
||||
TODO.md
|
||||
var
|
||||
navidrome.toml
|
||||
!release/linux/navidrome.toml
|
||||
master.zip
|
||||
testDB
|
||||
navidrome.db
|
||||
cache/*
|
||||
*.swp
|
||||
coverage.out
|
||||
embedded_gen.go
|
||||
dist
|
||||
music
|
||||
*.db*
|
||||
navidrome.db-shm
|
||||
navidrome.db-wal
|
||||
tags
|
||||
.gitinfo
|
||||
docker-compose.yml
|
||||
!contrib/docker-compose.yml
|
||||
binaries
|
||||
navidrome-*
|
||||
/ndpgen
|
||||
AGENTS.md
|
||||
.github/prompts
|
||||
.github/instructions
|
||||
.github/git-commit-instructions.md
|
||||
*.exe
|
||||
*.test
|
||||
*.wasm
|
||||
*.ndp
|
||||
openspec/
|
||||
go.work*
|
||||
/api/openapi.yaml
|
||||
|
||||
@@ -1,58 +1,35 @@
|
||||
version: "2"
|
||||
run:
|
||||
build-tags:
|
||||
- netgo
|
||||
go: "1.19"
|
||||
|
||||
linters:
|
||||
enable:
|
||||
- asasalint
|
||||
- asciicheck
|
||||
- bidichk
|
||||
- bodyclose
|
||||
- copyloopvar
|
||||
- dogsled
|
||||
- durationcheck
|
||||
- errcheck
|
||||
- errorlint
|
||||
- gocritic
|
||||
- exportloopref
|
||||
- gocyclo
|
||||
- goprintffuncname
|
||||
- gosec
|
||||
- gosimple
|
||||
- govet
|
||||
- ineffassign
|
||||
- misspell
|
||||
- nakedret
|
||||
- nilerr
|
||||
- rowserrcheck
|
||||
- unconvert
|
||||
- whitespace
|
||||
disable:
|
||||
- staticcheck
|
||||
settings:
|
||||
gocritic:
|
||||
disable-all: true
|
||||
enabled-checks:
|
||||
- deprecatedComment
|
||||
gosec:
|
||||
excludes:
|
||||
- G501
|
||||
- G401
|
||||
- G505
|
||||
- G115
|
||||
govet:
|
||||
enable:
|
||||
- nilness
|
||||
exclusions:
|
||||
generated: lax
|
||||
presets:
|
||||
- comments
|
||||
- common-false-positives
|
||||
- legacy
|
||||
- std-error-handling
|
||||
paths:
|
||||
- third_party$
|
||||
- builtin$
|
||||
- examples$
|
||||
formatters:
|
||||
exclusions:
|
||||
generated: lax
|
||||
paths:
|
||||
- third_party$
|
||||
- builtin$
|
||||
- examples$
|
||||
- typecheck
|
||||
- unconvert
|
||||
- unused
|
||||
- whitespace
|
||||
|
||||
issues:
|
||||
exclude-rules:
|
||||
- linters:
|
||||
- gosec
|
||||
text: "(G501|G401|G505):"
|
||||
|
||||
139
.goreleaser.yml
Normal file
139
.goreleaser.yml
Normal file
@@ -0,0 +1,139 @@
|
||||
# GoReleaser config
|
||||
project_name: navidrome
|
||||
|
||||
builds:
|
||||
- id: navidrome_linux_amd64
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
goos:
|
||||
- linux
|
||||
goarch:
|
||||
- amd64
|
||||
flags:
|
||||
- -tags=netgo
|
||||
ldflags:
|
||||
- "-extldflags '-static -lz'"
|
||||
- -s -w -X github.com/navidrome/navidrome/consts.gitSha={{.ShortCommit}} -X github.com/navidrome/navidrome/consts.gitTag={{.Version}}
|
||||
|
||||
- id: navidrome_linux_386
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- PKG_CONFIG_PATH=/i386/lib/pkgconfig
|
||||
goos:
|
||||
- linux
|
||||
goarch:
|
||||
- "386"
|
||||
flags:
|
||||
- -tags=netgo
|
||||
ldflags:
|
||||
- "-extldflags '-static'"
|
||||
- -s -w -X github.com/navidrome/navidrome/consts.gitSha={{.ShortCommit}} -X github.com/navidrome/navidrome/consts.gitTag={{.Version}}
|
||||
|
||||
- id: navidrome_linux_arm
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- CC=arm-linux-gnueabi-gcc
|
||||
- CXX=arm-linux-gnueabi-g++
|
||||
- PKG_CONFIG_PATH=/arm/lib/pkgconfig
|
||||
goos:
|
||||
- linux
|
||||
goarch:
|
||||
- arm
|
||||
goarm:
|
||||
- "5"
|
||||
- "6"
|
||||
- "7"
|
||||
flags:
|
||||
- -tags=netgo
|
||||
ldflags:
|
||||
- "-extldflags '-static'"
|
||||
- -s -w -X github.com/navidrome/navidrome/consts.gitSha={{.ShortCommit}} -X github.com/navidrome/navidrome/consts.gitTag={{.Version}}
|
||||
|
||||
- id: navidrome_linux_arm64
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- CC=aarch64-linux-gnu-gcc
|
||||
- CXX=aarch64-linux-gnu-g++
|
||||
- PKG_CONFIG_PATH=/arm64/lib/pkgconfig
|
||||
goos:
|
||||
- linux
|
||||
goarch:
|
||||
- arm64
|
||||
flags:
|
||||
- -tags=netgo
|
||||
ldflags:
|
||||
- "-extldflags '-static'"
|
||||
- -s -w -X github.com/navidrome/navidrome/consts.gitSha={{.ShortCommit}} -X github.com/navidrome/navidrome/consts.gitTag={{.Version}}
|
||||
|
||||
- id: navidrome_windows_386
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- CC=i686-w64-mingw32-gcc
|
||||
- CXX=i686-w64-mingw32-g++
|
||||
- PKG_CONFIG_PATH=/mingw32/lib/pkgconfig
|
||||
goos:
|
||||
- windows
|
||||
goarch:
|
||||
- "386"
|
||||
flags:
|
||||
- -tags=netgo
|
||||
ldflags:
|
||||
- "-extldflags '-static'"
|
||||
- -s -w -X github.com/navidrome/navidrome/consts.gitSha={{.ShortCommit}} -X github.com/navidrome/navidrome/consts.gitTag={{.Version}}
|
||||
|
||||
- id: navidrome_windows_amd64
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- CC=x86_64-w64-mingw32-gcc
|
||||
- CXX=x86_64-w64-mingw32-g++
|
||||
- PKG_CONFIG_PATH=/mingw64/lib/pkgconfig
|
||||
goos:
|
||||
- windows
|
||||
goarch:
|
||||
- amd64
|
||||
flags:
|
||||
- -tags=netgo
|
||||
ldflags:
|
||||
- "-extldflags '-static'"
|
||||
- -s -w -X github.com/navidrome/navidrome/consts.gitSha={{.ShortCommit}} -X github.com/navidrome/navidrome/consts.gitTag={{.Version}}
|
||||
|
||||
- id: navidrome_darwin_amd64
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- CC=o64-clang
|
||||
- CXX=o64-clang++
|
||||
- PKG_CONFIG_PATH=/darwin/lib/pkgconfig
|
||||
goos:
|
||||
- darwin
|
||||
goarch:
|
||||
- amd64
|
||||
flags:
|
||||
- -tags=netgo
|
||||
ldflags:
|
||||
- -s -w -X github.com/navidrome/navidrome/consts.gitSha={{.ShortCommit}} -X github.com/navidrome/navidrome/consts.gitTag={{.Version}}
|
||||
|
||||
archives:
|
||||
- format_overrides:
|
||||
- goos: windows
|
||||
format: zip
|
||||
replacements:
|
||||
darwin: macOS
|
||||
linux: Linux
|
||||
windows: Windows
|
||||
386: i386
|
||||
amd64: x86_64
|
||||
|
||||
checksum:
|
||||
name_template: "{{ .ProjectName }}_checksums.txt"
|
||||
|
||||
snapshot:
|
||||
name_template: "{{ .Tag }}-SNAPSHOT"
|
||||
|
||||
release:
|
||||
draft: true
|
||||
|
||||
changelog:
|
||||
# sort: asc
|
||||
filters:
|
||||
exclude:
|
||||
- "^docs:"
|
||||
@@ -48,15 +48,14 @@ This improves the readability of the messages
|
||||
It can be one of the following:
|
||||
1. **feat**: Addition of a new feature
|
||||
2. **fix**: Bug fix
|
||||
3. **sec**: Fixing security issues
|
||||
4. **docs**: Documentation Changes
|
||||
5. **style**: Changes to styling
|
||||
6. **refactor**: Refactoring of code
|
||||
7. **perf**: Code that affects performance
|
||||
8. **test**: Updating or improving the current tests
|
||||
9. **build**: Changes to Build process
|
||||
10. **revert**: Reverting to a previous commit
|
||||
11. **chore** : updating grunt tasks etc
|
||||
3. **docs**: Documentation Changes
|
||||
4. **style**: Changes to styling
|
||||
5. **refactor**: Refactoring of code
|
||||
6. **perf**: Code that affects performance
|
||||
7. **test**: Updating or improving the current tests
|
||||
8. **build**: Changes to Build process
|
||||
9. **revert**: Reverting to a previous commit
|
||||
10. **chore** : updating grunt tasks etc
|
||||
|
||||
If there is a breaking change in your Pull Request, please add `BREAKING CHANGE` in the optional body section
|
||||
|
||||
|
||||
147
Dockerfile
147
Dockerfile
@@ -1,147 +0,0 @@
|
||||
FROM --platform=$BUILDPLATFORM ghcr.io/crazy-max/osxcross:14.5-debian AS osxcross
|
||||
|
||||
########################################################################################################################
|
||||
### Build xx (original image: tonistiigi/xx)
|
||||
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/alpine:3.20 AS xx-build
|
||||
|
||||
# v1.9.0
|
||||
ENV XX_VERSION=a5592eab7a57895e8d385394ff12241bc65ecd50
|
||||
|
||||
RUN apk add -U --no-cache git
|
||||
RUN git clone https://github.com/tonistiigi/xx && \
|
||||
cd xx && \
|
||||
git checkout ${XX_VERSION} && \
|
||||
mkdir -p /out && \
|
||||
cp src/xx-* /out/
|
||||
|
||||
RUN cd /out && \
|
||||
ln -s xx-cc /out/xx-clang && \
|
||||
ln -s xx-cc /out/xx-clang++ && \
|
||||
ln -s xx-cc /out/xx-c++ && \
|
||||
ln -s xx-apt /out/xx-apt-get
|
||||
|
||||
# xx mimics the original tonistiigi/xx image
|
||||
FROM scratch AS xx
|
||||
COPY --from=xx-build /out/ /usr/bin/
|
||||
|
||||
########################################################################################################################
|
||||
### Get TagLib
|
||||
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/alpine:3.20 AS taglib-build
|
||||
ARG TARGETPLATFORM
|
||||
ARG CROSS_TAGLIB_VERSION=2.1.1-2
|
||||
ENV CROSS_TAGLIB_RELEASES_URL=https://github.com/navidrome/cross-taglib/releases/download/v${CROSS_TAGLIB_VERSION}/
|
||||
|
||||
# wget in busybox can't follow redirects
|
||||
RUN <<EOT
|
||||
apk add --no-cache wget
|
||||
PLATFORM=$(echo ${TARGETPLATFORM} | tr '/' '-')
|
||||
FILE=taglib-${PLATFORM}.tar.gz
|
||||
|
||||
DOWNLOAD_URL=${CROSS_TAGLIB_RELEASES_URL}${FILE}
|
||||
wget ${DOWNLOAD_URL}
|
||||
|
||||
mkdir /taglib
|
||||
tar -xzf ${FILE} -C /taglib
|
||||
EOT
|
||||
|
||||
########################################################################################################################
|
||||
### Build Navidrome UI
|
||||
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/node:lts-alpine AS ui
|
||||
WORKDIR /app
|
||||
|
||||
# Install node dependencies
|
||||
COPY ui/package.json ui/package-lock.json ./
|
||||
COPY ui/bin/ ./bin/
|
||||
RUN npm ci
|
||||
|
||||
# Build bundle
|
||||
COPY ui/ ./
|
||||
RUN npm run build -- --outDir=/build
|
||||
|
||||
FROM scratch AS ui-bundle
|
||||
COPY --from=ui /build /build
|
||||
|
||||
########################################################################################################################
|
||||
### Build Navidrome binary
|
||||
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/golang:1.25-trixie AS base
|
||||
RUN apt-get update && apt-get install -y clang lld
|
||||
COPY --from=xx / /
|
||||
WORKDIR /workspace
|
||||
|
||||
FROM --platform=$BUILDPLATFORM base AS build
|
||||
|
||||
# Install build dependencies for the target platform
|
||||
ARG TARGETPLATFORM
|
||||
|
||||
RUN xx-apt install -y binutils gcc g++ libc6-dev zlib1g-dev
|
||||
RUN xx-verify --setup
|
||||
|
||||
RUN --mount=type=bind,source=. \
|
||||
--mount=type=cache,target=/root/.cache \
|
||||
--mount=type=cache,target=/go/pkg/mod \
|
||||
go mod download
|
||||
|
||||
ARG GIT_SHA
|
||||
ARG GIT_TAG
|
||||
|
||||
RUN --mount=type=bind,source=. \
|
||||
--mount=from=ui,source=/build,target=./ui/build,ro \
|
||||
--mount=from=osxcross,src=/osxcross/SDK,target=/xx-sdk,ro \
|
||||
--mount=type=cache,target=/root/.cache \
|
||||
--mount=type=cache,target=/go/pkg/mod \
|
||||
--mount=from=taglib-build,target=/taglib,src=/taglib,ro <<EOT
|
||||
|
||||
# Setup CGO cross-compilation environment
|
||||
xx-go --wrap
|
||||
export CGO_ENABLED=1
|
||||
export CGO_CFLAGS_ALLOW="--define-prefix"
|
||||
export PKG_CONFIG_PATH=/taglib/lib/pkgconfig
|
||||
cat $(go env GOENV)
|
||||
|
||||
# Only Darwin (macOS) requires clang (default), Windows requires gcc, everything else can use any compiler.
|
||||
# So let's use gcc for everything except Darwin.
|
||||
if [ "$(xx-info os)" != "darwin" ]; then
|
||||
export CC=$(xx-info)-gcc
|
||||
export CXX=$(xx-info)-g++
|
||||
export LD_EXTRA="-extldflags '-static -latomic'"
|
||||
fi
|
||||
if [ "$(xx-info os)" = "windows" ]; then
|
||||
export EXT=".exe"
|
||||
fi
|
||||
|
||||
go build -tags=netgo -ldflags="${LD_EXTRA} -w -s \
|
||||
-X github.com/navidrome/navidrome/consts.gitSha=${GIT_SHA} \
|
||||
-X github.com/navidrome/navidrome/consts.gitTag=${GIT_TAG}" \
|
||||
-o /out/navidrome${EXT} .
|
||||
EOT
|
||||
|
||||
# Verify if the binary was built for the correct platform and it is statically linked
|
||||
RUN xx-verify --static /out/navidrome*
|
||||
|
||||
FROM scratch AS binary
|
||||
COPY --from=build /out /
|
||||
|
||||
########################################################################################################################
|
||||
### Build Final Image
|
||||
FROM public.ecr.aws/docker/library/alpine:3.20 AS final
|
||||
LABEL maintainer="deluan@navidrome.org"
|
||||
LABEL org.opencontainers.image.source="https://github.com/navidrome/navidrome"
|
||||
|
||||
# Install ffmpeg and mpv
|
||||
RUN apk add -U --no-cache ffmpeg mpv sqlite
|
||||
|
||||
# Copy navidrome binary
|
||||
COPY --from=build /out/navidrome /app/
|
||||
|
||||
VOLUME ["/data", "/music"]
|
||||
ENV ND_MUSICFOLDER=/music
|
||||
ENV ND_DATAFOLDER=/data
|
||||
ENV ND_CONFIGFILE=/data/navidrome.toml
|
||||
ENV ND_PORT=4533
|
||||
RUN touch /.nddockerenv
|
||||
|
||||
EXPOSE ${ND_PORT}
|
||||
WORKDIR /app
|
||||
|
||||
ENTRYPOINT ["/app/navidrome"]
|
||||
|
||||
230
Makefile
230
Makefile
@@ -1,30 +1,17 @@
|
||||
GO_VERSION=$(shell grep "^go " go.mod | cut -f 2 -d ' ')
|
||||
NODE_VERSION=$(shell cat .nvmrc)
|
||||
|
||||
# Set global environment variables, required for most targets
|
||||
export CGO_CFLAGS_ALLOW=--define-prefix
|
||||
export ND_ENABLEINSIGHTSCOLLECTOR=false
|
||||
|
||||
ifneq ("$(wildcard .git/HEAD)","")
|
||||
GIT_SHA=$(shell git rev-parse --short HEAD)
|
||||
GIT_TAG=$(shell git describe --tags `git rev-list --tags --max-count=1`)-SNAPSHOT
|
||||
GIT_TAG=$(shell git describe --tags `git rev-list --tags --max-count=1`)
|
||||
else
|
||||
GIT_SHA=source_archive
|
||||
GIT_TAG=$(patsubst navidrome-%,v%,$(notdir $(PWD)))-SNAPSHOT
|
||||
GIT_TAG=$(patsubst navidrome-%,v%,$(notdir $(PWD)))
|
||||
endif
|
||||
|
||||
SUPPORTED_PLATFORMS ?= linux/amd64,linux/arm64,linux/arm/v5,linux/arm/v6,linux/arm/v7,linux/386,linux/riscv64,darwin/amd64,darwin/arm64,windows/amd64,windows/386
|
||||
IMAGE_PLATFORMS ?= $(shell echo $(SUPPORTED_PLATFORMS) | tr ',' '\n' | grep "linux" | grep -v "arm/v5" | tr '\n' ',' | sed 's/,$$//')
|
||||
PLATFORMS ?= $(SUPPORTED_PLATFORMS)
|
||||
DOCKER_TAG ?= deluan/navidrome:develop
|
||||
CI_RELEASER_VERSION=1.20.3-1 ## https://github.com/navidrome/ci-goreleaser
|
||||
|
||||
# Taglib version to use in cross-compilation, from https://github.com/navidrome/cross-taglib
|
||||
CROSS_TAGLIB_VERSION ?= 2.1.1-2
|
||||
GOLANGCI_LINT_VERSION ?= v2.8.0
|
||||
|
||||
UI_SRC_FILES := $(shell find ui -type f -not -path "ui/build/*" -not -path "ui/node_modules/*")
|
||||
|
||||
setup: check_env download-deps install-golangci-lint setup-git ##@1_Run_First Install dependencies and prepare development environment
|
||||
setup: check_env download-deps setup-git ##@1_Run_First Install dependencies and prepare development environment
|
||||
@echo Downloading Node dependencies...
|
||||
@(cd ./ui && npm ci)
|
||||
.PHONY: setup
|
||||
@@ -33,105 +20,60 @@ dev: check_env ##@Development Start Navidrome in development mode, with hot-re
|
||||
npx foreman -j Procfile.dev -p 4533 start
|
||||
.PHONY: dev
|
||||
|
||||
server: check_go_env buildjs ##@Development Start the backend in development mode
|
||||
go tool reflex -d none -c reflex.conf
|
||||
server: check_go_env ##@Development Start the backend in development mode
|
||||
@go run github.com/cespare/reflex@latest -d none -c reflex.conf
|
||||
.PHONY: server
|
||||
|
||||
stop: ##@Development Stop development servers (UI and backend)
|
||||
@echo "Stopping development servers..."
|
||||
@-pkill -f "vite"
|
||||
@-pkill -f "go tool reflex.*reflex.conf"
|
||||
@-pkill -f "go run.*netgo"
|
||||
@echo "Development servers stopped."
|
||||
.PHONY: stop
|
||||
|
||||
watch: ##@Development Start Go tests in watch mode (re-run when code changes)
|
||||
go tool ginkgo watch -tags=netgo -notify ./...
|
||||
go run github.com/onsi/ginkgo/v2/ginkgo@latest watch -notify ./...
|
||||
.PHONY: watch
|
||||
|
||||
PKG ?= ./...
|
||||
test: ##@Development Run Go tests. Use PKG variable to specify packages to test, e.g. make test PKG=./server
|
||||
go test -tags netgo $(PKG)
|
||||
test: ##@Development Run Go tests
|
||||
go test -race -shuffle=on ./...
|
||||
.PHONY: test
|
||||
|
||||
test-ndpgen: ##@Development Run tests for ndpgen plugin
|
||||
cd plugins/cmd/ndpgen && go test ./......
|
||||
.PHONY: test-ndpgen
|
||||
|
||||
testall: test test-ndpgen test-i18n test-js ##@Development Run Go and JS tests
|
||||
testall: test ##@Development Run Go and JS tests, and validate OpenAPI spec
|
||||
@(cd ./ui && npm test -- --watchAll=false)
|
||||
.PHONY: testall
|
||||
|
||||
test-race: ##@Development Run Go tests with race detector
|
||||
go test -tags netgo -race -shuffle=on $(PKG)
|
||||
.PHONY: test-race
|
||||
|
||||
test-js: ##@Development Run JS tests
|
||||
@(cd ./ui && npm run test)
|
||||
.PHONY: test-js
|
||||
|
||||
test-i18n: ##@Development Validate all translations files
|
||||
./.github/workflows/validate-translations.sh
|
||||
.PHONY: test-i18n
|
||||
|
||||
install-golangci-lint: ##@Development Install golangci-lint if not present
|
||||
@INSTALL=false; \
|
||||
if PATH=$$PATH:./bin which golangci-lint > /dev/null 2>&1; then \
|
||||
CURRENT_VERSION=$$(PATH=$$PATH:./bin golangci-lint version 2>/dev/null | grep -oE '[0-9]+\.[0-9]+\.[0-9]+' | head -n1); \
|
||||
REQUIRED_VERSION=$$(echo "$(GOLANGCI_LINT_VERSION)" | sed 's/^v//'); \
|
||||
if [ "$$CURRENT_VERSION" != "$$REQUIRED_VERSION" ]; then \
|
||||
echo "Found golangci-lint $$CURRENT_VERSION, but $$REQUIRED_VERSION is required. Reinstalling..."; \
|
||||
rm -f ./bin/golangci-lint; \
|
||||
INSTALL=true; \
|
||||
fi; \
|
||||
else \
|
||||
INSTALL=true; \
|
||||
fi; \
|
||||
if [ "$$INSTALL" = "true" ]; then \
|
||||
echo "Installing golangci-lint $(GOLANGCI_LINT_VERSION)..."; \
|
||||
curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/HEAD/install.sh | sh -s $(GOLANGCI_LINT_VERSION); \
|
||||
fi
|
||||
.PHONY: install-golangci-lint
|
||||
|
||||
lint: install-golangci-lint ##@Development Lint Go code
|
||||
PATH=$$PATH:./bin golangci-lint run --timeout 5m
|
||||
lint: ##@Development Lint Go code
|
||||
go run github.com/golangci/golangci-lint/cmd/golangci-lint@latest run -v --timeout 5m
|
||||
.PHONY: lint
|
||||
|
||||
lintall: lint ##@Development Lint Go and JS code
|
||||
lintapi: api/openapi.yaml ##@Development Lint OpenAPI spec
|
||||
npx @redocly/cli lint api/openapi.yaml
|
||||
.PHONY: lintapi
|
||||
|
||||
lintall: lint lintapi ##@Development Lint Go and JS code
|
||||
@(cd ./ui && npm run check-formatting) || (echo "\n\nPlease run 'npm run prettier' to fix formatting issues." && exit 1)
|
||||
@(cd ./ui && npm run lint)
|
||||
.PHONY: lintall
|
||||
|
||||
format: ##@Development Format code
|
||||
@(cd ./ui && npm run prettier)
|
||||
@go tool goimports -w `find . -name '*.go' | grep -v _gen.go$$ | grep -v .pb.go$$`
|
||||
@go mod tidy
|
||||
.PHONY: format
|
||||
|
||||
wire: check_go_env ##@Development Update Dependency Injection
|
||||
go tool wire gen -tags=netgo ./...
|
||||
gen: check_go_env api ##@Development Update Generated Code (wire, mocks, openapi, etc)
|
||||
go run github.com/google/wire/cmd/wire@latest ./...
|
||||
.PHONY: wire
|
||||
|
||||
gen: check_go_env ##@Development Run go generate for code generation
|
||||
go generate ./...
|
||||
cd plugins/cmd/ndpgen && go run . -host-wrappers -input=../../host -package=host
|
||||
cd plugins/cmd/ndpgen && go run . -input=../../host -output=../../pdk -go -python -rust
|
||||
cd plugins/cmd/ndpgen && go run . -capability-only -input=../../capabilities -output=../../pdk -go -rust
|
||||
cd plugins/cmd/ndpgen && go run . -schemas -input=../../capabilities
|
||||
go mod tidy -C plugins/pdk/go
|
||||
.PHONY: gen
|
||||
api: check_go_env api/openapi.yaml
|
||||
go generate ./server/api/...
|
||||
.PHONY: api
|
||||
|
||||
spec_parts=$(shell find api -name '*.yml')
|
||||
api/openapi.yaml: $(spec_parts)
|
||||
@echo "Bundling OpenAPI spec..."
|
||||
npx @redocly/cli bundle api/spec.yml -o api/openapi.yaml
|
||||
|
||||
snapshots: ##@Development Update (GoLang) Snapshot tests
|
||||
UPDATE_SNAPSHOTS=true go tool ginkgo ./server/subsonic/responses/...
|
||||
UPDATE_SNAPSHOTS=true go run github.com/onsi/ginkgo/v2/ginkgo@latest ./server/subsonic/...
|
||||
.PHONY: snapshots
|
||||
|
||||
migration-sql: ##@Development Create an empty SQL migration file
|
||||
@if [ -z "${name}" ]; then echo "Usage: make migration-sql name=name_of_migration_file"; exit 1; fi
|
||||
go run github.com/pressly/goose/v3/cmd/goose@latest -dir db/migrations create ${name} sql
|
||||
go run github.com/pressly/goose/v3/cmd/goose@latest -dir db/migration create ${name} sql
|
||||
.PHONY: migration
|
||||
|
||||
migration-go: ##@Development Create an empty Go migration file
|
||||
@if [ -z "${name}" ]; then echo "Usage: make migration-go name=name_of_migration_file"; exit 1; fi
|
||||
go run github.com/pressly/goose/v3/cmd/goose@latest -dir db/migrations create ${name}
|
||||
go run github.com/pressly/goose/v3/cmd/goose@latest -dir db/migration create ${name}
|
||||
.PHONY: migration
|
||||
|
||||
setup-dev: setup
|
||||
@@ -143,90 +85,45 @@ setup-git: ##@Development Setup Git hooks (pre-commit and pre-push)
|
||||
@(cd .git/hooks && ln -sf ../../git/* .)
|
||||
.PHONY: setup-git
|
||||
|
||||
build: check_go_env buildjs ##@Build Build the project
|
||||
go build -ldflags="-X github.com/navidrome/navidrome/consts.gitSha=$(GIT_SHA) -X github.com/navidrome/navidrome/consts.gitTag=$(GIT_TAG)" -tags=netgo
|
||||
.PHONY: build
|
||||
|
||||
buildall: deprecated build
|
||||
buildall: buildjs build ##@Build Build the project, both frontend and backend
|
||||
.PHONY: buildall
|
||||
|
||||
debug-build: check_go_env buildjs ##@Build Build the project (with remote debug on)
|
||||
go build -gcflags="all=-N -l" -ldflags="-X github.com/navidrome/navidrome/consts.gitSha=$(GIT_SHA) -X github.com/navidrome/navidrome/consts.gitTag=$(GIT_TAG)" -tags=netgo
|
||||
.PHONY: debug-build
|
||||
build: warning-noui-build check_go_env ##@Build Build only backend
|
||||
go build -ldflags="-X github.com/navidrome/navidrome/consts.gitSha=$(GIT_SHA) -X github.com/navidrome/navidrome/consts.gitTag=$(GIT_TAG)-SNAPSHOT" -tags=netgo
|
||||
.PHONY: build
|
||||
|
||||
buildjs: check_node_env ui/build/index.html ##@Build Build only frontend
|
||||
buildjs: check_node_env ##@Build Build only frontend
|
||||
@(cd ./ui && npm run build)
|
||||
.PHONY: buildjs
|
||||
|
||||
docker-buildjs: ##@Build Build only frontend using Docker
|
||||
docker build --output "./ui" --target ui-bundle .
|
||||
.PHONY: docker-buildjs
|
||||
all: warning-noui-build ##@Cross_Compilation Build binaries for all supported platforms. It does not build the frontend
|
||||
docker run -t -v $(PWD):/workspace -w /workspace deluan/ci-goreleaser:$(CI_RELEASER_VERSION) \
|
||||
goreleaser release --rm-dist --skip-publish --snapshot
|
||||
.PHONY: all
|
||||
|
||||
ui/build/index.html: $(UI_SRC_FILES)
|
||||
@(cd ./ui && npm run build)
|
||||
single: warning-noui-build ##@Cross_Compilation Build binaries for a single supported platforms. It does not build the frontend
|
||||
@if [ -z "${GOOS}" -o -z "${GOARCH}" ]; then \
|
||||
echo "Usage: GOOS=<os> GOARCH=<arch> make single"; \
|
||||
echo "Options:"; \
|
||||
grep -- "- id: navidrome_" .goreleaser.yml | sed 's/- id: navidrome_//g'; \
|
||||
exit 1; \
|
||||
fi
|
||||
@echo "Building binaries for ${GOOS}/${GOARCH}"
|
||||
docker run -t -v $(PWD):/workspace -e GOOS -e GOARCH -w /workspace deluan/ci-goreleaser:$(CI_RELEASER_VERSION) \
|
||||
goreleaser build --rm-dist --snapshot --single-target --id navidrome_${GOOS}_${GOARCH}
|
||||
.PHONY: single
|
||||
|
||||
docker-platforms: ##@Cross_Compilation List supported platforms
|
||||
@echo "Supported platforms:"
|
||||
@echo "$(SUPPORTED_PLATFORMS)" | tr ',' '\n' | sort | sed 's/^/ /'
|
||||
@echo "\nUsage: make PLATFORMS=\"linux/amd64\" docker-build"
|
||||
@echo " make IMAGE_PLATFORMS=\"linux/amd64\" docker-image"
|
||||
.PHONY: docker-platforms
|
||||
|
||||
docker-build: ##@Cross_Compilation Cross-compile for any supported platform (check `make docker-platforms`)
|
||||
docker buildx build \
|
||||
--platform $(PLATFORMS) \
|
||||
--build-arg GIT_TAG=${GIT_TAG} \
|
||||
--build-arg GIT_SHA=${GIT_SHA} \
|
||||
--build-arg CROSS_TAGLIB_VERSION=${CROSS_TAGLIB_VERSION} \
|
||||
--output "./binaries" --target binary .
|
||||
.PHONY: docker-build
|
||||
|
||||
docker-image: ##@Cross_Compilation Build Docker image, tagged as `deluan/navidrome:develop`, override with DOCKER_TAG var. Use IMAGE_PLATFORMS to specify target platforms
|
||||
@echo $(IMAGE_PLATFORMS) | grep -q "windows" && echo "ERROR: Windows is not supported for Docker builds" && exit 1 || true
|
||||
@echo $(IMAGE_PLATFORMS) | grep -q "darwin" && echo "ERROR: macOS is not supported for Docker builds" && exit 1 || true
|
||||
@echo $(IMAGE_PLATFORMS) | grep -q "arm/v5" && echo "ERROR: Linux ARMv5 is not supported for Docker builds" && exit 1 || true
|
||||
docker buildx build \
|
||||
--platform $(IMAGE_PLATFORMS) \
|
||||
--build-arg GIT_TAG=${GIT_TAG} \
|
||||
--build-arg GIT_SHA=${GIT_SHA} \
|
||||
--build-arg CROSS_TAGLIB_VERSION=${CROSS_TAGLIB_VERSION} \
|
||||
--tag $(DOCKER_TAG) .
|
||||
.PHONY: docker-image
|
||||
|
||||
docker-msi: ##@Cross_Compilation Build MSI installer for Windows
|
||||
make docker-build PLATFORMS=windows/386,windows/amd64
|
||||
DOCKER_CLI_HINTS=false docker build -q -t navidrome-msi-builder -f release/wix/msitools.dockerfile .
|
||||
@rm -rf binaries/msi
|
||||
docker run -it --rm -v $(PWD):/workspace -v $(PWD)/binaries:/workspace/binaries -e GIT_TAG=${GIT_TAG} \
|
||||
navidrome-msi-builder sh -c "release/wix/build_msi.sh /workspace 386 && release/wix/build_msi.sh /workspace amd64"
|
||||
@du -h binaries/msi/*.msi
|
||||
.PHONY: docker-msi
|
||||
|
||||
run-docker: ##@Development Run a Navidrome Docker image. Usage: make run-docker tag=<tag>
|
||||
@if [ -z "$(tag)" ]; then echo "Usage: make run-docker tag=<tag>"; exit 1; fi
|
||||
@TAG_DIR="tmp/$$(echo '$(tag)' | tr '/:' '_')"; mkdir -p "$$TAG_DIR"; \
|
||||
VOLUMES="-v $(PWD)/$$TAG_DIR:/data"; \
|
||||
if [ -f navidrome.toml ]; then \
|
||||
VOLUMES="$$VOLUMES -v $(PWD)/navidrome.toml:/data/navidrome.toml:ro"; \
|
||||
MUSIC_FOLDER=$$(grep '^MusicFolder' navidrome.toml | head -n1 | sed 's/.*= *"//' | sed 's/".*//'); \
|
||||
if [ -n "$$MUSIC_FOLDER" ] && [ -d "$$MUSIC_FOLDER" ]; then \
|
||||
VOLUMES="$$VOLUMES -v $$MUSIC_FOLDER:/music:ro"; \
|
||||
fi; \
|
||||
fi; \
|
||||
echo "Running: docker run --rm -p 4533:4533 $$VOLUMES $(tag)"; docker run --rm -p 4533:4533 $$VOLUMES $(tag)
|
||||
.PHONY: run-docker
|
||||
|
||||
package: docker-build ##@Cross_Compilation Create binaries and packages for ALL supported platforms
|
||||
@if [ -z `which goreleaser` ]; then echo "Please install goreleaser first: https://goreleaser.com/install/"; exit 1; fi
|
||||
goreleaser release -f release/goreleaser.yml --clean --skip=publish --snapshot
|
||||
.PHONY: package
|
||||
warning-noui-build:
|
||||
@echo "WARNING: This command does not build the frontend, it uses the latest built with 'make buildjs'"
|
||||
.PHONY: warning-noui-build
|
||||
|
||||
get-music: ##@Development Download some free music from Navidrome's demo instance
|
||||
mkdir -p music
|
||||
( cd music; \
|
||||
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=2Y3qQA6zJC3ObbBrF9ZBoV" > brock.zip; \
|
||||
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=04HrSORpypcLGNUdQp37gn" > back_on_earth.zip; \
|
||||
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=5xcMPJdeEgNrGtnzYbzAqb" > ugress.zip; \
|
||||
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=1jjQMAZrG3lUsJ0YH6ZRS0" > voodoocuts.zip; \
|
||||
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=ec2093ec4801402f1e17cc462195cdbb" > brock.zip; \
|
||||
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=b376eeb4652d2498aa2b25ba0696725e" > back_on_earth.zip; \
|
||||
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=e49c609b542fc51899ee8b53aa858cb4" > ugress.zip; \
|
||||
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=350bcab3a4c1d93869e39ce496464f03" > voodoocuts.zip; \
|
||||
for file in *.zip; do unzip -n $${file}; done )
|
||||
@echo "Done. Remember to set your MusicFolder to ./music"
|
||||
.PHONY: get-music
|
||||
@@ -235,11 +132,6 @@ get-music: ##@Development Download some free music from Navidrome's demo instanc
|
||||
##########################################
|
||||
#### Miscellaneous
|
||||
|
||||
clean:
|
||||
@rm -rf ./binaries ./dist ./ui/build/*
|
||||
@touch ./ui/build/.gitkeep
|
||||
.PHONY: clean
|
||||
|
||||
release:
|
||||
@if [[ ! "${V}" =~ ^[0-9]+\.[0-9]+\.[0-9]+.*$$ ]]; then echo "Usage: make release V=X.X.X"; exit 1; fi
|
||||
go mod tidy
|
||||
@@ -279,10 +171,6 @@ check_node_env:
|
||||
pre-push: lintall testall
|
||||
.PHONY: pre-push
|
||||
|
||||
deprecated:
|
||||
@echo "WARNING: This target is deprecated and will be removed in future releases. Use 'make build' instead."
|
||||
.PHONY: deprecated
|
||||
|
||||
.DEFAULT_GOAL := help
|
||||
|
||||
HELP_FUN = \
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
JS: sh -c "cd ./ui && npm start"
|
||||
GO: go tool reflex -d none -c reflex.conf
|
||||
GO: go run github.com/cespare/reflex@latest -d none -c reflex.conf
|
||||
|
||||
10
README.md
10
README.md
@@ -9,7 +9,6 @@
|
||||
[](https://discord.gg/xh7j7yF)
|
||||
[](https://www.reddit.com/r/navidrome/)
|
||||
[](CODE_OF_CONDUCT.md)
|
||||
[](https://gurubase.io/g/navidrome)
|
||||
|
||||
Navidrome is an open source web-based music collection server and streamer. It gives you freedom to listen to your
|
||||
music collection from any browser or mobile device. It's like your personal Spotify!
|
||||
@@ -57,15 +56,6 @@ A share of the revenue helps fund the development of Navidrome at no additional
|
||||
- **Transcoding** on the fly. Can be set per user/player. **Opus encoding is supported**
|
||||
- Translated to **various languages**
|
||||
|
||||
## Translations
|
||||
|
||||
Navidrome uses [POEditor](https://poeditor.com/) for translations, and we are always looking
|
||||
for [more contributors](https://www.navidrome.org/docs/developers/translations/)
|
||||
|
||||
<a href="https://poeditor.com/">
|
||||
<img height="32" src="https://github.com/user-attachments/assets/c19b1d2b-01e1-4682-a007-12356c42147c">
|
||||
</a>
|
||||
|
||||
## Documentation
|
||||
All documentation can be found in the project's website: https://www.navidrome.org/docs.
|
||||
Here are some useful direct links:
|
||||
|
||||
@@ -1,217 +0,0 @@
|
||||
package deezer
|
||||
|
||||
import (
|
||||
bytes "bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/microcosm-cc/bluemonday"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
)
|
||||
|
||||
const apiBaseURL = "https://api.deezer.com"
|
||||
const authBaseURL = "https://auth.deezer.com"
|
||||
|
||||
var (
|
||||
ErrNotFound = errors.New("deezer: not found")
|
||||
)
|
||||
|
||||
type httpDoer interface {
|
||||
Do(req *http.Request) (*http.Response, error)
|
||||
}
|
||||
|
||||
type client struct {
|
||||
httpDoer httpDoer
|
||||
jwt jwtToken
|
||||
}
|
||||
|
||||
func newClient(hc httpDoer) *client {
|
||||
return &client{
|
||||
httpDoer: hc,
|
||||
}
|
||||
}
|
||||
|
||||
func (c *client) searchArtists(ctx context.Context, name string, limit int) ([]Artist, error) {
|
||||
params := url.Values{}
|
||||
params.Add("q", name)
|
||||
params.Add("order", "RANKING")
|
||||
params.Add("limit", strconv.Itoa(limit))
|
||||
req, err := http.NewRequestWithContext(ctx, "GET", apiBaseURL+"/search/artist", nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req.URL.RawQuery = params.Encode()
|
||||
|
||||
var results SearchArtistResults
|
||||
err = c.makeRequest(req, &results)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(results.Data) == 0 {
|
||||
return nil, ErrNotFound
|
||||
}
|
||||
return results.Data, nil
|
||||
}
|
||||
|
||||
func (c *client) makeRequest(req *http.Request, response any) error {
|
||||
log.Trace(req.Context(), fmt.Sprintf("Sending Deezer %s request", req.Method), "url", req.URL)
|
||||
resp, err := c.httpDoer.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer resp.Body.Close()
|
||||
data, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if resp.StatusCode != 200 {
|
||||
return c.parseError(data)
|
||||
}
|
||||
|
||||
return json.Unmarshal(data, response)
|
||||
}
|
||||
|
||||
func (c *client) parseError(data []byte) error {
|
||||
var deezerError Error
|
||||
err := json.Unmarshal(data, &deezerError)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return fmt.Errorf("deezer error(%d): %s", deezerError.Error.Code, deezerError.Error.Message)
|
||||
}
|
||||
|
||||
func (c *client) getRelatedArtists(ctx context.Context, artistID int) ([]Artist, error) {
|
||||
req, err := http.NewRequestWithContext(ctx, "GET", fmt.Sprintf("%s/artist/%d/related", apiBaseURL, artistID), nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var results RelatedArtists
|
||||
err = c.makeRequest(req, &results)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return results.Data, nil
|
||||
}
|
||||
|
||||
func (c *client) getTopTracks(ctx context.Context, artistID int, limit int) ([]Track, error) {
|
||||
params := url.Values{}
|
||||
params.Add("limit", strconv.Itoa(limit))
|
||||
req, err := http.NewRequestWithContext(ctx, "GET", fmt.Sprintf("%s/artist/%d/top", apiBaseURL, artistID), nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req.URL.RawQuery = params.Encode()
|
||||
|
||||
var results TopTracks
|
||||
err = c.makeRequest(req, &results)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return results.Data, nil
|
||||
}
|
||||
|
||||
const pipeAPIURL = "https://pipe.deezer.com/api"
|
||||
|
||||
var strictPolicy = bluemonday.StrictPolicy()
|
||||
|
||||
func (c *client) getArtistBio(ctx context.Context, artistID int, lang string) (string, error) {
|
||||
jwt, err := c.getJWT(ctx)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("deezer: failed to get JWT: %w", err)
|
||||
}
|
||||
|
||||
query := map[string]any{
|
||||
"operationName": "ArtistBio",
|
||||
"variables": map[string]any{
|
||||
"artistId": strconv.Itoa(artistID),
|
||||
},
|
||||
"query": `query ArtistBio($artistId: String!) {
|
||||
artist(artistId: $artistId) {
|
||||
bio {
|
||||
full
|
||||
}
|
||||
}
|
||||
}`,
|
||||
}
|
||||
|
||||
body, err := json.Marshal(query)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, "POST", pipeAPIURL, bytes.NewReader(body))
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
req.Header.Set("Accept-Language", lang)
|
||||
req.Header.Set("Authorization", "Bearer "+jwt)
|
||||
|
||||
log.Trace(ctx, "Fetching Deezer artist biography via GraphQL", "artistId", artistID, "language", lang)
|
||||
resp, err := c.httpDoer.Do(req)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != 200 {
|
||||
return "", fmt.Errorf("deezer: failed to fetch biography: %s", resp.Status)
|
||||
}
|
||||
|
||||
data, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
type graphQLResponse struct {
|
||||
Data struct {
|
||||
Artist struct {
|
||||
Bio struct {
|
||||
Full string `json:"full"`
|
||||
} `json:"bio"`
|
||||
} `json:"artist"`
|
||||
} `json:"data"`
|
||||
Errors []struct {
|
||||
Message string `json:"message"`
|
||||
}
|
||||
}
|
||||
|
||||
var result graphQLResponse
|
||||
if err := json.Unmarshal(data, &result); err != nil {
|
||||
return "", fmt.Errorf("deezer: failed to parse GraphQL response: %w", err)
|
||||
}
|
||||
|
||||
if len(result.Errors) > 0 {
|
||||
var errs []error
|
||||
for m := range result.Errors {
|
||||
errs = append(errs, errors.New(result.Errors[m].Message))
|
||||
}
|
||||
err := errors.Join(errs...)
|
||||
return "", fmt.Errorf("deezer: GraphQL error: %w", err)
|
||||
}
|
||||
|
||||
if result.Data.Artist.Bio.Full == "" {
|
||||
return "", errors.New("deezer: biography not found")
|
||||
}
|
||||
|
||||
return cleanBio(result.Data.Artist.Bio.Full), nil
|
||||
}
|
||||
|
||||
func cleanBio(bio string) string {
|
||||
bio = strings.ReplaceAll(bio, "</p>", "\n")
|
||||
return strictPolicy.Sanitize(bio)
|
||||
}
|
||||
@@ -1,101 +0,0 @@
|
||||
package deezer
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/lestrrat-go/jwx/v2/jwt"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
)
|
||||
|
||||
type jwtToken struct {
|
||||
token string
|
||||
expiresAt time.Time
|
||||
mu sync.RWMutex
|
||||
}
|
||||
|
||||
func (j *jwtToken) get() (string, bool) {
|
||||
j.mu.RLock()
|
||||
defer j.mu.RUnlock()
|
||||
if time.Now().Before(j.expiresAt) {
|
||||
return j.token, true
|
||||
}
|
||||
return "", false
|
||||
}
|
||||
|
||||
func (j *jwtToken) set(token string, expiresIn time.Duration) {
|
||||
j.mu.Lock()
|
||||
defer j.mu.Unlock()
|
||||
j.token = token
|
||||
j.expiresAt = time.Now().Add(expiresIn)
|
||||
}
|
||||
|
||||
func (c *client) getJWT(ctx context.Context) (string, error) {
|
||||
// Check if we have a valid cached token
|
||||
if token, valid := c.jwt.get(); valid {
|
||||
return token, nil
|
||||
}
|
||||
|
||||
// Fetch a new anonymous token
|
||||
req, err := http.NewRequestWithContext(ctx, "GET", authBaseURL+"/login/anonymous?jo=p&rto=c", nil)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
req.Header.Set("Accept", "application/json")
|
||||
|
||||
resp, err := c.httpDoer.Do(req)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != 200 {
|
||||
return "", fmt.Errorf("deezer: failed to get JWT token: %s", resp.Status)
|
||||
}
|
||||
|
||||
data, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
type authResponse struct {
|
||||
JWT string `json:"jwt"`
|
||||
}
|
||||
|
||||
var result authResponse
|
||||
if err := json.Unmarshal(data, &result); err != nil {
|
||||
return "", fmt.Errorf("deezer: failed to parse auth response: %w", err)
|
||||
}
|
||||
|
||||
if result.JWT == "" {
|
||||
return "", errors.New("deezer: no JWT token in response")
|
||||
}
|
||||
|
||||
// Parse JWT to get actual expiration time
|
||||
token, err := jwt.ParseString(result.JWT, jwt.WithVerify(false), jwt.WithValidate(false))
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("deezer: failed to parse JWT token: %w", err)
|
||||
}
|
||||
|
||||
// Calculate TTL with a 1-minute buffer for clock skew and network delays
|
||||
expiresAt := token.Expiration()
|
||||
if expiresAt.IsZero() {
|
||||
return "", errors.New("deezer: JWT token has no expiration time")
|
||||
}
|
||||
|
||||
ttl := time.Until(expiresAt) - 1*time.Minute
|
||||
if ttl <= 0 {
|
||||
return "", errors.New("deezer: JWT token already expired or expires too soon")
|
||||
}
|
||||
|
||||
c.jwt.set(result.JWT, ttl)
|
||||
log.Trace(ctx, "Fetched new Deezer JWT token", "expiresAt", expiresAt, "ttl", ttl)
|
||||
|
||||
return result.JWT, nil
|
||||
}
|
||||
@@ -1,293 +0,0 @@
|
||||
package deezer
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/lestrrat-go/jwx/v2/jwt"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("JWT Authentication", func() {
|
||||
var httpClient *fakeHttpClient
|
||||
var client *client
|
||||
var ctx context.Context
|
||||
|
||||
BeforeEach(func() {
|
||||
httpClient = &fakeHttpClient{}
|
||||
client = newClient(httpClient)
|
||||
ctx = context.Background()
|
||||
})
|
||||
|
||||
Describe("getJWT", func() {
|
||||
Context("with a valid JWT response", func() {
|
||||
It("successfully fetches and caches a JWT token", func() {
|
||||
testJWT := createTestJWT(5 * time.Minute)
|
||||
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
|
||||
StatusCode: 200,
|
||||
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s"}`, testJWT))),
|
||||
})
|
||||
|
||||
token, err := client.getJWT(ctx)
|
||||
Expect(err).To(BeNil())
|
||||
Expect(token).To(Equal(testJWT))
|
||||
})
|
||||
|
||||
It("returns the cached token on subsequent calls", func() {
|
||||
testJWT := createTestJWT(5 * time.Minute)
|
||||
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
|
||||
StatusCode: 200,
|
||||
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s"}`, testJWT))),
|
||||
})
|
||||
|
||||
// First call should fetch from API
|
||||
token1, err := client.getJWT(ctx)
|
||||
Expect(err).To(BeNil())
|
||||
Expect(token1).To(Equal(testJWT))
|
||||
Expect(httpClient.lastRequest.URL.Path).To(Equal("/login/anonymous"))
|
||||
|
||||
// Second call should return cached token without hitting API
|
||||
httpClient.lastRequest = nil // Clear last request to verify no new request is made
|
||||
token2, err := client.getJWT(ctx)
|
||||
Expect(err).To(BeNil())
|
||||
Expect(token2).To(Equal(testJWT))
|
||||
Expect(httpClient.lastRequest).To(BeNil()) // No new request made
|
||||
})
|
||||
|
||||
It("parses the JWT expiration time correctly", func() {
|
||||
expectedExpiration := time.Now().Add(5 * time.Minute)
|
||||
testToken, err := jwt.NewBuilder().
|
||||
Expiration(expectedExpiration).
|
||||
Build()
|
||||
Expect(err).To(BeNil())
|
||||
testJWT, err := jwt.Sign(testToken, jwt.WithInsecureNoSignature())
|
||||
Expect(err).To(BeNil())
|
||||
|
||||
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
|
||||
StatusCode: 200,
|
||||
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s"}`, string(testJWT)))),
|
||||
})
|
||||
|
||||
token, err := client.getJWT(ctx)
|
||||
Expect(err).To(BeNil())
|
||||
Expect(token).ToNot(BeEmpty())
|
||||
|
||||
// Verify the token is cached until close to expiration
|
||||
// The cache should expire 1 minute before the JWT expires
|
||||
expectedCacheExpiry := expectedExpiration.Add(-1 * time.Minute)
|
||||
Expect(client.jwt.expiresAt).To(BeTemporally("~", expectedCacheExpiry, 2*time.Second))
|
||||
})
|
||||
})
|
||||
|
||||
Context("with JWT tokens that expire soon", func() {
|
||||
It("rejects tokens that expire in less than 1 minute", func() {
|
||||
// Create a token that expires in 30 seconds (less than 1-minute buffer)
|
||||
testJWT := createTestJWT(30 * time.Second)
|
||||
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
|
||||
StatusCode: 200,
|
||||
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s"}`, testJWT))),
|
||||
})
|
||||
|
||||
_, err := client.getJWT(ctx)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(err.Error()).To(ContainSubstring("JWT token already expired or expires too soon"))
|
||||
})
|
||||
|
||||
It("rejects already expired tokens", func() {
|
||||
// Create a token that expired 1 minute ago
|
||||
testJWT := createTestJWT(-1 * time.Minute)
|
||||
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
|
||||
StatusCode: 200,
|
||||
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s"}`, testJWT))),
|
||||
})
|
||||
|
||||
_, err := client.getJWT(ctx)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(err.Error()).To(ContainSubstring("JWT token already expired or expires too soon"))
|
||||
})
|
||||
|
||||
It("accepts tokens that expire in more than 1 minute", func() {
|
||||
// Create a token that expires in 2 minutes (just over the 1-minute buffer)
|
||||
testJWT := createTestJWT(2 * time.Minute)
|
||||
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
|
||||
StatusCode: 200,
|
||||
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s"}`, testJWT))),
|
||||
})
|
||||
|
||||
token, err := client.getJWT(ctx)
|
||||
Expect(err).To(BeNil())
|
||||
Expect(token).ToNot(BeEmpty())
|
||||
})
|
||||
})
|
||||
|
||||
Context("with invalid responses", func() {
|
||||
It("handles HTTP error responses", func() {
|
||||
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
|
||||
StatusCode: 500,
|
||||
Body: io.NopCloser(bytes.NewBufferString(`{"error":"Internal server error"}`)),
|
||||
})
|
||||
|
||||
_, err := client.getJWT(ctx)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(err.Error()).To(ContainSubstring("failed to get JWT token"))
|
||||
})
|
||||
|
||||
It("handles malformed JSON responses", func() {
|
||||
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
|
||||
StatusCode: 200,
|
||||
Body: io.NopCloser(bytes.NewBufferString(`{invalid json}`)),
|
||||
})
|
||||
|
||||
_, err := client.getJWT(ctx)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(err.Error()).To(ContainSubstring("failed to parse auth response"))
|
||||
})
|
||||
|
||||
It("handles responses with empty JWT field", func() {
|
||||
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
|
||||
StatusCode: 200,
|
||||
Body: io.NopCloser(bytes.NewBufferString(`{"jwt":""}`)),
|
||||
})
|
||||
|
||||
_, err := client.getJWT(ctx)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(err.Error()).To(Equal("deezer: no JWT token in response"))
|
||||
})
|
||||
|
||||
It("handles invalid JWT tokens", func() {
|
||||
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
|
||||
StatusCode: 200,
|
||||
Body: io.NopCloser(bytes.NewBufferString(`{"jwt":"not-a-valid-jwt"}`)),
|
||||
})
|
||||
|
||||
_, err := client.getJWT(ctx)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(err.Error()).To(ContainSubstring("failed to parse JWT token"))
|
||||
})
|
||||
|
||||
It("rejects JWT tokens without expiration", func() {
|
||||
// Create a JWT without expiration claim
|
||||
testToken, err := jwt.NewBuilder().
|
||||
Claim("custom", "value").
|
||||
Build()
|
||||
Expect(err).To(BeNil())
|
||||
|
||||
// Verify token has no expiration
|
||||
Expect(testToken.Expiration().IsZero()).To(BeTrue())
|
||||
|
||||
testJWT, err := jwt.Sign(testToken, jwt.WithInsecureNoSignature())
|
||||
Expect(err).To(BeNil())
|
||||
|
||||
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
|
||||
StatusCode: 200,
|
||||
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s"}`, string(testJWT)))),
|
||||
})
|
||||
|
||||
_, err = client.getJWT(ctx)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(err.Error()).To(Equal("deezer: JWT token has no expiration time"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("token caching behavior", func() {
|
||||
It("fetches a new token when the cached token expires", func() {
|
||||
// First token expires in 5 minutes
|
||||
firstJWT := createTestJWT(5 * time.Minute)
|
||||
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
|
||||
StatusCode: 200,
|
||||
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s"}`, firstJWT))),
|
||||
})
|
||||
|
||||
token1, err := client.getJWT(ctx)
|
||||
Expect(err).To(BeNil())
|
||||
Expect(token1).To(Equal(firstJWT))
|
||||
|
||||
// Manually expire the cached token
|
||||
client.jwt.expiresAt = time.Now().Add(-1 * time.Second)
|
||||
|
||||
// Second token with different expiration (10 minutes)
|
||||
secondJWT := createTestJWT(10 * time.Minute)
|
||||
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
|
||||
StatusCode: 200,
|
||||
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s"}`, secondJWT))),
|
||||
})
|
||||
|
||||
token2, err := client.getJWT(ctx)
|
||||
Expect(err).To(BeNil())
|
||||
Expect(token2).To(Equal(secondJWT))
|
||||
Expect(token2).ToNot(Equal(token1))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("jwtToken cache", func() {
|
||||
var cache *jwtToken
|
||||
|
||||
BeforeEach(func() {
|
||||
cache = &jwtToken{}
|
||||
})
|
||||
|
||||
It("returns false for expired tokens", func() {
|
||||
cache.set("test-token", -1*time.Second) // Already expired
|
||||
token, valid := cache.get()
|
||||
Expect(valid).To(BeFalse())
|
||||
Expect(token).To(BeEmpty())
|
||||
})
|
||||
|
||||
It("returns true for valid tokens", func() {
|
||||
cache.set("test-token", 4*time.Minute)
|
||||
token, valid := cache.get()
|
||||
Expect(valid).To(BeTrue())
|
||||
Expect(token).To(Equal("test-token"))
|
||||
})
|
||||
|
||||
It("is thread-safe for concurrent access", func() {
|
||||
wg := sync.WaitGroup{}
|
||||
|
||||
// Writer goroutine
|
||||
wg.Go(func() {
|
||||
for i := 0; i < 100; i++ {
|
||||
cache.set(fmt.Sprintf("token-%d", i), 1*time.Hour)
|
||||
time.Sleep(1 * time.Millisecond)
|
||||
}
|
||||
})
|
||||
|
||||
// Reader goroutine
|
||||
wg.Go(func() {
|
||||
for i := 0; i < 100; i++ {
|
||||
cache.get()
|
||||
time.Sleep(1 * time.Millisecond)
|
||||
}
|
||||
})
|
||||
|
||||
// Wait for both goroutines to complete
|
||||
wg.Wait()
|
||||
|
||||
// Verify final state is valid
|
||||
token, valid := cache.get()
|
||||
Expect(valid).To(BeTrue())
|
||||
Expect(token).To(HavePrefix("token-"))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// createTestJWT creates a valid JWT token for testing purposes
|
||||
func createTestJWT(expiresIn time.Duration) string {
|
||||
token, err := jwt.NewBuilder().
|
||||
Expiration(time.Now().Add(expiresIn)).
|
||||
Build()
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("failed to create test JWT: %v", err))
|
||||
}
|
||||
signed, err := jwt.Sign(token, jwt.WithInsecureNoSignature())
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("failed to sign test JWT: %v", err))
|
||||
}
|
||||
return string(signed)
|
||||
}
|
||||
@@ -1,210 +0,0 @@
|
||||
package deezer
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("client", func() {
|
||||
var httpClient *fakeHttpClient
|
||||
var client *client
|
||||
|
||||
BeforeEach(func() {
|
||||
httpClient = &fakeHttpClient{}
|
||||
client = newClient(httpClient)
|
||||
})
|
||||
|
||||
Describe("ArtistImages", func() {
|
||||
It("returns artist images from a successful request", func() {
|
||||
f, err := os.Open("tests/fixtures/deezer.search.artist.json")
|
||||
Expect(err).To(BeNil())
|
||||
httpClient.mock("https://api.deezer.com/search/artist", http.Response{Body: f, StatusCode: 200})
|
||||
|
||||
artists, err := client.searchArtists(GinkgoT().Context(), "Michael Jackson", 20)
|
||||
Expect(err).To(BeNil())
|
||||
Expect(artists).To(HaveLen(17))
|
||||
Expect(artists[0].Name).To(Equal("Michael Jackson"))
|
||||
Expect(artists[0].PictureXl).To(Equal("https://cdn-images.dzcdn.net/images/artist/97fae13b2b30e4aec2e8c9e0c7839d92/1000x1000-000000-80-0-0.jpg"))
|
||||
})
|
||||
|
||||
It("fails if artist was not found", func() {
|
||||
httpClient.mock("https://api.deezer.com/search/artist", http.Response{
|
||||
StatusCode: 200,
|
||||
Body: io.NopCloser(bytes.NewBufferString(`{"data":[],"total":0}`)),
|
||||
})
|
||||
|
||||
_, err := client.searchArtists(GinkgoT().Context(), "Michael Jackson", 20)
|
||||
Expect(err).To(MatchError(ErrNotFound))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("TopTracks", func() {
|
||||
It("returns top tracks with artist and album info from a successful request", func() {
|
||||
f, err := os.Open("tests/fixtures/deezer.artist.top.json")
|
||||
Expect(err).To(BeNil())
|
||||
httpClient.mock("https://api.deezer.com/artist/27/top", http.Response{Body: f, StatusCode: 200})
|
||||
|
||||
tracks, err := client.getTopTracks(GinkgoT().Context(), 27, 5)
|
||||
Expect(err).To(BeNil())
|
||||
Expect(tracks).To(HaveLen(5))
|
||||
|
||||
// Verify first track has all expected fields
|
||||
Expect(tracks[0].Title).To(Equal("Instant Crush (feat. Julian Casablancas)"))
|
||||
Expect(tracks[0].Artist.Name).To(Equal("Daft Punk"))
|
||||
Expect(tracks[0].Album.Title).To(Equal("Random Access Memories"))
|
||||
|
||||
// Verify second track
|
||||
Expect(tracks[1].Title).To(Equal("One More Time"))
|
||||
Expect(tracks[1].Artist.Name).To(Equal("Daft Punk"))
|
||||
Expect(tracks[1].Album.Title).To(Equal("Discovery"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("ArtistBio", func() {
|
||||
BeforeEach(func() {
|
||||
// Mock the JWT token endpoint with a valid JWT that expires in 5 minutes
|
||||
testJWT := createTestJWT(5 * time.Minute)
|
||||
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
|
||||
StatusCode: 200,
|
||||
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s","refresh_token":""}`, testJWT))),
|
||||
})
|
||||
})
|
||||
|
||||
It("returns artist bio from a successful request", func() {
|
||||
f, err := os.Open("tests/fixtures/deezer.artist.bio.en.json")
|
||||
Expect(err).To(BeNil())
|
||||
httpClient.mock("https://pipe.deezer.com/api", http.Response{Body: f, StatusCode: 200})
|
||||
|
||||
bio, err := client.getArtistBio(GinkgoT().Context(), 27, "en")
|
||||
Expect(err).To(BeNil())
|
||||
Expect(bio).To(ContainSubstring("Schoolmates Thomas and Guy-Manuel"))
|
||||
Expect(bio).ToNot(ContainSubstring("<p>"))
|
||||
Expect(bio).ToNot(ContainSubstring("</p>"))
|
||||
})
|
||||
|
||||
It("uses the provided language", func() {
|
||||
f, err := os.Open("tests/fixtures/deezer.artist.bio.fr.json")
|
||||
Expect(err).To(BeNil())
|
||||
httpClient.mock("https://pipe.deezer.com/api", http.Response{Body: f, StatusCode: 200})
|
||||
|
||||
_, err = client.getArtistBio(GinkgoT().Context(), 27, "fr")
|
||||
Expect(err).To(BeNil())
|
||||
Expect(httpClient.lastRequest.Header.Get("Accept-Language")).To(Equal("fr"))
|
||||
})
|
||||
|
||||
It("includes the JWT token in the request", func() {
|
||||
f, err := os.Open("tests/fixtures/deezer.artist.bio.en.json")
|
||||
Expect(err).To(BeNil())
|
||||
httpClient.mock("https://pipe.deezer.com/api", http.Response{Body: f, StatusCode: 200})
|
||||
|
||||
_, err = client.getArtistBio(GinkgoT().Context(), 27, "en")
|
||||
Expect(err).To(BeNil())
|
||||
// Verify that the Authorization header has the Bearer token format
|
||||
authHeader := httpClient.lastRequest.Header.Get("Authorization")
|
||||
Expect(authHeader).To(HavePrefix("Bearer "))
|
||||
Expect(len(authHeader)).To(BeNumerically(">", 20)) // JWT tokens are longer than 20 chars
|
||||
})
|
||||
|
||||
It("handles GraphQL errors", func() {
|
||||
errorResponse := `{
|
||||
"data": {
|
||||
"artist": {
|
||||
"bio": {
|
||||
"full": ""
|
||||
}
|
||||
}
|
||||
},
|
||||
"errors": [
|
||||
{
|
||||
"message": "Artist not found"
|
||||
},
|
||||
{
|
||||
"message": "Invalid artist ID"
|
||||
}
|
||||
]
|
||||
}`
|
||||
httpClient.mock("https://pipe.deezer.com/api", http.Response{
|
||||
StatusCode: 200,
|
||||
Body: io.NopCloser(bytes.NewBufferString(errorResponse)),
|
||||
})
|
||||
|
||||
_, err := client.getArtistBio(GinkgoT().Context(), 999, "en")
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(err.Error()).To(ContainSubstring("GraphQL error"))
|
||||
Expect(err.Error()).To(ContainSubstring("Artist not found"))
|
||||
Expect(err.Error()).To(ContainSubstring("Invalid artist ID"))
|
||||
})
|
||||
|
||||
It("handles empty biography", func() {
|
||||
emptyBioResponse := `{
|
||||
"data": {
|
||||
"artist": {
|
||||
"bio": {
|
||||
"full": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}`
|
||||
httpClient.mock("https://pipe.deezer.com/api", http.Response{
|
||||
StatusCode: 200,
|
||||
Body: io.NopCloser(bytes.NewBufferString(emptyBioResponse)),
|
||||
})
|
||||
|
||||
_, err := client.getArtistBio(GinkgoT().Context(), 27, "en")
|
||||
Expect(err).To(MatchError("deezer: biography not found"))
|
||||
})
|
||||
|
||||
It("handles JWT token fetch failure", func() {
|
||||
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
|
||||
StatusCode: 500,
|
||||
Body: io.NopCloser(bytes.NewBufferString(`{"error":"Internal server error"}`)),
|
||||
})
|
||||
|
||||
_, err := client.getArtistBio(GinkgoT().Context(), 27, "en")
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(err.Error()).To(ContainSubstring("failed to get JWT"))
|
||||
})
|
||||
|
||||
It("handles JWT token that expires too soon", func() {
|
||||
// Create a JWT that expires in 30 seconds (less than the 1-minute buffer)
|
||||
expiredJWT := createTestJWT(30 * time.Second)
|
||||
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
|
||||
StatusCode: 200,
|
||||
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s","refresh_token":""}`, expiredJWT))),
|
||||
})
|
||||
|
||||
_, err := client.getArtistBio(GinkgoT().Context(), 27, "en")
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(err.Error()).To(ContainSubstring("JWT token already expired or expires too soon"))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
type fakeHttpClient struct {
|
||||
responses map[string]*http.Response
|
||||
lastRequest *http.Request
|
||||
}
|
||||
|
||||
func (c *fakeHttpClient) mock(url string, response http.Response) {
|
||||
if c.responses == nil {
|
||||
c.responses = make(map[string]*http.Response)
|
||||
}
|
||||
c.responses[url] = &response
|
||||
}
|
||||
|
||||
func (c *fakeHttpClient) Do(req *http.Request) (*http.Response, error) {
|
||||
c.lastRequest = req
|
||||
u := req.URL
|
||||
u.RawQuery = ""
|
||||
if resp, ok := c.responses[u.String()]; ok {
|
||||
return resp, nil
|
||||
}
|
||||
panic("URL not mocked: " + u.String())
|
||||
}
|
||||
@@ -1,172 +0,0 @@
|
||||
package deezer
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/utils/cache"
|
||||
"github.com/navidrome/navidrome/utils/slice"
|
||||
)
|
||||
|
||||
const deezerAgentName = "deezer"
|
||||
const deezerApiPictureXlSize = 1000
|
||||
const deezerApiPictureBigSize = 500
|
||||
const deezerApiPictureMediumSize = 250
|
||||
const deezerApiPictureSmallSize = 56
|
||||
const deezerArtistSearchLimit = 50
|
||||
|
||||
type deezerAgent struct {
|
||||
dataStore model.DataStore
|
||||
client *client
|
||||
languages []string
|
||||
}
|
||||
|
||||
func deezerConstructor(dataStore model.DataStore) agents.Interface {
|
||||
agent := &deezerAgent{
|
||||
dataStore: dataStore,
|
||||
languages: conf.Server.Deezer.Languages,
|
||||
}
|
||||
httpClient := &http.Client{
|
||||
Timeout: consts.DefaultHttpClientTimeOut,
|
||||
}
|
||||
cachedHttpClient := cache.NewHTTPClient(httpClient, consts.DefaultHttpClientTimeOut)
|
||||
agent.client = newClient(cachedHttpClient)
|
||||
return agent
|
||||
}
|
||||
|
||||
func (s *deezerAgent) AgentName() string {
|
||||
return deezerAgentName
|
||||
}
|
||||
|
||||
func (s *deezerAgent) GetArtistImages(ctx context.Context, _, name, _ string) ([]agents.ExternalImage, error) {
|
||||
artist, err := s.searchArtist(ctx, name)
|
||||
if err != nil {
|
||||
if errors.Is(err, agents.ErrNotFound) {
|
||||
log.Warn(ctx, "Artist not found in deezer", "artist", name)
|
||||
} else {
|
||||
log.Error(ctx, "Error calling deezer", "artist", name, err)
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var res []agents.ExternalImage
|
||||
possibleImages := []struct {
|
||||
URL string
|
||||
Size int
|
||||
}{
|
||||
{artist.PictureXl, deezerApiPictureXlSize},
|
||||
{artist.PictureBig, deezerApiPictureBigSize},
|
||||
{artist.PictureMedium, deezerApiPictureMediumSize},
|
||||
{artist.PictureSmall, deezerApiPictureSmallSize},
|
||||
}
|
||||
for _, imgData := range possibleImages {
|
||||
if imgData.URL != "" {
|
||||
res = append(res, agents.ExternalImage{
|
||||
URL: imgData.URL,
|
||||
Size: imgData.Size,
|
||||
})
|
||||
}
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (s *deezerAgent) searchArtist(ctx context.Context, name string) (*Artist, error) {
|
||||
artists, err := s.client.searchArtists(ctx, name, deezerArtistSearchLimit)
|
||||
if errors.Is(err, ErrNotFound) || len(artists) == 0 {
|
||||
return nil, agents.ErrNotFound
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
log.Trace(ctx, "Artists found", "count", len(artists), "searched_name", name)
|
||||
for i := range artists {
|
||||
log.Trace(ctx, fmt.Sprintf("Artists found #%d", i), "name", artists[i].Name, "id", artists[i].ID, "link", artists[i].Link)
|
||||
if i > 2 {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// If the first one has the same name, that's the one
|
||||
if !strings.EqualFold(artists[0].Name, name) {
|
||||
log.Trace(ctx, "Top artist do not match", "searched_name", name, "found_name", artists[0].Name)
|
||||
return nil, agents.ErrNotFound
|
||||
}
|
||||
log.Trace(ctx, "Found artist", "name", artists[0].Name, "id", artists[0].ID, "link", artists[0].Link)
|
||||
return &artists[0], err
|
||||
}
|
||||
|
||||
func (s *deezerAgent) GetSimilarArtists(ctx context.Context, _, name, _ string, limit int) ([]agents.Artist, error) {
|
||||
artist, err := s.searchArtist(ctx, name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
related, err := s.client.getRelatedArtists(ctx, artist.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
res := slice.Map(related, func(r Artist) agents.Artist {
|
||||
return agents.Artist{
|
||||
Name: r.Name,
|
||||
}
|
||||
})
|
||||
if len(res) > limit {
|
||||
res = res[:limit]
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (s *deezerAgent) GetArtistTopSongs(ctx context.Context, _, artistName, _ string, count int) ([]agents.Song, error) {
|
||||
artist, err := s.searchArtist(ctx, artistName)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
tracks, err := s.client.getTopTracks(ctx, artist.ID, count)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
res := slice.Map(tracks, func(r Track) agents.Song {
|
||||
return agents.Song{
|
||||
Name: r.Title,
|
||||
Album: r.Album.Title,
|
||||
Duration: uint32(r.Duration * 1000), // Convert seconds to milliseconds
|
||||
}
|
||||
})
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (s *deezerAgent) GetArtistBiography(ctx context.Context, _, name, _ string) (string, error) {
|
||||
artist, err := s.searchArtist(ctx, name)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
for _, lang := range s.languages {
|
||||
bio, err := s.client.getArtistBio(ctx, artist.ID, lang)
|
||||
if err == nil && bio != "" {
|
||||
return bio, nil
|
||||
}
|
||||
log.Debug(ctx, "Deezer/artist.bio returned empty/error, trying next language", "artist", name, "lang", lang, err)
|
||||
}
|
||||
return "", agents.ErrNotFound
|
||||
}
|
||||
|
||||
func init() {
|
||||
conf.AddHook(func() {
|
||||
if conf.Server.Deezer.Enabled {
|
||||
agents.Register(deezerAgentName, deezerConstructor)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,171 +0,0 @@
|
||||
package deezer
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/conf/configtest"
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("deezerAgent", func() {
|
||||
var ctx context.Context
|
||||
|
||||
BeforeEach(func() {
|
||||
ctx = context.Background()
|
||||
DeferCleanup(configtest.SetupConfig())
|
||||
conf.Server.Deezer.Enabled = true
|
||||
})
|
||||
|
||||
Describe("deezerConstructor", func() {
|
||||
It("uses configured languages", func() {
|
||||
conf.Server.Deezer.Languages = []string{"pt", "en"}
|
||||
agent := deezerConstructor(&tests.MockDataStore{}).(*deezerAgent)
|
||||
Expect(agent.languages).To(Equal([]string{"pt", "en"}))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("GetArtistBiography - Language Fallback", func() {
|
||||
var agent *deezerAgent
|
||||
var httpClient *langAwareHttpClient
|
||||
|
||||
BeforeEach(func() {
|
||||
httpClient = newLangAwareHttpClient()
|
||||
|
||||
// Mock search artist (returns Michael Jackson)
|
||||
fSearch, _ := os.Open("tests/fixtures/deezer.search.artist.json")
|
||||
httpClient.searchResponse = &http.Response{Body: fSearch, StatusCode: 200}
|
||||
|
||||
// Mock JWT token
|
||||
testJWT := createTestJWT(5 * time.Minute)
|
||||
httpClient.jwtResponse = &http.Response{
|
||||
StatusCode: 200,
|
||||
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s","refresh_token":""}`, testJWT))),
|
||||
}
|
||||
})
|
||||
|
||||
setupAgent := func(languages []string) {
|
||||
conf.Server.Deezer.Languages = languages
|
||||
agent = &deezerAgent{
|
||||
dataStore: &tests.MockDataStore{},
|
||||
client: newClient(httpClient),
|
||||
languages: languages,
|
||||
}
|
||||
}
|
||||
|
||||
It("returns content in first language when available (1 bio API call)", func() {
|
||||
setupAgent([]string{"fr", "en"})
|
||||
|
||||
// French biography available
|
||||
fFr, _ := os.Open("tests/fixtures/deezer.artist.bio.fr.json")
|
||||
httpClient.bioResponses["fr"] = &http.Response{Body: fFr, StatusCode: 200}
|
||||
|
||||
bio, err := agent.GetArtistBiography(ctx, "", "Michael Jackson", "")
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(bio).To(ContainSubstring("Guy-Manuel de Homem Christo et Thomas Bangalter"))
|
||||
Expect(httpClient.bioRequestCount).To(Equal(1))
|
||||
Expect(httpClient.bioRequests[0].Header.Get("Accept-Language")).To(Equal("fr"))
|
||||
})
|
||||
|
||||
It("falls back to second language when first returns empty (2 bio API calls)", func() {
|
||||
setupAgent([]string{"ja", "en"})
|
||||
|
||||
// Japanese returns empty biography
|
||||
fJa, _ := os.Open("tests/fixtures/deezer.artist.bio.empty.json")
|
||||
httpClient.bioResponses["ja"] = &http.Response{Body: fJa, StatusCode: 200}
|
||||
// English returns full biography
|
||||
fEn, _ := os.Open("tests/fixtures/deezer.artist.bio.en.json")
|
||||
httpClient.bioResponses["en"] = &http.Response{Body: fEn, StatusCode: 200}
|
||||
|
||||
bio, err := agent.GetArtistBiography(ctx, "", "Michael Jackson", "")
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(bio).To(ContainSubstring("Schoolmates Thomas and Guy-Manuel"))
|
||||
Expect(httpClient.bioRequestCount).To(Equal(2))
|
||||
Expect(httpClient.bioRequests[0].Header.Get("Accept-Language")).To(Equal("ja"))
|
||||
Expect(httpClient.bioRequests[1].Header.Get("Accept-Language")).To(Equal("en"))
|
||||
})
|
||||
|
||||
It("returns ErrNotFound when all languages return empty", func() {
|
||||
setupAgent([]string{"ja", "xx"})
|
||||
|
||||
// Both languages return empty biography
|
||||
fJa, _ := os.Open("tests/fixtures/deezer.artist.bio.empty.json")
|
||||
httpClient.bioResponses["ja"] = &http.Response{Body: fJa, StatusCode: 200}
|
||||
fXx, _ := os.Open("tests/fixtures/deezer.artist.bio.empty.json")
|
||||
httpClient.bioResponses["xx"] = &http.Response{Body: fXx, StatusCode: 200}
|
||||
|
||||
_, err := agent.GetArtistBiography(ctx, "", "Michael Jackson", "")
|
||||
|
||||
Expect(err).To(MatchError(agents.ErrNotFound))
|
||||
Expect(httpClient.bioRequestCount).To(Equal(2))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// langAwareHttpClient is a mock HTTP client that returns different responses based on the Accept-Language header
|
||||
type langAwareHttpClient struct {
|
||||
searchResponse *http.Response
|
||||
jwtResponse *http.Response
|
||||
bioResponses map[string]*http.Response
|
||||
bioRequests []*http.Request
|
||||
bioRequestCount int
|
||||
}
|
||||
|
||||
func newLangAwareHttpClient() *langAwareHttpClient {
|
||||
return &langAwareHttpClient{
|
||||
bioResponses: make(map[string]*http.Response),
|
||||
bioRequests: make([]*http.Request, 0),
|
||||
}
|
||||
}
|
||||
|
||||
func (c *langAwareHttpClient) Do(req *http.Request) (*http.Response, error) {
|
||||
// Handle search artist request
|
||||
if req.URL.Host == "api.deezer.com" && req.URL.Path == "/search/artist" {
|
||||
if c.searchResponse != nil {
|
||||
return c.searchResponse, nil
|
||||
}
|
||||
return &http.Response{
|
||||
StatusCode: 200,
|
||||
Body: io.NopCloser(bytes.NewBufferString(`{"data":[],"total":0}`)),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Handle JWT token request
|
||||
if req.URL.Host == "auth.deezer.com" && req.URL.Path == "/login/anonymous" {
|
||||
if c.jwtResponse != nil {
|
||||
return c.jwtResponse, nil
|
||||
}
|
||||
return &http.Response{
|
||||
StatusCode: 500,
|
||||
Body: io.NopCloser(bytes.NewBufferString(`{"error":"no mock"}`)),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Handle bio request (GraphQL API)
|
||||
if req.URL.Host == "pipe.deezer.com" && req.URL.Path == "/api" {
|
||||
c.bioRequestCount++
|
||||
c.bioRequests = append(c.bioRequests, req)
|
||||
lang := req.Header.Get("Accept-Language")
|
||||
if resp, ok := c.bioResponses[lang]; ok {
|
||||
return resp, nil
|
||||
}
|
||||
// Return empty bio by default
|
||||
return &http.Response{
|
||||
StatusCode: 200,
|
||||
Body: io.NopCloser(bytes.NewBufferString(`{"data":{"artist":{"bio":{"full":""}}}}`)),
|
||||
}, nil
|
||||
}
|
||||
|
||||
panic("URL not mocked: " + req.URL.String())
|
||||
}
|
||||
@@ -1,66 +0,0 @@
|
||||
package deezer
|
||||
|
||||
type SearchArtistResults struct {
|
||||
Data []Artist `json:"data"`
|
||||
Total int `json:"total"`
|
||||
Next string `json:"next"`
|
||||
}
|
||||
|
||||
type Artist struct {
|
||||
ID int `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Link string `json:"link"`
|
||||
Picture string `json:"picture"`
|
||||
PictureSmall string `json:"picture_small"`
|
||||
PictureMedium string `json:"picture_medium"`
|
||||
PictureBig string `json:"picture_big"`
|
||||
PictureXl string `json:"picture_xl"`
|
||||
NbAlbum int `json:"nb_album"`
|
||||
NbFan int `json:"nb_fan"`
|
||||
Radio bool `json:"radio"`
|
||||
Tracklist string `json:"tracklist"`
|
||||
Type string `json:"type"`
|
||||
}
|
||||
|
||||
type Error struct {
|
||||
Error struct {
|
||||
Type string `json:"type"`
|
||||
Message string `json:"message"`
|
||||
Code int `json:"code"`
|
||||
} `json:"error"`
|
||||
}
|
||||
|
||||
type RelatedArtists struct {
|
||||
Data []Artist `json:"data"`
|
||||
Total int `json:"total"`
|
||||
}
|
||||
|
||||
type TopTracks struct {
|
||||
Data []Track `json:"data"`
|
||||
Total int `json:"total"`
|
||||
Next string `json:"next"`
|
||||
}
|
||||
|
||||
type Track struct {
|
||||
ID int `json:"id"`
|
||||
Title string `json:"title"`
|
||||
Link string `json:"link"`
|
||||
Duration int `json:"duration"`
|
||||
Rank int `json:"rank"`
|
||||
Preview string `json:"preview"`
|
||||
Artist Artist `json:"artist"`
|
||||
Album Album `json:"album"`
|
||||
Contributors []Artist `json:"contributors"`
|
||||
}
|
||||
|
||||
type Album struct {
|
||||
ID int `json:"id"`
|
||||
Title string `json:"title"`
|
||||
Cover string `json:"cover"`
|
||||
CoverSmall string `json:"cover_small"`
|
||||
CoverMedium string `json:"cover_medium"`
|
||||
CoverBig string `json:"cover_big"`
|
||||
CoverXl string `json:"cover_xl"`
|
||||
Tracklist string `json:"tracklist"`
|
||||
Type string `json:"type"`
|
||||
}
|
||||
@@ -1,69 +0,0 @@
|
||||
package deezer
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"os"
|
||||
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("Responses", func() {
|
||||
Describe("Search type=artist", func() {
|
||||
It("parses the artist search result correctly ", func() {
|
||||
var resp SearchArtistResults
|
||||
body, err := os.ReadFile("tests/fixtures/deezer.search.artist.json")
|
||||
Expect(err).To(BeNil())
|
||||
err = json.Unmarshal(body, &resp)
|
||||
Expect(err).To(BeNil())
|
||||
|
||||
Expect(resp.Data).To(HaveLen(17))
|
||||
michael := resp.Data[0]
|
||||
Expect(michael.Name).To(Equal("Michael Jackson"))
|
||||
Expect(michael.PictureXl).To(Equal("https://cdn-images.dzcdn.net/images/artist/97fae13b2b30e4aec2e8c9e0c7839d92/1000x1000-000000-80-0-0.jpg"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Error", func() {
|
||||
It("parses the error response correctly", func() {
|
||||
var errorResp Error
|
||||
body := []byte(`{"error":{"type":"MissingParameterException","message":"Missing parameters: q","code":501}}`)
|
||||
err := json.Unmarshal(body, &errorResp)
|
||||
Expect(err).To(BeNil())
|
||||
|
||||
Expect(errorResp.Error.Code).To(Equal(501))
|
||||
Expect(errorResp.Error.Message).To(Equal("Missing parameters: q"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Related Artists", func() {
|
||||
It("parses the related artists response correctly", func() {
|
||||
var resp RelatedArtists
|
||||
body, err := os.ReadFile("tests/fixtures/deezer.artist.related.json")
|
||||
Expect(err).To(BeNil())
|
||||
err = json.Unmarshal(body, &resp)
|
||||
Expect(err).To(BeNil())
|
||||
|
||||
Expect(resp.Data).To(HaveLen(20))
|
||||
justice := resp.Data[0]
|
||||
Expect(justice.Name).To(Equal("Justice"))
|
||||
Expect(justice.ID).To(Equal(6404))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Top Tracks", func() {
|
||||
It("parses the top tracks response correctly", func() {
|
||||
var resp TopTracks
|
||||
body, err := os.ReadFile("tests/fixtures/deezer.artist.top.json")
|
||||
Expect(err).To(BeNil())
|
||||
err = json.Unmarshal(body, &resp)
|
||||
Expect(err).To(BeNil())
|
||||
|
||||
Expect(resp.Data).To(HaveLen(5))
|
||||
track := resp.Data[0]
|
||||
Expect(track.Title).To(Equal("Instant Crush (feat. Julian Casablancas)"))
|
||||
Expect(track.ID).To(Equal(67238732))
|
||||
Expect(track.Album.Title).To(Equal("Random Access Memories"))
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,274 +0,0 @@
|
||||
package gotaglib
|
||||
|
||||
import (
|
||||
"io/fs"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/djherbis/times"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/model/metadata"
|
||||
"github.com/navidrome/navidrome/utils/gg"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
type testFileInfo struct {
|
||||
fs.FileInfo
|
||||
}
|
||||
|
||||
func (t testFileInfo) BirthTime() time.Time {
|
||||
if ts := times.Get(t.FileInfo); ts.HasBirthTime() {
|
||||
return ts.BirthTime()
|
||||
}
|
||||
return t.FileInfo.ModTime()
|
||||
}
|
||||
|
||||
var _ = Describe("Extractor", func() {
|
||||
toP := func(name, sortName, mbid string) model.Participant {
|
||||
return model.Participant{
|
||||
Artist: model.Artist{Name: name, SortArtistName: sortName, MbzArtistID: mbid},
|
||||
}
|
||||
}
|
||||
|
||||
roles := []struct {
|
||||
model.Role
|
||||
model.ParticipantList
|
||||
}{
|
||||
{model.RoleComposer, model.ParticipantList{
|
||||
toP("coma a", "a, coma", "bf13b584-f27c-43db-8f42-32898d33d4e2"),
|
||||
toP("comb", "comb", "924039a2-09c6-4d29-9b4f-50cc54447d36"),
|
||||
}},
|
||||
{model.RoleLyricist, model.ParticipantList{
|
||||
toP("la a", "a, la", "c84f648f-68a6-40a2-a0cb-d135b25da3c2"),
|
||||
toP("lb", "lb", "0a7c582d-143a-4540-b4e9-77200835af65"),
|
||||
}},
|
||||
{model.RoleArranger, model.ParticipantList{
|
||||
toP("aa", "", "4605a1d4-8d15-42a3-bd00-9c20e42f71e6"),
|
||||
toP("ab", "", "002f0ff8-77bf-42cc-8216-61a9c43dc145"),
|
||||
}},
|
||||
{model.RoleConductor, model.ParticipantList{
|
||||
toP("cona", "", "af86879b-2141-42af-bad2-389a4dc91489"),
|
||||
toP("conb", "", "3dfa3c70-d7d3-4b97-b953-c298dd305e12"),
|
||||
}},
|
||||
{model.RoleDirector, model.ParticipantList{
|
||||
toP("dia", "", "f943187f-73de-4794-be47-88c66f0fd0f4"),
|
||||
toP("dib", "", "bceb75da-1853-4b3d-b399-b27f0cafc389"),
|
||||
}},
|
||||
{model.RoleEngineer, model.ParticipantList{
|
||||
toP("ea", "", "f634bf6d-d66a-425d-888a-28ad39392759"),
|
||||
toP("eb", "", "243d64ae-d514-44e1-901a-b918d692baee"),
|
||||
}},
|
||||
{model.RoleProducer, model.ParticipantList{
|
||||
toP("pra", "", "d971c8d7-999c-4a5f-ac31-719721ab35d6"),
|
||||
toP("prb", "", "f0a09070-9324-434f-a599-6d25ded87b69"),
|
||||
}},
|
||||
{model.RoleRemixer, model.ParticipantList{
|
||||
toP("ra", "", "c7dc6095-9534-4c72-87cc-aea0103462cf"),
|
||||
toP("rb", "", "8ebeef51-c08c-4736-992f-c37870becedd"),
|
||||
}},
|
||||
{model.RoleDJMixer, model.ParticipantList{
|
||||
toP("dja", "", "d063f13b-7589-4efc-ab7f-c60e6db17247"),
|
||||
toP("djb", "", "3636670c-385f-4212-89c8-0ff51d6bc456"),
|
||||
}},
|
||||
{model.RoleMixer, model.ParticipantList{
|
||||
toP("ma", "", "53fb5a2d-7016-427e-a563-d91819a5f35a"),
|
||||
toP("mb", "", "64c13e65-f0da-4ab9-a300-71ee53b0376a"),
|
||||
}},
|
||||
}
|
||||
|
||||
var e *extractor
|
||||
|
||||
parseTestFile := func(path string) *model.MediaFile {
|
||||
mds, err := e.Parse(path)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
info, ok := mds[path]
|
||||
Expect(ok).To(BeTrue())
|
||||
|
||||
fileInfo, err := os.Stat(path)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
info.FileInfo = testFileInfo{FileInfo: fileInfo}
|
||||
|
||||
metadata := metadata.New(path, info)
|
||||
mf := metadata.ToMediaFile(1, "folderID")
|
||||
return &mf
|
||||
}
|
||||
|
||||
BeforeEach(func() {
|
||||
e = &extractor{fs: os.DirFS(".")}
|
||||
})
|
||||
|
||||
Describe("ReplayGain", func() {
|
||||
DescribeTable("test replaygain end-to-end", func(file string, trackGain, trackPeak, albumGain, albumPeak *float64) {
|
||||
mf := parseTestFile("tests/fixtures/" + file)
|
||||
|
||||
Expect(mf.RGTrackGain).To(Equal(trackGain))
|
||||
Expect(mf.RGTrackPeak).To(Equal(trackPeak))
|
||||
Expect(mf.RGAlbumGain).To(Equal(albumGain))
|
||||
Expect(mf.RGAlbumPeak).To(Equal(albumPeak))
|
||||
},
|
||||
Entry("mp3 with no replaygain", "no_replaygain.mp3", nil, nil, nil, nil),
|
||||
Entry("mp3 with no zero replaygain", "zero_replaygain.mp3", gg.P(0.0), gg.P(1.0), gg.P(0.0), gg.P(1.0)),
|
||||
)
|
||||
})
|
||||
|
||||
Describe("lyrics", func() {
|
||||
makeLyrics := func(code, secondLine string) model.Lyrics {
|
||||
return model.Lyrics{
|
||||
DisplayArtist: "",
|
||||
DisplayTitle: "",
|
||||
Lang: code,
|
||||
Line: []model.Line{
|
||||
{Start: gg.P(int64(0)), Value: "This is"},
|
||||
{Start: gg.P(int64(2500)), Value: secondLine},
|
||||
},
|
||||
Offset: nil,
|
||||
Synced: true,
|
||||
}
|
||||
}
|
||||
|
||||
It("should fetch both synced and unsynced lyrics in mixed flac", func() {
|
||||
mf := parseTestFile("tests/fixtures/mixed-lyrics.flac")
|
||||
|
||||
lyrics, err := mf.StructuredLyrics()
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(lyrics).To(HaveLen(2))
|
||||
|
||||
Expect(lyrics[0].Synced).To(BeTrue())
|
||||
Expect(lyrics[1].Synced).To(BeFalse())
|
||||
})
|
||||
|
||||
It("should handle mp3 with uslt and sylt", func() {
|
||||
mf := parseTestFile("tests/fixtures/test.mp3")
|
||||
|
||||
lyrics, err := mf.StructuredLyrics()
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(lyrics).To(HaveLen(4))
|
||||
|
||||
engSylt := makeLyrics("eng", "English SYLT")
|
||||
engUslt := makeLyrics("eng", "English")
|
||||
unsSylt := makeLyrics("xxx", "unspecified SYLT")
|
||||
unsUslt := makeLyrics("xxx", "unspecified")
|
||||
|
||||
Expect(lyrics).To(ConsistOf(engSylt, engUslt, unsSylt, unsUslt))
|
||||
})
|
||||
|
||||
DescribeTable("format-specific lyrics", func(file string, isId3 bool) {
|
||||
mf := parseTestFile("tests/fixtures/" + file)
|
||||
|
||||
lyrics, err := mf.StructuredLyrics()
|
||||
Expect(err).To(Not(HaveOccurred()))
|
||||
Expect(lyrics).To(HaveLen(2))
|
||||
|
||||
unspec := makeLyrics("xxx", "unspecified")
|
||||
eng := makeLyrics("xxx", "English")
|
||||
|
||||
if isId3 {
|
||||
eng.Lang = "eng"
|
||||
}
|
||||
|
||||
Expect(lyrics).To(Or(
|
||||
Equal(model.LyricList{unspec, eng}),
|
||||
Equal(model.LyricList{eng, unspec})))
|
||||
},
|
||||
Entry("flac", "test.flac", false),
|
||||
Entry("m4a", "test.m4a", false),
|
||||
Entry("ogg", "test.ogg", false),
|
||||
Entry("wma", "test.wma", false),
|
||||
Entry("wv", "test.wv", false),
|
||||
Entry("wav", "test.wav", true),
|
||||
Entry("aiff", "test.aiff", true),
|
||||
)
|
||||
})
|
||||
|
||||
Describe("Participants", func() {
|
||||
DescribeTable("test tags consistent across formats", func(format string) {
|
||||
mf := parseTestFile("tests/fixtures/test." + format)
|
||||
|
||||
for _, data := range roles {
|
||||
role := data.Role
|
||||
artists := data.ParticipantList
|
||||
|
||||
actual := mf.Participants[role]
|
||||
Expect(actual).To(HaveLen(len(artists)))
|
||||
|
||||
for i := range artists {
|
||||
actualArtist := actual[i]
|
||||
expectedArtist := artists[i]
|
||||
|
||||
Expect(actualArtist.Name).To(Equal(expectedArtist.Name))
|
||||
Expect(actualArtist.SortArtistName).To(Equal(expectedArtist.SortArtistName))
|
||||
Expect(actualArtist.MbzArtistID).To(Equal(expectedArtist.MbzArtistID))
|
||||
}
|
||||
}
|
||||
|
||||
if format != "m4a" {
|
||||
performers := mf.Participants[model.RolePerformer]
|
||||
Expect(performers).To(HaveLen(8))
|
||||
|
||||
rules := map[string][]string{
|
||||
"pgaa": {"2fd0b311-9fa8-4ff9-be5d-f6f3d16b835e", "Guitar"},
|
||||
"pgbb": {"223d030b-bf97-4c2a-ad26-b7f7bbe25c93", "Guitar", ""},
|
||||
"pvaa": {"cb195f72-448f-41c8-b962-3f3c13d09d38", "Vocals"},
|
||||
"pvbb": {"60a1f832-8ca2-49f6-8660-84d57f07b520", "Vocals", "Flute"},
|
||||
"pfaa": {"51fb40c-0305-4bf9-a11b-2ee615277725", "", "Flute"},
|
||||
}
|
||||
|
||||
for name, rule := range rules {
|
||||
mbid := rule[0]
|
||||
for i := 1; i < len(rule); i++ {
|
||||
found := false
|
||||
|
||||
for _, mapped := range performers {
|
||||
if mapped.Name == name && mapped.MbzArtistID == mbid && mapped.SubRole == rule[i] {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
Expect(found).To(BeTrue(), "Could not find matching artist")
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
Entry("FLAC format", "flac"),
|
||||
Entry("M4a format", "m4a"),
|
||||
Entry("OGG format", "ogg"),
|
||||
Entry("WV format", "wv"),
|
||||
|
||||
Entry("MP3 format", "mp3"),
|
||||
Entry("WAV format", "wav"),
|
||||
Entry("AIFF format", "aiff"),
|
||||
)
|
||||
|
||||
It("should parse wma", func() {
|
||||
mf := parseTestFile("tests/fixtures/test.wma")
|
||||
|
||||
for _, data := range roles {
|
||||
role := data.Role
|
||||
artists := data.ParticipantList
|
||||
actual := mf.Participants[role]
|
||||
|
||||
// WMA has no Arranger role
|
||||
if role == model.RoleArranger {
|
||||
Expect(actual).To(HaveLen(0))
|
||||
continue
|
||||
}
|
||||
|
||||
Expect(actual).To(HaveLen(len(artists)), role.String())
|
||||
|
||||
// For some bizarre reason, the order is inverted. We also don't get
|
||||
// sort names or MBIDs
|
||||
for i := range artists {
|
||||
idx := len(artists) - 1 - i
|
||||
|
||||
actualArtist := actual[i]
|
||||
expectedArtist := artists[idx]
|
||||
|
||||
Expect(actualArtist.Name).To(Equal(expectedArtist.Name))
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,276 +0,0 @@
|
||||
// Package gotaglib provides an alternative metadata extractor using go-taglib,
|
||||
// a pure Go (WASM-based) implementation of TagLib.
|
||||
//
|
||||
// This extractor aims for parity with the CGO-based taglib extractor. It uses
|
||||
// TagLib's PropertyMap interface for standard tags. The File handle API provides
|
||||
// efficient access to format-specific tags (ID3v2 frames, MP4 atoms, ASF attributes)
|
||||
// through a single file open operation.
|
||||
//
|
||||
// This extractor is registered under the name "taglib". It only works with a filesystem
|
||||
// (fs.FS) and does not support direct local file paths. Files returned by the filesystem
|
||||
// must implement io.ReadSeeker for go-taglib to read them.
|
||||
package gotaglib
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/fs"
|
||||
"runtime/debug"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/core/storage/local"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model/metadata"
|
||||
"go.senan.xyz/taglib"
|
||||
)
|
||||
|
||||
type extractor struct {
|
||||
fs fs.FS
|
||||
}
|
||||
|
||||
func (e extractor) Parse(files ...string) (map[string]metadata.Info, error) {
|
||||
results := make(map[string]metadata.Info)
|
||||
for _, path := range files {
|
||||
props, err := e.extractMetadata(path)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
results[path] = *props
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (e extractor) Version() string {
|
||||
return "go-taglib (TagLib 2.1.1 WASM)"
|
||||
}
|
||||
|
||||
func (e extractor) extractMetadata(filePath string) (*metadata.Info, error) {
|
||||
f, close, err := e.openFile(filePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer close()
|
||||
|
||||
// Get all tags and properties in one go
|
||||
allTags := f.AllTags()
|
||||
props := f.Properties()
|
||||
|
||||
// Map properties to AudioProperties
|
||||
ap := metadata.AudioProperties{
|
||||
Duration: props.Length.Round(time.Millisecond * 10),
|
||||
BitRate: int(props.Bitrate),
|
||||
Channels: int(props.Channels),
|
||||
SampleRate: int(props.SampleRate),
|
||||
BitDepth: int(props.BitsPerSample),
|
||||
}
|
||||
|
||||
// Convert normalized tags to lowercase keys (go-taglib returns UPPERCASE keys)
|
||||
normalizedTags := make(map[string][]string, len(allTags.Tags))
|
||||
for key, values := range allTags.Tags {
|
||||
lowerKey := strings.ToLower(key)
|
||||
normalizedTags[lowerKey] = values
|
||||
}
|
||||
|
||||
// Process format-specific raw tags
|
||||
processRawTags(allTags, normalizedTags)
|
||||
|
||||
// Parse track/disc totals from "N/Total" format
|
||||
parseTuple(normalizedTags, "track")
|
||||
parseTuple(normalizedTags, "disc")
|
||||
|
||||
// Adjust some ID3 tags
|
||||
parseLyrics(normalizedTags)
|
||||
parseTIPL(normalizedTags)
|
||||
delete(normalizedTags, "tmcl") // TMCL is already parsed by TagLib
|
||||
|
||||
// Determine if file has embedded picture
|
||||
hasPicture := len(props.Images) > 0
|
||||
|
||||
return &metadata.Info{
|
||||
Tags: normalizedTags,
|
||||
AudioProperties: ap,
|
||||
HasPicture: hasPicture,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// openFile opens the file at filePath using the extractor's filesystem.
|
||||
// It returns a TagLib File handle and a cleanup function to close resources.
|
||||
func (e extractor) openFile(filePath string) (f *taglib.File, closeFunc func(), err error) {
|
||||
// Recover from panics in the WASM runtime (e.g., wazero failing to mmap executable memory
|
||||
// on hardened systems like NixOS with MemoryDenyWriteExecute=true)
|
||||
debug.SetPanicOnFault(true)
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
log.Error("WASM runtime panic: This may be caused by a hardened system that blocks executable memory mapping.", "file", filePath, "panic", r)
|
||||
err = fmt.Errorf("WASM runtime panic (hardened system?): %v", r)
|
||||
}
|
||||
}()
|
||||
|
||||
// Open the file from the filesystem
|
||||
file, err := e.fs.Open(filePath)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
rs, isSeekable := file.(io.ReadSeeker)
|
||||
if !isSeekable {
|
||||
file.Close()
|
||||
return nil, nil, errors.New("file is not seekable")
|
||||
}
|
||||
f, err = taglib.OpenStream(rs, taglib.WithReadStyle(taglib.ReadStyleFast))
|
||||
if err != nil {
|
||||
file.Close()
|
||||
return nil, nil, err
|
||||
}
|
||||
closeFunc = func() {
|
||||
f.Close()
|
||||
file.Close()
|
||||
}
|
||||
return f, closeFunc, nil
|
||||
}
|
||||
|
||||
// parseTuple parses track/disc numbers in "N/Total" format and separates them.
|
||||
// For example, tracknumber="2/10" becomes tracknumber="2" and tracktotal="10".
|
||||
func parseTuple(tags map[string][]string, prop string) {
|
||||
tagName := prop + "number"
|
||||
tagTotal := prop + "total"
|
||||
if value, ok := tags[tagName]; ok && len(value) > 0 {
|
||||
parts := strings.Split(value[0], "/")
|
||||
tags[tagName] = []string{parts[0]}
|
||||
if len(parts) == 2 {
|
||||
tags[tagTotal] = []string{parts[1]}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// parseLyrics ensures lyrics tags have a language code.
|
||||
// If lyrics exist without a language code, they are moved to "lyrics:xxx".
|
||||
func parseLyrics(tags map[string][]string) {
|
||||
lyrics := tags["lyrics"]
|
||||
if len(lyrics) > 0 {
|
||||
tags["lyrics:xxx"] = lyrics
|
||||
delete(tags, "lyrics")
|
||||
}
|
||||
}
|
||||
|
||||
// processRawTags processes format-specific raw tags based on the detected file format.
|
||||
// This handles ID3v2 frames (MP3/WAV/AIFF), MP4 atoms, and ASF attributes.
|
||||
func processRawTags(allTags taglib.AllTags, normalizedTags map[string][]string) {
|
||||
switch allTags.Format {
|
||||
case taglib.FormatMPEG, taglib.FormatWAV, taglib.FormatAIFF:
|
||||
parseID3v2Frames(allTags.Raw, normalizedTags)
|
||||
case taglib.FormatMP4:
|
||||
parseMP4Atoms(allTags.Raw, normalizedTags)
|
||||
case taglib.FormatASF:
|
||||
parseASFAttributes(allTags.Raw, normalizedTags)
|
||||
}
|
||||
}
|
||||
|
||||
// parseID3v2Frames processes ID3v2 raw frames to extract USLT/SYLT with language codes.
|
||||
// This extracts language-specific lyrics that the standard Tags() doesn't provide.
|
||||
func parseID3v2Frames(rawFrames map[string][]string, tags map[string][]string) {
|
||||
// Process frames that have language-specific data
|
||||
for key, values := range rawFrames {
|
||||
lowerKey := strings.ToLower(key)
|
||||
|
||||
// Handle USLT:xxx and SYLT:xxx (lyrics with language codes)
|
||||
if strings.HasPrefix(lowerKey, "uslt:") || strings.HasPrefix(lowerKey, "sylt:") {
|
||||
parts := strings.SplitN(lowerKey, ":", 2)
|
||||
if len(parts) == 2 && parts[1] != "" {
|
||||
lang := parts[1]
|
||||
lyricsKey := "lyrics:" + lang
|
||||
tags[lyricsKey] = append(tags[lyricsKey], values...)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If we found any language-specific lyrics from ID3v2 frames, remove the generic lyrics
|
||||
for key := range tags {
|
||||
if strings.HasPrefix(key, "lyrics:") && key != "lyrics" {
|
||||
delete(tags, "lyrics")
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const iTunesKeyPrefix = "----:com.apple.iTunes:"
|
||||
|
||||
// parseMP4Atoms processes MP4 raw atoms to get iTunes-specific tags.
|
||||
func parseMP4Atoms(rawAtoms map[string][]string, tags map[string][]string) {
|
||||
// Process all atoms and add them to tags
|
||||
for key, values := range rawAtoms {
|
||||
// Strip iTunes prefix and convert to lowercase
|
||||
normalizedKey := strings.TrimPrefix(key, iTunesKeyPrefix)
|
||||
normalizedKey = strings.ToLower(normalizedKey)
|
||||
|
||||
// Only add if the tag doesn't already exist (avoid duplication with PropertyMap)
|
||||
if _, exists := tags[normalizedKey]; !exists {
|
||||
tags[normalizedKey] = values
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// parseASFAttributes processes ASF raw attributes to get WMA-specific tags.
|
||||
func parseASFAttributes(rawAttrs map[string][]string, tags map[string][]string) {
|
||||
// Process all attributes and add them to tags
|
||||
for key, values := range rawAttrs {
|
||||
normalizedKey := strings.ToLower(key)
|
||||
|
||||
// Only add if the tag doesn't already exist (avoid duplication with PropertyMap)
|
||||
if _, exists := tags[normalizedKey]; !exists {
|
||||
tags[normalizedKey] = values
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// These are the only roles we support, based on Picard's tag map:
|
||||
// https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html
|
||||
var tiplMapping = map[string]string{
|
||||
"arranger": "arranger",
|
||||
"engineer": "engineer",
|
||||
"producer": "producer",
|
||||
"mix": "mixer",
|
||||
"DJ-mix": "djmixer",
|
||||
}
|
||||
|
||||
// parseTIPL parses the ID3v2.4 TIPL frame string, which is received from TagLib in the format:
|
||||
//
|
||||
// "arranger Andrew Powell engineer Chris Blair engineer Pat Stapley producer Eric Woolfson".
|
||||
//
|
||||
// and breaks it down into a map of roles and names, e.g.:
|
||||
//
|
||||
// {"arranger": ["Andrew Powell"], "engineer": ["Chris Blair", "Pat Stapley"], "producer": ["Eric Woolfson"]}.
|
||||
func parseTIPL(tags map[string][]string) {
|
||||
tipl := tags["tipl"]
|
||||
if len(tipl) == 0 {
|
||||
return
|
||||
}
|
||||
addRole := func(currentRole string, currentValue []string) {
|
||||
if currentRole != "" && len(currentValue) > 0 {
|
||||
role := tiplMapping[currentRole]
|
||||
tags[role] = append(tags[role], strings.Join(currentValue, " "))
|
||||
}
|
||||
}
|
||||
var currentRole string
|
||||
var currentValue []string
|
||||
for _, part := range strings.Split(tipl[0], " ") {
|
||||
if _, ok := tiplMapping[part]; ok {
|
||||
addRole(currentRole, currentValue)
|
||||
currentRole = part
|
||||
currentValue = nil
|
||||
continue
|
||||
}
|
||||
currentValue = append(currentValue, part)
|
||||
}
|
||||
addRole(currentRole, currentValue)
|
||||
delete(tags, "tipl")
|
||||
}
|
||||
|
||||
var _ local.Extractor = (*extractor)(nil)
|
||||
|
||||
func init() {
|
||||
local.RegisterExtractor("taglib", func(fsys fs.FS, baseDir string) local.Extractor {
|
||||
return &extractor{fsys}
|
||||
})
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
package gotaglib
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
func TestGoTagLib(t *testing.T) {
|
||||
tests.Init(t, true)
|
||||
log.SetLevel(log.LevelFatal)
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "GoTagLib Suite")
|
||||
}
|
||||
@@ -1,302 +0,0 @@
|
||||
package gotaglib
|
||||
|
||||
import (
|
||||
"io/fs"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/navidrome/navidrome/utils"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("Extractor", func() {
|
||||
var e *extractor
|
||||
|
||||
BeforeEach(func() {
|
||||
e = &extractor{fs: os.DirFS(".")}
|
||||
})
|
||||
|
||||
Describe("Parse", func() {
|
||||
It("correctly parses metadata from all files in folder", func() {
|
||||
mds, err := e.Parse(
|
||||
"tests/fixtures/test.mp3",
|
||||
"tests/fixtures/test.ogg",
|
||||
)
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(mds).To(HaveLen(2))
|
||||
|
||||
// Test MP3
|
||||
m := mds["tests/fixtures/test.mp3"]
|
||||
Expect(m.Tags).To(HaveKeyWithValue("title", []string{"Song"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("album", []string{"Album"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("artist", []string{"Artist"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("albumartist", []string{"Album Artist"}))
|
||||
|
||||
Expect(m.HasPicture).To(BeTrue())
|
||||
Expect(m.AudioProperties.Duration.String()).To(Equal("1.02s"))
|
||||
Expect(m.AudioProperties.BitRate).To(Equal(192))
|
||||
Expect(m.AudioProperties.Channels).To(Equal(2))
|
||||
Expect(m.AudioProperties.SampleRate).To(Equal(44100))
|
||||
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("compilation", []string{"1"}),
|
||||
HaveKeyWithValue("tcmp", []string{"1"})),
|
||||
)
|
||||
Expect(m.Tags).To(HaveKeyWithValue("genre", []string{"Rock"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("date", []string{"2014-05-21"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("originaldate", []string{"1996-11-21"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("releasedate", []string{"2020-12-31"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("discnumber", []string{"1"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("disctotal", []string{"2"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("comment", []string{"Comment1\nComment2"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("bpm", []string{"123"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("replaygain_album_gain", []string{"+3.21518 dB"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("replaygain_album_peak", []string{"0.9125"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("replaygain_track_gain", []string{"-1.48 dB"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("replaygain_track_peak", []string{"0.4512"}))
|
||||
|
||||
Expect(m.Tags).To(HaveKeyWithValue("tracknumber", []string{"2"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("tracktotal", []string{"10"}))
|
||||
|
||||
Expect(m.Tags).ToNot(HaveKey("lyrics"))
|
||||
Expect(m.Tags).To(Or(HaveKeyWithValue("lyrics:eng", []string{
|
||||
"[00:00.00]This is\n[00:02.50]English SYLT\n",
|
||||
"[00:00.00]This is\n[00:02.50]English",
|
||||
}), HaveKeyWithValue("lyrics:eng", []string{
|
||||
"[00:00.00]This is\n[00:02.50]English",
|
||||
"[00:00.00]This is\n[00:02.50]English SYLT\n",
|
||||
})))
|
||||
Expect(m.Tags).To(Or(HaveKeyWithValue("lyrics:xxx", []string{
|
||||
"[00:00.00]This is\n[00:02.50]unspecified SYLT\n",
|
||||
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||
}), HaveKeyWithValue("lyrics:xxx", []string{
|
||||
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||
"[00:00.00]This is\n[00:02.50]unspecified SYLT\n",
|
||||
})))
|
||||
|
||||
// Test OGG
|
||||
m = mds["tests/fixtures/test.ogg"]
|
||||
Expect(err).To(BeNil())
|
||||
Expect(m.Tags).To(HaveKeyWithValue("fbpm", []string{"141.7"}))
|
||||
|
||||
// TagLib 1.12 returns 18, previous versions return 39.
|
||||
// See https://github.com/taglib/taglib/commit/2f238921824741b2cfe6fbfbfc9701d9827ab06b
|
||||
Expect(m.AudioProperties.BitRate).To(BeElementOf(18, 19, 39, 40, 43, 49))
|
||||
Expect(m.AudioProperties.Channels).To(BeElementOf(2))
|
||||
Expect(m.AudioProperties.SampleRate).To(BeElementOf(8000))
|
||||
Expect(m.HasPicture).To(BeTrue())
|
||||
})
|
||||
|
||||
DescribeTable("Format-Specific tests",
|
||||
func(file, duration string, channels, samplerate, bitdepth int, albumGain, albumPeak, trackGain, trackPeak string, id3Lyrics bool, image bool) {
|
||||
file = "tests/fixtures/" + file
|
||||
mds, err := e.Parse(file)
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(mds).To(HaveLen(1))
|
||||
|
||||
m := mds[file]
|
||||
|
||||
Expect(m.HasPicture).To(Equal(image))
|
||||
Expect(m.AudioProperties.Duration.String()).To(Equal(duration))
|
||||
Expect(m.AudioProperties.Channels).To(Equal(channels))
|
||||
Expect(m.AudioProperties.SampleRate).To(Equal(samplerate))
|
||||
Expect(m.AudioProperties.BitDepth).To(Equal(bitdepth))
|
||||
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("replaygain_album_gain", []string{albumGain}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_album_gain", []string{albumGain}),
|
||||
))
|
||||
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("replaygain_album_peak", []string{albumPeak}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_album_peak", []string{albumPeak}),
|
||||
))
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("replaygain_track_gain", []string{trackGain}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_track_gain", []string{trackGain}),
|
||||
))
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("replaygain_track_peak", []string{trackPeak}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_track_peak", []string{trackPeak}),
|
||||
))
|
||||
|
||||
Expect(m.Tags).To(HaveKeyWithValue("title", []string{"Title"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("album", []string{"Album"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("artist", []string{"Artist"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("albumartist", []string{"Album Artist"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("genre", []string{"Rock"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("date", []string{"2014"}))
|
||||
|
||||
Expect(m.Tags).To(HaveKeyWithValue("bpm", []string{"123"}))
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("tracknumber", []string{"3"}),
|
||||
HaveKeyWithValue("tracknumber", []string{"3/10"}),
|
||||
))
|
||||
if !strings.HasSuffix(file, "test.wma") {
|
||||
// TODO Not sure why this is not working for WMA
|
||||
Expect(m.Tags).To(HaveKeyWithValue("tracktotal", []string{"10"}))
|
||||
}
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("discnumber", []string{"1"}),
|
||||
HaveKeyWithValue("discnumber", []string{"1/2"}),
|
||||
))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("disctotal", []string{"2"}))
|
||||
|
||||
// WMA does not have a "compilation" tag, but "wm/iscompilation"
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("compilation", []string{"1"}),
|
||||
HaveKeyWithValue("wm/iscompilation", []string{"1"})),
|
||||
)
|
||||
|
||||
if id3Lyrics {
|
||||
Expect(m.Tags).To(HaveKeyWithValue("lyrics:eng", []string{
|
||||
"[00:00.00]This is\n[00:02.50]English",
|
||||
}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("lyrics:xxx", []string{
|
||||
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||
}))
|
||||
} else {
|
||||
Expect(m.Tags).To(HaveKeyWithValue("lyrics:xxx", []string{
|
||||
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||
"[00:00.00]This is\n[00:02.50]English",
|
||||
}))
|
||||
}
|
||||
|
||||
Expect(m.Tags).To(HaveKeyWithValue("comment", []string{"Comment1\nComment2"}))
|
||||
},
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=1200:duration=1" test.flac
|
||||
Entry("correctly parses flac tags", "test.flac", "1s", 1, 44100, 16, "+4.06 dB", "0.12496948", "+4.06 dB", "0.12496948", false, true),
|
||||
|
||||
Entry("correctly parses m4a (aac) gain tags", "01 Invisible (RED) Edit Version.m4a", "1.04s", 2, 44100, 16, "0.37", "0.48", "0.37", "0.48", false, true),
|
||||
Entry("correctly parses m4a (aac) gain tags (uppercase)", "test.m4a", "1.04s", 2, 44100, 16, "0.37", "0.48", "0.37", "0.48", false, true),
|
||||
Entry("correctly parses ogg (vorbis) tags", "test.ogg", "1.04s", 2, 8000, 0, "+7.64 dB", "0.11772506", "+7.64 dB", "0.11772506", false, true),
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=900:duration=1" test.wma
|
||||
// Weird note: for the tag parsing to work, the lyrics are actually stored in the reverse order
|
||||
Entry("correctly parses wma/asf tags", "test.wma", "1.02s", 1, 44100, 16, "3.27 dB", "0.132914", "3.27 dB", "0.132914", false, true),
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=800:duration=1" test.wv
|
||||
Entry("correctly parses wv (wavpak) tags", "test.wv", "1s", 1, 44100, 16, "3.43 dB", "0.125061", "3.43 dB", "0.125061", false, true),
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=1000:duration=1" test.wav
|
||||
Entry("correctly parses wav tags", "test.wav", "1s", 1, 44100, 16, "3.06 dB", "0.125056", "3.06 dB", "0.125056", true, true),
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=1400:duration=1" test.aiff
|
||||
Entry("correctly parses aiff tags", "test.aiff", "1s", 1, 44100, 16, "2.00 dB", "0.124972", "2.00 dB", "0.124972", true, true),
|
||||
)
|
||||
|
||||
// Skip these tests when running as root
|
||||
Context("Access Forbidden", func() {
|
||||
var accessForbiddenFile string
|
||||
var RegularUserContext = XContext
|
||||
var isRegularUser = os.Getuid() != 0
|
||||
if isRegularUser {
|
||||
RegularUserContext = Context
|
||||
}
|
||||
|
||||
// Only run permission tests if we are not root
|
||||
RegularUserContext("when run without root privileges", func() {
|
||||
BeforeEach(func() {
|
||||
// Use root fs for absolute paths in temp directory
|
||||
e = &extractor{fs: os.DirFS("/")}
|
||||
accessForbiddenFile = utils.TempFileName("access_forbidden-", ".mp3")
|
||||
|
||||
f, err := os.OpenFile(accessForbiddenFile, os.O_WRONLY|os.O_CREATE, 0222)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
DeferCleanup(func() {
|
||||
Expect(f.Close()).To(Succeed())
|
||||
Expect(os.Remove(accessForbiddenFile)).To(Succeed())
|
||||
})
|
||||
})
|
||||
|
||||
It("correctly handle unreadable file due to insufficient read permission", func() {
|
||||
// Strip leading slash for DirFS rooted at "/"
|
||||
_, err := e.extractMetadata(accessForbiddenFile[1:])
|
||||
Expect(err).To(MatchError(os.ErrPermission))
|
||||
})
|
||||
|
||||
It("skips the file if it cannot be read", func() {
|
||||
// Get current working directory to construct paths relative to root
|
||||
cwd, err := os.Getwd()
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
// Strip leading slash for DirFS rooted at "/"
|
||||
files := []string{
|
||||
cwd[1:] + "/tests/fixtures/test.mp3",
|
||||
cwd[1:] + "/tests/fixtures/test.ogg",
|
||||
accessForbiddenFile[1:],
|
||||
}
|
||||
mds, err := e.Parse(files...)
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(mds).To(HaveLen(2))
|
||||
Expect(mds).ToNot(HaveKey(accessForbiddenFile[1:]))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
Describe("Error Checking", func() {
|
||||
It("returns a generic ErrPath if file does not exist", func() {
|
||||
testFilePath := "tests/fixtures/NON_EXISTENT.ogg"
|
||||
_, err := e.extractMetadata(testFilePath)
|
||||
Expect(err).To(MatchError(fs.ErrNotExist))
|
||||
})
|
||||
It("does not throw a SIGSEGV error when reading a file with an invalid frame", func() {
|
||||
// File has an empty TDAT frame
|
||||
md, err := e.extractMetadata("tests/fixtures/invalid-files/test-invalid-frame.mp3")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(md.Tags).To(HaveKeyWithValue("albumartist", []string{"Elvis Presley"}))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("parseTIPL", func() {
|
||||
var tags map[string][]string
|
||||
|
||||
BeforeEach(func() {
|
||||
tags = make(map[string][]string)
|
||||
})
|
||||
|
||||
Context("when the TIPL string is populated", func() {
|
||||
It("correctly parses roles and names", func() {
|
||||
tags["tipl"] = []string{"arranger Andrew Powell DJ-mix François Kevorkian DJ-mix Jane Doe engineer Chris Blair"}
|
||||
parseTIPL(tags)
|
||||
Expect(tags["arranger"]).To(ConsistOf("Andrew Powell"))
|
||||
Expect(tags["engineer"]).To(ConsistOf("Chris Blair"))
|
||||
Expect(tags["djmixer"]).To(ConsistOf("François Kevorkian", "Jane Doe"))
|
||||
})
|
||||
|
||||
It("handles multiple names for a single role", func() {
|
||||
tags["tipl"] = []string{"engineer Pat Stapley producer Eric Woolfson engineer Chris Blair"}
|
||||
parseTIPL(tags)
|
||||
Expect(tags["producer"]).To(ConsistOf("Eric Woolfson"))
|
||||
Expect(tags["engineer"]).To(ConsistOf("Pat Stapley", "Chris Blair"))
|
||||
})
|
||||
|
||||
It("discards roles without names", func() {
|
||||
tags["tipl"] = []string{"engineer Pat Stapley producer engineer Chris Blair"}
|
||||
parseTIPL(tags)
|
||||
Expect(tags).ToNot(HaveKey("producer"))
|
||||
Expect(tags["engineer"]).To(ConsistOf("Pat Stapley", "Chris Blair"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the TIPL string is empty", func() {
|
||||
It("does nothing", func() {
|
||||
tags["tipl"] = []string{""}
|
||||
parseTIPL(tags)
|
||||
Expect(tags).To(BeEmpty())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the TIPL is not present", func() {
|
||||
It("does nothing", func() {
|
||||
parseTIPL(tags)
|
||||
Expect(tags).To(BeEmpty())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
})
|
||||
@@ -1,434 +0,0 @@
|
||||
package lastfm
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/andybalholm/cascadia"
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
"github.com/navidrome/navidrome/core/scrobbler"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/utils/cache"
|
||||
"golang.org/x/net/html"
|
||||
)
|
||||
|
||||
const (
|
||||
lastFMAgentName = "lastfm"
|
||||
sessionKeyProperty = "LastFMSessionKey"
|
||||
)
|
||||
|
||||
var ignoredContent = []string{
|
||||
// Empty Artist/Album
|
||||
`<a href="https://www.last.fm/music/`,
|
||||
}
|
||||
|
||||
type lastfmAgent struct {
|
||||
ds model.DataStore
|
||||
sessionKeys *agents.SessionKeys
|
||||
apiKey string
|
||||
secret string
|
||||
languages []string
|
||||
client *client
|
||||
httpClient httpDoer
|
||||
getInfoMutex sync.Mutex
|
||||
}
|
||||
|
||||
func lastFMConstructor(ds model.DataStore) *lastfmAgent {
|
||||
if !conf.Server.LastFM.Enabled || conf.Server.LastFM.ApiKey == "" || conf.Server.LastFM.Secret == "" {
|
||||
return nil
|
||||
}
|
||||
l := &lastfmAgent{
|
||||
ds: ds,
|
||||
languages: conf.Server.LastFM.Languages,
|
||||
apiKey: conf.Server.LastFM.ApiKey,
|
||||
secret: conf.Server.LastFM.Secret,
|
||||
sessionKeys: &agents.SessionKeys{DataStore: ds, KeyName: sessionKeyProperty},
|
||||
}
|
||||
hc := &http.Client{
|
||||
Timeout: consts.DefaultHttpClientTimeOut,
|
||||
}
|
||||
chc := cache.NewHTTPClient(hc, consts.DefaultHttpClientTimeOut)
|
||||
l.httpClient = chc
|
||||
l.client = newClient(l.apiKey, l.secret, chc)
|
||||
return l
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) AgentName() string {
|
||||
return lastFMAgentName
|
||||
}
|
||||
|
||||
var imageRegex = regexp.MustCompile(`u\/(\d+)`)
|
||||
|
||||
// isValidContent checks if content is non-empty and not in the ignored list
|
||||
func isValidContent(content string) bool {
|
||||
content = strings.TrimSpace(content)
|
||||
if content == "" {
|
||||
return false
|
||||
}
|
||||
for _, ign := range ignoredContent {
|
||||
if strings.HasPrefix(content, ign) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) GetAlbumInfo(ctx context.Context, name, artist, mbid string) (*agents.AlbumInfo, error) {
|
||||
var a *Album
|
||||
var resp agents.AlbumInfo
|
||||
for _, lang := range l.languages {
|
||||
var err error
|
||||
a, err = l.callAlbumGetInfo(ctx, name, artist, mbid, lang)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
resp.Name = a.Name
|
||||
resp.MBID = a.MBID
|
||||
resp.URL = a.URL
|
||||
if isValidContent(a.Description.Summary) {
|
||||
resp.Description = strings.TrimSpace(a.Description.Summary)
|
||||
return &resp, nil
|
||||
}
|
||||
log.Debug(ctx, "LastFM/album.getInfo returned empty/ignored description, trying next language", "album", name, "artist", artist, "lang", lang)
|
||||
}
|
||||
// This condition should not be hit (languages default to ["en"]), but just in case
|
||||
if a == nil {
|
||||
return nil, agents.ErrNotFound
|
||||
}
|
||||
return &resp, nil
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) GetAlbumImages(ctx context.Context, name, artist, mbid string) ([]agents.ExternalImage, error) {
|
||||
a, err := l.callAlbumGetInfo(ctx, name, artist, mbid, l.languages[0])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Last.fm can return duplicate sizes.
|
||||
seenSizes := map[int]bool{}
|
||||
images := make([]agents.ExternalImage, 0)
|
||||
|
||||
// This assumes that Last.fm returns images with size small, medium, and large.
|
||||
// This is true as of December 29, 2022
|
||||
for _, img := range a.Image {
|
||||
size := imageRegex.FindStringSubmatch(img.URL)
|
||||
// Last.fm can return images without URL
|
||||
if len(size) == 0 || len(size[0]) < 4 {
|
||||
log.Trace(ctx, "LastFM/albuminfo image URL does not match expected regex or is empty", "url", img.URL, "size", img.Size)
|
||||
continue
|
||||
}
|
||||
numericSize, err := strconv.Atoi(size[0][2:])
|
||||
if err != nil {
|
||||
log.Error(ctx, "LastFM/albuminfo image URL does not match expected regex", "url", img.URL, "size", img.Size, err)
|
||||
return nil, err
|
||||
}
|
||||
if _, exists := seenSizes[numericSize]; !exists {
|
||||
images = append(images, agents.ExternalImage{
|
||||
Size: numericSize,
|
||||
URL: img.URL,
|
||||
})
|
||||
seenSizes[numericSize] = true
|
||||
}
|
||||
}
|
||||
return images, nil
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) GetArtistMBID(ctx context.Context, id string, name string) (string, error) {
|
||||
a, err := l.callArtistGetInfo(ctx, name, l.languages[0])
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if a.MBID == "" {
|
||||
return "", agents.ErrNotFound
|
||||
}
|
||||
return a.MBID, nil
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) GetArtistURL(ctx context.Context, id, name, mbid string) (string, error) {
|
||||
a, err := l.callArtistGetInfo(ctx, name, l.languages[0])
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if a.URL == "" {
|
||||
return "", agents.ErrNotFound
|
||||
}
|
||||
return a.URL, nil
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) GetArtistBiography(ctx context.Context, id, name, mbid string) (string, error) {
|
||||
for _, lang := range l.languages {
|
||||
a, err := l.callArtistGetInfo(ctx, name, lang)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if isValidContent(a.Bio.Summary) {
|
||||
return strings.TrimSpace(a.Bio.Summary), nil
|
||||
}
|
||||
log.Debug(ctx, "LastFM/artist.getInfo returned empty/ignored biography, trying next language", "artist", name, "lang", lang)
|
||||
}
|
||||
return "", agents.ErrNotFound
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) GetSimilarArtists(ctx context.Context, id, name, mbid string, limit int) ([]agents.Artist, error) {
|
||||
resp, err := l.callArtistGetSimilar(ctx, name, limit)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(resp) == 0 {
|
||||
return nil, agents.ErrNotFound
|
||||
}
|
||||
var res []agents.Artist
|
||||
for _, a := range resp {
|
||||
res = append(res, agents.Artist{
|
||||
Name: a.Name,
|
||||
MBID: a.MBID,
|
||||
})
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) GetArtistTopSongs(ctx context.Context, id, artistName, mbid string, count int) ([]agents.Song, error) {
|
||||
resp, err := l.callArtistGetTopTracks(ctx, artistName, count)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(resp) == 0 {
|
||||
return nil, agents.ErrNotFound
|
||||
}
|
||||
var res []agents.Song
|
||||
for _, t := range resp {
|
||||
res = append(res, agents.Song{
|
||||
Name: t.Name,
|
||||
MBID: t.MBID,
|
||||
})
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) GetSimilarSongsByTrack(ctx context.Context, id, name, artist, mbid string, count int) ([]agents.Song, error) {
|
||||
resp, err := l.callTrackGetSimilar(ctx, name, artist, count)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(resp) == 0 {
|
||||
return nil, agents.ErrNotFound
|
||||
}
|
||||
res := make([]agents.Song, 0, len(resp))
|
||||
for _, t := range resp {
|
||||
res = append(res, agents.Song{
|
||||
Name: t.Name,
|
||||
MBID: t.MBID,
|
||||
Artist: t.Artist.Name,
|
||||
ArtistMBID: t.Artist.MBID,
|
||||
})
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
var (
|
||||
artistOpenGraphQuery = cascadia.MustCompile(`html > head > meta[property="og:image"]`)
|
||||
artistIgnoredImage = "2a96cbd8b46e442fc41c2b86b821562f" // Last.fm artist placeholder image name
|
||||
)
|
||||
|
||||
func (l *lastfmAgent) GetArtistImages(ctx context.Context, _, name, mbid string) ([]agents.ExternalImage, error) {
|
||||
log.Debug(ctx, "Getting artist images from Last.fm", "name", name)
|
||||
a, err := l.callArtistGetInfo(ctx, name, l.languages[0])
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("get artist info: %w", err)
|
||||
}
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, a.URL, nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("create artist image request: %w", err)
|
||||
}
|
||||
resp, err := l.httpClient.Do(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("get artist url: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
node, err := html.Parse(resp.Body)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("parse html: %w", err)
|
||||
}
|
||||
|
||||
var res []agents.ExternalImage
|
||||
n := cascadia.Query(node, artistOpenGraphQuery)
|
||||
if n == nil {
|
||||
return res, nil
|
||||
}
|
||||
for _, attr := range n.Attr {
|
||||
if attr.Key != "content" {
|
||||
continue
|
||||
}
|
||||
if strings.Contains(attr.Val, artistIgnoredImage) {
|
||||
log.Debug(ctx, "Artist image is ignored default image", "name", name, "url", attr.Val)
|
||||
return res, nil
|
||||
}
|
||||
|
||||
res = []agents.ExternalImage{
|
||||
{URL: attr.Val},
|
||||
}
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) callAlbumGetInfo(ctx context.Context, name, artist, mbid string, lang string) (*Album, error) {
|
||||
a, err := l.client.albumGetInfo(ctx, name, artist, mbid, lang)
|
||||
var lfErr *lastFMError
|
||||
isLastFMError := errors.As(err, &lfErr)
|
||||
|
||||
if mbid != "" && (isLastFMError && lfErr.Code == 6) {
|
||||
log.Debug(ctx, "LastFM/album.getInfo could not find album by mbid, trying again", "album", name, "mbid", mbid)
|
||||
return l.callAlbumGetInfo(ctx, name, artist, "", lang)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
if isLastFMError && lfErr.Code == 6 {
|
||||
log.Debug(ctx, "Album not found", "album", name, "mbid", mbid, err)
|
||||
} else {
|
||||
log.Error(ctx, "Error calling LastFM/album.getInfo", "album", name, "mbid", mbid, err)
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
return a, nil
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) callArtistGetInfo(ctx context.Context, name string, lang string) (*Artist, error) {
|
||||
l.getInfoMutex.Lock()
|
||||
defer l.getInfoMutex.Unlock()
|
||||
|
||||
a, err := l.client.artistGetInfo(ctx, name, lang)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error calling LastFM/artist.getInfo", "artist", name, err)
|
||||
return nil, err
|
||||
}
|
||||
return a, nil
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) callArtistGetSimilar(ctx context.Context, name string, limit int) ([]Artist, error) {
|
||||
s, err := l.client.artistGetSimilar(ctx, name, limit)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error calling LastFM/artist.getSimilar", "artist", name, err)
|
||||
return nil, err
|
||||
}
|
||||
return s.Artists, nil
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) callArtistGetTopTracks(ctx context.Context, artistName string, count int) ([]Track, error) {
|
||||
t, err := l.client.artistGetTopTracks(ctx, artistName, count)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error calling LastFM/artist.getTopTracks", "artist", artistName, err)
|
||||
return nil, err
|
||||
}
|
||||
return t.Track, nil
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) callTrackGetSimilar(ctx context.Context, name, artist string, count int) ([]SimilarTrack, error) {
|
||||
s, err := l.client.trackGetSimilar(ctx, name, artist, count)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error calling LastFM/track.getSimilar", "track", name, "artist", artist, err)
|
||||
return nil, err
|
||||
}
|
||||
return s.Track, nil
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) getArtistForScrobble(track *model.MediaFile, role model.Role, displayName string) string {
|
||||
if conf.Server.LastFM.ScrobbleFirstArtistOnly && len(track.Participants[role]) > 0 {
|
||||
return track.Participants[role][0].Name
|
||||
}
|
||||
return displayName
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) NowPlaying(ctx context.Context, userId string, track *model.MediaFile, position int) error {
|
||||
sk, err := l.sessionKeys.Get(ctx, userId)
|
||||
if err != nil || sk == "" {
|
||||
return scrobbler.ErrNotAuthorized
|
||||
}
|
||||
|
||||
err = l.client.updateNowPlaying(ctx, sk, ScrobbleInfo{
|
||||
artist: l.getArtistForScrobble(track, model.RoleArtist, track.Artist),
|
||||
track: track.Title,
|
||||
album: track.Album,
|
||||
trackNumber: track.TrackNumber,
|
||||
mbid: track.MbzRecordingID,
|
||||
duration: int(track.Duration),
|
||||
albumArtist: l.getArtistForScrobble(track, model.RoleAlbumArtist, track.AlbumArtist),
|
||||
})
|
||||
if err != nil {
|
||||
log.Warn(ctx, "Last.fm client.updateNowPlaying returned error", "track", track.Title, err)
|
||||
return errors.Join(err, scrobbler.ErrUnrecoverable)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) Scrobble(ctx context.Context, userId string, s scrobbler.Scrobble) error {
|
||||
sk, err := l.sessionKeys.Get(ctx, userId)
|
||||
if err != nil || sk == "" {
|
||||
return errors.Join(err, scrobbler.ErrNotAuthorized)
|
||||
}
|
||||
|
||||
if s.Duration <= 30 {
|
||||
log.Debug(ctx, "Skipping Last.fm scrobble for short song", "track", s.Title, "duration", s.Duration)
|
||||
return nil
|
||||
}
|
||||
err = l.client.scrobble(ctx, sk, ScrobbleInfo{
|
||||
artist: l.getArtistForScrobble(&s.MediaFile, model.RoleArtist, s.Artist),
|
||||
track: s.Title,
|
||||
album: s.Album,
|
||||
trackNumber: s.TrackNumber,
|
||||
mbid: s.MbzRecordingID,
|
||||
duration: int(s.Duration),
|
||||
albumArtist: l.getArtistForScrobble(&s.MediaFile, model.RoleAlbumArtist, s.AlbumArtist),
|
||||
timestamp: s.TimeStamp,
|
||||
})
|
||||
if err == nil {
|
||||
return nil
|
||||
}
|
||||
var lfErr *lastFMError
|
||||
isLastFMError := errors.As(err, &lfErr)
|
||||
if !isLastFMError {
|
||||
log.Warn(ctx, "Last.fm client.scrobble returned error", "track", s.Title, err)
|
||||
return errors.Join(err, scrobbler.ErrRetryLater)
|
||||
}
|
||||
if lfErr.Code == 11 || lfErr.Code == 16 {
|
||||
return errors.Join(err, scrobbler.ErrRetryLater)
|
||||
}
|
||||
return errors.Join(err, scrobbler.ErrUnrecoverable)
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) IsAuthorized(ctx context.Context, userId string) bool {
|
||||
sk, err := l.sessionKeys.Get(ctx, userId)
|
||||
return err == nil && sk != ""
|
||||
}
|
||||
|
||||
func init() {
|
||||
conf.AddHook(func() {
|
||||
agents.Register(lastFMAgentName, func(ds model.DataStore) agents.Interface {
|
||||
// This is a workaround for the fact that a (Interface)(nil) is not the same as a (*lastfmAgent)(nil)
|
||||
// See https://go.dev/doc/faq#nil_error
|
||||
a := lastFMConstructor(ds)
|
||||
if a != nil {
|
||||
return a
|
||||
}
|
||||
return nil
|
||||
})
|
||||
scrobbler.Register(lastFMAgentName, func(ds model.DataStore) scrobbler.Scrobbler {
|
||||
// Same as above - this is a workaround for the fact that a (Scrobbler)(nil) is not the same as a (*lastfmAgent)(nil)
|
||||
// See https://go.dev/doc/faq#nil_error
|
||||
a := lastFMConstructor(ds)
|
||||
if a != nil {
|
||||
return a
|
||||
}
|
||||
return nil
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -1,685 +0,0 @@
|
||||
package lastfm
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"errors"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/conf/configtest"
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
"github.com/navidrome/navidrome/core/scrobbler"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
const (
|
||||
lastfmError3 = `{"error":3,"message":"Invalid Method - No method with that name in this package","links":[]}`
|
||||
lastfmError6 = `{"error":6,"message":"The artist you supplied could not be found","links":[]}`
|
||||
)
|
||||
|
||||
var _ = Describe("lastfmAgent", func() {
|
||||
var ds model.DataStore
|
||||
var ctx context.Context
|
||||
BeforeEach(func() {
|
||||
ds = &tests.MockDataStore{}
|
||||
ctx = context.Background()
|
||||
DeferCleanup(configtest.SetupConfig())
|
||||
conf.Server.LastFM.Enabled = true
|
||||
conf.Server.LastFM.ApiKey = "123"
|
||||
conf.Server.LastFM.Secret = "secret"
|
||||
})
|
||||
Describe("lastFMConstructor", func() {
|
||||
When("Agent is properly configured", func() {
|
||||
It("uses configured api key and languages", func() {
|
||||
conf.Server.LastFM.Languages = []string{"pt", "en"}
|
||||
agent := lastFMConstructor(ds)
|
||||
Expect(agent.apiKey).To(Equal("123"))
|
||||
Expect(agent.secret).To(Equal("secret"))
|
||||
Expect(agent.languages).To(Equal([]string{"pt", "en"}))
|
||||
})
|
||||
})
|
||||
When("Agent is disabled", func() {
|
||||
It("returns nil", func() {
|
||||
conf.Server.LastFM.Enabled = false
|
||||
Expect(lastFMConstructor(ds)).To(BeNil())
|
||||
})
|
||||
})
|
||||
When("ApiKey is empty", func() {
|
||||
It("returns nil", func() {
|
||||
conf.Server.LastFM.ApiKey = ""
|
||||
Expect(lastFMConstructor(ds)).To(BeNil())
|
||||
})
|
||||
})
|
||||
When("Secret is empty", func() {
|
||||
It("returns nil", func() {
|
||||
conf.Server.LastFM.Secret = ""
|
||||
Expect(lastFMConstructor(ds)).To(BeNil())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("GetArtistBiography", func() {
|
||||
var agent *lastfmAgent
|
||||
var httpClient *tests.FakeHttpClient
|
||||
BeforeEach(func() {
|
||||
httpClient = &tests.FakeHttpClient{}
|
||||
client := newClient("API_KEY", "SECRET", httpClient)
|
||||
agent = lastFMConstructor(ds)
|
||||
agent.client = client
|
||||
})
|
||||
|
||||
It("returns the biography", func() {
|
||||
f, _ := os.Open("tests/fixtures/lastfm.artist.getinfo.json")
|
||||
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||
Expect(agent.GetArtistBiography(ctx, "123", "U2", "")).To(Equal("U2 é uma das mais importantes bandas de rock de todos os tempos. Formada em 1976 em Dublin, composta por Bono (vocalista e guitarrista), The Edge (guitarrista, pianista e backing vocal), Adam Clayton (baixista), Larry Mullen, Jr. (baterista e percussionista).\n\nDesde a década de 80, U2 é uma das bandas mais populares no mundo. Seus shows são únicos e um verdadeiro festival de efeitos especiais, além de serem um dos que mais arrecadam anualmente. <a href=\"https://www.last.fm/music/U2\">Read more on Last.fm</a>"))
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call fails", func() {
|
||||
httpClient.Err = errors.New("error")
|
||||
_, err := agent.GetArtistBiography(ctx, "123", "U2", "")
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call returns an error", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError3)), StatusCode: 200}
|
||||
_, err := agent.GetArtistBiography(ctx, "123", "U2", "")
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Language Fallback", func() {
|
||||
Describe("GetArtistBiography", func() {
|
||||
var agent *lastfmAgent
|
||||
var httpClient *langAwareHttpClient
|
||||
|
||||
BeforeEach(func() {
|
||||
httpClient = newLangAwareHttpClient()
|
||||
})
|
||||
|
||||
It("returns content in first language when available (1 API call)", func() {
|
||||
conf.Server.LastFM.Languages = []string{"pt", "en"}
|
||||
agent = lastFMConstructor(ds)
|
||||
agent.client = newClient("API_KEY", "SECRET", httpClient)
|
||||
|
||||
// Portuguese biography available
|
||||
f, _ := os.Open("tests/fixtures/lastfm.artist.getinfo.json")
|
||||
httpClient.responses["pt"] = http.Response{Body: f, StatusCode: 200}
|
||||
|
||||
bio, err := agent.GetArtistBiography(ctx, "123", "U2", "")
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(bio).To(ContainSubstring("U2 é uma das mais importantes bandas de rock"))
|
||||
Expect(httpClient.requestCount).To(Equal(1))
|
||||
Expect(httpClient.requests[0].URL.Query().Get("lang")).To(Equal("pt"))
|
||||
})
|
||||
|
||||
It("falls back to second language when first returns empty (2 API calls)", func() {
|
||||
conf.Server.LastFM.Languages = []string{"ja", "en"}
|
||||
agent = lastFMConstructor(ds)
|
||||
agent.client = newClient("API_KEY", "SECRET", httpClient)
|
||||
|
||||
// Japanese returns empty/ignored biography (actual Last.fm response with just "Read more" link)
|
||||
fJa, _ := os.Open("tests/fixtures/lastfm.artist.getinfo.empty.json")
|
||||
httpClient.responses["ja"] = http.Response{Body: fJa, StatusCode: 200}
|
||||
// English returns full biography
|
||||
fEn, _ := os.Open("tests/fixtures/lastfm.artist.getinfo.en.json")
|
||||
httpClient.responses["en"] = http.Response{Body: fEn, StatusCode: 200}
|
||||
|
||||
bio, err := agent.GetArtistBiography(ctx, "123", "Legião Urbana", "")
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(bio).To(ContainSubstring("Legião Urbana was a Brazilian post-punk band"))
|
||||
Expect(httpClient.requestCount).To(Equal(2))
|
||||
Expect(httpClient.requests[0].URL.Query().Get("lang")).To(Equal("ja"))
|
||||
Expect(httpClient.requests[1].URL.Query().Get("lang")).To(Equal("en"))
|
||||
})
|
||||
|
||||
It("returns ErrNotFound when all languages return empty", func() {
|
||||
conf.Server.LastFM.Languages = []string{"ja", "xx"}
|
||||
agent = lastFMConstructor(ds)
|
||||
agent.client = newClient("API_KEY", "SECRET", httpClient)
|
||||
|
||||
// Both languages return empty/ignored biography (using actual Last.fm response format)
|
||||
fJa, _ := os.Open("tests/fixtures/lastfm.artist.getinfo.empty.json")
|
||||
httpClient.responses["ja"] = http.Response{Body: fJa, StatusCode: 200}
|
||||
// Second language also returns empty
|
||||
fXx, _ := os.Open("tests/fixtures/lastfm.artist.getinfo.empty.json")
|
||||
httpClient.responses["xx"] = http.Response{Body: fXx, StatusCode: 200}
|
||||
|
||||
_, err := agent.GetArtistBiography(ctx, "123", "Legião Urbana", "")
|
||||
|
||||
Expect(err).To(MatchError(agents.ErrNotFound))
|
||||
Expect(httpClient.requestCount).To(Equal(2))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("GetAlbumInfo", func() {
|
||||
var agent *lastfmAgent
|
||||
var httpClient *langAwareHttpClient
|
||||
|
||||
BeforeEach(func() {
|
||||
httpClient = newLangAwareHttpClient()
|
||||
})
|
||||
|
||||
It("falls back to second language when first returns empty description (2 API calls)", func() {
|
||||
conf.Server.LastFM.Languages = []string{"ja", "en"}
|
||||
agent = lastFMConstructor(ds)
|
||||
agent.client = newClient("API_KEY", "SECRET", httpClient)
|
||||
|
||||
// Japanese returns album without wiki/description (actual Last.fm response)
|
||||
fJa, _ := os.Open("tests/fixtures/lastfm.album.getinfo.empty.json")
|
||||
httpClient.responses["ja"] = http.Response{Body: fJa, StatusCode: 200}
|
||||
// English returns album with description
|
||||
fEn, _ := os.Open("tests/fixtures/lastfm.album.getinfo.en.json")
|
||||
httpClient.responses["en"] = http.Response{Body: fEn, StatusCode: 200}
|
||||
|
||||
albumInfo, err := agent.GetAlbumInfo(ctx, "Dois", "Legião Urbana", "")
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(albumInfo.Name).To(Equal("Dois"))
|
||||
Expect(albumInfo.Description).To(ContainSubstring("segundo álbum de estúdio"))
|
||||
Expect(httpClient.requestCount).To(Equal(2))
|
||||
Expect(httpClient.requests[0].URL.Query().Get("lang")).To(Equal("ja"))
|
||||
Expect(httpClient.requests[1].URL.Query().Get("lang")).To(Equal("en"))
|
||||
})
|
||||
|
||||
It("returns album without description when all languages return empty", func() {
|
||||
conf.Server.LastFM.Languages = []string{"ja", "xx"}
|
||||
agent = lastFMConstructor(ds)
|
||||
agent.client = newClient("API_KEY", "SECRET", httpClient)
|
||||
|
||||
// Both languages return album without description
|
||||
fJa, _ := os.Open("tests/fixtures/lastfm.album.getinfo.empty.json")
|
||||
httpClient.responses["ja"] = http.Response{Body: fJa, StatusCode: 200}
|
||||
fXx, _ := os.Open("tests/fixtures/lastfm.album.getinfo.empty.json")
|
||||
httpClient.responses["xx"] = http.Response{Body: fXx, StatusCode: 200}
|
||||
|
||||
albumInfo, err := agent.GetAlbumInfo(ctx, "Dois", "Legião Urbana", "")
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(albumInfo.Name).To(Equal("Dois"))
|
||||
Expect(albumInfo.Description).To(BeEmpty())
|
||||
Expect(httpClient.requestCount).To(Equal(2))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("GetSimilarArtists", func() {
|
||||
var agent *lastfmAgent
|
||||
var httpClient *tests.FakeHttpClient
|
||||
BeforeEach(func() {
|
||||
httpClient = &tests.FakeHttpClient{}
|
||||
client := newClient("API_KEY", "SECRET", httpClient)
|
||||
agent = lastFMConstructor(ds)
|
||||
agent.client = client
|
||||
})
|
||||
|
||||
It("returns similar artists", func() {
|
||||
f, _ := os.Open("tests/fixtures/lastfm.artist.getsimilar.json")
|
||||
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||
Expect(agent.GetSimilarArtists(ctx, "123", "U2", "", 2)).To(Equal([]agents.Artist{
|
||||
{Name: "Passengers", MBID: "e110c11f-1c94-4471-a350-c38f46b29389"},
|
||||
{Name: "INXS", MBID: "481bf5f9-2e7c-4c44-b08a-05b32bc7c00d"},
|
||||
}))
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call fails", func() {
|
||||
httpClient.Err = errors.New("error")
|
||||
_, err := agent.GetSimilarArtists(ctx, "123", "U2", "", 2)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call returns an error", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError3)), StatusCode: 200}
|
||||
_, err := agent.GetSimilarArtists(ctx, "123", "U2", "", 2)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("GetArtistTopSongs", func() {
|
||||
var agent *lastfmAgent
|
||||
var httpClient *tests.FakeHttpClient
|
||||
BeforeEach(func() {
|
||||
httpClient = &tests.FakeHttpClient{}
|
||||
client := newClient("API_KEY", "SECRET", httpClient)
|
||||
agent = lastFMConstructor(ds)
|
||||
agent.client = client
|
||||
})
|
||||
|
||||
It("returns top songs", func() {
|
||||
f, _ := os.Open("tests/fixtures/lastfm.artist.gettoptracks.json")
|
||||
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||
Expect(agent.GetArtistTopSongs(ctx, "123", "U2", "", 2)).To(Equal([]agents.Song{
|
||||
{Name: "Beautiful Day", MBID: "f7f264d0-a89b-4682-9cd7-a4e7c37637af"},
|
||||
{Name: "With or Without You", MBID: "6b9a509f-6907-4a6e-9345-2f12da09ba4b"},
|
||||
}))
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call fails", func() {
|
||||
httpClient.Err = errors.New("error")
|
||||
_, err := agent.GetArtistTopSongs(ctx, "123", "U2", "", 2)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call returns an error", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError3)), StatusCode: 200}
|
||||
_, err := agent.GetArtistTopSongs(ctx, "123", "U2", "", 2)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("GetSimilarSongsByTrack", func() {
|
||||
var agent *lastfmAgent
|
||||
var httpClient *tests.FakeHttpClient
|
||||
BeforeEach(func() {
|
||||
httpClient = &tests.FakeHttpClient{}
|
||||
client := newClient("API_KEY", "SECRET", httpClient)
|
||||
agent = lastFMConstructor(ds)
|
||||
agent.client = client
|
||||
})
|
||||
|
||||
It("returns similar songs", func() {
|
||||
f, _ := os.Open("tests/fixtures/lastfm.track.getsimilar.json")
|
||||
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||
Expect(agent.GetSimilarSongsByTrack(ctx, "123", "Just Can't Get Enough", "Depeche Mode", "", 5)).To(Equal([]agents.Song{
|
||||
{Name: "Dreaming of Me", MBID: "027b553e-7c74-3ed4-a95e-1d4fea51f174", Artist: "Depeche Mode", ArtistMBID: "8538e728-ca0b-4321-b7e5-cff6565dd4c0"},
|
||||
{Name: "Everything Counts", MBID: "5a5a3ca4-bdb8-4641-a674-9b54b9b319a6", Artist: "Depeche Mode", ArtistMBID: "8538e728-ca0b-4321-b7e5-cff6565dd4c0"},
|
||||
{Name: "Don't You Want Me", MBID: "", Artist: "The Human League", ArtistMBID: "7adaabfb-acfb-47bc-8c7c-59471c2f0db8"},
|
||||
{Name: "Tainted Love", MBID: "", Artist: "Soft Cell", ArtistMBID: "7fb50287-029d-47cc-825a-235ca28024b2"},
|
||||
{Name: "Blue Monday", MBID: "727e84c6-1b56-31dd-a958-a5f46305cec0", Artist: "New Order", ArtistMBID: "f1106b17-dcbb-45f6-b938-199ccfab50cc"},
|
||||
}))
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("track")).To(Equal("Just Can't Get Enough"))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("Depeche Mode"))
|
||||
})
|
||||
|
||||
It("returns ErrNotFound when no similar songs found", func() {
|
||||
f, _ := os.Open("tests/fixtures/lastfm.track.getsimilar.unknown.json")
|
||||
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||
_, err := agent.GetSimilarSongsByTrack(ctx, "123", "UnknownTrack", "UnknownArtist", "", 3)
|
||||
Expect(err).To(MatchError(agents.ErrNotFound))
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call fails", func() {
|
||||
httpClient.Err = errors.New("error")
|
||||
_, err := agent.GetSimilarSongsByTrack(ctx, "123", "Believe", "Cher", "", 3)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call returns an error", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError3)), StatusCode: 200}
|
||||
_, err := agent.GetSimilarSongsByTrack(ctx, "123", "Believe", "Cher", "", 3)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Scrobbling", func() {
|
||||
var agent *lastfmAgent
|
||||
var httpClient *tests.FakeHttpClient
|
||||
var track *model.MediaFile
|
||||
BeforeEach(func() {
|
||||
_ = ds.UserProps(ctx).Put("user-1", sessionKeyProperty, "SK-1")
|
||||
httpClient = &tests.FakeHttpClient{}
|
||||
client := newClient("API_KEY", "SECRET", httpClient)
|
||||
agent = lastFMConstructor(ds)
|
||||
agent.client = client
|
||||
track = &model.MediaFile{
|
||||
ID: "123",
|
||||
Title: "Track Title",
|
||||
Album: "Track Album",
|
||||
Artist: "Track Artist",
|
||||
AlbumArtist: "Track AlbumArtist",
|
||||
TrackNumber: 1,
|
||||
Duration: 180,
|
||||
MbzRecordingID: "mbz-123",
|
||||
Participants: map[model.Role]model.ParticipantList{
|
||||
model.RoleArtist: []model.Participant{
|
||||
{Artist: model.Artist{ID: "ar-1", Name: "First Artist"}},
|
||||
{Artist: model.Artist{ID: "ar-2", Name: "Second Artist"}},
|
||||
},
|
||||
model.RoleAlbumArtist: []model.Participant{
|
||||
{Artist: model.Artist{ID: "ar-1", Name: "First Album Artist"}},
|
||||
{Artist: model.Artist{ID: "ar-2", Name: "Second Album Artist"}},
|
||||
},
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
Describe("NowPlaying", func() {
|
||||
It("calls Last.fm with correct params", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString("{}")), StatusCode: 200}
|
||||
|
||||
err := agent.NowPlaying(ctx, "user-1", track, 0)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(httpClient.SavedRequest.Method).To(Equal(http.MethodPost))
|
||||
body, _ := io.ReadAll(httpClient.SavedRequest.Body)
|
||||
sentParams, _ := url.ParseQuery(string(body))
|
||||
Expect(sentParams.Get("method")).To(Equal("track.updateNowPlaying"))
|
||||
Expect(sentParams.Get("sk")).To(Equal("SK-1"))
|
||||
Expect(sentParams.Get("track")).To(Equal(track.Title))
|
||||
Expect(sentParams.Get("album")).To(Equal(track.Album))
|
||||
Expect(sentParams.Get("artist")).To(Equal(track.Artist))
|
||||
Expect(sentParams.Get("albumArtist")).To(Equal(track.AlbumArtist))
|
||||
Expect(sentParams.Get("trackNumber")).To(Equal(strconv.Itoa(track.TrackNumber)))
|
||||
Expect(sentParams.Get("duration")).To(Equal(strconv.FormatFloat(float64(track.Duration), 'G', -1, 32)))
|
||||
Expect(sentParams.Get("mbid")).To(Equal(track.MbzRecordingID))
|
||||
})
|
||||
|
||||
It("returns ErrNotAuthorized if user is not linked", func() {
|
||||
err := agent.NowPlaying(ctx, "user-2", track, 0)
|
||||
Expect(err).To(MatchError(scrobbler.ErrNotAuthorized))
|
||||
})
|
||||
|
||||
When("ScrobbleFirstArtistOnly is true", func() {
|
||||
BeforeEach(func() {
|
||||
conf.Server.LastFM.ScrobbleFirstArtistOnly = true
|
||||
})
|
||||
|
||||
It("uses only the first artist", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString("{}")), StatusCode: 200}
|
||||
|
||||
err := agent.NowPlaying(ctx, "user-1", track, 0)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
body, _ := io.ReadAll(httpClient.SavedRequest.Body)
|
||||
sentParams, _ := url.ParseQuery(string(body))
|
||||
Expect(sentParams.Get("artist")).To(Equal("First Artist"))
|
||||
Expect(sentParams.Get("albumArtist")).To(Equal("First Album Artist"))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("scrobble", func() {
|
||||
It("calls Last.fm with correct params", func() {
|
||||
ts := time.Now()
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString("{}")), StatusCode: 200}
|
||||
|
||||
err := agent.Scrobble(ctx, "user-1", scrobbler.Scrobble{MediaFile: *track, TimeStamp: ts})
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(httpClient.SavedRequest.Method).To(Equal(http.MethodPost))
|
||||
body, _ := io.ReadAll(httpClient.SavedRequest.Body)
|
||||
sentParams, _ := url.ParseQuery(string(body))
|
||||
Expect(sentParams.Get("method")).To(Equal("track.scrobble"))
|
||||
Expect(sentParams.Get("sk")).To(Equal("SK-1"))
|
||||
Expect(sentParams.Get("track")).To(Equal(track.Title))
|
||||
Expect(sentParams.Get("album")).To(Equal(track.Album))
|
||||
Expect(sentParams.Get("artist")).To(Equal(track.Artist))
|
||||
Expect(sentParams.Get("albumArtist")).To(Equal(track.AlbumArtist))
|
||||
Expect(sentParams.Get("trackNumber")).To(Equal(strconv.Itoa(track.TrackNumber)))
|
||||
Expect(sentParams.Get("duration")).To(Equal(strconv.FormatFloat(float64(track.Duration), 'G', -1, 32)))
|
||||
Expect(sentParams.Get("mbid")).To(Equal(track.MbzRecordingID))
|
||||
Expect(sentParams.Get("timestamp")).To(Equal(strconv.FormatInt(ts.Unix(), 10)))
|
||||
})
|
||||
|
||||
When("ScrobbleFirstArtistOnly is true", func() {
|
||||
BeforeEach(func() {
|
||||
conf.Server.LastFM.ScrobbleFirstArtistOnly = true
|
||||
})
|
||||
|
||||
It("uses only the first artist", func() {
|
||||
ts := time.Now()
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString("{}")), StatusCode: 200}
|
||||
|
||||
err := agent.Scrobble(ctx, "user-1", scrobbler.Scrobble{MediaFile: *track, TimeStamp: ts})
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
body, _ := io.ReadAll(httpClient.SavedRequest.Body)
|
||||
sentParams, _ := url.ParseQuery(string(body))
|
||||
Expect(sentParams.Get("artist")).To(Equal("First Artist"))
|
||||
Expect(sentParams.Get("albumArtist")).To(Equal("First Album Artist"))
|
||||
})
|
||||
})
|
||||
|
||||
It("skips songs with less than 31 seconds", func() {
|
||||
track.Duration = 29
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString("{}")), StatusCode: 200}
|
||||
|
||||
err := agent.Scrobble(ctx, "user-1", scrobbler.Scrobble{MediaFile: *track, TimeStamp: time.Now()})
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(httpClient.SavedRequest).To(BeNil())
|
||||
})
|
||||
|
||||
It("returns ErrNotAuthorized if user is not linked", func() {
|
||||
err := agent.Scrobble(ctx, "user-2", scrobbler.Scrobble{MediaFile: *track, TimeStamp: time.Now()})
|
||||
Expect(err).To(MatchError(scrobbler.ErrNotAuthorized))
|
||||
})
|
||||
|
||||
It("returns ErrRetryLater on error 11", func() {
|
||||
httpClient.Res = http.Response{
|
||||
Body: io.NopCloser(bytes.NewBufferString(`{"error":11,"message":"Service Offline - This service is temporarily offline. Try again later."}`)),
|
||||
StatusCode: 400,
|
||||
}
|
||||
|
||||
err := agent.Scrobble(ctx, "user-1", scrobbler.Scrobble{MediaFile: *track, TimeStamp: time.Now()})
|
||||
Expect(err).To(MatchError(scrobbler.ErrRetryLater))
|
||||
})
|
||||
|
||||
It("returns ErrRetryLater on error 16", func() {
|
||||
httpClient.Res = http.Response{
|
||||
Body: io.NopCloser(bytes.NewBufferString(`{"error":16,"message":"There was a temporary error processing your request. Please try again"}`)),
|
||||
StatusCode: 400,
|
||||
}
|
||||
|
||||
err := agent.Scrobble(ctx, "user-1", scrobbler.Scrobble{MediaFile: *track, TimeStamp: time.Now()})
|
||||
Expect(err).To(MatchError(scrobbler.ErrRetryLater))
|
||||
})
|
||||
|
||||
It("returns ErrRetryLater on http errors", func() {
|
||||
httpClient.Res = http.Response{
|
||||
Body: io.NopCloser(bytes.NewBufferString(`internal server error`)),
|
||||
StatusCode: 500,
|
||||
}
|
||||
|
||||
err := agent.Scrobble(ctx, "user-1", scrobbler.Scrobble{MediaFile: *track, TimeStamp: time.Now()})
|
||||
Expect(err).To(MatchError(scrobbler.ErrRetryLater))
|
||||
})
|
||||
|
||||
It("returns ErrUnrecoverable on other errors", func() {
|
||||
httpClient.Res = http.Response{
|
||||
Body: io.NopCloser(bytes.NewBufferString(`{"error":8,"message":"Operation failed - Something else went wrong"}`)),
|
||||
StatusCode: 400,
|
||||
}
|
||||
|
||||
err := agent.Scrobble(ctx, "user-1", scrobbler.Scrobble{MediaFile: *track, TimeStamp: time.Now()})
|
||||
Expect(err).To(MatchError(scrobbler.ErrUnrecoverable))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("GetAlbumInfo", func() {
|
||||
var agent *lastfmAgent
|
||||
var httpClient *tests.FakeHttpClient
|
||||
BeforeEach(func() {
|
||||
httpClient = &tests.FakeHttpClient{}
|
||||
client := newClient("API_KEY", "SECRET", httpClient)
|
||||
agent = lastFMConstructor(ds)
|
||||
agent.client = client
|
||||
})
|
||||
|
||||
It("returns the biography", func() {
|
||||
f, _ := os.Open("tests/fixtures/lastfm.album.getinfo.json")
|
||||
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||
Expect(agent.GetAlbumInfo(ctx, "Believe", "Cher", "03c91c40-49a6-44a7-90e7-a700edf97a62")).To(Equal(&agents.AlbumInfo{
|
||||
Name: "Believe",
|
||||
MBID: "03c91c40-49a6-44a7-90e7-a700edf97a62",
|
||||
Description: "Believe is the twenty-third studio album by American singer-actress Cher, released on November 10, 1998 by Warner Bros. Records. The RIAA certified it Quadruple Platinum on December 23, 1999, recognizing four million shipments in the United States; Worldwide, the album has sold more than 20 million copies, making it the biggest-selling album of her career. In 1999 the album received three Grammy Awards nominations including \"Record of the Year\", \"Best Pop Album\" and winning \"Best Dance Recording\" for the single \"Believe\". It was released by Warner Bros. Records at the end of 1998. The album was executive produced by Rob <a href=\"https://www.last.fm/music/Cher/Believe\">Read more on Last.fm</a>.",
|
||||
URL: "https://www.last.fm/music/Cher/Believe",
|
||||
}))
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(Equal("03c91c40-49a6-44a7-90e7-a700edf97a62"))
|
||||
})
|
||||
|
||||
It("returns empty images if no images are available", func() {
|
||||
f, _ := os.Open("tests/fixtures/lastfm.album.getinfo.empty_urls.json")
|
||||
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||
Expect(agent.GetAlbumInfo(ctx, "The Definitive Less Damage And More Joy", "The Jesus and Mary Chain", "")).To(Equal(&agents.AlbumInfo{
|
||||
Name: "The Definitive Less Damage And More Joy",
|
||||
URL: "https://www.last.fm/music/The+Jesus+and+Mary+Chain/The+Definitive+Less+Damage+And+More+Joy",
|
||||
}))
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("album")).To(Equal("The Definitive Less Damage And More Joy"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call fails", func() {
|
||||
httpClient.Err = errors.New("error")
|
||||
_, err := agent.GetAlbumInfo(ctx, "123", "U2", "mbid-1234")
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(Equal("mbid-1234"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call returns an error", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError3)), StatusCode: 200}
|
||||
_, err := agent.GetAlbumInfo(ctx, "123", "U2", "mbid-1234")
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(Equal("mbid-1234"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call returns an error 6 and mbid is empty", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError6)), StatusCode: 200}
|
||||
_, err := agent.GetAlbumInfo(ctx, "123", "U2", "")
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
})
|
||||
|
||||
Context("MBID non existent in Last.fm", func() {
|
||||
It("calls again when last.fm returns an error 6", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError6)), StatusCode: 200}
|
||||
_, _ = agent.GetAlbumInfo(ctx, "123", "U2", "mbid-1234")
|
||||
Expect(httpClient.RequestCount).To(Equal(2))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(BeEmpty())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("GetArtistImages", func() {
|
||||
var agent *lastfmAgent
|
||||
var apiClient *tests.FakeHttpClient
|
||||
var httpClient *tests.FakeHttpClient
|
||||
|
||||
BeforeEach(func() {
|
||||
apiClient = &tests.FakeHttpClient{}
|
||||
httpClient = &tests.FakeHttpClient{}
|
||||
client := newClient("API_KEY", "SECRET", apiClient)
|
||||
agent = lastFMConstructor(ds)
|
||||
agent.client = client
|
||||
agent.httpClient = httpClient
|
||||
})
|
||||
|
||||
It("returns the artist image from the page", func() {
|
||||
fApi, _ := os.Open("tests/fixtures/lastfm.artist.getinfo.json")
|
||||
apiClient.Res = http.Response{Body: fApi, StatusCode: 200}
|
||||
|
||||
fScraper, _ := os.Open("tests/fixtures/lastfm.artist.page.html")
|
||||
httpClient.Res = http.Response{Body: fScraper, StatusCode: 200}
|
||||
|
||||
images, err := agent.GetArtistImages(ctx, "123", "U2", "")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(images).To(HaveLen(1))
|
||||
Expect(images[0].URL).To(Equal("https://lastfm.freetls.fastly.net/i/u/ar0/818148bf682d429dc21b59a73ef6f68e.png"))
|
||||
})
|
||||
|
||||
It("returns empty list if image is the ignored default image", func() {
|
||||
fApi, _ := os.Open("tests/fixtures/lastfm.artist.getinfo.json")
|
||||
apiClient.Res = http.Response{Body: fApi, StatusCode: 200}
|
||||
|
||||
fScraper, _ := os.Open("tests/fixtures/lastfm.artist.page.ignored.html")
|
||||
httpClient.Res = http.Response{Body: fScraper, StatusCode: 200}
|
||||
|
||||
images, err := agent.GetArtistImages(ctx, "123", "U2", "")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(images).To(BeEmpty())
|
||||
})
|
||||
|
||||
It("returns empty list if page has no meta tags", func() {
|
||||
fApi, _ := os.Open("tests/fixtures/lastfm.artist.getinfo.json")
|
||||
apiClient.Res = http.Response{Body: fApi, StatusCode: 200}
|
||||
|
||||
fScraper, _ := os.Open("tests/fixtures/lastfm.artist.page.no_meta.html")
|
||||
httpClient.Res = http.Response{Body: fScraper, StatusCode: 200}
|
||||
|
||||
images, err := agent.GetArtistImages(ctx, "123", "U2", "")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(images).To(BeEmpty())
|
||||
})
|
||||
|
||||
It("returns error if API call fails", func() {
|
||||
apiClient.Err = errors.New("api error")
|
||||
_, err := agent.GetArtistImages(ctx, "123", "U2", "")
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(err.Error()).To(ContainSubstring("get artist info"))
|
||||
})
|
||||
|
||||
It("returns error if scraper call fails", func() {
|
||||
fApi, _ := os.Open("tests/fixtures/lastfm.artist.getinfo.json")
|
||||
apiClient.Res = http.Response{Body: fApi, StatusCode: 200}
|
||||
|
||||
httpClient.Err = errors.New("scraper error")
|
||||
_, err := agent.GetArtistImages(ctx, "123", "U2", "")
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(err.Error()).To(ContainSubstring("get artist url"))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// langAwareHttpClient is a mock HTTP client that returns different responses based on the lang parameter
|
||||
type langAwareHttpClient struct {
|
||||
responses map[string]http.Response
|
||||
requests []*http.Request
|
||||
requestCount int
|
||||
}
|
||||
|
||||
func newLangAwareHttpClient() *langAwareHttpClient {
|
||||
return &langAwareHttpClient{
|
||||
responses: make(map[string]http.Response),
|
||||
requests: make([]*http.Request, 0),
|
||||
}
|
||||
}
|
||||
|
||||
func (c *langAwareHttpClient) Do(req *http.Request) (*http.Response, error) {
|
||||
c.requestCount++
|
||||
c.requests = append(c.requests, req)
|
||||
lang := req.URL.Query().Get("lang")
|
||||
if resp, ok := c.responses[lang]; ok {
|
||||
return &resp, nil
|
||||
}
|
||||
// Return default empty response if no specific response is configured
|
||||
return &http.Response{
|
||||
StatusCode: 200,
|
||||
Body: io.NopCloser(bytes.NewBufferString(`{}`)),
|
||||
}, nil
|
||||
}
|
||||
@@ -1,274 +0,0 @@
|
||||
package taglib
|
||||
|
||||
import (
|
||||
"io/fs"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/djherbis/times"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/model/metadata"
|
||||
"github.com/navidrome/navidrome/utils/gg"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
type testFileInfo struct {
|
||||
fs.FileInfo
|
||||
}
|
||||
|
||||
func (t testFileInfo) BirthTime() time.Time {
|
||||
if ts := times.Get(t.FileInfo); ts.HasBirthTime() {
|
||||
return ts.BirthTime()
|
||||
}
|
||||
return t.FileInfo.ModTime()
|
||||
}
|
||||
|
||||
var _ = Describe("Extractor", func() {
|
||||
toP := func(name, sortName, mbid string) model.Participant {
|
||||
return model.Participant{
|
||||
Artist: model.Artist{Name: name, SortArtistName: sortName, MbzArtistID: mbid},
|
||||
}
|
||||
}
|
||||
|
||||
roles := []struct {
|
||||
model.Role
|
||||
model.ParticipantList
|
||||
}{
|
||||
{model.RoleComposer, model.ParticipantList{
|
||||
toP("coma a", "a, coma", "bf13b584-f27c-43db-8f42-32898d33d4e2"),
|
||||
toP("comb", "comb", "924039a2-09c6-4d29-9b4f-50cc54447d36"),
|
||||
}},
|
||||
{model.RoleLyricist, model.ParticipantList{
|
||||
toP("la a", "a, la", "c84f648f-68a6-40a2-a0cb-d135b25da3c2"),
|
||||
toP("lb", "lb", "0a7c582d-143a-4540-b4e9-77200835af65"),
|
||||
}},
|
||||
{model.RoleArranger, model.ParticipantList{
|
||||
toP("aa", "", "4605a1d4-8d15-42a3-bd00-9c20e42f71e6"),
|
||||
toP("ab", "", "002f0ff8-77bf-42cc-8216-61a9c43dc145"),
|
||||
}},
|
||||
{model.RoleConductor, model.ParticipantList{
|
||||
toP("cona", "", "af86879b-2141-42af-bad2-389a4dc91489"),
|
||||
toP("conb", "", "3dfa3c70-d7d3-4b97-b953-c298dd305e12"),
|
||||
}},
|
||||
{model.RoleDirector, model.ParticipantList{
|
||||
toP("dia", "", "f943187f-73de-4794-be47-88c66f0fd0f4"),
|
||||
toP("dib", "", "bceb75da-1853-4b3d-b399-b27f0cafc389"),
|
||||
}},
|
||||
{model.RoleEngineer, model.ParticipantList{
|
||||
toP("ea", "", "f634bf6d-d66a-425d-888a-28ad39392759"),
|
||||
toP("eb", "", "243d64ae-d514-44e1-901a-b918d692baee"),
|
||||
}},
|
||||
{model.RoleProducer, model.ParticipantList{
|
||||
toP("pra", "", "d971c8d7-999c-4a5f-ac31-719721ab35d6"),
|
||||
toP("prb", "", "f0a09070-9324-434f-a599-6d25ded87b69"),
|
||||
}},
|
||||
{model.RoleRemixer, model.ParticipantList{
|
||||
toP("ra", "", "c7dc6095-9534-4c72-87cc-aea0103462cf"),
|
||||
toP("rb", "", "8ebeef51-c08c-4736-992f-c37870becedd"),
|
||||
}},
|
||||
{model.RoleDJMixer, model.ParticipantList{
|
||||
toP("dja", "", "d063f13b-7589-4efc-ab7f-c60e6db17247"),
|
||||
toP("djb", "", "3636670c-385f-4212-89c8-0ff51d6bc456"),
|
||||
}},
|
||||
{model.RoleMixer, model.ParticipantList{
|
||||
toP("ma", "", "53fb5a2d-7016-427e-a563-d91819a5f35a"),
|
||||
toP("mb", "", "64c13e65-f0da-4ab9-a300-71ee53b0376a"),
|
||||
}},
|
||||
}
|
||||
|
||||
var e *extractor
|
||||
|
||||
parseTestFile := func(path string) *model.MediaFile {
|
||||
mds, err := e.Parse(path)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
info, ok := mds[path]
|
||||
Expect(ok).To(BeTrue())
|
||||
|
||||
fileInfo, err := os.Stat(path)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
info.FileInfo = testFileInfo{FileInfo: fileInfo}
|
||||
|
||||
metadata := metadata.New(path, info)
|
||||
mf := metadata.ToMediaFile(1, "folderID")
|
||||
return &mf
|
||||
}
|
||||
|
||||
BeforeEach(func() {
|
||||
e = &extractor{}
|
||||
})
|
||||
|
||||
Describe("ReplayGain", func() {
|
||||
DescribeTable("test replaygain end-to-end", func(file string, trackGain, trackPeak, albumGain, albumPeak *float64) {
|
||||
mf := parseTestFile("tests/fixtures/" + file)
|
||||
|
||||
Expect(mf.RGTrackGain).To(Equal(trackGain))
|
||||
Expect(mf.RGTrackPeak).To(Equal(trackPeak))
|
||||
Expect(mf.RGAlbumGain).To(Equal(albumGain))
|
||||
Expect(mf.RGAlbumPeak).To(Equal(albumPeak))
|
||||
},
|
||||
Entry("mp3 with no replaygain", "no_replaygain.mp3", nil, nil, nil, nil),
|
||||
Entry("mp3 with no zero replaygain", "zero_replaygain.mp3", gg.P(0.0), gg.P(1.0), gg.P(0.0), gg.P(1.0)),
|
||||
)
|
||||
})
|
||||
|
||||
Describe("lyrics", func() {
|
||||
makeLyrics := func(code, secondLine string) model.Lyrics {
|
||||
return model.Lyrics{
|
||||
DisplayArtist: "",
|
||||
DisplayTitle: "",
|
||||
Lang: code,
|
||||
Line: []model.Line{
|
||||
{Start: gg.P(int64(0)), Value: "This is"},
|
||||
{Start: gg.P(int64(2500)), Value: secondLine},
|
||||
},
|
||||
Offset: nil,
|
||||
Synced: true,
|
||||
}
|
||||
}
|
||||
|
||||
It("should fetch both synced and unsynced lyrics in mixed flac", func() {
|
||||
mf := parseTestFile("tests/fixtures/mixed-lyrics.flac")
|
||||
|
||||
lyrics, err := mf.StructuredLyrics()
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(lyrics).To(HaveLen(2))
|
||||
|
||||
Expect(lyrics[0].Synced).To(BeTrue())
|
||||
Expect(lyrics[1].Synced).To(BeFalse())
|
||||
})
|
||||
|
||||
It("should handle mp3 with uslt and sylt", func() {
|
||||
mf := parseTestFile("tests/fixtures/test.mp3")
|
||||
|
||||
lyrics, err := mf.StructuredLyrics()
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(lyrics).To(HaveLen(4))
|
||||
|
||||
engSylt := makeLyrics("eng", "English SYLT")
|
||||
engUslt := makeLyrics("eng", "English")
|
||||
unsSylt := makeLyrics("xxx", "unspecified SYLT")
|
||||
unsUslt := makeLyrics("xxx", "unspecified")
|
||||
|
||||
Expect(lyrics).To(ConsistOf(engSylt, engUslt, unsSylt, unsUslt))
|
||||
})
|
||||
|
||||
DescribeTable("format-specific lyrics", func(file string, isId3 bool) {
|
||||
mf := parseTestFile("tests/fixtures/" + file)
|
||||
|
||||
lyrics, err := mf.StructuredLyrics()
|
||||
Expect(err).To(Not(HaveOccurred()))
|
||||
Expect(lyrics).To(HaveLen(2))
|
||||
|
||||
unspec := makeLyrics("xxx", "unspecified")
|
||||
eng := makeLyrics("xxx", "English")
|
||||
|
||||
if isId3 {
|
||||
eng.Lang = "eng"
|
||||
}
|
||||
|
||||
Expect(lyrics).To(Or(
|
||||
Equal(model.LyricList{unspec, eng}),
|
||||
Equal(model.LyricList{eng, unspec})))
|
||||
},
|
||||
Entry("flac", "test.flac", false),
|
||||
Entry("m4a", "test.m4a", false),
|
||||
Entry("ogg", "test.ogg", false),
|
||||
Entry("wma", "test.wma", false),
|
||||
Entry("wv", "test.wv", false),
|
||||
Entry("wav", "test.wav", true),
|
||||
Entry("aiff", "test.aiff", true),
|
||||
)
|
||||
})
|
||||
|
||||
Describe("Participants", func() {
|
||||
DescribeTable("test tags consistent across formats", func(format string) {
|
||||
mf := parseTestFile("tests/fixtures/test." + format)
|
||||
|
||||
for _, data := range roles {
|
||||
role := data.Role
|
||||
artists := data.ParticipantList
|
||||
|
||||
actual := mf.Participants[role]
|
||||
Expect(actual).To(HaveLen(len(artists)))
|
||||
|
||||
for i := range artists {
|
||||
actualArtist := actual[i]
|
||||
expectedArtist := artists[i]
|
||||
|
||||
Expect(actualArtist.Name).To(Equal(expectedArtist.Name))
|
||||
Expect(actualArtist.SortArtistName).To(Equal(expectedArtist.SortArtistName))
|
||||
Expect(actualArtist.MbzArtistID).To(Equal(expectedArtist.MbzArtistID))
|
||||
}
|
||||
}
|
||||
|
||||
if format != "m4a" {
|
||||
performers := mf.Participants[model.RolePerformer]
|
||||
Expect(performers).To(HaveLen(8))
|
||||
|
||||
rules := map[string][]string{
|
||||
"pgaa": {"2fd0b311-9fa8-4ff9-be5d-f6f3d16b835e", "Guitar"},
|
||||
"pgbb": {"223d030b-bf97-4c2a-ad26-b7f7bbe25c93", "Guitar", ""},
|
||||
"pvaa": {"cb195f72-448f-41c8-b962-3f3c13d09d38", "Vocals"},
|
||||
"pvbb": {"60a1f832-8ca2-49f6-8660-84d57f07b520", "Vocals", "Flute"},
|
||||
"pfaa": {"51fb40c-0305-4bf9-a11b-2ee615277725", "", "Flute"},
|
||||
}
|
||||
|
||||
for name, rule := range rules {
|
||||
mbid := rule[0]
|
||||
for i := 1; i < len(rule); i++ {
|
||||
found := false
|
||||
|
||||
for _, mapped := range performers {
|
||||
if mapped.Name == name && mapped.MbzArtistID == mbid && mapped.SubRole == rule[i] {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
Expect(found).To(BeTrue(), "Could not find matching artist")
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
Entry("FLAC format", "flac"),
|
||||
Entry("M4a format", "m4a"),
|
||||
Entry("OGG format", "ogg"),
|
||||
Entry("WV format", "wv"),
|
||||
|
||||
Entry("MP3 format", "mp3"),
|
||||
Entry("WAV format", "wav"),
|
||||
Entry("AIFF format", "aiff"),
|
||||
)
|
||||
|
||||
It("should parse wma", func() {
|
||||
mf := parseTestFile("tests/fixtures/test.wma")
|
||||
|
||||
for _, data := range roles {
|
||||
role := data.Role
|
||||
artists := data.ParticipantList
|
||||
actual := mf.Participants[role]
|
||||
|
||||
// WMA has no Arranger role
|
||||
if role == model.RoleArranger {
|
||||
Expect(actual).To(HaveLen(0))
|
||||
continue
|
||||
}
|
||||
|
||||
Expect(actual).To(HaveLen(len(artists)), role.String())
|
||||
|
||||
// For some bizarre reason, the order is inverted. We also don't get
|
||||
// sort names or MBIDs
|
||||
for i := range artists {
|
||||
idx := len(artists) - 1 - i
|
||||
|
||||
actualArtist := actual[i]
|
||||
expectedArtist := artists[idx]
|
||||
|
||||
Expect(actualArtist.Name).To(Equal(expectedArtist.Name))
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,178 +0,0 @@
|
||||
package taglib
|
||||
|
||||
import (
|
||||
"io/fs"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/core/storage/local"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model/metadata"
|
||||
)
|
||||
|
||||
type extractor struct {
|
||||
baseDir string
|
||||
}
|
||||
|
||||
func (e extractor) Parse(files ...string) (map[string]metadata.Info, error) {
|
||||
results := make(map[string]metadata.Info)
|
||||
for _, path := range files {
|
||||
props, err := e.extractMetadata(path)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
results[path] = *props
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (e extractor) Version() string {
|
||||
return Version()
|
||||
}
|
||||
|
||||
func (e extractor) extractMetadata(filePath string) (*metadata.Info, error) {
|
||||
fullPath := filepath.Join(e.baseDir, filePath)
|
||||
tags, err := Read(fullPath)
|
||||
if err != nil {
|
||||
log.Warn("extractor: Error reading metadata from file. Skipping", "filePath", fullPath, err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Parse audio properties
|
||||
ap := metadata.AudioProperties{}
|
||||
ap.BitRate = parseProp(tags, "__bitrate")
|
||||
ap.Channels = parseProp(tags, "__channels")
|
||||
ap.SampleRate = parseProp(tags, "__samplerate")
|
||||
ap.BitDepth = parseProp(tags, "__bitspersample")
|
||||
length := parseProp(tags, "__lengthinmilliseconds")
|
||||
ap.Duration = (time.Millisecond * time.Duration(length)).Round(time.Millisecond * 10)
|
||||
|
||||
// Extract basic tags
|
||||
parseBasicTag(tags, "__title", "title")
|
||||
parseBasicTag(tags, "__artist", "artist")
|
||||
parseBasicTag(tags, "__album", "album")
|
||||
parseBasicTag(tags, "__comment", "comment")
|
||||
parseBasicTag(tags, "__genre", "genre")
|
||||
parseBasicTag(tags, "__year", "year")
|
||||
parseBasicTag(tags, "__track", "tracknumber")
|
||||
|
||||
// Parse track/disc totals
|
||||
parseTuple := func(prop string) {
|
||||
tagName := prop + "number"
|
||||
tagTotal := prop + "total"
|
||||
if value, ok := tags[tagName]; ok && len(value) > 0 {
|
||||
parts := strings.Split(value[0], "/")
|
||||
tags[tagName] = []string{parts[0]}
|
||||
if len(parts) == 2 {
|
||||
tags[tagTotal] = []string{parts[1]}
|
||||
}
|
||||
}
|
||||
}
|
||||
parseTuple("track")
|
||||
parseTuple("disc")
|
||||
|
||||
// Adjust some ID3 tags
|
||||
parseLyrics(tags)
|
||||
parseTIPL(tags)
|
||||
delete(tags, "tmcl") // TMCL is already parsed by TagLib
|
||||
|
||||
return &metadata.Info{
|
||||
Tags: tags,
|
||||
AudioProperties: ap,
|
||||
HasPicture: tags["has_picture"] != nil && len(tags["has_picture"]) > 0 && tags["has_picture"][0] == "true",
|
||||
}, nil
|
||||
}
|
||||
|
||||
// parseLyrics make sure lyrics tags have language
|
||||
func parseLyrics(tags map[string][]string) {
|
||||
lyrics := tags["lyrics"]
|
||||
if len(lyrics) > 0 {
|
||||
tags["lyrics:xxx"] = lyrics
|
||||
delete(tags, "lyrics")
|
||||
}
|
||||
}
|
||||
|
||||
// These are the only roles we support, based on Picard's tag map:
|
||||
// https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html
|
||||
var tiplMapping = map[string]string{
|
||||
"arranger": "arranger",
|
||||
"engineer": "engineer",
|
||||
"producer": "producer",
|
||||
"mix": "mixer",
|
||||
"DJ-mix": "djmixer",
|
||||
}
|
||||
|
||||
// parseProp parses a property from the tags map and sets it to the target integer.
|
||||
// It also deletes the property from the tags map after parsing.
|
||||
func parseProp(tags map[string][]string, prop string) int {
|
||||
if value, ok := tags[prop]; ok && len(value) > 0 {
|
||||
v, _ := strconv.Atoi(value[0])
|
||||
delete(tags, prop)
|
||||
return v
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
// parseBasicTag checks if a basic tag (like __title, __artist, etc.) exists in the tags map.
|
||||
// If it does, it moves the value to a more appropriate tag name (like title, artist, etc.),
|
||||
// and deletes the basic tag from the map. If the target tag already exists, it ignores the basic tag.
|
||||
func parseBasicTag(tags map[string][]string, basicName string, tagName string) {
|
||||
basicValue := tags[basicName]
|
||||
if len(basicValue) == 0 {
|
||||
return
|
||||
}
|
||||
delete(tags, basicName)
|
||||
if len(tags[tagName]) == 0 {
|
||||
tags[tagName] = basicValue
|
||||
}
|
||||
}
|
||||
|
||||
// parseTIPL parses the ID3v2.4 TIPL frame string, which is received from TagLib in the format:
|
||||
//
|
||||
// "arranger Andrew Powell engineer Chris Blair engineer Pat Stapley producer Eric Woolfson".
|
||||
//
|
||||
// and breaks it down into a map of roles and names, e.g.:
|
||||
//
|
||||
// {"arranger": ["Andrew Powell"], "engineer": ["Chris Blair", "Pat Stapley"], "producer": ["Eric Woolfson"]}.
|
||||
func parseTIPL(tags map[string][]string) {
|
||||
tipl := tags["tipl"]
|
||||
if len(tipl) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
addRole := func(currentRole string, currentValue []string) {
|
||||
if currentRole != "" && len(currentValue) > 0 {
|
||||
role := tiplMapping[currentRole]
|
||||
tags[role] = append(tags[role], strings.Join(currentValue, " "))
|
||||
}
|
||||
}
|
||||
|
||||
var currentRole string
|
||||
var currentValue []string
|
||||
for _, part := range strings.Split(tipl[0], " ") {
|
||||
if _, ok := tiplMapping[part]; ok {
|
||||
addRole(currentRole, currentValue)
|
||||
currentRole = part
|
||||
currentValue = nil
|
||||
continue
|
||||
}
|
||||
currentValue = append(currentValue, part)
|
||||
}
|
||||
addRole(currentRole, currentValue)
|
||||
delete(tags, "tipl")
|
||||
}
|
||||
|
||||
var _ local.Extractor = (*extractor)(nil)
|
||||
|
||||
func init() {
|
||||
local.RegisterExtractor("legacy-taglib", func(_ fs.FS, baseDir string) local.Extractor {
|
||||
// ignores fs, as taglib extractor only works with local files
|
||||
return &extractor{baseDir}
|
||||
})
|
||||
conf.AddHook(func() {
|
||||
log.Debug("TagLib version", "version", Version())
|
||||
})
|
||||
}
|
||||
@@ -1,295 +0,0 @@
|
||||
package taglib
|
||||
|
||||
import (
|
||||
"io/fs"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/navidrome/navidrome/utils"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("Extractor", func() {
|
||||
var e *extractor
|
||||
|
||||
BeforeEach(func() {
|
||||
e = &extractor{}
|
||||
})
|
||||
|
||||
Describe("Parse", func() {
|
||||
It("correctly parses metadata from all files in folder", func() {
|
||||
mds, err := e.Parse(
|
||||
"tests/fixtures/test.mp3",
|
||||
"tests/fixtures/test.ogg",
|
||||
)
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(mds).To(HaveLen(2))
|
||||
|
||||
// Test MP3
|
||||
m := mds["tests/fixtures/test.mp3"]
|
||||
Expect(m.Tags).To(HaveKeyWithValue("title", []string{"Song"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("album", []string{"Album"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("artist", []string{"Artist"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("albumartist", []string{"Album Artist"}))
|
||||
|
||||
Expect(m.HasPicture).To(BeTrue())
|
||||
Expect(m.AudioProperties.Duration.String()).To(Equal("1.02s"))
|
||||
Expect(m.AudioProperties.BitRate).To(Equal(192))
|
||||
Expect(m.AudioProperties.Channels).To(Equal(2))
|
||||
Expect(m.AudioProperties.SampleRate).To(Equal(44100))
|
||||
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("compilation", []string{"1"}),
|
||||
HaveKeyWithValue("tcmp", []string{"1"})),
|
||||
)
|
||||
Expect(m.Tags).To(HaveKeyWithValue("genre", []string{"Rock"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("date", []string{"2014-05-21"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("originaldate", []string{"1996-11-21"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("releasedate", []string{"2020-12-31"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("discnumber", []string{"1"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("disctotal", []string{"2"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("comment", []string{"Comment1\nComment2"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("bpm", []string{"123"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("replaygain_album_gain", []string{"+3.21518 dB"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("replaygain_album_peak", []string{"0.9125"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("replaygain_track_gain", []string{"-1.48 dB"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("replaygain_track_peak", []string{"0.4512"}))
|
||||
|
||||
Expect(m.Tags).To(HaveKeyWithValue("tracknumber", []string{"2"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("tracktotal", []string{"10"}))
|
||||
|
||||
Expect(m.Tags).ToNot(HaveKey("lyrics"))
|
||||
Expect(m.Tags).To(Or(HaveKeyWithValue("lyrics:eng", []string{
|
||||
"[00:00.00]This is\n[00:02.50]English SYLT\n",
|
||||
"[00:00.00]This is\n[00:02.50]English",
|
||||
}), HaveKeyWithValue("lyrics:eng", []string{
|
||||
"[00:00.00]This is\n[00:02.50]English",
|
||||
"[00:00.00]This is\n[00:02.50]English SYLT\n",
|
||||
})))
|
||||
Expect(m.Tags).To(Or(HaveKeyWithValue("lyrics:xxx", []string{
|
||||
"[00:00.00]This is\n[00:02.50]unspecified SYLT\n",
|
||||
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||
}), HaveKeyWithValue("lyrics:xxx", []string{
|
||||
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||
"[00:00.00]This is\n[00:02.50]unspecified SYLT\n",
|
||||
})))
|
||||
|
||||
// Test OGG
|
||||
m = mds["tests/fixtures/test.ogg"]
|
||||
Expect(err).To(BeNil())
|
||||
Expect(m.Tags).To(HaveKeyWithValue("fbpm", []string{"141.7"}))
|
||||
|
||||
// TagLib 1.12 returns 18, previous versions return 39.
|
||||
// See https://github.com/taglib/taglib/commit/2f238921824741b2cfe6fbfbfc9701d9827ab06b
|
||||
Expect(m.AudioProperties.BitRate).To(BeElementOf(18, 19, 39, 40, 43, 49))
|
||||
Expect(m.AudioProperties.Channels).To(BeElementOf(2))
|
||||
Expect(m.AudioProperties.SampleRate).To(BeElementOf(8000))
|
||||
Expect(m.HasPicture).To(BeTrue())
|
||||
})
|
||||
|
||||
DescribeTable("Format-Specific tests",
|
||||
func(file, duration string, channels, samplerate, bitdepth int, albumGain, albumPeak, trackGain, trackPeak string, id3Lyrics bool, image bool) {
|
||||
file = "tests/fixtures/" + file
|
||||
mds, err := e.Parse(file)
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(mds).To(HaveLen(1))
|
||||
|
||||
m := mds[file]
|
||||
|
||||
Expect(m.HasPicture).To(Equal(image))
|
||||
Expect(m.AudioProperties.Duration.String()).To(Equal(duration))
|
||||
Expect(m.AudioProperties.Channels).To(Equal(channels))
|
||||
Expect(m.AudioProperties.SampleRate).To(Equal(samplerate))
|
||||
Expect(m.AudioProperties.BitDepth).To(Equal(bitdepth))
|
||||
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("replaygain_album_gain", []string{albumGain}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_album_gain", []string{albumGain}),
|
||||
))
|
||||
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("replaygain_album_peak", []string{albumPeak}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_album_peak", []string{albumPeak}),
|
||||
))
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("replaygain_track_gain", []string{trackGain}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_track_gain", []string{trackGain}),
|
||||
))
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("replaygain_track_peak", []string{trackPeak}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_track_peak", []string{trackPeak}),
|
||||
))
|
||||
|
||||
Expect(m.Tags).To(HaveKeyWithValue("title", []string{"Title"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("album", []string{"Album"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("artist", []string{"Artist"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("albumartist", []string{"Album Artist"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("genre", []string{"Rock"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("date", []string{"2014"}))
|
||||
|
||||
Expect(m.Tags).To(HaveKeyWithValue("bpm", []string{"123"}))
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("tracknumber", []string{"3"}),
|
||||
HaveKeyWithValue("tracknumber", []string{"3/10"}),
|
||||
))
|
||||
if !strings.HasSuffix(file, "test.wma") {
|
||||
// TODO Not sure why this is not working for WMA
|
||||
Expect(m.Tags).To(HaveKeyWithValue("tracktotal", []string{"10"}))
|
||||
}
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("discnumber", []string{"1"}),
|
||||
HaveKeyWithValue("discnumber", []string{"1/2"}),
|
||||
))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("disctotal", []string{"2"}))
|
||||
|
||||
// WMA does not have a "compilation" tag, but "wm/iscompilation"
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("compilation", []string{"1"}),
|
||||
HaveKeyWithValue("wm/iscompilation", []string{"1"})),
|
||||
)
|
||||
|
||||
if id3Lyrics {
|
||||
Expect(m.Tags).To(HaveKeyWithValue("lyrics:eng", []string{
|
||||
"[00:00.00]This is\n[00:02.50]English",
|
||||
}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("lyrics:xxx", []string{
|
||||
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||
}))
|
||||
} else {
|
||||
Expect(m.Tags).To(HaveKeyWithValue("lyrics:xxx", []string{
|
||||
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||
"[00:00.00]This is\n[00:02.50]English",
|
||||
}))
|
||||
}
|
||||
|
||||
Expect(m.Tags).To(HaveKeyWithValue("comment", []string{"Comment1\nComment2"}))
|
||||
},
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=1200:duration=1" test.flac
|
||||
Entry("correctly parses flac tags", "test.flac", "1s", 1, 44100, 16, "+4.06 dB", "0.12496948", "+4.06 dB", "0.12496948", false, true),
|
||||
|
||||
Entry("correctly parses m4a (aac) gain tags", "01 Invisible (RED) Edit Version.m4a", "1.04s", 2, 44100, 16, "0.37", "0.48", "0.37", "0.48", false, true),
|
||||
Entry("correctly parses m4a (aac) gain tags (uppercase)", "test.m4a", "1.04s", 2, 44100, 16, "0.37", "0.48", "0.37", "0.48", false, true),
|
||||
Entry("correctly parses ogg (vorbis) tags", "test.ogg", "1.04s", 2, 8000, 0, "+7.64 dB", "0.11772506", "+7.64 dB", "0.11772506", false, true),
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=900:duration=1" test.wma
|
||||
// Weird note: for the tag parsing to work, the lyrics are actually stored in the reverse order
|
||||
Entry("correctly parses wma/asf tags", "test.wma", "1.02s", 1, 44100, 16, "3.27 dB", "0.132914", "3.27 dB", "0.132914", false, true),
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=800:duration=1" test.wv
|
||||
Entry("correctly parses wv (wavpak) tags", "test.wv", "1s", 1, 44100, 16, "3.43 dB", "0.125061", "3.43 dB", "0.125061", false, true),
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=1000:duration=1" test.wav
|
||||
Entry("correctly parses wav tags", "test.wav", "1s", 1, 44100, 16, "3.06 dB", "0.125056", "3.06 dB", "0.125056", true, true),
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=1400:duration=1" test.aiff
|
||||
Entry("correctly parses aiff tags", "test.aiff", "1s", 1, 44100, 16, "2.00 dB", "0.124972", "2.00 dB", "0.124972", true, true),
|
||||
)
|
||||
|
||||
// Skip these tests when running as root
|
||||
Context("Access Forbidden", func() {
|
||||
var accessForbiddenFile string
|
||||
var RegularUserContext = XContext
|
||||
var isRegularUser = os.Getuid() != 0
|
||||
if isRegularUser {
|
||||
RegularUserContext = Context
|
||||
}
|
||||
|
||||
// Only run permission tests if we are not root
|
||||
RegularUserContext("when run without root privileges", func() {
|
||||
BeforeEach(func() {
|
||||
accessForbiddenFile = utils.TempFileName("access_forbidden-", ".mp3")
|
||||
|
||||
f, err := os.OpenFile(accessForbiddenFile, os.O_WRONLY|os.O_CREATE, 0222)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
DeferCleanup(func() {
|
||||
Expect(f.Close()).To(Succeed())
|
||||
Expect(os.Remove(accessForbiddenFile)).To(Succeed())
|
||||
})
|
||||
})
|
||||
|
||||
It("correctly handle unreadable file due to insufficient read permission", func() {
|
||||
_, err := e.extractMetadata(accessForbiddenFile)
|
||||
Expect(err).To(MatchError(os.ErrPermission))
|
||||
})
|
||||
|
||||
It("skips the file if it cannot be read", func() {
|
||||
files := []string{
|
||||
"tests/fixtures/test.mp3",
|
||||
"tests/fixtures/test.ogg",
|
||||
accessForbiddenFile,
|
||||
}
|
||||
mds, err := e.Parse(files...)
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(mds).To(HaveLen(2))
|
||||
Expect(mds).ToNot(HaveKey(accessForbiddenFile))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
Describe("Error Checking", func() {
|
||||
It("returns a generic ErrPath if file does not exist", func() {
|
||||
testFilePath := "tests/fixtures/NON_EXISTENT.ogg"
|
||||
_, err := e.extractMetadata(testFilePath)
|
||||
Expect(err).To(MatchError(fs.ErrNotExist))
|
||||
})
|
||||
It("does not throw a SIGSEGV error when reading a file with an invalid frame", func() {
|
||||
// File has an empty TDAT frame
|
||||
md, err := e.extractMetadata("tests/fixtures/invalid-files/test-invalid-frame.mp3")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(md.Tags).To(HaveKeyWithValue("albumartist", []string{"Elvis Presley"}))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("parseTIPL", func() {
|
||||
var tags map[string][]string
|
||||
|
||||
BeforeEach(func() {
|
||||
tags = make(map[string][]string)
|
||||
})
|
||||
|
||||
Context("when the TIPL string is populated", func() {
|
||||
It("correctly parses roles and names", func() {
|
||||
tags["tipl"] = []string{"arranger Andrew Powell DJ-mix François Kevorkian DJ-mix Jane Doe engineer Chris Blair"}
|
||||
parseTIPL(tags)
|
||||
Expect(tags["arranger"]).To(ConsistOf("Andrew Powell"))
|
||||
Expect(tags["engineer"]).To(ConsistOf("Chris Blair"))
|
||||
Expect(tags["djmixer"]).To(ConsistOf("François Kevorkian", "Jane Doe"))
|
||||
})
|
||||
|
||||
It("handles multiple names for a single role", func() {
|
||||
tags["tipl"] = []string{"engineer Pat Stapley producer Eric Woolfson engineer Chris Blair"}
|
||||
parseTIPL(tags)
|
||||
Expect(tags["producer"]).To(ConsistOf("Eric Woolfson"))
|
||||
Expect(tags["engineer"]).To(ConsistOf("Pat Stapley", "Chris Blair"))
|
||||
})
|
||||
|
||||
It("discards roles without names", func() {
|
||||
tags["tipl"] = []string{"engineer Pat Stapley producer engineer Chris Blair"}
|
||||
parseTIPL(tags)
|
||||
Expect(tags).ToNot(HaveKey("producer"))
|
||||
Expect(tags["engineer"]).To(ConsistOf("Pat Stapley", "Chris Blair"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the TIPL string is empty", func() {
|
||||
It("does nothing", func() {
|
||||
tags["tipl"] = []string{""}
|
||||
parseTIPL(tags)
|
||||
Expect(tags).To(BeEmpty())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the TIPL is not present", func() {
|
||||
It("does nothing", func() {
|
||||
parseTIPL(tags)
|
||||
Expect(tags).To(BeEmpty())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
})
|
||||
@@ -1,299 +0,0 @@
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
|
||||
#define TAGLIB_STATIC
|
||||
#include <apeproperties.h>
|
||||
#include <apetag.h>
|
||||
#include <aifffile.h>
|
||||
#include <asffile.h>
|
||||
#include <dsffile.h>
|
||||
#include <fileref.h>
|
||||
#include <flacfile.h>
|
||||
#include <id3v2tag.h>
|
||||
#include <unsynchronizedlyricsframe.h>
|
||||
#include <synchronizedlyricsframe.h>
|
||||
#include <mp4file.h>
|
||||
#include <mpegfile.h>
|
||||
#include <opusfile.h>
|
||||
#include <tpropertymap.h>
|
||||
#include <vorbisfile.h>
|
||||
#include <wavfile.h>
|
||||
#include <wavfile.h>
|
||||
#include <wavpackfile.h>
|
||||
|
||||
#include "taglib_wrapper.h"
|
||||
|
||||
char has_cover(const TagLib::FileRef f);
|
||||
|
||||
static char TAGLIB_VERSION[16];
|
||||
|
||||
char* taglib_version() {
|
||||
snprintf((char *)TAGLIB_VERSION, 16, "%d.%d.%d", TAGLIB_MAJOR_VERSION, TAGLIB_MINOR_VERSION, TAGLIB_PATCH_VERSION);
|
||||
return (char *)TAGLIB_VERSION;
|
||||
}
|
||||
|
||||
int taglib_read(const FILENAME_CHAR_T *filename, unsigned long id) {
|
||||
TagLib::FileRef f(filename, true, TagLib::AudioProperties::Fast);
|
||||
|
||||
if (f.isNull()) {
|
||||
return TAGLIB_ERR_PARSE;
|
||||
}
|
||||
|
||||
if (!f.audioProperties()) {
|
||||
return TAGLIB_ERR_AUDIO_PROPS;
|
||||
}
|
||||
|
||||
// Add audio properties to the tags
|
||||
const TagLib::AudioProperties *props(f.audioProperties());
|
||||
goPutInt(id, (char *)"__lengthinmilliseconds", props->lengthInMilliseconds());
|
||||
goPutInt(id, (char *)"__bitrate", props->bitrate());
|
||||
goPutInt(id, (char *)"__channels", props->channels());
|
||||
goPutInt(id, (char *)"__samplerate", props->sampleRate());
|
||||
|
||||
// Extract bits per sample for supported formats
|
||||
int bitsPerSample = 0;
|
||||
if (const auto* apeProperties{ dynamic_cast<const TagLib::APE::Properties*>(props) })
|
||||
bitsPerSample = apeProperties->bitsPerSample();
|
||||
else if (const auto* asfProperties{ dynamic_cast<const TagLib::ASF::Properties*>(props) })
|
||||
bitsPerSample = asfProperties->bitsPerSample();
|
||||
else if (const auto* flacProperties{ dynamic_cast<const TagLib::FLAC::Properties*>(props) })
|
||||
bitsPerSample = flacProperties->bitsPerSample();
|
||||
else if (const auto* mp4Properties{ dynamic_cast<const TagLib::MP4::Properties*>(props) })
|
||||
bitsPerSample = mp4Properties->bitsPerSample();
|
||||
else if (const auto* wavePackProperties{ dynamic_cast<const TagLib::WavPack::Properties*>(props) })
|
||||
bitsPerSample = wavePackProperties->bitsPerSample();
|
||||
else if (const auto* aiffProperties{ dynamic_cast<const TagLib::RIFF::AIFF::Properties*>(props) })
|
||||
bitsPerSample = aiffProperties->bitsPerSample();
|
||||
else if (const auto* wavProperties{ dynamic_cast<const TagLib::RIFF::WAV::Properties*>(props) })
|
||||
bitsPerSample = wavProperties->bitsPerSample();
|
||||
else if (const auto* dsfProperties{ dynamic_cast<const TagLib::DSF::Properties*>(props) })
|
||||
bitsPerSample = dsfProperties->bitsPerSample();
|
||||
|
||||
if (bitsPerSample > 0) {
|
||||
goPutInt(id, (char *)"__bitspersample", bitsPerSample);
|
||||
}
|
||||
|
||||
// Send all properties to the Go map
|
||||
TagLib::PropertyMap tags = f.file()->properties();
|
||||
|
||||
// Make sure at least the basic properties are extracted
|
||||
TagLib::Tag *basic = f.file()->tag();
|
||||
if (!basic->isEmpty()) {
|
||||
if (!basic->title().isEmpty()) {
|
||||
tags.insert("__title", basic->title());
|
||||
}
|
||||
if (!basic->artist().isEmpty()) {
|
||||
tags.insert("__artist", basic->artist());
|
||||
}
|
||||
if (!basic->album().isEmpty()) {
|
||||
tags.insert("__album", basic->album());
|
||||
}
|
||||
if (!basic->comment().isEmpty()) {
|
||||
tags.insert("__comment", basic->comment());
|
||||
}
|
||||
if (!basic->genre().isEmpty()) {
|
||||
tags.insert("__genre", basic->genre());
|
||||
}
|
||||
if (basic->year() > 0) {
|
||||
tags.insert("__year", TagLib::String::number(basic->year()));
|
||||
}
|
||||
if (basic->track() > 0) {
|
||||
tags.insert("__track", TagLib::String::number(basic->track()));
|
||||
}
|
||||
}
|
||||
|
||||
TagLib::ID3v2::Tag *id3Tags = NULL;
|
||||
|
||||
// Get some extended/non-standard ID3-only tags (ex: iTunes extended frames)
|
||||
TagLib::MPEG::File *mp3File(dynamic_cast<TagLib::MPEG::File *>(f.file()));
|
||||
if (mp3File != NULL) {
|
||||
id3Tags = mp3File->ID3v2Tag();
|
||||
}
|
||||
|
||||
if (id3Tags == NULL) {
|
||||
TagLib::RIFF::WAV::File *wavFile(dynamic_cast<TagLib::RIFF::WAV::File *>(f.file()));
|
||||
if (wavFile != NULL && wavFile->hasID3v2Tag()) {
|
||||
id3Tags = wavFile->ID3v2Tag();
|
||||
}
|
||||
}
|
||||
|
||||
if (id3Tags == NULL) {
|
||||
TagLib::RIFF::AIFF::File *aiffFile(dynamic_cast<TagLib::RIFF::AIFF::File *>(f.file()));
|
||||
if (aiffFile && aiffFile->hasID3v2Tag()) {
|
||||
id3Tags = aiffFile->tag();
|
||||
}
|
||||
}
|
||||
|
||||
// Yes, it is possible to have ID3v2 tags in FLAC. However, that can cause problems
|
||||
// with many players, so they will not be parsed
|
||||
|
||||
if (id3Tags != NULL) {
|
||||
const auto &frames = id3Tags->frameListMap();
|
||||
|
||||
for (const auto &kv: frames) {
|
||||
if (kv.first == "USLT") {
|
||||
for (const auto &tag: kv.second) {
|
||||
TagLib::ID3v2::UnsynchronizedLyricsFrame *frame = dynamic_cast<TagLib::ID3v2::UnsynchronizedLyricsFrame *>(tag);
|
||||
if (frame == NULL) continue;
|
||||
|
||||
tags.erase("LYRICS");
|
||||
|
||||
const auto bv = frame->language();
|
||||
char language[4] = {'x', 'x', 'x', '\0'};
|
||||
if (bv.size() == 3) {
|
||||
strncpy(language, bv.data(), 3);
|
||||
}
|
||||
|
||||
char *val = const_cast<char*>(frame->text().toCString(true));
|
||||
|
||||
goPutLyrics(id, language, val);
|
||||
}
|
||||
} else if (kv.first == "SYLT") {
|
||||
for (const auto &tag: kv.second) {
|
||||
TagLib::ID3v2::SynchronizedLyricsFrame *frame = dynamic_cast<TagLib::ID3v2::SynchronizedLyricsFrame *>(tag);
|
||||
if (frame == NULL) continue;
|
||||
|
||||
const auto bv = frame->language();
|
||||
char language[4] = {'x', 'x', 'x', '\0'};
|
||||
if (bv.size() == 3) {
|
||||
strncpy(language, bv.data(), 3);
|
||||
}
|
||||
|
||||
const auto format = frame->timestampFormat();
|
||||
if (format == TagLib::ID3v2::SynchronizedLyricsFrame::AbsoluteMilliseconds) {
|
||||
|
||||
for (const auto &line: frame->synchedText()) {
|
||||
char *text = const_cast<char*>(line.text.toCString(true));
|
||||
goPutLyricLine(id, language, text, line.time);
|
||||
}
|
||||
} else if (format == TagLib::ID3v2::SynchronizedLyricsFrame::AbsoluteMpegFrames) {
|
||||
const int sampleRate = props->sampleRate();
|
||||
|
||||
if (sampleRate != 0) {
|
||||
for (const auto &line: frame->synchedText()) {
|
||||
const int timeInMs = (line.time * 1000) / sampleRate;
|
||||
char *text = const_cast<char*>(line.text.toCString(true));
|
||||
goPutLyricLine(id, language, text, timeInMs);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (kv.first == "TIPL"){
|
||||
if (!kv.second.isEmpty()) {
|
||||
tags.insert(kv.first, kv.second.front()->toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// M4A may have some iTunes specific tags not captured by the PropertyMap interface
|
||||
TagLib::MP4::File *m4afile(dynamic_cast<TagLib::MP4::File *>(f.file()));
|
||||
if (m4afile != NULL) {
|
||||
const auto itemListMap = m4afile->tag()->itemMap();
|
||||
for (const auto item: itemListMap) {
|
||||
char *key = const_cast<char*>(item.first.toCString(true));
|
||||
for (const auto value: item.second.toStringList()) {
|
||||
char *val = const_cast<char*>(value.toCString(true));
|
||||
goPutM4AStr(id, key, val);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// WMA/ASF files may have additional tags not captured by the PropertyMap interface
|
||||
TagLib::ASF::File *asfFile(dynamic_cast<TagLib::ASF::File *>(f.file()));
|
||||
if (asfFile != NULL) {
|
||||
const TagLib::ASF::Tag *asfTags{asfFile->tag()};
|
||||
const auto itemListMap = asfTags->attributeListMap();
|
||||
for (const auto item : itemListMap) {
|
||||
char *key = const_cast<char*>(item.first.toCString(true));
|
||||
|
||||
for (auto j = item.second.begin();
|
||||
j != item.second.end(); ++j) {
|
||||
|
||||
char *val = const_cast<char*>(j->toString().toCString(true));
|
||||
goPutStr(id, key, val);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Send all collected tags to the Go map
|
||||
for (TagLib::PropertyMap::ConstIterator i = tags.begin(); i != tags.end();
|
||||
++i) {
|
||||
char *key = const_cast<char*>(i->first.toCString(true));
|
||||
for (TagLib::StringList::ConstIterator j = i->second.begin();
|
||||
j != i->second.end(); ++j) {
|
||||
char *val = const_cast<char*>((*j).toCString(true));
|
||||
goPutStr(id, key, val);
|
||||
}
|
||||
}
|
||||
|
||||
// Cover art has to be handled separately
|
||||
if (has_cover(f)) {
|
||||
goPutStr(id, (char *)"has_picture", (char *)"true");
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Detect if the file has cover art. Returns 1 if the file has cover art, 0 otherwise.
|
||||
char has_cover(const TagLib::FileRef f) {
|
||||
char hasCover = 0;
|
||||
// ----- MP3
|
||||
if (TagLib::MPEG::File * mp3File{dynamic_cast<TagLib::MPEG::File *>(f.file())}) {
|
||||
if (mp3File->ID3v2Tag()) {
|
||||
const auto &frameListMap{mp3File->ID3v2Tag()->frameListMap()};
|
||||
hasCover = !frameListMap["APIC"].isEmpty();
|
||||
}
|
||||
}
|
||||
// ----- FLAC
|
||||
else if (TagLib::FLAC::File * flacFile{dynamic_cast<TagLib::FLAC::File *>(f.file())}) {
|
||||
hasCover = !flacFile->pictureList().isEmpty();
|
||||
}
|
||||
// ----- MP4
|
||||
else if (TagLib::MP4::File * mp4File{dynamic_cast<TagLib::MP4::File *>(f.file())}) {
|
||||
auto &coverItem{mp4File->tag()->itemMap()["covr"]};
|
||||
TagLib::MP4::CoverArtList coverArtList{coverItem.toCoverArtList()};
|
||||
hasCover = !coverArtList.isEmpty();
|
||||
}
|
||||
// ----- Ogg
|
||||
else if (TagLib::Ogg::Vorbis::File * vorbisFile{dynamic_cast<TagLib::Ogg::Vorbis::File *>(f.file())}) {
|
||||
hasCover = !vorbisFile->tag()->pictureList().isEmpty();
|
||||
}
|
||||
// ----- Opus
|
||||
else if (TagLib::Ogg::Opus::File * opusFile{dynamic_cast<TagLib::Ogg::Opus::File *>(f.file())}) {
|
||||
hasCover = !opusFile->tag()->pictureList().isEmpty();
|
||||
}
|
||||
// ----- WAV
|
||||
else if (TagLib::RIFF::WAV::File * wavFile{ dynamic_cast<TagLib::RIFF::WAV::File*>(f.file()) }) {
|
||||
if (wavFile->hasID3v2Tag()) {
|
||||
const auto& frameListMap{ wavFile->ID3v2Tag()->frameListMap() };
|
||||
hasCover = !frameListMap["APIC"].isEmpty();
|
||||
}
|
||||
}
|
||||
// ----- AIFF
|
||||
else if (TagLib::RIFF::AIFF::File * aiffFile{ dynamic_cast<TagLib::RIFF::AIFF::File *>(f.file())}) {
|
||||
if (aiffFile->hasID3v2Tag()) {
|
||||
const auto& frameListMap{ aiffFile->tag()->frameListMap() };
|
||||
hasCover = !frameListMap["APIC"].isEmpty();
|
||||
}
|
||||
}
|
||||
// ----- WMA
|
||||
else if (TagLib::ASF::File * asfFile{dynamic_cast<TagLib::ASF::File *>(f.file())}) {
|
||||
const TagLib::ASF::Tag *tag{ asfFile->tag() };
|
||||
hasCover = tag && tag->attributeListMap().contains("WM/Picture");
|
||||
}
|
||||
// ----- DSF
|
||||
else if (TagLib::DSF::File * dsffile{ dynamic_cast<TagLib::DSF::File *>(f.file())}) {
|
||||
const TagLib::ID3v2::Tag *tag { dsffile->tag() };
|
||||
hasCover = tag && !tag->frameListMap()["APIC"].isEmpty();
|
||||
}
|
||||
// ----- WAVPAK (APE tag)
|
||||
else if (TagLib::WavPack::File * wvFile{dynamic_cast<TagLib::WavPack::File *>(f.file())}) {
|
||||
if (wvFile->hasAPETag()) {
|
||||
// This is the particular string that Picard uses
|
||||
hasCover = !wvFile->APETag()->itemListMap()["COVER ART (FRONT)"].isEmpty();
|
||||
}
|
||||
}
|
||||
|
||||
return hasCover;
|
||||
}
|
||||
@@ -1,157 +0,0 @@
|
||||
package taglib
|
||||
|
||||
/*
|
||||
#cgo !windows pkg-config: --define-prefix taglib
|
||||
#cgo windows pkg-config: taglib
|
||||
#cgo illumos LDFLAGS: -lstdc++ -lsendfile
|
||||
#cgo linux darwin CXXFLAGS: -std=c++11
|
||||
#cgo darwin LDFLAGS: -L/opt/homebrew/opt/taglib/lib
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include "taglib_wrapper.h"
|
||||
*/
|
||||
import "C"
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"runtime/debug"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"unsafe"
|
||||
|
||||
"github.com/navidrome/navidrome/log"
|
||||
)
|
||||
|
||||
const iTunesKeyPrefix = "----:com.apple.itunes:"
|
||||
|
||||
func Version() string {
|
||||
return C.GoString(C.taglib_version())
|
||||
}
|
||||
|
||||
func Read(filename string) (tags map[string][]string, err error) {
|
||||
// Do not crash on failures in the C code/library
|
||||
debug.SetPanicOnFault(true)
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
log.Error("extractor: recovered from panic when reading tags", "file", filename, "error", r)
|
||||
err = fmt.Errorf("extractor: recovered from panic: %s", r)
|
||||
}
|
||||
}()
|
||||
|
||||
fp := getFilename(filename)
|
||||
defer C.free(unsafe.Pointer(fp))
|
||||
id, m, release := newMap()
|
||||
defer release()
|
||||
|
||||
log.Trace("extractor: reading tags", "filename", filename, "map_id", id)
|
||||
res := C.taglib_read(fp, C.ulong(id))
|
||||
switch res {
|
||||
case C.TAGLIB_ERR_PARSE:
|
||||
// Check additional case whether the file is unreadable due to permission
|
||||
file, fileErr := os.OpenFile(filename, os.O_RDONLY, 0600)
|
||||
defer file.Close()
|
||||
|
||||
if os.IsPermission(fileErr) {
|
||||
return nil, fmt.Errorf("navidrome does not have permission: %w", fileErr)
|
||||
} else if fileErr != nil {
|
||||
return nil, fmt.Errorf("cannot parse file media file: %w", fileErr)
|
||||
} else {
|
||||
return nil, fmt.Errorf("cannot parse file media file")
|
||||
}
|
||||
case C.TAGLIB_ERR_AUDIO_PROPS:
|
||||
return nil, fmt.Errorf("can't get audio properties from file")
|
||||
}
|
||||
if log.IsGreaterOrEqualTo(log.LevelDebug) {
|
||||
j, _ := json.Marshal(m)
|
||||
log.Trace("extractor: read tags", "tags", string(j), "filename", filename, "id", id)
|
||||
} else {
|
||||
log.Trace("extractor: read tags", "tags", m, "filename", filename, "id", id)
|
||||
}
|
||||
|
||||
return m, nil
|
||||
}
|
||||
|
||||
type tagMap map[string][]string
|
||||
|
||||
var allMaps sync.Map
|
||||
var mapsNextID atomic.Uint32
|
||||
|
||||
func newMap() (uint32, tagMap, func()) {
|
||||
id := mapsNextID.Add(1)
|
||||
|
||||
m := tagMap{}
|
||||
allMaps.Store(id, m)
|
||||
|
||||
return id, m, func() {
|
||||
allMaps.Delete(id)
|
||||
}
|
||||
}
|
||||
|
||||
func doPutTag(id C.ulong, key string, val *C.char) {
|
||||
if key == "" {
|
||||
return
|
||||
}
|
||||
|
||||
r, _ := allMaps.Load(uint32(id))
|
||||
m := r.(tagMap)
|
||||
k := strings.ToLower(key)
|
||||
v := strings.TrimSpace(C.GoString(val))
|
||||
m[k] = append(m[k], v)
|
||||
}
|
||||
|
||||
//export goPutM4AStr
|
||||
func goPutM4AStr(id C.ulong, key *C.char, val *C.char) {
|
||||
k := C.GoString(key)
|
||||
|
||||
// Special for M4A, do not catch keys that have no actual name
|
||||
k = strings.TrimPrefix(k, iTunesKeyPrefix)
|
||||
doPutTag(id, k, val)
|
||||
}
|
||||
|
||||
//export goPutStr
|
||||
func goPutStr(id C.ulong, key *C.char, val *C.char) {
|
||||
doPutTag(id, C.GoString(key), val)
|
||||
}
|
||||
|
||||
//export goPutInt
|
||||
func goPutInt(id C.ulong, key *C.char, val C.int) {
|
||||
valStr := strconv.Itoa(int(val))
|
||||
vp := C.CString(valStr)
|
||||
defer C.free(unsafe.Pointer(vp))
|
||||
goPutStr(id, key, vp)
|
||||
}
|
||||
|
||||
//export goPutLyrics
|
||||
func goPutLyrics(id C.ulong, lang *C.char, val *C.char) {
|
||||
doPutTag(id, "lyrics:"+C.GoString(lang), val)
|
||||
}
|
||||
|
||||
//export goPutLyricLine
|
||||
func goPutLyricLine(id C.ulong, lang *C.char, text *C.char, time C.int) {
|
||||
language := C.GoString(lang)
|
||||
line := C.GoString(text)
|
||||
timeGo := int64(time)
|
||||
|
||||
ms := timeGo % 1000
|
||||
timeGo /= 1000
|
||||
sec := timeGo % 60
|
||||
timeGo /= 60
|
||||
minimum := timeGo % 60
|
||||
formattedLine := fmt.Sprintf("[%02d:%02d.%02d]%s\n", minimum, sec, ms/10, line)
|
||||
|
||||
key := "lyrics:" + language
|
||||
|
||||
r, _ := allMaps.Load(uint32(id))
|
||||
m := r.(tagMap)
|
||||
k := strings.ToLower(key)
|
||||
existing, ok := m[k]
|
||||
if ok {
|
||||
existing[0] += formattedLine
|
||||
} else {
|
||||
m[k] = []string{formattedLine}
|
||||
}
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
#define TAGLIB_ERR_PARSE -1
|
||||
#define TAGLIB_ERR_AUDIO_PROPS -2
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
#ifdef WIN32
|
||||
#define FILENAME_CHAR_T wchar_t
|
||||
#else
|
||||
#define FILENAME_CHAR_T char
|
||||
#endif
|
||||
|
||||
extern void goPutM4AStr(unsigned long id, char *key, char *val);
|
||||
extern void goPutStr(unsigned long id, char *key, char *val);
|
||||
extern void goPutInt(unsigned long id, char *key, int val);
|
||||
extern void goPutLyrics(unsigned long id, char *lang, char *val);
|
||||
extern void goPutLyricLine(unsigned long id, char *lang, char *text, int time);
|
||||
int taglib_read(const FILENAME_CHAR_T *filename, unsigned long id);
|
||||
char* taglib_version();
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
28
api/parameters/_index.yml
Normal file
28
api/parameters/_index.yml
Normal file
@@ -0,0 +1,28 @@
|
||||
pageOffset:
|
||||
$ref: './query/pageOffset.yml'
|
||||
pageLimit:
|
||||
$ref: './query/pageLimit.yml'
|
||||
filterEquals:
|
||||
$ref: './query/filterEquals.yml'
|
||||
filterLessThan:
|
||||
$ref: './query/filterLessThan.yml'
|
||||
filterLessOrEqual:
|
||||
$ref: './query/filterLessOrEqual.yml'
|
||||
filterGreaterThan:
|
||||
$ref: './query/filterGreaterThan.yml'
|
||||
filterGreaterOrEqual:
|
||||
$ref: './query/filterGreaterOrEqual.yml'
|
||||
filterContains:
|
||||
$ref: './query/filterContains.yml'
|
||||
filterStartsWith:
|
||||
$ref: './query/filterStartsWith.yml'
|
||||
filterEndsWith:
|
||||
$ref: './query/filterEndsWith.yml'
|
||||
sort:
|
||||
$ref: './query/sort.yml'
|
||||
include:
|
||||
$ref: './query/include.yml'
|
||||
includeForTracks:
|
||||
$ref: './query/includeForTracks.yml'
|
||||
includeForAlbums:
|
||||
$ref: './query/includeForAlbums.yml'
|
||||
9
api/parameters/query/filterContains.yml
Normal file
9
api/parameters/query/filterContains.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
name: filter[contains]
|
||||
in: query
|
||||
description: 'Filter by any property containing text. Usage: filter[contains]=property:value'
|
||||
required: false
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
pattern: '^\w+:\w+'
|
||||
9
api/parameters/query/filterEndsWith.yml
Normal file
9
api/parameters/query/filterEndsWith.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
name: filter[endsWith]
|
||||
in: query
|
||||
description: 'Filter by any property that ends with text. Usage: filter[endsWith]=property:value'
|
||||
required: false
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
pattern: '^\w+:\w+'
|
||||
9
api/parameters/query/filterEquals.yml
Normal file
9
api/parameters/query/filterEquals.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
name: filter[equals]
|
||||
in: query
|
||||
description: 'Filter by any property with an exact match. Usage: filter[equals]=property:value'
|
||||
required: false
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
pattern: '^\w+:\w+'
|
||||
9
api/parameters/query/filterGreaterOrEqual.yml
Normal file
9
api/parameters/query/filterGreaterOrEqual.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
name: filter[greaterOrEqual]
|
||||
in: query
|
||||
description: 'Filter by any numeric property greater than or equal to a value. Usage: filter[greaterOrEqual]=property:value'
|
||||
required: false
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
pattern: '^\w+:\d+'
|
||||
9
api/parameters/query/filterGreaterThan.yml
Normal file
9
api/parameters/query/filterGreaterThan.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
name: filter[greaterThan]
|
||||
in: query
|
||||
description: 'Filter by any numeric property greater than a value. Usage: filter[greaterThan]=property:value'
|
||||
required: false
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
pattern: '^\w+:\d+'
|
||||
9
api/parameters/query/filterLessOrEqual.yml
Normal file
9
api/parameters/query/filterLessOrEqual.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
name: filter[lessOrEqual]
|
||||
in: query
|
||||
description: 'Filter by any numeric property less than or equal to a value. Usage: filter[lessOrEqual]=property:value'
|
||||
required: false
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
pattern: '^\w+:\d+'
|
||||
9
api/parameters/query/filterLessThan.yml
Normal file
9
api/parameters/query/filterLessThan.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
name: filter[lessThan]
|
||||
in: query
|
||||
description: 'Filter by any numeric property less than a value. Usage: filter[lessThan]=property:value'
|
||||
required: false
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
pattern: '^\w+:\d+'
|
||||
9
api/parameters/query/filterStartsWith.yml
Normal file
9
api/parameters/query/filterStartsWith.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
name: filter[startsWith]
|
||||
in: query
|
||||
description: 'Filter by any property that starts with text. Usage: filter[startsWith]=property:value'
|
||||
required: false
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
pattern: '^\w+:\w+'
|
||||
6
api/parameters/query/include.yml
Normal file
6
api/parameters/query/include.yml
Normal file
@@ -0,0 +1,6 @@
|
||||
name: include
|
||||
in: query
|
||||
description: Related resources to include in the response, separated by commas
|
||||
required: false
|
||||
schema:
|
||||
type: string
|
||||
13
api/parameters/query/includeForAlbum.yml
Normal file
13
api/parameters/query/includeForAlbum.yml
Normal file
@@ -0,0 +1,13 @@
|
||||
name: include
|
||||
in: query
|
||||
description: Related resources to include in the response, separated by commas
|
||||
required: false
|
||||
explode: false
|
||||
schema:
|
||||
type: array
|
||||
x-go-type: includeSlice
|
||||
items:
|
||||
type: string
|
||||
enum:
|
||||
- track
|
||||
- artist
|
||||
12
api/parameters/query/includeForAlbums.yml
Normal file
12
api/parameters/query/includeForAlbums.yml
Normal file
@@ -0,0 +1,12 @@
|
||||
name: include
|
||||
in: query
|
||||
description: Related resources to include in the response, separated by commas
|
||||
required: false
|
||||
explode: false
|
||||
schema:
|
||||
type: array
|
||||
x-go-type: includeSlice
|
||||
items:
|
||||
type: string
|
||||
enum:
|
||||
- artist
|
||||
13
api/parameters/query/includeForTracks.yml
Normal file
13
api/parameters/query/includeForTracks.yml
Normal file
@@ -0,0 +1,13 @@
|
||||
name: include
|
||||
in: query
|
||||
description: Related resources to include in the response, separated by commas
|
||||
required: false
|
||||
explode: false
|
||||
schema:
|
||||
type: array
|
||||
x-go-type: includeSlice
|
||||
items:
|
||||
type: string
|
||||
enum:
|
||||
- album
|
||||
- artist
|
||||
9
api/parameters/query/pageLimit.yml
Normal file
9
api/parameters/query/pageLimit.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
name: page[limit]
|
||||
in: query
|
||||
description: The number of items per page
|
||||
required: false
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
minimum: 0
|
||||
default: 10
|
||||
9
api/parameters/query/pageOffset.yml
Normal file
9
api/parameters/query/pageOffset.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
name: page[offset]
|
||||
in: query
|
||||
description: The offset for pagination
|
||||
required: false
|
||||
schema:
|
||||
type: integer
|
||||
format: int32
|
||||
minimum: 0
|
||||
default: 0
|
||||
6
api/parameters/query/sort.yml
Normal file
6
api/parameters/query/sort.yml
Normal file
@@ -0,0 +1,6 @@
|
||||
name: sort
|
||||
in: query
|
||||
description: Sort the results by one or more properties, separated by commas. Prefix the property with '-' for descending order.
|
||||
required: false
|
||||
schema:
|
||||
type: string
|
||||
33
api/resources/album.yml
Normal file
33
api/resources/album.yml
Normal file
@@ -0,0 +1,33 @@
|
||||
get:
|
||||
summary: Retrieve an individual album
|
||||
operationId: getAlbum
|
||||
parameters:
|
||||
- $ref: '../parameters/query/includeForAlbum.yml'
|
||||
- name: albumId
|
||||
in: path
|
||||
description: The unique identifier of the album
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: An album object
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
type: object
|
||||
required: [data]
|
||||
properties:
|
||||
data:
|
||||
$ref: '../schemas/Album.yml'
|
||||
included:
|
||||
description: Included resources, as requested by the `include` query parameter
|
||||
type: array
|
||||
items:
|
||||
$ref: '../schemas/IncludedResource.yml'
|
||||
'403':
|
||||
$ref: '../responses/NotAuthorized.yml'
|
||||
'404':
|
||||
$ref: '../responses/NotFound.yml'
|
||||
'500':
|
||||
$ref: '../responses/InternalServerError.yml'
|
||||
44
api/resources/albums.yml
Normal file
44
api/resources/albums.yml
Normal file
@@ -0,0 +1,44 @@
|
||||
get:
|
||||
summary: Retrieve a list of albums
|
||||
operationId: getAlbums
|
||||
parameters:
|
||||
- $ref: '../parameters/query/pageLimit.yml'
|
||||
- $ref: '../parameters/query/pageOffset.yml'
|
||||
- $ref: '../parameters/query/filterEquals.yml'
|
||||
- $ref: '../parameters/query/filterContains.yml'
|
||||
- $ref: '../parameters/query/filterLessThan.yml'
|
||||
- $ref: '../parameters/query/filterLessOrEqual.yml'
|
||||
- $ref: '../parameters/query/filterGreaterThan.yml'
|
||||
- $ref: '../parameters/query/filterGreaterOrEqual.yml'
|
||||
- $ref: '../parameters/query/filterStartsWith.yml'
|
||||
- $ref: '../parameters/query/filterEndsWith.yml'
|
||||
- $ref: '../parameters/query/sort.yml'
|
||||
- $ref: '../parameters/query/includeForAlbums.yml'
|
||||
responses:
|
||||
'200':
|
||||
description: A list of albums
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
type: object
|
||||
required: [data, links]
|
||||
properties:
|
||||
data:
|
||||
type: array
|
||||
items:
|
||||
$ref: '../schemas/Album.yml'
|
||||
links:
|
||||
$ref: '../schemas/PaginationLinks.yml'
|
||||
meta:
|
||||
$ref: '../schemas/PaginationMeta.yml'
|
||||
included:
|
||||
description: Included resources, as requested by the `include` query parameter
|
||||
type: array
|
||||
items:
|
||||
$ref: '../schemas/IncludedResource.yml'
|
||||
'400':
|
||||
$ref: '../responses/BadRequest.yml'
|
||||
'403':
|
||||
$ref: '../responses/NotAuthorized.yml'
|
||||
'500':
|
||||
$ref: '../responses/InternalServerError.yml'
|
||||
28
api/resources/artist.yml
Normal file
28
api/resources/artist.yml
Normal file
@@ -0,0 +1,28 @@
|
||||
get:
|
||||
summary: Retrieve an individual artist
|
||||
operationId: getArtist
|
||||
parameters:
|
||||
- $ref: '../parameters/query/include.yml'
|
||||
- name: artistId
|
||||
in: path
|
||||
description: The unique identifier of the artist
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: An artist object
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
type: object
|
||||
required: [data]
|
||||
properties:
|
||||
data:
|
||||
$ref: '../schemas/Artist.yml'
|
||||
'403':
|
||||
$ref: '../responses/NotAuthorized.yml'
|
||||
'404':
|
||||
$ref: '../responses/NotFound.yml'
|
||||
'500':
|
||||
$ref: '../responses/InternalServerError.yml'
|
||||
39
api/resources/artists.yml
Normal file
39
api/resources/artists.yml
Normal file
@@ -0,0 +1,39 @@
|
||||
get:
|
||||
summary: Retrieve a list of artists
|
||||
operationId: getArtists
|
||||
parameters:
|
||||
- $ref: '../parameters/query/pageLimit.yml'
|
||||
- $ref: '../parameters/query/pageOffset.yml'
|
||||
- $ref: '../parameters/query/filterEquals.yml'
|
||||
- $ref: '../parameters/query/filterContains.yml'
|
||||
- $ref: '../parameters/query/filterLessThan.yml'
|
||||
- $ref: '../parameters/query/filterLessOrEqual.yml'
|
||||
- $ref: '../parameters/query/filterGreaterThan.yml'
|
||||
- $ref: '../parameters/query/filterGreaterOrEqual.yml'
|
||||
- $ref: '../parameters/query/filterStartsWith.yml'
|
||||
- $ref: '../parameters/query/filterEndsWith.yml'
|
||||
- $ref: '../parameters/query/sort.yml'
|
||||
- $ref: '../parameters/query/include.yml'
|
||||
responses:
|
||||
'200':
|
||||
description: A list of artists
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
type: object
|
||||
required: [data, links]
|
||||
properties:
|
||||
data:
|
||||
type: array
|
||||
items:
|
||||
$ref: '../schemas/Artist.yml'
|
||||
links:
|
||||
$ref: '../schemas/PaginationLinks.yml'
|
||||
meta:
|
||||
$ref: '../schemas/PaginationMeta.yml'
|
||||
'400':
|
||||
$ref: '../responses/BadRequest.yml'
|
||||
'403':
|
||||
$ref: '../responses/NotAuthorized.yml'
|
||||
'500':
|
||||
$ref: '../responses/InternalServerError.yml'
|
||||
18
api/resources/server.yml
Normal file
18
api/resources/server.yml
Normal file
@@ -0,0 +1,18 @@
|
||||
get:
|
||||
summary: Get server's global info
|
||||
operationId: getServerInfo
|
||||
responses:
|
||||
'200':
|
||||
description: The response’s data key maps to a resource object dictionary representing the server.
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
type: object
|
||||
required: [data]
|
||||
properties:
|
||||
data:
|
||||
$ref: '../schemas/ServerInfo.yml'
|
||||
'403':
|
||||
$ref: '../responses/NotAuthorized.yml'
|
||||
'500':
|
||||
$ref: '../responses/InternalServerError.yml'
|
||||
33
api/resources/track.yml
Normal file
33
api/resources/track.yml
Normal file
@@ -0,0 +1,33 @@
|
||||
get:
|
||||
summary: Retrieve an individual track
|
||||
operationId: getTrack
|
||||
parameters:
|
||||
- $ref: '../parameters/query/includeForTracks.yml'
|
||||
- name: trackId
|
||||
in: path
|
||||
description: The unique identifier of the track
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: A track object
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
type: object
|
||||
required: [data]
|
||||
properties:
|
||||
data:
|
||||
$ref: '../schemas/Track.yml'
|
||||
included:
|
||||
description: Included resources, as requested by the `include` query parameter
|
||||
type: array
|
||||
items:
|
||||
$ref: '../schemas/IncludedResource.yml'
|
||||
'403':
|
||||
$ref: '../responses/NotAuthorized.yml'
|
||||
'404':
|
||||
$ref: '../responses/NotFound.yml'
|
||||
'500':
|
||||
$ref: '../responses/InternalServerError.yml'
|
||||
44
api/resources/tracks.yml
Normal file
44
api/resources/tracks.yml
Normal file
@@ -0,0 +1,44 @@
|
||||
get:
|
||||
summary: Retrieve a list of tracks
|
||||
operationId: getTracks
|
||||
parameters:
|
||||
- $ref: '../parameters/query/pageLimit.yml'
|
||||
- $ref: '../parameters/query/pageOffset.yml'
|
||||
- $ref: '../parameters/query/filterEquals.yml'
|
||||
- $ref: '../parameters/query/filterContains.yml'
|
||||
- $ref: '../parameters/query/filterLessThan.yml'
|
||||
- $ref: '../parameters/query/filterLessOrEqual.yml'
|
||||
- $ref: '../parameters/query/filterGreaterThan.yml'
|
||||
- $ref: '../parameters/query/filterGreaterOrEqual.yml'
|
||||
- $ref: '../parameters/query/filterStartsWith.yml'
|
||||
- $ref: '../parameters/query/filterEndsWith.yml'
|
||||
- $ref: '../parameters/query/sort.yml'
|
||||
- $ref: '../parameters/query/includeForTracks.yml'
|
||||
responses:
|
||||
'200':
|
||||
description: A list of tracks
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
type: object
|
||||
required: [data, links]
|
||||
properties:
|
||||
data:
|
||||
type: array
|
||||
items:
|
||||
$ref: '../schemas/Track.yml'
|
||||
links:
|
||||
$ref: '../schemas/PaginationLinks.yml'
|
||||
meta:
|
||||
$ref: '../schemas/PaginationMeta.yml'
|
||||
included:
|
||||
description: Included resources, as requested by the `include` query parameter
|
||||
type: array
|
||||
items:
|
||||
$ref: '../schemas/IncludedResource.yml'
|
||||
'400':
|
||||
$ref: '../responses/BadRequest.yml'
|
||||
'403':
|
||||
$ref: '../responses/NotAuthorized.yml'
|
||||
'500':
|
||||
$ref: '../responses/InternalServerError.yml'
|
||||
5
api/responses/BadRequest.yml
Normal file
5
api/responses/BadRequest.yml
Normal file
@@ -0,0 +1,5 @@
|
||||
description: Bad Request
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
$ref: '../schemas/ErrorList.yml'
|
||||
5
api/responses/InternalServerError.yml
Normal file
5
api/responses/InternalServerError.yml
Normal file
@@ -0,0 +1,5 @@
|
||||
description: Internal Server Error
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
$ref: '../schemas/ErrorList.yml'
|
||||
5
api/responses/NotAuthorized.yml
Normal file
5
api/responses/NotAuthorized.yml
Normal file
@@ -0,0 +1,5 @@
|
||||
description: Not Authorized
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
$ref: '../schemas/ErrorList.yml'
|
||||
5
api/responses/NotFound.yml
Normal file
5
api/responses/NotFound.yml
Normal file
@@ -0,0 +1,5 @@
|
||||
description: Not Found
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
$ref: '../schemas/ErrorList.yml'
|
||||
8
api/responses/_index.yml
Normal file
8
api/responses/_index.yml
Normal file
@@ -0,0 +1,8 @@
|
||||
NotFound:
|
||||
$ref: './NotFound.yml'
|
||||
NotAuthorized:
|
||||
$ref: './NotAuthorized.yml'
|
||||
BadRequest:
|
||||
$ref: './BadRequest.yml'
|
||||
InternalServerError:
|
||||
$ref: './InternalServerError.yml'
|
||||
20
api/schemas/Album.yml
Normal file
20
api/schemas/Album.yml
Normal file
@@ -0,0 +1,20 @@
|
||||
allOf:
|
||||
- $ref: './ResourceObject.yml'
|
||||
- type: object
|
||||
properties:
|
||||
attributes:
|
||||
$ref: './AlbumAttributes.yml'
|
||||
relationships:
|
||||
type: object
|
||||
properties:
|
||||
artists:
|
||||
type: array
|
||||
items:
|
||||
$ref: './AlbumArtistRelationship.yml'
|
||||
tracks:
|
||||
type: array
|
||||
items:
|
||||
$ref: './AlbumTrackRelationship.yml'
|
||||
required:
|
||||
- artists
|
||||
- tracks
|
||||
9
api/schemas/AlbumArtistRelationship.yml
Normal file
9
api/schemas/AlbumArtistRelationship.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
type: object
|
||||
properties:
|
||||
meta:
|
||||
$ref: './ArtistMetaObject.yml'
|
||||
data:
|
||||
$ref: './ResourceObject.yml'
|
||||
required:
|
||||
- meta
|
||||
- data
|
||||
23
api/schemas/AlbumAttributes.yml
Normal file
23
api/schemas/AlbumAttributes.yml
Normal file
@@ -0,0 +1,23 @@
|
||||
type: object
|
||||
properties:
|
||||
title:
|
||||
type: string
|
||||
description: The title of the album
|
||||
artist:
|
||||
type: string
|
||||
description: The artist of the album
|
||||
releaseDate:
|
||||
type: string
|
||||
description: The release date of the album
|
||||
tracktotal:
|
||||
type: integer
|
||||
description: The number of tracks on the album
|
||||
disctotal:
|
||||
type: integer
|
||||
description: The number of discs in the album
|
||||
genre:
|
||||
type: string
|
||||
description: The genre of the album
|
||||
required:
|
||||
- title
|
||||
- artist
|
||||
6
api/schemas/AlbumTrackRelationship.yml
Normal file
6
api/schemas/AlbumTrackRelationship.yml
Normal file
@@ -0,0 +1,6 @@
|
||||
type: object
|
||||
properties:
|
||||
data:
|
||||
$ref: './ResourceObject.yml'
|
||||
required:
|
||||
- data
|
||||
27
api/schemas/Artist.yml
Normal file
27
api/schemas/Artist.yml
Normal file
@@ -0,0 +1,27 @@
|
||||
allOf:
|
||||
- $ref: './ResourceObject.yml'
|
||||
- type: object
|
||||
properties:
|
||||
attributes:
|
||||
$ref: './ArtistAttributes.yml'
|
||||
relationships:
|
||||
type: object
|
||||
properties:
|
||||
tracks:
|
||||
type: object
|
||||
properties:
|
||||
data:
|
||||
type: array
|
||||
items:
|
||||
$ref: './ArtistTrackRelationship.yml'
|
||||
required:
|
||||
- data
|
||||
albums:
|
||||
type: object
|
||||
properties:
|
||||
data:
|
||||
type: array
|
||||
items:
|
||||
$ref: './ArtistAlbumRelationship.yml'
|
||||
required:
|
||||
- data
|
||||
9
api/schemas/ArtistAlbumRelationship.yml
Normal file
9
api/schemas/ArtistAlbumRelationship.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
type: object
|
||||
properties:
|
||||
meta:
|
||||
$ref: './ArtistMetaObject.yml'
|
||||
data:
|
||||
$ref: './ResourceObject.yml'
|
||||
required:
|
||||
- meta
|
||||
- data
|
||||
10
api/schemas/ArtistAttributes.yml
Normal file
10
api/schemas/ArtistAttributes.yml
Normal file
@@ -0,0 +1,10 @@
|
||||
type: object
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
description: The name of the artist
|
||||
bio:
|
||||
type: string
|
||||
description: A short biography of the artist
|
||||
required:
|
||||
- name
|
||||
6
api/schemas/ArtistMetaObject.yml
Normal file
6
api/schemas/ArtistMetaObject.yml
Normal file
@@ -0,0 +1,6 @@
|
||||
type: object
|
||||
properties:
|
||||
role:
|
||||
$ref: './ArtistRole.yml'
|
||||
required:
|
||||
- role
|
||||
5
api/schemas/ArtistRole.yml
Normal file
5
api/schemas/ArtistRole.yml
Normal file
@@ -0,0 +1,5 @@
|
||||
type: string
|
||||
enum:
|
||||
- artist
|
||||
- albumArtist
|
||||
description: The role of an artist in a track or album
|
||||
14
api/schemas/ArtistTrackRelationship.yml
Normal file
14
api/schemas/ArtistTrackRelationship.yml
Normal file
@@ -0,0 +1,14 @@
|
||||
type: object
|
||||
properties:
|
||||
meta:
|
||||
type: object
|
||||
properties:
|
||||
role:
|
||||
$ref: './ArtistRole.yml'
|
||||
required:
|
||||
- role
|
||||
data:
|
||||
$ref: './ResourceObject.yml'
|
||||
required:
|
||||
- meta
|
||||
- data
|
||||
7
api/schemas/ErrorList.yml
Normal file
7
api/schemas/ErrorList.yml
Normal file
@@ -0,0 +1,7 @@
|
||||
type: object
|
||||
required: [errors]
|
||||
properties:
|
||||
errors:
|
||||
type: array
|
||||
items:
|
||||
$ref: './ErrorObject.yml'
|
||||
10
api/schemas/ErrorObject.yml
Normal file
10
api/schemas/ErrorObject.yml
Normal file
@@ -0,0 +1,10 @@
|
||||
type: object
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
status:
|
||||
type: string
|
||||
title:
|
||||
type: string
|
||||
detail:
|
||||
type: string
|
||||
10
api/schemas/IncludedResource.yml
Normal file
10
api/schemas/IncludedResource.yml
Normal file
@@ -0,0 +1,10 @@
|
||||
oneOf:
|
||||
- $ref: './Track.yml'
|
||||
- $ref: './Album.yml'
|
||||
- $ref: './Artist.yml'
|
||||
discriminator:
|
||||
propertyName: type
|
||||
mapping:
|
||||
track: './Track.yml'
|
||||
album: './Album.yml'
|
||||
artist: './Artist.yml'
|
||||
14
api/schemas/PaginationLinks.yml
Normal file
14
api/schemas/PaginationLinks.yml
Normal file
@@ -0,0 +1,14 @@
|
||||
type: object
|
||||
properties:
|
||||
first:
|
||||
type: string
|
||||
format: uri
|
||||
prev:
|
||||
type: string
|
||||
format: uri
|
||||
next:
|
||||
type: string
|
||||
format: uri
|
||||
last:
|
||||
type: string
|
||||
format: uri
|
||||
14
api/schemas/PaginationMeta.yml
Normal file
14
api/schemas/PaginationMeta.yml
Normal file
@@ -0,0 +1,14 @@
|
||||
type: object
|
||||
properties:
|
||||
currentPage:
|
||||
type: integer
|
||||
format: int32
|
||||
description: The current page in the collection
|
||||
totalPages:
|
||||
type: integer
|
||||
format: int32
|
||||
description: The total number of pages in the collection
|
||||
totalItems:
|
||||
type: integer
|
||||
format: int32
|
||||
description: The total number of items in the collection
|
||||
18
api/schemas/ResourceList.yml
Normal file
18
api/schemas/ResourceList.yml
Normal file
@@ -0,0 +1,18 @@
|
||||
type: object
|
||||
properties:
|
||||
data:
|
||||
oneOf:
|
||||
- $ref: './Track.yml'
|
||||
- $ref: './Album.yml'
|
||||
- $ref: './Artist.yml'
|
||||
- type: array
|
||||
items:
|
||||
$ref: './ResourceObject.yml'
|
||||
included:
|
||||
type: array
|
||||
items:
|
||||
$ref: './IncludedResource.yml'
|
||||
links:
|
||||
$ref: './PaginationLinks.yml'
|
||||
meta:
|
||||
$ref: './PaginationMeta.yml'
|
||||
8
api/schemas/ResourceObject.yml
Normal file
8
api/schemas/ResourceObject.yml
Normal file
@@ -0,0 +1,8 @@
|
||||
type: object
|
||||
required: [id, type]
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
description: The unique identifier for the resource
|
||||
type:
|
||||
$ref: './ResourceType.yml'
|
||||
6
api/schemas/ResourceType.yml
Normal file
6
api/schemas/ResourceType.yml
Normal file
@@ -0,0 +1,6 @@
|
||||
type: string
|
||||
description: The type of the resource
|
||||
enum:
|
||||
- album
|
||||
- artist
|
||||
- track
|
||||
24
api/schemas/ServerInfo.yml
Normal file
24
api/schemas/ServerInfo.yml
Normal file
@@ -0,0 +1,24 @@
|
||||
type: object
|
||||
required: [server, serverVersion, authRequired, features]
|
||||
properties:
|
||||
server:
|
||||
type: string
|
||||
description: The name of the server software.
|
||||
example: "navidrome"
|
||||
serverVersion:
|
||||
type: string
|
||||
description: The version number of the server.
|
||||
example: "0.60.0"
|
||||
authRequired:
|
||||
type: boolean
|
||||
description: Whether the user has access to the server.
|
||||
example: true
|
||||
features:
|
||||
type: array
|
||||
description: A list of optional features the server supports.
|
||||
items:
|
||||
type: string
|
||||
enum:
|
||||
- albums
|
||||
- artists
|
||||
- images
|
||||
8
api/schemas/Track.yml
Normal file
8
api/schemas/Track.yml
Normal file
@@ -0,0 +1,8 @@
|
||||
allOf:
|
||||
- $ref: './ResourceObject.yml'
|
||||
- type: object
|
||||
properties:
|
||||
attributes:
|
||||
$ref: './TrackAttributes.yml'
|
||||
relationships:
|
||||
$ref: './TrackRelationships.yml'
|
||||
9
api/schemas/TrackArtistRelationship.yml
Normal file
9
api/schemas/TrackArtistRelationship.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
type: object
|
||||
properties:
|
||||
meta:
|
||||
$ref: './ArtistMetaObject.yml'
|
||||
data:
|
||||
$ref: './ResourceObject.yml'
|
||||
required:
|
||||
- meta
|
||||
- data
|
||||
55
api/schemas/TrackAttributes.yml
Normal file
55
api/schemas/TrackAttributes.yml
Normal file
@@ -0,0 +1,55 @@
|
||||
type: object
|
||||
required: [title, artist, album, albumartist, track, mimetype, duration, channels, bitrate, size]
|
||||
properties:
|
||||
title:
|
||||
type: string
|
||||
description: The title of the track
|
||||
artist: # TODO: Remove
|
||||
type: string
|
||||
description: The name of the artist who performed the track
|
||||
albumartist: # TODO: Remove
|
||||
type: string
|
||||
description: The primary artist of the album the track belongs to.
|
||||
album: # TODO Remove
|
||||
type: string
|
||||
description: The name of the album the track belongs to
|
||||
genre: # TODO Remove
|
||||
type: string
|
||||
description: The genre of the track.
|
||||
track:
|
||||
type: integer
|
||||
description: The track number within the album.
|
||||
disc:
|
||||
type: integer
|
||||
description: The disc number within a multi-disc album.
|
||||
year:
|
||||
type: integer
|
||||
description: The release year of the track or album.
|
||||
bpm:
|
||||
type: integer
|
||||
description: The beats per minute (BPM) of the track.
|
||||
recording-mbid:
|
||||
type: string
|
||||
description: The MusicBrainz identifier for the recording of the track.
|
||||
track-mbid:
|
||||
type: string
|
||||
description: The MusicBrainz identifier for the track.
|
||||
comments:
|
||||
type: string
|
||||
description: Any additional comments or notes about the track.
|
||||
mimetype:
|
||||
type: string
|
||||
description: The MIME type of the audio file.
|
||||
duration:
|
||||
type: number
|
||||
format: float
|
||||
description: The duration of the track in seconds
|
||||
channels:
|
||||
type: integer
|
||||
description: The number of audio channels in the track.
|
||||
bitrate:
|
||||
type: integer
|
||||
description: The bitrate of the audio file in kilobits per second (kbps).
|
||||
size:
|
||||
type: integer
|
||||
description: The size of the audio file in bytes.
|
||||
12
api/schemas/TrackRelationships.yml
Normal file
12
api/schemas/TrackRelationships.yml
Normal file
@@ -0,0 +1,12 @@
|
||||
type: object
|
||||
properties:
|
||||
artists:
|
||||
type: array
|
||||
items:
|
||||
$ref: './TrackArtistRelationship.yml'
|
||||
albums:
|
||||
type: array
|
||||
items:
|
||||
$ref: './AlbumTrackRelationship.yml'
|
||||
required:
|
||||
- artists
|
||||
46
api/schemas/_index.yml
Normal file
46
api/schemas/_index.yml
Normal file
@@ -0,0 +1,46 @@
|
||||
ServerInfo:
|
||||
$ref: './ServerInfo.yml'
|
||||
ResourceObject:
|
||||
$ref: './ResourceObject.yml'
|
||||
ResourceType:
|
||||
$ref: './ResourceType.yml'
|
||||
ResourceList:
|
||||
$ref: './ResourceList.yml'
|
||||
IncludedResource:
|
||||
$ref: './IncludedResource.yml'
|
||||
Track:
|
||||
$ref: './Track.yml'
|
||||
TrackAttributes:
|
||||
$ref: './TrackAttributes.yml'
|
||||
TrackRelationships:
|
||||
$ref: './TrackRelationships.yml'
|
||||
TrackArtistRelationship:
|
||||
$ref: './TrackArtistRelationship.yml'
|
||||
ArtistRole:
|
||||
$ref: './ArtistRole.yml'
|
||||
Artist:
|
||||
$ref: './Artist.yml'
|
||||
ArtistAttributes:
|
||||
$ref: './ArtistAttributes.yml'
|
||||
ArtistAlbumRelationship:
|
||||
$ref: './ArtistAlbumRelationship.yml'
|
||||
ArtistTrackRelationship:
|
||||
$ref: './ArtistTrackRelationship.yml'
|
||||
ArtistMetaObject:
|
||||
$ref: './ArtistMetaObject.yml'
|
||||
Album:
|
||||
$ref: './Album.yml'
|
||||
AlbumAttributes:
|
||||
$ref: './AlbumAttributes.yml'
|
||||
AlbumArtistRelationship:
|
||||
$ref: './AlbumArtistRelationship.yml'
|
||||
AlbumTrackRelationship:
|
||||
$ref: './AlbumTrackRelationship.yml'
|
||||
PaginationLinks:
|
||||
$ref: './PaginationLinks.yml'
|
||||
PaginationMeta:
|
||||
$ref: './PaginationMeta.yml'
|
||||
ErrorList:
|
||||
$ref: './ErrorList.yml'
|
||||
ErrorObject:
|
||||
$ref: './ErrorObject.yml'
|
||||
52
api/spec.yml
Normal file
52
api/spec.yml
Normal file
@@ -0,0 +1,52 @@
|
||||
openapi: 3.0.0
|
||||
info:
|
||||
version: 0.2.0
|
||||
title: Navidrome API
|
||||
description: >
|
||||
This spec describes the Navidrome API, which allows users to browse and manage their music library via a JSON:API
|
||||
based interface. The API provides endpoints for albums, tracks, artists, playlists and images, along with their
|
||||
relationships. Clients can retrieve information about the items in the library, filter and sort results, and
|
||||
perform actions such as creating and deleting playlists. With this API, developers can build music apps and
|
||||
services that integrate with Navidrome music server, providing a seamless experience for users to access and
|
||||
manage their music collection.
|
||||
contact:
|
||||
name: Navidrome
|
||||
url: https://navidrome.org
|
||||
license:
|
||||
name: GNU General Public License v3.0
|
||||
url: https://github.com/navidrome/navidrome/blob/master/LICENSE
|
||||
|
||||
servers:
|
||||
- url: /api/v2
|
||||
|
||||
security:
|
||||
- bearerAuth: []
|
||||
|
||||
paths:
|
||||
/server:
|
||||
$ref: './resources/server.yml'
|
||||
/tracks:
|
||||
$ref: './resources/tracks.yml'
|
||||
/tracks/{trackId}:
|
||||
$ref: './resources/track.yml'
|
||||
/artists:
|
||||
$ref: './resources/artists.yml'
|
||||
/artists/{artistId}:
|
||||
$ref: './resources/artist.yml'
|
||||
/albums:
|
||||
$ref: './resources/albums.yml'
|
||||
/albums/{albumId}:
|
||||
$ref: './resources/album.yml'
|
||||
|
||||
components:
|
||||
parameters:
|
||||
$ref: './parameters/_index.yml'
|
||||
schemas:
|
||||
$ref: './schemas/_index.yml'
|
||||
responses:
|
||||
$ref: './responses/_index.yml'
|
||||
securitySchemes:
|
||||
bearerAuth:
|
||||
type: http
|
||||
scheme: bearer
|
||||
bearerFormat: JWT
|
||||
186
cmd/backup.go
186
cmd/backup.go
@@ -1,186 +0,0 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/db"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var (
|
||||
backupCount int
|
||||
backupDir string
|
||||
force bool
|
||||
restorePath string
|
||||
)
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(backupRoot)
|
||||
|
||||
backupCmd.Flags().StringVarP(&backupDir, "backup-dir", "d", "", "directory to manually make backup")
|
||||
backupRoot.AddCommand(backupCmd)
|
||||
|
||||
pruneCmd.Flags().StringVarP(&backupDir, "backup-dir", "d", "", "directory holding Navidrome backups")
|
||||
pruneCmd.Flags().IntVarP(&backupCount, "keep-count", "k", -1, "specify the number of backups to keep. 0 remove ALL backups, and negative values mean to use the default from configuration")
|
||||
pruneCmd.Flags().BoolVarP(&force, "force", "f", false, "bypass warning when backup count is zero")
|
||||
backupRoot.AddCommand(pruneCmd)
|
||||
|
||||
restoreCommand.Flags().StringVarP(&restorePath, "backup-file", "b", "", "path of backup database to restore")
|
||||
restoreCommand.Flags().BoolVarP(&force, "force", "f", false, "bypass restore warning")
|
||||
_ = restoreCommand.MarkFlagRequired("backup-file")
|
||||
backupRoot.AddCommand(restoreCommand)
|
||||
}
|
||||
|
||||
var (
|
||||
backupRoot = &cobra.Command{
|
||||
Use: "backup",
|
||||
Aliases: []string{"bkp"},
|
||||
Short: "Create, restore and prune database backups",
|
||||
Long: "Create, restore and prune database backups",
|
||||
}
|
||||
|
||||
backupCmd = &cobra.Command{
|
||||
Use: "create",
|
||||
Short: "Create a backup database",
|
||||
Long: "Manually backup Navidrome database. This will ignore BackupCount",
|
||||
Run: func(cmd *cobra.Command, _ []string) {
|
||||
runBackup(cmd.Context())
|
||||
},
|
||||
}
|
||||
|
||||
pruneCmd = &cobra.Command{
|
||||
Use: "prune",
|
||||
Short: "Prune database backups",
|
||||
Long: "Manually prune database backups according to backup rules",
|
||||
Run: func(cmd *cobra.Command, _ []string) {
|
||||
runPrune(cmd.Context())
|
||||
},
|
||||
}
|
||||
|
||||
restoreCommand = &cobra.Command{
|
||||
Use: "restore",
|
||||
Short: "Restore Navidrome database",
|
||||
Long: "Restore Navidrome database from a backup. This must be done offline",
|
||||
Run: func(cmd *cobra.Command, _ []string) {
|
||||
runRestore(cmd.Context())
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
func runBackup(ctx context.Context) {
|
||||
if backupDir != "" {
|
||||
conf.Server.Backup.Path = backupDir
|
||||
}
|
||||
|
||||
idx := strings.LastIndex(conf.Server.DbPath, "?")
|
||||
var path string
|
||||
|
||||
if idx == -1 {
|
||||
path = conf.Server.DbPath
|
||||
} else {
|
||||
path = conf.Server.DbPath[:idx]
|
||||
}
|
||||
|
||||
if _, err := os.Stat(path); os.IsNotExist(err) {
|
||||
log.Fatal("No existing database", "path", path)
|
||||
return
|
||||
}
|
||||
|
||||
start := time.Now()
|
||||
path, err := db.Backup(ctx)
|
||||
if err != nil {
|
||||
log.Fatal("Error backing up database", "backup path", conf.Server.BasePath, err)
|
||||
}
|
||||
|
||||
elapsed := time.Since(start)
|
||||
log.Info("Backup complete", "elapsed", elapsed, "path", path)
|
||||
}
|
||||
|
||||
func runPrune(ctx context.Context) {
|
||||
if backupDir != "" {
|
||||
conf.Server.Backup.Path = backupDir
|
||||
}
|
||||
|
||||
if backupCount != -1 {
|
||||
conf.Server.Backup.Count = backupCount
|
||||
}
|
||||
|
||||
if conf.Server.Backup.Count == 0 && !force {
|
||||
fmt.Println("Warning: pruning ALL backups")
|
||||
fmt.Printf("Please enter YES (all caps) to continue: ")
|
||||
var input string
|
||||
_, err := fmt.Scanln(&input)
|
||||
|
||||
if input != "YES" || err != nil {
|
||||
log.Warn("Prune cancelled")
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
idx := strings.LastIndex(conf.Server.DbPath, "?")
|
||||
var path string
|
||||
|
||||
if idx == -1 {
|
||||
path = conf.Server.DbPath
|
||||
} else {
|
||||
path = conf.Server.DbPath[:idx]
|
||||
}
|
||||
|
||||
if _, err := os.Stat(path); os.IsNotExist(err) {
|
||||
log.Fatal("No existing database", "path", path)
|
||||
return
|
||||
}
|
||||
|
||||
start := time.Now()
|
||||
count, err := db.Prune(ctx)
|
||||
if err != nil {
|
||||
log.Fatal("Error pruning up database", "backup path", conf.Server.BasePath, err)
|
||||
}
|
||||
|
||||
elapsed := time.Since(start)
|
||||
|
||||
log.Info("Prune complete", "elapsed", elapsed, "successfully pruned", count)
|
||||
}
|
||||
|
||||
func runRestore(ctx context.Context) {
|
||||
idx := strings.LastIndex(conf.Server.DbPath, "?")
|
||||
var path string
|
||||
|
||||
if idx == -1 {
|
||||
path = conf.Server.DbPath
|
||||
} else {
|
||||
path = conf.Server.DbPath[:idx]
|
||||
}
|
||||
|
||||
if _, err := os.Stat(path); os.IsNotExist(err) {
|
||||
log.Fatal("No existing database", "path", path)
|
||||
return
|
||||
}
|
||||
|
||||
if !force {
|
||||
fmt.Println("Warning: restoring the Navidrome database should only be done offline, especially if your backup is very old.")
|
||||
fmt.Printf("Please enter YES (all caps) to continue: ")
|
||||
var input string
|
||||
_, err := fmt.Scanln(&input)
|
||||
|
||||
if input != "YES" || err != nil {
|
||||
log.Warn("Restore cancelled")
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
start := time.Now()
|
||||
err := db.Restore(ctx, restorePath)
|
||||
if err != nil {
|
||||
log.Fatal("Error restoring database", "backup path", conf.Server.BasePath, err)
|
||||
}
|
||||
|
||||
elapsed := time.Since(start)
|
||||
log.Info("Restore complete", "elapsed", elapsed)
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
func TestCmd(t *testing.T) {
|
||||
tests.Init(t, false)
|
||||
log.SetLevel(log.LevelFatal)
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "Cmd Suite")
|
||||
}
|
||||
@@ -1,79 +0,0 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/pelletier/go-toml/v2"
|
||||
"github.com/spf13/cobra"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
var (
|
||||
format string
|
||||
)
|
||||
|
||||
func init() {
|
||||
inspectCmd.Flags().StringVarP(&format, "format", "f", "jsonindent", "output format (pretty, toml, yaml, json, jsonindent)")
|
||||
rootCmd.AddCommand(inspectCmd)
|
||||
}
|
||||
|
||||
var inspectCmd = &cobra.Command{
|
||||
Use: "inspect [files to inspect]",
|
||||
Short: "Inspect tags",
|
||||
Long: "Show file tags as seen by Navidrome",
|
||||
Args: cobra.MinimumNArgs(1),
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
runInspector(args)
|
||||
},
|
||||
}
|
||||
|
||||
var marshalers = map[string]func(interface{}) ([]byte, error){
|
||||
"pretty": prettyMarshal,
|
||||
"toml": toml.Marshal,
|
||||
"yaml": yaml.Marshal,
|
||||
"json": json.Marshal,
|
||||
"jsonindent": func(v interface{}) ([]byte, error) {
|
||||
return json.MarshalIndent(v, "", " ")
|
||||
},
|
||||
}
|
||||
|
||||
func prettyMarshal(v interface{}) ([]byte, error) {
|
||||
out := v.([]core.InspectOutput)
|
||||
var res strings.Builder
|
||||
for i := range out {
|
||||
res.WriteString(fmt.Sprintf("====================\nFile: %s\n\n", out[i].File))
|
||||
t, _ := toml.Marshal(out[i].RawTags)
|
||||
res.WriteString(fmt.Sprintf("Raw tags:\n%s\n\n", t))
|
||||
t, _ = toml.Marshal(out[i].MappedTags)
|
||||
res.WriteString(fmt.Sprintf("Mapped tags:\n%s\n\n", t))
|
||||
}
|
||||
return []byte(res.String()), nil
|
||||
}
|
||||
|
||||
func runInspector(args []string) {
|
||||
marshal := marshalers[format]
|
||||
if marshal == nil {
|
||||
log.Fatal("Invalid format", "format", format)
|
||||
}
|
||||
var out []core.InspectOutput
|
||||
for _, filePath := range args {
|
||||
if !model.IsAudioFile(filePath) {
|
||||
log.Warn("Not an audio file", "file", filePath)
|
||||
continue
|
||||
}
|
||||
output, err := core.Inspect(filePath, 1, "")
|
||||
if err != nil {
|
||||
log.Warn("Unable to process file", "file", filePath, "error", err)
|
||||
continue
|
||||
}
|
||||
|
||||
out = append(out, *output)
|
||||
}
|
||||
data, _ := marshal(out)
|
||||
fmt.Println(string(data))
|
||||
}
|
||||
106
cmd/pls.go
106
cmd/pls.go
@@ -2,69 +2,44 @@ package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/csv"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"strconv"
|
||||
|
||||
"github.com/Masterminds/squirrel"
|
||||
"github.com/navidrome/navidrome/core/auth"
|
||||
"github.com/navidrome/navidrome/db"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/persistence"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var (
|
||||
playlistID string
|
||||
outputFile string
|
||||
userID string
|
||||
outputFormat string
|
||||
playlistID string
|
||||
outputFile string
|
||||
)
|
||||
|
||||
type displayPlaylist struct {
|
||||
Id string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
OwnerName string `json:"ownerName"`
|
||||
OwnerId string `json:"ownerId"`
|
||||
Public bool `json:"public"`
|
||||
}
|
||||
|
||||
type displayPlaylists []displayPlaylist
|
||||
|
||||
func init() {
|
||||
plsCmd.Flags().StringVarP(&playlistID, "playlist", "p", "", "playlist name or ID")
|
||||
plsCmd.Flags().StringVarP(&outputFile, "output", "o", "", "output file (default stdout)")
|
||||
_ = plsCmd.MarkFlagRequired("playlist")
|
||||
rootCmd.AddCommand(plsCmd)
|
||||
|
||||
listCommand.Flags().StringVarP(&userID, "user", "u", "", "username or ID")
|
||||
listCommand.Flags().StringVarP(&outputFormat, "format", "f", "csv", "output format [supported values: csv, json]")
|
||||
plsCmd.AddCommand(listCommand)
|
||||
}
|
||||
|
||||
var (
|
||||
plsCmd = &cobra.Command{
|
||||
Use: "pls",
|
||||
Short: "Export playlists",
|
||||
Long: "Export Navidrome playlists to M3U files",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
runExporter(cmd.Context())
|
||||
},
|
||||
}
|
||||
var plsCmd = &cobra.Command{
|
||||
Use: "pls",
|
||||
Short: "Export playlists",
|
||||
Long: "Export Navidrome playlists to M3U files",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
runExporter()
|
||||
},
|
||||
}
|
||||
|
||||
listCommand = &cobra.Command{
|
||||
Use: "list",
|
||||
Short: "List playlists",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
runList(cmd.Context())
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
func runExporter(ctx context.Context) {
|
||||
ds, ctx := getAdminContext(ctx)
|
||||
playlist, err := ds.Playlist(ctx).GetWithTracks(playlistID, true, false)
|
||||
func runExporter() {
|
||||
sqlDB := db.Db()
|
||||
ds := persistence.New(sqlDB)
|
||||
ctx := auth.WithAdminUser(context.Background(), ds)
|
||||
playlist, err := ds.Playlist(ctx).GetWithTracks(playlistID, true)
|
||||
if err != nil && !errors.Is(err, model.ErrNotFound) {
|
||||
log.Fatal("Error retrieving playlist", "name", playlistID, err)
|
||||
}
|
||||
@@ -74,7 +49,7 @@ func runExporter(ctx context.Context) {
|
||||
log.Fatal("Error retrieving playlist", "name", playlistID, err)
|
||||
}
|
||||
if len(playlists) > 0 {
|
||||
playlist, err = ds.Playlist(ctx).GetWithTracks(playlists[0].ID, true, false)
|
||||
playlist, err = ds.Playlist(ctx).GetWithTracks(playlists[0].ID, true)
|
||||
if err != nil {
|
||||
log.Fatal("Error retrieving playlist", "name", playlistID, err)
|
||||
}
|
||||
@@ -94,46 +69,3 @@ func runExporter(ctx context.Context) {
|
||||
log.Fatal("Error writing to the output file", "file", outputFile, err)
|
||||
}
|
||||
}
|
||||
|
||||
func runList(ctx context.Context) {
|
||||
if outputFormat != "csv" && outputFormat != "json" {
|
||||
log.Fatal("Invalid output format. Must be one of csv, json", "format", outputFormat)
|
||||
}
|
||||
|
||||
ds, ctx := getAdminContext(ctx)
|
||||
options := model.QueryOptions{Sort: "owner_name"}
|
||||
|
||||
if userID != "" {
|
||||
user, err := getUser(ctx, userID, ds)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "Error retrieving user", "username or id", userID)
|
||||
}
|
||||
options.Filters = squirrel.Eq{"owner_id": user.ID}
|
||||
}
|
||||
|
||||
playlists, err := ds.Playlist(ctx).GetAll(options)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "Failed to retrieve playlists", err)
|
||||
}
|
||||
|
||||
if outputFormat == "csv" {
|
||||
w := csv.NewWriter(os.Stdout)
|
||||
_ = w.Write([]string{"playlist id", "playlist name", "owner id", "owner name", "public"})
|
||||
for _, playlist := range playlists {
|
||||
_ = w.Write([]string{playlist.ID, playlist.Name, playlist.OwnerID, playlist.OwnerName, strconv.FormatBool(playlist.Public)})
|
||||
}
|
||||
w.Flush()
|
||||
} else {
|
||||
display := make(displayPlaylists, len(playlists))
|
||||
for idx, playlist := range playlists {
|
||||
display[idx].Id = playlist.ID
|
||||
display[idx].Name = playlist.Name
|
||||
display[idx].OwnerId = playlist.OwnerID
|
||||
display[idx].OwnerName = playlist.OwnerName
|
||||
display[idx].Public = playlist.Public
|
||||
}
|
||||
|
||||
j, _ := json.Marshal(display)
|
||||
fmt.Printf("%s\n", j)
|
||||
}
|
||||
}
|
||||
|
||||
301
cmd/root.go
301
cmd/root.go
@@ -2,35 +2,30 @@ package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/signal"
|
||||
"strings"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
"github.com/go-chi/chi/v5/middleware"
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/db"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/resources"
|
||||
"github.com/navidrome/navidrome/scanner"
|
||||
"github.com/navidrome/navidrome/scheduler"
|
||||
"github.com/navidrome/navidrome/server/backgrounds"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/viper"
|
||||
"github.com/prometheus/client_golang/prometheus/promhttp"
|
||||
"golang.org/x/sync/errgroup"
|
||||
|
||||
// Import adapters to register them
|
||||
_ "github.com/navidrome/navidrome/adapters/deezer"
|
||||
_ "github.com/navidrome/navidrome/adapters/gotaglib"
|
||||
_ "github.com/navidrome/navidrome/adapters/lastfm"
|
||||
_ "github.com/navidrome/navidrome/adapters/listenbrainz"
|
||||
_ "github.com/navidrome/navidrome/adapters/spotify"
|
||||
_ "github.com/navidrome/navidrome/adapters/taglib"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
var interrupted = errors.New("service was interrupted")
|
||||
|
||||
var (
|
||||
cfgFile string
|
||||
noBanner bool
|
||||
@@ -44,23 +39,17 @@ Complete documentation is available at https://www.navidrome.org/docs`,
|
||||
preRun()
|
||||
},
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
runNavidrome(cmd.Context())
|
||||
},
|
||||
PostRun: func(cmd *cobra.Command, args []string) {
|
||||
postRun()
|
||||
runNavidrome()
|
||||
},
|
||||
Version: consts.Version,
|
||||
}
|
||||
)
|
||||
|
||||
// Execute runs the root cobra command, which will start the Navidrome server by calling the runNavidrome function.
|
||||
func Execute() {
|
||||
ctx, cancel := mainContext(context.Background())
|
||||
defer cancel()
|
||||
|
||||
rootCmd.SetVersionTemplate(`{{println .Version}}`)
|
||||
if err := rootCmd.ExecuteContext(ctx); err != nil {
|
||||
log.Fatal(err)
|
||||
if err := rootCmd.Execute(); err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -68,57 +57,34 @@ func preRun() {
|
||||
if !noBanner {
|
||||
println(resources.Banner())
|
||||
}
|
||||
conf.Load(noBanner)
|
||||
conf.Load()
|
||||
}
|
||||
|
||||
func postRun() {
|
||||
log.Info("Navidrome stopped, bye.")
|
||||
}
|
||||
func runNavidrome() {
|
||||
db.Init()
|
||||
defer func() {
|
||||
if err := db.Close(); err != nil {
|
||||
log.Error("Error closing DB", err)
|
||||
}
|
||||
log.Info("Navidrome stopped, bye.")
|
||||
}()
|
||||
|
||||
// runNavidrome is the main entry point for the Navidrome server. It starts all the services and blocks.
|
||||
// If any of the services returns an error, it will log it and exit. If the process receives a signal to exit,
|
||||
// it will cancel the context and exit gracefully.
|
||||
func runNavidrome(ctx context.Context) {
|
||||
defer db.Init(ctx)()
|
||||
|
||||
g, ctx := errgroup.WithContext(ctx)
|
||||
g, ctx := errgroup.WithContext(context.Background())
|
||||
g.Go(startServer(ctx))
|
||||
g.Go(startSignaller(ctx))
|
||||
g.Go(startSignaler(ctx))
|
||||
g.Go(startScheduler(ctx))
|
||||
g.Go(startPlaybackServer(ctx))
|
||||
g.Go(schedulePeriodicBackup(ctx))
|
||||
g.Go(startInsightsCollector(ctx))
|
||||
g.Go(scheduleDBOptimizer(ctx))
|
||||
g.Go(startPluginManager(ctx))
|
||||
g.Go(runInitialScan(ctx))
|
||||
if conf.Server.Scanner.Enabled {
|
||||
g.Go(startScanWatcher(ctx))
|
||||
g.Go(schedulePeriodicScan(ctx))
|
||||
} else {
|
||||
log.Warn(ctx, "Automatic Scanning is DISABLED")
|
||||
}
|
||||
g.Go(schedulePeriodicScan(ctx))
|
||||
|
||||
if err := g.Wait(); err != nil {
|
||||
if err := g.Wait(); err != nil && !errors.Is(err, interrupted) {
|
||||
log.Error("Fatal error in Navidrome. Aborting", err)
|
||||
}
|
||||
}
|
||||
|
||||
// mainContext returns a context that is cancelled when the process receives a signal to exit.
|
||||
func mainContext(ctx context.Context) (context.Context, context.CancelFunc) {
|
||||
return signal.NotifyContext(ctx,
|
||||
os.Interrupt,
|
||||
syscall.SIGHUP,
|
||||
syscall.SIGTERM,
|
||||
syscall.SIGABRT,
|
||||
)
|
||||
}
|
||||
|
||||
// startServer starts the Navidrome web server, adding all the necessary routers.
|
||||
func startServer(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
a := CreateServer()
|
||||
a.MountRouter("Native API", consts.URLPathNativeAPI, CreateNativeAPIRouter(ctx))
|
||||
a.MountRouter("Subsonic API", consts.URLPathSubsonicAPI, CreateSubsonicAPIRouter(ctx))
|
||||
a := CreateServer(conf.Server.MusicFolder)
|
||||
a.MountRouter("Native API", consts.URLPathNativeAPI, CreateNativeAPIRouter())
|
||||
a.MountRouter("Subsonic API", consts.URLPathSubsonicAPI, CreateSubsonicAPIRouter())
|
||||
a.MountRouter("Public Endpoints", consts.URLPathPublic, CreatePublicRouter())
|
||||
if conf.Server.LastFM.Enabled {
|
||||
a.MountRouter("LastFM Auth", consts.URLPathNativeAPI+"/lastfm", CreateLastFMRouter())
|
||||
@@ -127,230 +93,64 @@ func startServer(ctx context.Context) func() error {
|
||||
a.MountRouter("ListenBrainz Auth", consts.URLPathNativeAPI+"/listenbrainz", CreateListenBrainzRouter())
|
||||
}
|
||||
if conf.Server.Prometheus.Enabled {
|
||||
p := CreatePrometheus()
|
||||
// blocking call because takes <100ms but useful if fails
|
||||
p.WriteInitialMetrics(ctx)
|
||||
a.MountRouter("Prometheus metrics", conf.Server.Prometheus.MetricsPath, p.GetHandler())
|
||||
// blocking call because takes <1ms but useful if fails
|
||||
core.WriteInitialMetrics()
|
||||
a.MountRouter("Prometheus metrics", conf.Server.Prometheus.MetricsPath, promhttp.Handler())
|
||||
}
|
||||
if conf.Server.DevEnableProfiler {
|
||||
a.MountRouter("Profiling", "/debug", middleware.Profiler())
|
||||
}
|
||||
if strings.HasPrefix(conf.Server.UILoginBackgroundURL, "/") {
|
||||
a.MountRouter("Background images", conf.Server.UILoginBackgroundURL, backgrounds.NewHandler())
|
||||
a.MountRouter("Background images", consts.DefaultUILoginBackgroundURL, backgrounds.NewHandler())
|
||||
}
|
||||
a.MountRouter("New Native API", consts.URLPathAPI, CreateNewNativeAPIRouter())
|
||||
return a.Run(ctx, conf.Server.Address, conf.Server.Port, conf.Server.TLSCert, conf.Server.TLSKey)
|
||||
}
|
||||
}
|
||||
|
||||
// schedulePeriodicScan schedules a periodic scan of the music library, if configured.
|
||||
func schedulePeriodicScan(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
schedule := conf.Server.Scanner.Schedule
|
||||
schedule := conf.Server.ScanSchedule
|
||||
if schedule == "" {
|
||||
log.Info(ctx, "Periodic scan is DISABLED")
|
||||
log.Warn("Periodic scan is DISABLED")
|
||||
return nil
|
||||
}
|
||||
|
||||
s := CreateScanner(ctx)
|
||||
scanner := GetScanner()
|
||||
schedulerInstance := scheduler.GetInstance()
|
||||
|
||||
log.Info("Scheduling periodic scan", "schedule", schedule)
|
||||
_, err := schedulerInstance.Add(schedule, func() {
|
||||
_, err := s.ScanAll(ctx, false)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error executing periodic scan", err)
|
||||
}
|
||||
err := schedulerInstance.Add(schedule, func() {
|
||||
_ = scanner.RescanAll(ctx, false)
|
||||
})
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error scheduling periodic scan", err)
|
||||
log.Error("Error scheduling periodic scan", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func pidHashChanged(ds model.DataStore) (bool, error) {
|
||||
pidAlbum, err := ds.Property(context.Background()).DefaultGet(consts.PIDAlbumKey, "")
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
pidTrack, err := ds.Property(context.Background()).DefaultGet(consts.PIDTrackKey, "")
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return !strings.EqualFold(pidAlbum, conf.Server.PID.Album) || !strings.EqualFold(pidTrack, conf.Server.PID.Track), nil
|
||||
}
|
||||
|
||||
// runInitialScan runs an initial scan of the music library if needed.
|
||||
func runInitialScan(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
ds := CreateDataStore()
|
||||
fullScanRequired, err := ds.Property(ctx).DefaultGet(consts.FullScanAfterMigrationFlagKey, "0")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
inProgress, err := ds.Library(ctx).ScanInProgress()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
pidHasChanged, err := pidHashChanged(ds)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
scanNeeded := conf.Server.Scanner.ScanOnStartup || inProgress || fullScanRequired == "1" || pidHasChanged
|
||||
time.Sleep(2 * time.Second) // Wait 2 seconds before the initial scan
|
||||
if scanNeeded {
|
||||
s := CreateScanner(ctx)
|
||||
switch {
|
||||
case fullScanRequired == "1":
|
||||
log.Warn(ctx, "Full scan required after migration")
|
||||
_ = ds.Property(ctx).Delete(consts.FullScanAfterMigrationFlagKey)
|
||||
case pidHasChanged:
|
||||
log.Warn(ctx, "PID config changed, performing full scan")
|
||||
fullScanRequired = "1"
|
||||
case inProgress:
|
||||
log.Warn(ctx, "Resuming interrupted scan")
|
||||
default:
|
||||
log.Info("Executing initial scan")
|
||||
}
|
||||
|
||||
_, err = s.ScanAll(ctx, fullScanRequired == "1")
|
||||
if err != nil {
|
||||
log.Error(ctx, "Scan failed", err)
|
||||
} else {
|
||||
log.Info(ctx, "Scan completed")
|
||||
}
|
||||
} else {
|
||||
log.Debug(ctx, "Initial scan not needed")
|
||||
log.Debug("Executing initial scan")
|
||||
if err := scanner.RescanAll(ctx, false); err != nil {
|
||||
log.Error("Error executing initial scan", err)
|
||||
}
|
||||
log.Debug("Finished initial scan")
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func startScanWatcher(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
if conf.Server.Scanner.WatcherWait == 0 {
|
||||
log.Debug("Folder watcher is DISABLED")
|
||||
return nil
|
||||
}
|
||||
w := CreateScanWatcher(ctx)
|
||||
err := w.Run(ctx)
|
||||
if err != nil {
|
||||
log.Error("Error starting watcher", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func schedulePeriodicBackup(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
schedule := conf.Server.Backup.Schedule
|
||||
if schedule == "" {
|
||||
log.Info(ctx, "Periodic backup is DISABLED")
|
||||
return nil
|
||||
}
|
||||
|
||||
schedulerInstance := scheduler.GetInstance()
|
||||
|
||||
log.Info("Scheduling periodic backup", "schedule", schedule)
|
||||
_, err := schedulerInstance.Add(schedule, func() {
|
||||
start := time.Now()
|
||||
path, err := db.Backup(ctx)
|
||||
elapsed := time.Since(start)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error backing up database", "elapsed", elapsed, err)
|
||||
return
|
||||
}
|
||||
log.Info(ctx, "Backup complete", "elapsed", elapsed, "path", path)
|
||||
|
||||
count, err := db.Prune(ctx)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error pruning database", "error", err)
|
||||
} else if count > 0 {
|
||||
log.Info(ctx, "Successfully pruned old files", "count", count)
|
||||
} else {
|
||||
log.Info(ctx, "No backups pruned")
|
||||
}
|
||||
})
|
||||
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
func scheduleDBOptimizer(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
log.Info(ctx, "Scheduling DB optimizer", "schedule", consts.OptimizeDBSchedule)
|
||||
schedulerInstance := scheduler.GetInstance()
|
||||
_, err := schedulerInstance.Add(consts.OptimizeDBSchedule, func() {
|
||||
if scanner.IsScanning() {
|
||||
log.Debug(ctx, "Skipping DB optimization because a scan is in progress")
|
||||
return
|
||||
}
|
||||
db.Optimize(ctx)
|
||||
})
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// startScheduler starts the Navidrome scheduler, which is used to run periodic tasks.
|
||||
func startScheduler(ctx context.Context) func() error {
|
||||
log.Info(ctx, "Starting scheduler")
|
||||
schedulerInstance := scheduler.GetInstance()
|
||||
|
||||
return func() error {
|
||||
log.Info(ctx, "Starting scheduler")
|
||||
schedulerInstance := scheduler.GetInstance()
|
||||
schedulerInstance.Run(ctx)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// startInsightsCollector starts the Navidrome Insight Collector, if configured.
|
||||
func startInsightsCollector(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
if !conf.Server.EnableInsightsCollector {
|
||||
log.Info(ctx, "Insight Collector is DISABLED")
|
||||
return nil
|
||||
}
|
||||
log.Info(ctx, "Starting Insight Collector")
|
||||
select {
|
||||
case <-time.After(conf.Server.DevInsightsInitialDelay):
|
||||
case <-ctx.Done():
|
||||
return nil
|
||||
}
|
||||
ic := CreateInsights()
|
||||
ic.Run(ctx)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// startPlaybackServer starts the Navidrome playback server, if configured.
|
||||
// It is responsible for the Jukebox functionality
|
||||
func startPlaybackServer(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
if !conf.Server.Jukebox.Enabled {
|
||||
log.Debug("Jukebox is DISABLED")
|
||||
return nil
|
||||
}
|
||||
log.Info(ctx, "Starting Jukebox service")
|
||||
playbackInstance := GetPlaybackServer()
|
||||
return playbackInstance.Run(ctx)
|
||||
}
|
||||
}
|
||||
|
||||
// startPluginManager starts the plugin manager, if configured.
|
||||
func startPluginManager(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
manager := GetPluginManager(ctx)
|
||||
if !conf.Server.Plugins.Enabled {
|
||||
log.Debug("Plugin system is DISABLED")
|
||||
return nil
|
||||
}
|
||||
log.Info(ctx, "Starting plugin manager")
|
||||
return manager.Start(ctx)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Implement some struct tags to map flags to viper
|
||||
func init() {
|
||||
cobra.OnInitialize(func() {
|
||||
conf.InitConfig(cfgFile, true)
|
||||
conf.InitConfig(cfgFile)
|
||||
})
|
||||
|
||||
rootCmd.PersistentFlags().StringVarP(&cfgFile, "configfile", "c", "", `config file (default "./navidrome.toml")`)
|
||||
@@ -359,29 +159,24 @@ func init() {
|
||||
rootCmd.PersistentFlags().String("datafolder", viper.GetString("datafolder"), "folder to store application data (DB), needs write access")
|
||||
rootCmd.PersistentFlags().String("cachefolder", viper.GetString("cachefolder"), "folder to store cache data (transcoding, images...), needs write access")
|
||||
rootCmd.PersistentFlags().StringP("loglevel", "l", viper.GetString("loglevel"), "log level, possible values: error, info, debug, trace")
|
||||
rootCmd.PersistentFlags().String("logfile", viper.GetString("logfile"), "log file path, if not set logs will be printed to stderr")
|
||||
|
||||
_ = viper.BindPFlag("musicfolder", rootCmd.PersistentFlags().Lookup("musicfolder"))
|
||||
_ = viper.BindPFlag("datafolder", rootCmd.PersistentFlags().Lookup("datafolder"))
|
||||
_ = viper.BindPFlag("cachefolder", rootCmd.PersistentFlags().Lookup("cachefolder"))
|
||||
_ = viper.BindPFlag("loglevel", rootCmd.PersistentFlags().Lookup("loglevel"))
|
||||
_ = viper.BindPFlag("logfile", rootCmd.PersistentFlags().Lookup("logfile"))
|
||||
|
||||
rootCmd.Flags().StringP("address", "a", viper.GetString("address"), "IP address to bind to")
|
||||
rootCmd.Flags().IntP("port", "p", viper.GetInt("port"), "HTTP port Navidrome will listen to")
|
||||
rootCmd.Flags().String("baseurl", viper.GetString("baseurl"), "base URL to configure Navidrome behind a proxy (ex: /music or http://my.server.com)")
|
||||
rootCmd.Flags().String("tlscert", viper.GetString("tlscert"), "optional path to a TLS cert file (enables HTTPS listening)")
|
||||
rootCmd.Flags().String("unixsocketperm", viper.GetString("unixsocketperm"), "optional file permission for the unix socket")
|
||||
rootCmd.Flags().String("tlskey", viper.GetString("tlskey"), "optional path to a TLS key file (enables HTTPS listening)")
|
||||
|
||||
rootCmd.Flags().Duration("sessiontimeout", viper.GetDuration("sessiontimeout"), "how long Navidrome will wait before closing web ui idle sessions")
|
||||
rootCmd.Flags().Duration("scaninterval", viper.GetDuration("scaninterval"), "how frequently to scan for changes in your music library")
|
||||
rootCmd.Flags().String("uiloginbackgroundurl", viper.GetString("uiloginbackgroundurl"), "URL to a backaground image used in the Login page")
|
||||
rootCmd.Flags().Bool("enabletranscodingconfig", viper.GetBool("enabletranscodingconfig"), "enables transcoding configuration in the UI")
|
||||
rootCmd.Flags().Bool("enabletranscodingcancellation", viper.GetBool("enabletranscodingcancellation"), "enables transcoding context cancellation")
|
||||
rootCmd.Flags().String("transcodingcachesize", viper.GetString("transcodingcachesize"), "size of transcoding cache")
|
||||
rootCmd.Flags().String("imagecachesize", viper.GetString("imagecachesize"), "size of image (art work) cache. set to 0 to disable cache")
|
||||
rootCmd.Flags().String("albumplaycountmode", viper.GetString("albumplaycountmode"), "how to compute playcount for albums. absolute (default) or normalized")
|
||||
rootCmd.Flags().Bool("autoimportplaylists", viper.GetBool("autoimportplaylists"), "enable/disable .m3u playlist auto-import`")
|
||||
|
||||
rootCmd.Flags().Bool("prometheus.enabled", viper.GetBool("prometheus.enabled"), "enable/disable prometheus metrics endpoint`")
|
||||
@@ -390,7 +185,6 @@ func init() {
|
||||
_ = viper.BindPFlag("address", rootCmd.Flags().Lookup("address"))
|
||||
_ = viper.BindPFlag("port", rootCmd.Flags().Lookup("port"))
|
||||
_ = viper.BindPFlag("tlscert", rootCmd.Flags().Lookup("tlscert"))
|
||||
_ = viper.BindPFlag("unixsocketperm", rootCmd.Flags().Lookup("unixsocketperm"))
|
||||
_ = viper.BindPFlag("tlskey", rootCmd.Flags().Lookup("tlskey"))
|
||||
_ = viper.BindPFlag("baseurl", rootCmd.Flags().Lookup("baseurl"))
|
||||
|
||||
@@ -402,7 +196,6 @@ func init() {
|
||||
_ = viper.BindPFlag("prometheus.metricspath", rootCmd.Flags().Lookup("prometheus.metricspath"))
|
||||
|
||||
_ = viper.BindPFlag("enabletranscodingconfig", rootCmd.Flags().Lookup("enabletranscodingconfig"))
|
||||
_ = viper.BindPFlag("enabletranscodingcancellation", rootCmd.Flags().Lookup("enabletranscodingcancellation"))
|
||||
_ = viper.BindPFlag("transcodingcachesize", rootCmd.Flags().Lookup("transcodingcachesize"))
|
||||
_ = viper.BindPFlag("imagecachesize", rootCmd.Flags().Lookup("imagecachesize"))
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user