mirror of
https://github.com/navidrome/navidrome.git
synced 2026-01-01 03:18:13 -05:00
Compare commits
509 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4994ae0aed | ||
|
|
4f1732f186 | ||
|
|
f0270dc48c | ||
|
|
8d4feb242b | ||
|
|
dd635c4e30 | ||
|
|
775626e037 | ||
|
|
91fab68578 | ||
|
|
0bdd3e6f8b | ||
|
|
465846c1bc | ||
|
|
cce11c5416 | ||
|
|
d021289279 | ||
|
|
aa7f55646d | ||
|
|
925bfafc1f | ||
|
|
e24f7984cc | ||
|
|
ac3e6ae6a5 | ||
|
|
b2019da999 | ||
|
|
871ee730cd | ||
|
|
c2657e0adb | ||
|
|
aff9c7120b | ||
|
|
94d2696c84 | ||
|
|
949bff993e | ||
|
|
b2ee5b5156 | ||
|
|
9dbe0c183e | ||
|
|
d9aa3529d7 | ||
|
|
77e47f1ea2 | ||
|
|
d75ebc5efd | ||
|
|
5ea14ba520 | ||
|
|
3e61b0426b | ||
|
|
d28a282de4 | ||
|
|
1eef2e554c | ||
|
|
6722af50e2 | ||
|
|
eeef98e2ca | ||
|
|
be83d68956 | ||
|
|
c8915ecd88 | ||
|
|
0da2352907 | ||
|
|
a30fa478ac | ||
|
|
9f0059e13f | ||
|
|
159aa28ec8 | ||
|
|
39febfac28 | ||
|
|
36d73eec0d | ||
|
|
e9a8d7ed66 | ||
|
|
c193bb2a09 | ||
|
|
72031d99ed | ||
|
|
9fcc996336 | ||
|
|
d5fa46e948 | ||
|
|
9f46204b63 | ||
|
|
a60bea70c9 | ||
|
|
a569f6788e | ||
|
|
00c83af170 | ||
|
|
089dbe9499 | ||
|
|
445880c006 | ||
|
|
3c1e5603d0 | ||
|
|
adef0ea1e7 | ||
|
|
b69a7652b9 | ||
|
|
d8e829ad18 | ||
|
|
5b73a4d5b7 | ||
|
|
1de84dbd0c | ||
|
|
e8a3495c70 | ||
|
|
1166a0fabf | ||
|
|
9e97d0a9d9 | ||
|
|
6730716d26 | ||
|
|
65961cce4b | ||
|
|
d041cb3249 | ||
|
|
f1f1fd2007 | ||
|
|
66eaac2762 | ||
|
|
c583ff57a3 | ||
|
|
9b3d3d15a1 | ||
|
|
d4f869152b | ||
|
|
ee34433cc5 | ||
|
|
a3d1a9dbe5 | ||
|
|
82f490d066 | ||
|
|
4909232e8f | ||
|
|
4096760b67 | ||
|
|
f92c807c0f | ||
|
|
bfa5b29913 | ||
|
|
f9c7cc5348 | ||
|
|
a559414ffa | ||
|
|
e3aec6d2a9 | ||
|
|
91e7f7b5c9 | ||
|
|
4f83987840 | ||
|
|
dce7705999 | ||
|
|
411b32ebb8 | ||
|
|
b4aaa7f3a6 | ||
|
|
2741b1a5c5 | ||
|
|
d4f8419d83 | ||
|
|
93040b3f85 | ||
|
|
0cd15c1ddc | ||
|
|
709714cfc0 | ||
|
|
b63630fa6e | ||
|
|
28bbd00dcc | ||
|
|
45c408a674 | ||
|
|
024b50dc2b | ||
|
|
aab3223e00 | ||
|
|
e5e2d860ef | ||
|
|
1bec99a2f8 | ||
|
|
cfa1d7fa81 | ||
|
|
177de7269b | ||
|
|
f1fc2cd9b9 | ||
|
|
7640c474cf | ||
|
|
4359adc042 | ||
|
|
8a4936dbc6 | ||
|
|
8d594671c4 | ||
|
|
873905bdf6 | ||
|
|
9249659773 | ||
|
|
65029968ab | ||
|
|
5667f6ab75 | ||
|
|
44834204de | ||
|
|
6f749b387b | ||
|
|
6e84236c1d | ||
|
|
5bbde9d9e9 | ||
|
|
464a5e7bc4 | ||
|
|
6fe3e3b6ad | ||
|
|
043f79d746 | ||
|
|
fcba2ba902 | ||
|
|
0d74d36cec | ||
|
|
050aa173cc | ||
|
|
f7e005a991 | ||
|
|
410e457e5a | ||
|
|
356caa93c7 | ||
|
|
e350e0ab49 | ||
|
|
8fcd8ba61a | ||
|
|
76042ba173 | ||
|
|
a65140b965 | ||
|
|
aee2a1f8be | ||
|
|
5882889a80 | ||
|
|
7928adb3d1 | ||
|
|
19008ad70e | ||
|
|
e3f740cafb | ||
|
|
7d1f5ddf06 | ||
|
|
bc733540f9 | ||
|
|
844966df89 | ||
|
|
2867cebd55 | ||
|
|
4172d2332a | ||
|
|
ee8ef661c3 | ||
|
|
e3527f9c00 | ||
|
|
a79e05b648 | ||
|
|
011f5891c3 | ||
|
|
b79e84a535 | ||
|
|
ac966d98a9 | ||
|
|
9c4af3c6d0 | ||
|
|
f5aac7af0d | ||
|
|
36ed2f2f58 | ||
|
|
8e32eeae93 | ||
|
|
7bb1fcdd4b | ||
|
|
ded8cf236e | ||
|
|
6dd98e0bed | ||
|
|
22c3486e38 | ||
|
|
11c9dd4bd9 | ||
|
|
623919f53e | ||
|
|
920800e909 | ||
|
|
c12472bd19 | ||
|
|
a2d764d5bc | ||
|
|
fa2cf36245 | ||
|
|
b19d5f0d3e | ||
|
|
175964b17a | ||
|
|
90b095b409 | ||
|
|
821f485022 | ||
|
|
d4a053370a | ||
|
|
66926ca466 | ||
|
|
1f9cbe7345 | ||
|
|
de698918ac | ||
|
|
71851b076c | ||
|
|
85a7268192 | ||
|
|
9dd5a8c334 | ||
|
|
030710afa9 | ||
|
|
5050250902 | ||
|
|
fb32cfd7db | ||
|
|
d26e2e29a6 | ||
|
|
5c4fbdb7c1 | ||
|
|
0cb02bce06 | ||
|
|
fe1ed582bc | ||
|
|
5e2db2c673 | ||
|
|
fac9275c27 | ||
|
|
6b3afc03cc | ||
|
|
35599230ff | ||
|
|
13ea00e7f8 | ||
|
|
f7fb77054f | ||
|
|
441c9f52cc | ||
|
|
b722f0dcfc | ||
|
|
c98e4d02cb | ||
|
|
5ade9344ff | ||
|
|
d903d3f1e0 | ||
|
|
6bf6424864 | ||
|
|
a9f93c97e1 | ||
|
|
3350e6c115 | ||
|
|
514aceb785 | ||
|
|
370f8ba293 | ||
|
|
1e4c759d93 | ||
|
|
e06fbd26b7 | ||
|
|
9062f4824e | ||
|
|
2503d2dbb8 | ||
|
|
45188e710c | ||
|
|
9dd050c377 | ||
|
|
3ccc02f375 | ||
|
|
992c78376c | ||
|
|
4a2412eef7 | ||
|
|
98fdc42d09 | ||
|
|
eb944bd261 | ||
|
|
84384006a4 | ||
|
|
e5438552c6 | ||
|
|
6ac3acaaf8 | ||
|
|
3953e3217d | ||
|
|
6731787053 | ||
|
|
dd1d3907b4 | ||
|
|
924354eb4b | ||
|
|
6880cffd16 | ||
|
|
fef1739c1a | ||
|
|
453630d430 | ||
|
|
4733616d90 | ||
|
|
ba7fd13724 | ||
|
|
1e4e3eac6e | ||
|
|
19d443ec7f | ||
|
|
db92cf9e47 | ||
|
|
ec9f9aa243 | ||
|
|
0d1f2bcc8a | ||
|
|
dfa217ab51 | ||
|
|
3d6a2380bc | ||
|
|
53aa640f35 | ||
|
|
e4d65a7828 | ||
|
|
b41123f75e | ||
|
|
6f52c0201c | ||
|
|
4944f8035a | ||
|
|
0d5097d888 | ||
|
|
ed7ee3d9f8 | ||
|
|
74803bb43e | ||
|
|
0159cf73e2 | ||
|
|
ac1d51f9d0 | ||
|
|
91eb661db5 | ||
|
|
524d508916 | ||
|
|
a6f1f7b7e3 | ||
|
|
49b8cfc261 | ||
|
|
bcea8b832a | ||
|
|
58367afaea | ||
|
|
6b59f5f73a | ||
|
|
5f0c1e7387 | ||
|
|
a057a680f1 | ||
|
|
f9081bbe6b | ||
|
|
73eb0e254b | ||
|
|
2b84c574ba | ||
|
|
88f87e6c4f | ||
|
|
cf100c4eb4 | ||
|
|
5ab345c83e | ||
|
|
46a2ec0ba1 | ||
|
|
3394580413 | ||
|
|
112ea281d9 | ||
|
|
c837838d58 | ||
|
|
9e9465567d | ||
|
|
651ce163c7 | ||
|
|
55ce28b2c6 | ||
|
|
d331ee904b | ||
|
|
3a0ce6aafa | ||
|
|
1806552ef6 | ||
|
|
223e88d481 | ||
|
|
57e0f6d3ea | ||
|
|
1c691ac0e6 | ||
|
|
264d73d73e | ||
|
|
296259d781 | ||
|
|
3f9d173495 | ||
|
|
b386981b7f | ||
|
|
be7cb59dc5 | ||
|
|
63dc0e2062 | ||
|
|
1e1dce92b6 | ||
|
|
d78c6f6a04 | ||
|
|
59ece40393 | ||
|
|
491210ac12 | ||
|
|
cd552a55ef | ||
|
|
ee2c2b19e9 | ||
|
|
0147bb5f12 | ||
|
|
1ed8930107 | ||
|
|
e457f21306 | ||
|
|
b04647309f | ||
|
|
2adb098f32 | ||
|
|
212887214c | ||
|
|
beb768cd9c | ||
|
|
ed1109ddb2 | ||
|
|
98808e4b6d | ||
|
|
422ba2284e | ||
|
|
938c3d44cc | ||
|
|
2838ac36df | ||
|
|
b952672877 | ||
|
|
5c0b6fb9b7 | ||
|
|
5fb1db6031 | ||
|
|
226be78bf5 | ||
|
|
7c13878075 | ||
|
|
0bb4b881e9 | ||
|
|
70f536e04d | ||
|
|
2a15a217de | ||
|
|
a28462a7ab | ||
|
|
5c67297dce | ||
|
|
365df5220b | ||
|
|
b2b5c00331 | ||
|
|
ee18489b85 | ||
|
|
57d3be8604 | ||
|
|
0d42b9a4a5 | ||
|
|
a1a6047c37 | ||
|
|
2171c44503 | ||
|
|
fac01ccecb | ||
|
|
98a6819390 | ||
|
|
4156602158 | ||
|
|
21a5528f5e | ||
|
|
31e003e6f3 | ||
|
|
e467e32c06 | ||
|
|
36ed880e61 | ||
|
|
1c192d8a6d | ||
|
|
5869f7caaf | ||
|
|
8732fc7226 | ||
|
|
0372339e1b | ||
|
|
a04167672c | ||
|
|
dc4e091622 | ||
|
|
8ab2a11d22 | ||
|
|
637c909e93 | ||
|
|
453873fa26 | ||
|
|
de37e0f720 | ||
|
|
f3cb85cb0d | ||
|
|
0c4c223127 | ||
|
|
3892f70c35 | ||
|
|
1468a56808 | ||
|
|
d6ec52b9d4 | ||
|
|
5fa19f9cfa | ||
|
|
15a3d2ca66 | ||
|
|
efab198d4a | ||
|
|
5ad9f546b2 | ||
|
|
20297c2aea | ||
|
|
f6eee65955 | ||
|
|
aee19e747c | ||
|
|
f34f15ba1c | ||
|
|
74348a340f | ||
|
|
09ae41a2da | ||
|
|
70487a09f4 | ||
|
|
d4147c2330 | ||
|
|
dd4802c0c6 | ||
|
|
efed7f1b40 | ||
|
|
6cc95d53a9 | ||
|
|
c795bcfcf7 | ||
|
|
46a963a02a | ||
|
|
195ae56001 | ||
|
|
f9db449e7e | ||
|
|
657fe11f53 | ||
|
|
47e3fdb1b8 | ||
|
|
c37583fa9f | ||
|
|
9d86f63f15 | ||
|
|
73ccfbd839 | ||
|
|
920fd53e58 | ||
|
|
3179966270 | ||
|
|
537e2fc033 | ||
|
|
f1478d40f5 | ||
|
|
beff1afad7 | ||
|
|
ba2623e3f1 | ||
|
|
d60e83176c | ||
|
|
acce3c97d5 | ||
|
|
734eb30ac5 | ||
|
|
1eedee9086 | ||
|
|
51eed74a0e | ||
|
|
3942275689 | ||
|
|
98b038c1fb | ||
|
|
f0302525a7 | ||
|
|
0299e488b5 | ||
|
|
630c304080 | ||
|
|
0bebd396df | ||
|
|
0b18489327 | ||
|
|
8880f67035 | ||
|
|
99dfb832eb | ||
|
|
51c16aa69f | ||
|
|
972229d1e8 | ||
|
|
c8f174ea84 | ||
|
|
d4dc8180a2 | ||
|
|
851f54ea57 | ||
|
|
72a0f59be3 | ||
|
|
c11fd9ce28 | ||
|
|
3e47819f7a | ||
|
|
906ac635c2 | ||
|
|
6bc4c0317f | ||
|
|
04f296cc73 | ||
|
|
21dd04cb7d | ||
|
|
2d8507cfd7 | ||
|
|
6c11649b06 | ||
|
|
4f8cd5307c | ||
|
|
32afe9698c | ||
|
|
3e7c4b6f70 | ||
|
|
dcc84e29d9 | ||
|
|
0d520dea2d | ||
|
|
2bb918f8a1 | ||
|
|
8e2052ff95 | ||
|
|
bc3576e092 | ||
|
|
44bc70b269 | ||
|
|
297f72ff1a | ||
|
|
181c29613f | ||
|
|
1a36f06147 | ||
|
|
9cbdb20a31 | ||
|
|
7f030b0859 | ||
|
|
177a1f853f | ||
|
|
627417dae3 | ||
|
|
2b0bfbd75a | ||
|
|
8fb09e71b6 | ||
|
|
c94def801e | ||
|
|
cbf5e3d51b | ||
|
|
1c0ebb9460 | ||
|
|
94bc1a1d41 | ||
|
|
9ae898d071 | ||
|
|
054946dc42 | ||
|
|
ccce1c0f6d | ||
|
|
81edef925c | ||
|
|
2d4f483812 | ||
|
|
d229ff39e5 | ||
|
|
3982ba7258 | ||
|
|
1bf94531fd | ||
|
|
6c38dc234f | ||
|
|
c1adf407a1 | ||
|
|
c952dc343a | ||
|
|
3671598121 | ||
|
|
cd0cf7c12b | ||
|
|
6c6223f2f9 | ||
|
|
6ff7ab52f4 | ||
|
|
faed2ea8d7 | ||
|
|
cf69df877a | ||
|
|
075a7e2640 | ||
|
|
3fda7445b0 | ||
|
|
fcb5e1b806 | ||
|
|
154e13f7c9 | ||
|
|
69e2a6d620 | ||
|
|
15b2dc6b48 | ||
|
|
1c48a55759 | ||
|
|
a9b301dfc5 | ||
|
|
b86a69567d | ||
|
|
67474b776c | ||
|
|
9d8c49750e | ||
|
|
a557f37834 | ||
|
|
0a650de357 | ||
|
|
9c3b456165 | ||
|
|
23bebe4e06 | ||
|
|
8808eaddda | ||
|
|
bbb3182bc9 | ||
|
|
82633d7490 | ||
|
|
28668782c6 | ||
|
|
97c06aba1a | ||
|
|
9c46e2b262 | ||
|
|
3713032f57 | ||
|
|
a358d107aa | ||
|
|
5f6a90e5aa | ||
|
|
0232afd98d | ||
|
|
270ae3549d | ||
|
|
8b5af67647 | ||
|
|
00c6a0ed1f | ||
|
|
ff79ac4336 | ||
|
|
8d37781a47 | ||
|
|
a6fb7fd705 | ||
|
|
fd81039f1b | ||
|
|
bc4aa55de3 | ||
|
|
6ec6ac1595 | ||
|
|
06e38a8024 | ||
|
|
6e5eea980d | ||
|
|
943b456d3f | ||
|
|
16d1314a68 | ||
|
|
ae6499b941 | ||
|
|
214287e00d | ||
|
|
af1add4312 | ||
|
|
b14c790641 | ||
|
|
d9fa19dab3 | ||
|
|
0281d06b01 | ||
|
|
de04393b47 | ||
|
|
55730514ea | ||
|
|
768160b05e | ||
|
|
b7285b28cf | ||
|
|
eab6aadc0f | ||
|
|
640a734896 | ||
|
|
a9334b7787 | ||
|
|
5e8085bf3c | ||
|
|
b2eb533082 | ||
|
|
936af2d895 | ||
|
|
b1c18a428b | ||
|
|
1fac9cc3ee | ||
|
|
92a1f19271 | ||
|
|
06c9c1e64a | ||
|
|
ed3ab5385d | ||
|
|
fcdd30ba8f | ||
|
|
dd48a23f92 | ||
|
|
6040a50297 | ||
|
|
9e5849e4dc | ||
|
|
13af8ed43a | ||
|
|
825cbcbf53 | ||
|
|
5be73d404f | ||
|
|
1fa245d141 | ||
|
|
782cd26b3d | ||
|
|
10a1b5faf8 | ||
|
|
84dc10529d | ||
|
|
24d911744e | ||
|
|
6031d97c9d | ||
|
|
80acfc103f | ||
|
|
76614b8f16 | ||
|
|
d31952f469 | ||
|
|
32d2d7c15b | ||
|
|
669c8f4c49 | ||
|
|
3910e77a7a | ||
|
|
196557a41a | ||
|
|
11d96f1da4 | ||
|
|
e628aafa4b | ||
|
|
ecf934feab | ||
|
|
5b89bf747f | ||
|
|
7a6845fa5a | ||
|
|
b6433057e9 | ||
|
|
d0784b6a21 | ||
|
|
b0e7941abe | ||
|
|
a02cfbe2a7 | ||
|
|
04603a1ea2 | ||
|
|
50870d3e61 | ||
|
|
27780683aa | ||
|
|
5baf0b80aa | ||
|
|
46be041e7b | ||
|
|
ee2e04b832 |
@@ -4,10 +4,10 @@
|
||||
"dockerfile": "Dockerfile",
|
||||
"args": {
|
||||
// Update the VARIANT arg to pick a version of Go: 1, 1.15, 1.14
|
||||
"VARIANT": "1.23",
|
||||
"VARIANT": "1.25",
|
||||
// Options
|
||||
"INSTALL_NODE": "true",
|
||||
"NODE_VERSION": "v20"
|
||||
"NODE_VERSION": "v24"
|
||||
}
|
||||
},
|
||||
"workspaceMount": "",
|
||||
|
||||
@@ -1,10 +1,18 @@
|
||||
.DS_Store
|
||||
ui/node_modules
|
||||
ui/build
|
||||
!ui/build/.gitkeep
|
||||
Dockerfile
|
||||
docker-compose*.yml
|
||||
data
|
||||
*.db
|
||||
testDB
|
||||
navidrome
|
||||
navidrome.db
|
||||
navidrome.toml
|
||||
tmp
|
||||
!tmp/taglib
|
||||
dist
|
||||
binaries
|
||||
cache
|
||||
music
|
||||
!Dockerfile
|
||||
23
.github/actions/download-taglib/action.yml
vendored
Normal file
23
.github/actions/download-taglib/action.yml
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
name: 'Download TagLib'
|
||||
description: 'Downloads and extracts the TagLib library, adding it to PKG_CONFIG_PATH'
|
||||
inputs:
|
||||
version:
|
||||
description: 'Version of TagLib to download'
|
||||
required: true
|
||||
platform:
|
||||
description: 'Platform to download TagLib for'
|
||||
default: 'linux-amd64'
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Download TagLib
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p /tmp/taglib
|
||||
cd /tmp
|
||||
FILE=taglib-${{ inputs.platform }}.tar.gz
|
||||
wget https://github.com/navidrome/cross-taglib/releases/download/v${{ inputs.version }}/${FILE}
|
||||
tar -xzf ${FILE} -C taglib
|
||||
PKG_CONFIG_PREFIX=/tmp/taglib
|
||||
echo "PKG_CONFIG_PREFIX=${PKG_CONFIG_PREFIX}" >> $GITHUB_ENV
|
||||
echo "PKG_CONFIG_PATH=${PKG_CONFIG_PATH}:${PKG_CONFIG_PREFIX}/lib/pkgconfig" >> $GITHUB_ENV
|
||||
84
.github/actions/prepare-docker/action.yml
vendored
Normal file
84
.github/actions/prepare-docker/action.yml
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
name: 'Prepare Docker Buildx environment'
|
||||
description: 'Downloads and extracts the TagLib library, adding it to PKG_CONFIG_PATH'
|
||||
inputs:
|
||||
github_token:
|
||||
description: 'GitHub token'
|
||||
required: true
|
||||
default: ''
|
||||
hub_repository:
|
||||
description: 'Docker Hub repository to push images to'
|
||||
required: false
|
||||
default: ''
|
||||
hub_username:
|
||||
description: 'Docker Hub username'
|
||||
required: false
|
||||
default: ''
|
||||
hub_password:
|
||||
description: 'Docker Hub password'
|
||||
required: false
|
||||
default: ''
|
||||
outputs:
|
||||
tags:
|
||||
description: 'Docker image tags'
|
||||
value: ${{ steps.meta.outputs.tags }}
|
||||
labels:
|
||||
description: 'Docker image labels'
|
||||
value: ${{ steps.meta.outputs.labels }}
|
||||
annotations:
|
||||
description: 'Docker image annotations'
|
||||
value: ${{ steps.meta.outputs.annotations }}
|
||||
version:
|
||||
description: 'Docker image version'
|
||||
value: ${{ steps.meta.outputs.version }}
|
||||
hub_repository:
|
||||
description: 'Docker Hub repository'
|
||||
value: ${{ env.DOCKER_HUB_REPO }}
|
||||
hub_enabled:
|
||||
description: 'Is Docker Hub enabled'
|
||||
value: ${{ env.DOCKER_HUB_ENABLED }}
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Check Docker Hub configuration
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -z "${{inputs.hub_repository}}" ]; then
|
||||
echo "DOCKER_HUB_REPO=none" >> $GITHUB_ENV
|
||||
echo "DOCKER_HUB_ENABLED=false" >> $GITHUB_ENV
|
||||
else
|
||||
echo "DOCKER_HUB_REPO=${{inputs.hub_repository}}" >> $GITHUB_ENV
|
||||
echo "DOCKER_HUB_ENABLED=true" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Login to Docker Hub
|
||||
if: inputs.hub_username != '' && inputs.hub_password != ''
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ inputs.hub_username }}
|
||||
password: ${{ inputs.hub_password }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ inputs.github_token }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Extract metadata for Docker image
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
labels: |
|
||||
maintainer=deluan@navidrome.org
|
||||
images: |
|
||||
name=${{env.DOCKER_HUB_REPO}},enable=${{env.DOCKER_HUB_ENABLED}}
|
||||
name=ghcr.io/${{ github.repository }}
|
||||
tags: |
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=raw,value=develop,enable={{is_default_branch}}
|
||||
10
.github/dependabot.yml
vendored
10
.github/dependabot.yml
vendored
@@ -10,3 +10,13 @@ updates:
|
||||
schedule:
|
||||
interval: weekly
|
||||
open-pull-requests-limit: 10
|
||||
- package-ecosystem: docker
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: weekly
|
||||
open-pull-requests-limit: 10
|
||||
- package-ecosystem: github-actions
|
||||
directory: "/.github/workflows"
|
||||
schedule:
|
||||
interval: weekly
|
||||
open-pull-requests-limit: 10
|
||||
38
.github/pull_request_template.md
vendored
Normal file
38
.github/pull_request_template.md
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
### Description
|
||||
<!-- Please provide a clear and concise description of what this PR does and why it is needed. -->
|
||||
|
||||
### Related Issues
|
||||
<!-- List any related issues, e.g., "Fixes #123" or "Related to #456". -->
|
||||
|
||||
### Type of Change
|
||||
- [ ] Bug fix
|
||||
- [ ] New feature
|
||||
- [ ] Documentation update
|
||||
- [ ] Refactor
|
||||
- [ ] Other (please describe):
|
||||
|
||||
### Checklist
|
||||
Please review and check all that apply:
|
||||
|
||||
- [ ] My code follows the project’s coding style
|
||||
- [ ] I have tested the changes locally
|
||||
- [ ] I have added or updated documentation as needed
|
||||
- [ ] I have added tests that prove my fix/feature works (or explain why not)
|
||||
- [ ] All existing and new tests pass
|
||||
|
||||
### How to Test
|
||||
<!-- Describe the steps to test your changes. Include setup, commands, and expected results. -->
|
||||
|
||||
### Screenshots / Demos (if applicable)
|
||||
<!-- Add screenshots, GIFs, or links to demos if your change includes UI updates or visual changes. -->
|
||||
|
||||
### Additional Notes
|
||||
<!-- Anything else the maintainer should know? Potential side effects, breaking changes, or areas of concern? -->
|
||||
|
||||
<!--
|
||||
**Tips for Contributors:**
|
||||
- Be concise but thorough.
|
||||
- If your PR is large, consider breaking it into smaller PRs.
|
||||
- Tag the maintainer if you need a prompt review.
|
||||
- Avoid force pushing to the branch after opening the PR, as it can complicate the review process.
|
||||
-->
|
||||
40
.github/workflows/pipeline.dockerfile
vendored
40
.github/workflows/pipeline.dockerfile
vendored
@@ -1,40 +0,0 @@
|
||||
#####################################################
|
||||
### Copy platform specific binary
|
||||
FROM bash as copy-binary
|
||||
ARG TARGETPLATFORM
|
||||
|
||||
RUN echo "Target Platform = ${TARGETPLATFORM}"
|
||||
|
||||
COPY dist .
|
||||
RUN if [ "$TARGETPLATFORM" = "linux/amd64" ]; then cp navidrome_linux_amd64_linux_amd64_v1/navidrome /navidrome; fi
|
||||
RUN if [ "$TARGETPLATFORM" = "linux/386" ]; then cp navidrome_linux_386_linux_386/navidrome /navidrome; fi
|
||||
RUN if [ "$TARGETPLATFORM" = "linux/arm64" ]; then cp navidrome_linux_arm64_linux_arm64/navidrome /navidrome; fi
|
||||
RUN if [ "$TARGETPLATFORM" = "linux/arm/v6" ]; then cp navidrome_linux_arm_linux_arm_6/navidrome /navidrome; fi
|
||||
RUN if [ "$TARGETPLATFORM" = "linux/arm/v7" ]; then cp navidrome_linux_arm_linux_arm_7/navidrome /navidrome; fi
|
||||
RUN chmod +x /navidrome
|
||||
|
||||
|
||||
#####################################################
|
||||
### Build Final Image
|
||||
FROM alpine:3.18
|
||||
LABEL maintainer="deluan@navidrome.org"
|
||||
|
||||
# Install ffmpeg and mpv
|
||||
RUN apk add -U --no-cache ffmpeg mpv
|
||||
|
||||
# Show ffmpeg build info, for troubleshooting purposes
|
||||
RUN ffmpeg -buildconf
|
||||
|
||||
COPY --from=copy-binary /navidrome /app/
|
||||
|
||||
VOLUME ["/data", "/music"]
|
||||
ENV ND_MUSICFOLDER /music
|
||||
ENV ND_DATAFOLDER /data
|
||||
ENV ND_PORT 4533
|
||||
ENV GODEBUG "asyncpreemptoff=1"
|
||||
|
||||
EXPOSE ${ND_PORT}
|
||||
HEALTHCHECK CMD wget -O- http://localhost:${ND_PORT}/ping || exit 1
|
||||
WORKDIR /app
|
||||
|
||||
ENTRYPOINT ["/app/navidrome"]
|
||||
450
.github/workflows/pipeline.yml
vendored
450
.github/workflows/pipeline.yml
vendored
@@ -9,29 +9,76 @@ on:
|
||||
branches:
|
||||
- master
|
||||
|
||||
concurrency:
|
||||
group: ${{ startsWith(github.ref, 'refs/tags/v') && 'tag' || 'branch' }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CROSS_TAGLIB_VERSION: "2.1.1-1"
|
||||
IS_RELEASE: ${{ startsWith(github.ref, 'refs/tags/') && 'true' || 'false' }}
|
||||
|
||||
jobs:
|
||||
git-version:
|
||||
name: Get version info
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
git_tag: ${{ steps.git-version.outputs.GIT_TAG }}
|
||||
git_sha: ${{ steps.git-version.outputs.GIT_SHA }}
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
fetch-tags: true
|
||||
|
||||
- name: Show git version info
|
||||
run: |
|
||||
echo "git describe (dirty): $(git describe --dirty --always --tags)"
|
||||
echo "git describe --tags: $(git describe --tags `git rev-list --tags --max-count=1`)"
|
||||
echo "git tag: $(git tag --sort=-committerdate | head -n 1)"
|
||||
echo "github_ref: $GITHUB_REF"
|
||||
echo "github_head_sha: ${{ github.event.pull_request.head.sha }}"
|
||||
git tag -l
|
||||
- name: Determine git current SHA and latest tag
|
||||
id: git-version
|
||||
run: |
|
||||
GIT_TAG=$(git tag --sort=-committerdate | head -n 1)
|
||||
if [ -n "$GIT_TAG" ]; then
|
||||
if [[ "$GITHUB_REF" != refs/tags/* ]]; then
|
||||
GIT_TAG=${GIT_TAG}-SNAPSHOT
|
||||
fi
|
||||
echo "GIT_TAG=$GIT_TAG" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
GIT_SHA=$(git rev-parse --short HEAD)
|
||||
PR_NUM=$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")
|
||||
if [[ $PR_NUM != "null" ]]; then
|
||||
GIT_SHA=$(echo "${{ github.event.pull_request.head.sha }}" | cut -c1-8)
|
||||
GIT_SHA="pr-${PR_NUM}/${GIT_SHA}"
|
||||
fi
|
||||
echo "GIT_SHA=$GIT_SHA" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "GIT_TAG=$GIT_TAG"
|
||||
echo "GIT_SHA=$GIT_SHA"
|
||||
|
||||
go-lint:
|
||||
name: Lint Go code
|
||||
runs-on: ubuntu-latest
|
||||
container: deluan/ci-goreleaser:1.23.0-1
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- name: Config workspace folder as trusted
|
||||
run: git config --global --add safe.directory $GITHUB_WORKSPACE; git describe --dirty --always --tags
|
||||
- name: Download TagLib
|
||||
uses: ./.github/actions/download-taglib
|
||||
with:
|
||||
version: ${{ env.CROSS_TAGLIB_VERSION }}
|
||||
|
||||
- name: golangci-lint
|
||||
uses: golangci/golangci-lint-action@v6
|
||||
uses: golangci/golangci-lint-action@v8
|
||||
with:
|
||||
version: latest
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
problem-matchers: true
|
||||
args: --timeout 2m
|
||||
|
||||
- name: Install goimports
|
||||
run: go install golang.org/x/tools/cmd/goimports@latest
|
||||
|
||||
- run: goimports -w `find . -name '*.go' | grep -v '_gen.go$'`
|
||||
- name: Run go goimports
|
||||
run: go run golang.org/x/tools/cmd/goimports@latest -w `find . -name '*.go' | grep -v '_gen.go$' | grep -v '.pb.go$'`
|
||||
- run: go mod tidy
|
||||
- name: Verify no changes from goimports and go mod tidy
|
||||
run: |
|
||||
@@ -44,33 +91,33 @@ jobs:
|
||||
go:
|
||||
name: Test Go code
|
||||
runs-on: ubuntu-latest
|
||||
container: deluan/ci-goreleaser:1.23.0-1
|
||||
steps:
|
||||
- name: Check out code into the Go module directory
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Config workspace folder as trusted
|
||||
run: git config --global --add safe.directory $GITHUB_WORKSPACE; git describe --dirty --always --tags
|
||||
- name: Download TagLib
|
||||
uses: ./.github/actions/download-taglib
|
||||
with:
|
||||
version: ${{ env.CROSS_TAGLIB_VERSION }}
|
||||
|
||||
- name: Download dependencies
|
||||
if: steps.cache-go.outputs.cache-hit != 'true'
|
||||
continue-on-error: ${{contains(matrix.go_version, 'beta') || contains(matrix.go_version, 'rc')}}
|
||||
run: go mod download
|
||||
|
||||
- name: Test
|
||||
continue-on-error: ${{contains(matrix.go_version, 'beta') || contains(matrix.go_version, 'rc')}}
|
||||
run: go test -shuffle=on -race -cover ./... -v
|
||||
run: |
|
||||
pkg-config --define-prefix --cflags --libs taglib # for debugging
|
||||
go test -shuffle=on -tags netgo -race ./... -v
|
||||
|
||||
js:
|
||||
name: Build JS bundle
|
||||
name: Test JS code
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
NODE_OPTIONS: "--max_old_space_size=4096"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20
|
||||
node-version: 24
|
||||
cache: "npm"
|
||||
cache-dependency-path: "**/package-lock.json"
|
||||
|
||||
@@ -94,16 +141,11 @@ jobs:
|
||||
cd ui
|
||||
npm run build
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: js-bundle
|
||||
path: ui/build
|
||||
retention-days: 7
|
||||
i18n-lint:
|
||||
name: Lint i18n files
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- run: |
|
||||
set -e
|
||||
for file in resources/i18n/*.json; do
|
||||
@@ -115,109 +157,275 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
- run: ./.github/workflows/validate-translations.sh -v
|
||||
|
||||
binaries:
|
||||
name: Build binaries
|
||||
needs: [js, go, go-lint, i18n-lint]
|
||||
|
||||
check-push-enabled:
|
||||
name: Check Docker configuration
|
||||
runs-on: ubuntu-latest
|
||||
container: deluan/ci-goreleaser:1.23.0-1
|
||||
outputs:
|
||||
is_enabled: ${{ steps.check.outputs.is_enabled }}
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Check if Docker push is configured
|
||||
id: check
|
||||
run: echo "is_enabled=${{ secrets.DOCKER_HUB_USERNAME != '' }}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Config workspace folder as trusted
|
||||
run: git config --global --add safe.directory $GITHUB_WORKSPACE; git describe --dirty --always --tags
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: js-bundle
|
||||
path: ui/build
|
||||
|
||||
- name: Run GoReleaser - SNAPSHOT
|
||||
if: startsWith(github.ref, 'refs/tags/') != true
|
||||
run: goreleaser release --clean --skip=publish --snapshot
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Run GoReleaser - RELEASE
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
run: goreleaser release --clean
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: binaries
|
||||
path: |
|
||||
dist
|
||||
!dist/*.tar.gz
|
||||
!dist/*.zip
|
||||
retention-days: 7
|
||||
|
||||
docker:
|
||||
name: Build and publish Docker images
|
||||
needs: [binaries]
|
||||
build:
|
||||
name: Build
|
||||
needs: [js, go, go-lint, i18n-lint, git-version, check-push-enabled]
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ linux/amd64, linux/arm64, linux/arm/v5, linux/arm/v6, linux/arm/v7, linux/386, darwin/amd64, darwin/arm64, windows/amd64, windows/386 ]
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
DOCKER_IMAGE: ${{secrets.DOCKER_IMAGE}}
|
||||
IS_LINUX: ${{ startsWith(matrix.platform, 'linux/') && 'true' || 'false' }}
|
||||
IS_ARMV5: ${{ matrix.platform == 'linux/arm/v5' && 'true' || 'false' }}
|
||||
IS_DOCKER_PUSH_CONFIGURED: ${{ needs.check-push-enabled.outputs.is_enabled == 'true' }}
|
||||
DOCKER_BUILD_SUMMARY: false
|
||||
GIT_SHA: ${{ needs.git-version.outputs.git_sha }}
|
||||
GIT_TAG: ${{ needs.git-version.outputs.git_tag }}
|
||||
steps:
|
||||
- name: Set up QEMU
|
||||
id: qemu
|
||||
uses: docker/setup-qemu-action@v3
|
||||
if: env.DOCKER_IMAGE != ''
|
||||
- name: Sanitize platform name
|
||||
id: set-platform
|
||||
run: |
|
||||
PLATFORM=$(echo ${{ matrix.platform }} | tr '/' '_')
|
||||
echo "PLATFORM=$PLATFORM" >> $GITHUB_ENV
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
if: env.DOCKER_IMAGE != ''
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
if: env.DOCKER_IMAGE != ''
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
if: env.DOCKER_IMAGE != ''
|
||||
- name: Prepare Docker Buildx
|
||||
uses: ./.github/actions/prepare-docker
|
||||
id: docker
|
||||
with:
|
||||
name: binaries
|
||||
path: dist
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
hub_repository: ${{ vars.DOCKER_HUB_REPO }}
|
||||
hub_username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
hub_password: ${{ secrets.DOCKER_HUB_PASSWORD }}
|
||||
|
||||
- name: Login to Docker Hub
|
||||
if: env.DOCKER_IMAGE != ''
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: env.DOCKER_IMAGE != ''
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata for Docker
|
||||
if: env.DOCKER_IMAGE != ''
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
labels: |
|
||||
maintainer=deluan
|
||||
images: |
|
||||
name=${{secrets.DOCKER_IMAGE}}
|
||||
name=ghcr.io/${{ github.repository }}
|
||||
tags: |
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=raw,value=develop,enable={{is_default_branch}}
|
||||
|
||||
- name: Build and Push
|
||||
if: env.DOCKER_IMAGE != ''
|
||||
uses: docker/build-push-action@v5
|
||||
- name: Build Binaries
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: .github/workflows/pipeline.dockerfile
|
||||
platforms: linux/amd64,linux/386,linux/arm/v6,linux/arm/v7,linux/arm64
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
file: Dockerfile
|
||||
platforms: ${{ matrix.platform }}
|
||||
outputs: |
|
||||
type=local,dest=./output/${{ env.PLATFORM }}
|
||||
target: binary
|
||||
build-args: |
|
||||
GIT_SHA=${{ env.GIT_SHA }}
|
||||
GIT_TAG=${{ env.GIT_TAG }}
|
||||
CROSS_TAGLIB_VERSION=${{ env.CROSS_TAGLIB_VERSION }}
|
||||
|
||||
- name: Upload Binaries
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: navidrome-${{ env.PLATFORM }}
|
||||
path: ./output
|
||||
retention-days: 7
|
||||
|
||||
- name: Build and push image by digest
|
||||
id: push-image
|
||||
if: env.IS_LINUX == 'true' && env.IS_DOCKER_PUSH_CONFIGURED == 'true' && env.IS_ARMV5 == 'false'
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: Dockerfile
|
||||
platforms: ${{ matrix.platform }}
|
||||
labels: ${{ steps.docker.outputs.labels }}
|
||||
build-args: |
|
||||
GIT_SHA=${{ env.GIT_SHA }}
|
||||
GIT_TAG=${{ env.GIT_TAG }}
|
||||
CROSS_TAGLIB_VERSION=${{ env.CROSS_TAGLIB_VERSION }}
|
||||
outputs: |
|
||||
type=image,name=${{ steps.docker.outputs.hub_repository }},push-by-digest=true,name-canonical=true,push=${{ steps.docker.outputs.hub_enabled }}
|
||||
type=image,name=ghcr.io/${{ github.repository }},push-by-digest=true,name-canonical=true,push=true
|
||||
|
||||
- name: Export digest
|
||||
if: env.IS_LINUX == 'true' && env.IS_DOCKER_PUSH_CONFIGURED == 'true' && env.IS_ARMV5 == 'false'
|
||||
run: |
|
||||
mkdir -p /tmp/digests
|
||||
digest="${{ steps.push-image.outputs.digest }}"
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v4
|
||||
if: env.IS_LINUX == 'true' && env.IS_DOCKER_PUSH_CONFIGURED == 'true' && env.IS_ARMV5 == 'false'
|
||||
with:
|
||||
name: digests-${{ env.PLATFORM }}
|
||||
path: /tmp/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
push-manifest:
|
||||
name: Push Docker manifest
|
||||
runs-on: ubuntu-latest
|
||||
needs: [build, check-push-enabled]
|
||||
if: needs.check-push-enabled.outputs.is_enabled == 'true'
|
||||
env:
|
||||
REGISTRY_IMAGE: ghcr.io/${{ github.repository }}
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Prepare Docker Buildx
|
||||
uses: ./.github/actions/prepare-docker
|
||||
id: docker
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
hub_repository: ${{ vars.DOCKER_HUB_REPO }}
|
||||
hub_username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
hub_password: ${{ secrets.DOCKER_HUB_PASSWORD }}
|
||||
|
||||
- name: Create manifest list and push to ghcr.io
|
||||
working-directory: /tmp/digests
|
||||
run: |
|
||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf '${{ env.REGISTRY_IMAGE }}@sha256:%s ' *)
|
||||
|
||||
- name: Create manifest list and push to Docker Hub
|
||||
working-directory: /tmp/digests
|
||||
if: vars.DOCKER_HUB_REPO != ''
|
||||
run: |
|
||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf '${{ vars.DOCKER_HUB_REPO }}@sha256:%s ' *)
|
||||
|
||||
- name: Inspect image in ghcr.io
|
||||
run: |
|
||||
docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:${{ steps.docker.outputs.version }}
|
||||
|
||||
- name: Inspect image in Docker Hub
|
||||
if: vars.DOCKER_HUB_REPO != ''
|
||||
run: |
|
||||
docker buildx imagetools inspect ${{ vars.DOCKER_HUB_REPO }}:${{ steps.docker.outputs.version }}
|
||||
|
||||
- name: Delete unnecessary digest artifacts
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
for artifact in $(gh api repos/${{ github.repository }}/actions/artifacts | jq -r '.artifacts[] | select(.name | startswith("digests-")) | .id'); do
|
||||
gh api --method DELETE repos/${{ github.repository }}/actions/artifacts/$artifact
|
||||
done
|
||||
|
||||
msi:
|
||||
name: Build Windows installers
|
||||
needs: [build, git-version]
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- uses: actions/download-artifact@v5
|
||||
with:
|
||||
path: ./binaries
|
||||
pattern: navidrome-windows*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Install Wix
|
||||
run: sudo apt-get install -y wixl jq
|
||||
|
||||
- name: Build MSI
|
||||
env:
|
||||
GIT_TAG: ${{ needs.git-version.outputs.git_tag }}
|
||||
run: |
|
||||
rm -rf binaries/msi
|
||||
sudo GIT_TAG=$GIT_TAG release/wix/build_msi.sh ${GITHUB_WORKSPACE} 386
|
||||
sudo GIT_TAG=$GIT_TAG release/wix/build_msi.sh ${GITHUB_WORKSPACE} amd64
|
||||
du -h binaries/msi/*.msi
|
||||
|
||||
- name: Upload MSI files
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: navidrome-windows-installers
|
||||
path: binaries/msi/*.msi
|
||||
retention-days: 7
|
||||
|
||||
release:
|
||||
name: Package/Release
|
||||
needs: [build, msi]
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
package_list: ${{ steps.set-package-list.outputs.package_list }}
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
fetch-tags: true
|
||||
|
||||
- uses: actions/download-artifact@v5
|
||||
with:
|
||||
path: ./binaries
|
||||
pattern: navidrome-*
|
||||
merge-multiple: true
|
||||
|
||||
- run: ls -lR ./binaries
|
||||
|
||||
- name: Set RELEASE_FLAGS for snapshot releases
|
||||
if: env.IS_RELEASE == 'false'
|
||||
run: echo 'RELEASE_FLAGS=--skip=publish --snapshot' >> $GITHUB_ENV
|
||||
|
||||
- name: Run GoReleaser
|
||||
uses: goreleaser/goreleaser-action@v6
|
||||
with:
|
||||
version: '~> v2'
|
||||
args: "release --clean -f release/goreleaser.yml ${{ env.RELEASE_FLAGS }}"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Remove build artifacts
|
||||
run: |
|
||||
ls -l ./dist
|
||||
rm ./dist/*.tar.gz ./dist/*.zip
|
||||
|
||||
- name: Upload all-packages artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: packages
|
||||
path: dist/navidrome_0*
|
||||
|
||||
- id: set-package-list
|
||||
name: Export list of generated packages
|
||||
run: |
|
||||
cd dist
|
||||
set +x
|
||||
ITEMS=$(ls navidrome_0* | sed 's/^navidrome_0[^_]*_linux_//' | jq -R -s -c 'split("\n")[:-1]')
|
||||
echo $ITEMS
|
||||
echo "package_list=${ITEMS}" >> $GITHUB_OUTPUT
|
||||
|
||||
upload-packages:
|
||||
name: Upload Linux PKG
|
||||
runs-on: ubuntu-latest
|
||||
needs: [release]
|
||||
strategy:
|
||||
matrix:
|
||||
item: ${{ fromJson(needs.release.outputs.package_list) }}
|
||||
steps:
|
||||
- name: Download all-packages artifact
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: packages
|
||||
path: ./dist
|
||||
|
||||
- name: Upload all-packages artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: navidrome_linux_${{ matrix.item }}
|
||||
path: dist/navidrome_0*_linux_${{ matrix.item }}
|
||||
|
||||
# delete-artifacts:
|
||||
# name: Delete unused artifacts
|
||||
# runs-on: ubuntu-latest
|
||||
# needs: [upload-packages]
|
||||
# steps:
|
||||
# - name: Delete all-packages artifact
|
||||
# env:
|
||||
# GH_TOKEN: ${{ github.token }}
|
||||
# run: |
|
||||
# for artifact in $(gh api repos/${{ github.repository }}/actions/artifacts | jq -r '.artifacts[] | select(.name | startswith("packages")) | .id'); do
|
||||
# gh api --method DELETE repos/${{ github.repository }}/actions/artifacts/$artifact
|
||||
# done
|
||||
55
.github/workflows/update-translations.sh
vendored
55
.github/workflows/update-translations.sh
vendored
@@ -9,6 +9,7 @@ process_json() {
|
||||
jq 'walk(if type == "object" then with_entries(select(.value != null and .value != "" and .value != [] and .value != {})) | to_entries | sort_by(.key) | from_entries else . end)' "$1"
|
||||
}
|
||||
|
||||
# Function to check differences between local and remote translations
|
||||
check_lang_diff() {
|
||||
filename=${I18N_DIR}/"$1".json
|
||||
url=$(curl -s -X POST https://poeditor.com/api/ \
|
||||
@@ -35,10 +36,58 @@ check_lang_diff() {
|
||||
rm -f poeditor.json poeditor.tmp "$filename".tmp
|
||||
}
|
||||
|
||||
# Function to get the list of languages
|
||||
get_language_list() {
|
||||
response=$(curl -s -X POST https://api.poeditor.com/v2/languages/list \
|
||||
-d api_token="${POEDITOR_APIKEY}" \
|
||||
-d id="${POEDITOR_PROJECTID}")
|
||||
|
||||
echo $response
|
||||
}
|
||||
|
||||
# Function to get the language name from the language code
|
||||
get_language_name() {
|
||||
lang_code="$1"
|
||||
lang_list="$2"
|
||||
|
||||
lang_name=$(echo "$lang_list" | jq -r ".result.languages[] | select(.code == \"$lang_code\") | .name")
|
||||
|
||||
if [ -z "$lang_name" ]; then
|
||||
echo "Error: Language code '$lang_code' not found" >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo "$lang_name"
|
||||
}
|
||||
|
||||
# Function to get the language code from the file path
|
||||
get_lang_code() {
|
||||
filepath="$1"
|
||||
# Extract just the filename
|
||||
filename=$(basename "$filepath")
|
||||
|
||||
# Remove the extension
|
||||
lang_code="${filename%.*}"
|
||||
|
||||
echo "$lang_code"
|
||||
}
|
||||
|
||||
lang_list=$(get_language_list)
|
||||
|
||||
# Check differences for each language
|
||||
for file in ${I18N_DIR}/*.json; do
|
||||
name=$(basename "$file")
|
||||
code=$(echo "$name" | cut -f1 -d.)
|
||||
code=$(get_lang_code "$file")
|
||||
lang=$(jq -r .languageName < "$file")
|
||||
echo "Downloading $lang ($code)"
|
||||
lang_name=$(get_language_name "$code" "$lang_list")
|
||||
echo "Downloading $lang_name - $lang ($code)"
|
||||
check_lang_diff "$code"
|
||||
done
|
||||
|
||||
# List changed languages to stderr
|
||||
languages=""
|
||||
for file in $(git diff --name-only --exit-code | grep json); do
|
||||
lang_code=$(get_lang_code "$file")
|
||||
lang_name=$(get_language_name "$lang_code" "$lang_list")
|
||||
languages="${languages}$(echo "$lang_name" | tr -d '\n'), "
|
||||
done
|
||||
echo "${languages%??}" 1>&2
|
||||
15
.github/workflows/update-translations.yml
vendored
15
.github/workflows/update-translations.yml
vendored
@@ -8,21 +8,26 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.repository_owner == 'navidrome' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Get updated translations
|
||||
id: poeditor
|
||||
env:
|
||||
POEDITOR_PROJECTID: ${{ secrets.POEDITOR_PROJECTID }}
|
||||
POEDITOR_APIKEY: ${{ secrets.POEDITOR_APIKEY }}
|
||||
run: |
|
||||
.github/workflows/update-translations.sh
|
||||
.github/workflows/update-translations.sh 2> title.tmp
|
||||
title=$(cat title.tmp)
|
||||
echo "::set-output name=title::$title"
|
||||
rm title.tmp
|
||||
- name: Show changes, if any
|
||||
run: |
|
||||
git status --porcelain
|
||||
git diff
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.PAT }}
|
||||
commit-message: Update translations
|
||||
title: Update translations from POEditor
|
||||
author: "navidrome-bot <navidrome-bot@navidrome.org>"
|
||||
commit-message: "fix(ui): update ${{ steps.poeditor.outputs.title }} translations from POEditor"
|
||||
title: "fix(ui): update ${{ steps.poeditor.outputs.title }} translations from POEditor"
|
||||
branch: update-translations
|
||||
|
||||
236
.github/workflows/validate-translations.sh
vendored
Executable file
236
.github/workflows/validate-translations.sh
vendored
Executable file
@@ -0,0 +1,236 @@
|
||||
#!/bin/bash
|
||||
|
||||
# validate-translations.sh
|
||||
#
|
||||
# This script validates the structure of JSON translation files by comparing them
|
||||
# against the reference English translation file (ui/src/i18n/en.json).
|
||||
#
|
||||
# The script performs the following validations:
|
||||
# 1. JSON syntax validation using jq
|
||||
# 2. Structural validation - ensures all keys from English file are present
|
||||
# 3. Reports missing keys (translation incomplete)
|
||||
# 4. Reports extra keys (keys not in English reference, possibly deprecated)
|
||||
# 5. Emits GitHub Actions annotations for CI/CD integration
|
||||
#
|
||||
# Usage:
|
||||
# ./validate-translations.sh
|
||||
#
|
||||
# Environment Variables:
|
||||
# EN_FILE - Path to reference English file (default: ui/src/i18n/en.json)
|
||||
# TRANSLATION_DIR - Directory containing translation files (default: resources/i18n)
|
||||
#
|
||||
# Exit codes:
|
||||
# 0 - All translations are valid
|
||||
# 1 - One or more translations have structural issues
|
||||
#
|
||||
# GitHub Actions Integration:
|
||||
# The script outputs GitHub Actions annotations using ::error and ::warning
|
||||
# format that will be displayed in PR checks and workflow summaries.
|
||||
|
||||
# Script to validate JSON translation files structure against en.json
|
||||
set -e
|
||||
|
||||
# Path to the reference English translation file
|
||||
EN_FILE="${EN_FILE:-ui/src/i18n/en.json}"
|
||||
TRANSLATION_DIR="${TRANSLATION_DIR:-resources/i18n}"
|
||||
VERBOSE=false
|
||||
|
||||
# Parse command line arguments
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
-v|--verbose)
|
||||
VERBOSE=true
|
||||
shift
|
||||
;;
|
||||
-h|--help)
|
||||
echo "Usage: $0 [options]"
|
||||
echo ""
|
||||
echo "Validates JSON translation files structure against English reference file."
|
||||
echo ""
|
||||
echo "Options:"
|
||||
echo " -h, --help Show this help message"
|
||||
echo " -v, --verbose Show detailed output (default: only show errors)"
|
||||
echo ""
|
||||
echo "Environment Variables:"
|
||||
echo " EN_FILE Path to reference English file (default: ui/src/i18n/en.json)"
|
||||
echo " TRANSLATION_DIR Directory with translation files (default: resources/i18n)"
|
||||
echo ""
|
||||
echo "Examples:"
|
||||
echo " $0 # Validate all translation files (quiet mode)"
|
||||
echo " $0 -v # Validate with detailed output"
|
||||
echo " EN_FILE=custom/en.json $0 # Use custom reference file"
|
||||
echo " TRANSLATION_DIR=custom/i18n $0 # Use custom translations directory"
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
echo "Unknown option: $1" >&2
|
||||
echo "Use --help for usage information" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Color codes for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
if [[ "$VERBOSE" == "true" ]]; then
|
||||
echo "Validating translation files structure against ${EN_FILE}..."
|
||||
fi
|
||||
|
||||
# Check if English reference file exists
|
||||
if [[ ! -f "$EN_FILE" ]]; then
|
||||
echo "::error::Reference file $EN_FILE not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Function to extract all JSON keys from a file, creating a flat list of dot-separated paths
|
||||
extract_keys() {
|
||||
local file="$1"
|
||||
jq -r 'paths(scalars) as $p | $p | join(".")' "$file" 2>/dev/null | sort
|
||||
}
|
||||
|
||||
# Function to extract all non-empty string keys (to identify structural issues)
|
||||
extract_structure_keys() {
|
||||
local file="$1"
|
||||
# Get only keys where values are not empty strings
|
||||
jq -r 'paths(scalars) as $p | select(getpath($p) != "") | $p | join(".")' "$file" 2>/dev/null | sort
|
||||
}
|
||||
|
||||
# Function to validate a single translation file
|
||||
validate_translation() {
|
||||
local translation_file="$1"
|
||||
local filename=$(basename "$translation_file")
|
||||
local has_errors=false
|
||||
local verbose=${2:-false}
|
||||
|
||||
if [[ "$verbose" == "true" ]]; then
|
||||
echo "Validating $filename..."
|
||||
fi
|
||||
|
||||
# First validate JSON syntax
|
||||
if ! jq empty "$translation_file" 2>/dev/null; then
|
||||
echo "::error file=$translation_file::Invalid JSON syntax"
|
||||
echo -e "${RED}✗ $filename has invalid JSON syntax${NC}"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Extract all keys from both files (for statistics)
|
||||
local en_keys_file=$(mktemp)
|
||||
local translation_keys_file=$(mktemp)
|
||||
|
||||
extract_keys "$EN_FILE" > "$en_keys_file"
|
||||
extract_keys "$translation_file" > "$translation_keys_file"
|
||||
|
||||
# Extract only non-empty structure keys (to validate structural issues)
|
||||
local en_structure_file=$(mktemp)
|
||||
local translation_structure_file=$(mktemp)
|
||||
|
||||
extract_structure_keys "$EN_FILE" > "$en_structure_file"
|
||||
extract_structure_keys "$translation_file" > "$translation_structure_file"
|
||||
|
||||
# Find structural issues: keys in translation not in English (misplaced)
|
||||
local extra_keys=$(comm -13 "$en_keys_file" "$translation_keys_file")
|
||||
|
||||
# Find missing keys (for statistics only)
|
||||
local missing_keys=$(comm -23 "$en_keys_file" "$translation_keys_file")
|
||||
|
||||
# Count keys for statistics
|
||||
local total_en_keys=$(wc -l < "$en_keys_file")
|
||||
local total_translation_keys=$(wc -l < "$translation_keys_file")
|
||||
local missing_count=0
|
||||
local extra_count=0
|
||||
|
||||
if [[ -n "$missing_keys" ]]; then
|
||||
missing_count=$(echo "$missing_keys" | grep -c '^' || echo 0)
|
||||
fi
|
||||
|
||||
if [[ -n "$extra_keys" ]]; then
|
||||
extra_count=$(echo "$extra_keys" | grep -c '^' || echo 0)
|
||||
has_errors=true
|
||||
fi
|
||||
|
||||
# Report extra/misplaced keys (these are structural issues)
|
||||
if [[ -n "$extra_keys" ]]; then
|
||||
if [[ "$verbose" == "true" ]]; then
|
||||
echo -e "${YELLOW}Misplaced keys in $filename ($extra_count):${NC}"
|
||||
fi
|
||||
|
||||
while IFS= read -r key; do
|
||||
# Try to find the line number
|
||||
line=$(grep -n "\"$(echo "$key" | sed 's/.*\.//')" "$translation_file" | head -1 | cut -d: -f1)
|
||||
line=${line:-1} # Default to line 1 if not found
|
||||
|
||||
echo "::error file=$translation_file,line=$line::Misplaced key: $key"
|
||||
|
||||
if [[ "$verbose" == "true" ]]; then
|
||||
echo " + $key (line ~$line)"
|
||||
fi
|
||||
done <<< "$extra_keys"
|
||||
fi
|
||||
|
||||
# Clean up temp files
|
||||
rm -f "$en_keys_file" "$translation_keys_file" "$en_structure_file" "$translation_structure_file"
|
||||
|
||||
# Print statistics
|
||||
if [[ "$verbose" == "true" ]]; then
|
||||
echo " Keys: $total_translation_keys/$total_en_keys (Missing: $missing_count, Extra/Misplaced: $extra_count)"
|
||||
|
||||
if [[ "$has_errors" == "true" ]]; then
|
||||
echo -e "${RED}✗ $filename has structural issues${NC}"
|
||||
else
|
||||
echo -e "${GREEN}✓ $filename structure is valid${NC}"
|
||||
fi
|
||||
elif [[ "$has_errors" == "true" ]]; then
|
||||
echo -e "${RED}✗ $filename has structural issues (Extra/Misplaced: $extra_count)${NC}"
|
||||
fi
|
||||
|
||||
return $([[ "$has_errors" == "true" ]] && echo 1 || echo 0)
|
||||
}
|
||||
|
||||
# Main validation loop
|
||||
validation_failed=false
|
||||
total_files=0
|
||||
failed_files=0
|
||||
valid_files=0
|
||||
|
||||
for translation_file in "$TRANSLATION_DIR"/*.json; do
|
||||
if [[ -f "$translation_file" ]]; then
|
||||
total_files=$((total_files + 1))
|
||||
if ! validate_translation "$translation_file" "$VERBOSE"; then
|
||||
validation_failed=true
|
||||
failed_files=$((failed_files + 1))
|
||||
else
|
||||
valid_files=$((valid_files + 1))
|
||||
fi
|
||||
|
||||
if [[ "$VERBOSE" == "true" ]]; then
|
||||
echo "" # Add spacing between files
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
# Summary
|
||||
if [[ "$VERBOSE" == "true" ]]; then
|
||||
echo "========================================="
|
||||
echo "Translation Validation Summary:"
|
||||
echo " Total files: $total_files"
|
||||
echo " Valid files: $valid_files"
|
||||
echo " Files with structural issues: $failed_files"
|
||||
echo "========================================="
|
||||
fi
|
||||
|
||||
if [[ "$validation_failed" == "true" ]]; then
|
||||
if [[ "$VERBOSE" == "true" ]]; then
|
||||
echo -e "${RED}Translation validation failed - $failed_files file(s) have structural issues${NC}"
|
||||
else
|
||||
echo -e "${RED}Translation validation failed - $failed_files/$total_files file(s) have structural issues${NC}"
|
||||
fi
|
||||
exit 1
|
||||
elif [[ "$VERBOSE" == "true" ]]; then
|
||||
echo -e "${GREEN}All translation files are structurally valid${NC}"
|
||||
fi
|
||||
|
||||
exit 0
|
||||
18
.gitignore
vendored
18
.gitignore
vendored
@@ -5,24 +5,30 @@
|
||||
/navidrome
|
||||
/iTunes*.xml
|
||||
/tmp
|
||||
/bin
|
||||
data/*
|
||||
vendor/*/
|
||||
wiki
|
||||
TODO.md
|
||||
var
|
||||
navidrome.toml
|
||||
!release/linux/navidrome.toml
|
||||
master.zip
|
||||
testDB
|
||||
navidrome.db
|
||||
cache/*
|
||||
*.swp
|
||||
embedded_gen.go
|
||||
dist
|
||||
music
|
||||
navidrome.db-shm
|
||||
navidrome.db-wal
|
||||
tags
|
||||
*.db*
|
||||
.gitinfo
|
||||
docker-compose.yml
|
||||
!contrib/docker-compose.yml
|
||||
test-123.db
|
||||
binaries
|
||||
navidrome-*
|
||||
AGENTS.md
|
||||
.github/prompts
|
||||
.github/instructions
|
||||
.github/git-commit-instructions.md
|
||||
*.exe
|
||||
*.test
|
||||
*.wasm
|
||||
@@ -1,3 +1,7 @@
|
||||
version: "2"
|
||||
run:
|
||||
build-tags:
|
||||
- netgo
|
||||
linters:
|
||||
enable:
|
||||
- asasalint
|
||||
@@ -7,31 +11,48 @@ linters:
|
||||
- copyloopvar
|
||||
- dogsled
|
||||
- durationcheck
|
||||
- errcheck
|
||||
- errorlint
|
||||
- gocritic
|
||||
- gocyclo
|
||||
- goprintffuncname
|
||||
- gosec
|
||||
- gosimple
|
||||
- govet
|
||||
- ineffassign
|
||||
- misspell
|
||||
- nakedret
|
||||
- nilerr
|
||||
- rowserrcheck
|
||||
- staticcheck
|
||||
- typecheck
|
||||
- unconvert
|
||||
- unused
|
||||
- whitespace
|
||||
|
||||
linters-settings:
|
||||
govet:
|
||||
enable:
|
||||
- nilness
|
||||
gosec:
|
||||
excludes:
|
||||
- G501
|
||||
- G401
|
||||
- G505
|
||||
- G115 # Can't check context, where the warning is clearly a false positive. See discussion in https://github.com/securego/gosec/pull/1149
|
||||
disable:
|
||||
- staticcheck
|
||||
settings:
|
||||
gocritic:
|
||||
disable-all: true
|
||||
enabled-checks:
|
||||
- deprecatedComment
|
||||
gosec:
|
||||
excludes:
|
||||
- G501
|
||||
- G401
|
||||
- G505
|
||||
- G115
|
||||
govet:
|
||||
enable:
|
||||
- nilness
|
||||
exclusions:
|
||||
generated: lax
|
||||
presets:
|
||||
- comments
|
||||
- common-false-positives
|
||||
- legacy
|
||||
- std-error-handling
|
||||
paths:
|
||||
- third_party$
|
||||
- builtin$
|
||||
- examples$
|
||||
formatters:
|
||||
exclusions:
|
||||
generated: lax
|
||||
paths:
|
||||
- third_party$
|
||||
- builtin$
|
||||
- examples$
|
||||
|
||||
134
.goreleaser.yml
134
.goreleaser.yml
@@ -1,134 +0,0 @@
|
||||
# GoReleaser config
|
||||
project_name: navidrome
|
||||
version: 2
|
||||
|
||||
builds:
|
||||
- id: navidrome_linux_amd64
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
goos:
|
||||
- linux
|
||||
goarch:
|
||||
- amd64
|
||||
flags:
|
||||
- -tags=netgo
|
||||
ldflags:
|
||||
- "-extldflags '-static -lz'"
|
||||
- -s -w -X github.com/navidrome/navidrome/consts.gitSha={{.ShortCommit}} -X github.com/navidrome/navidrome/consts.gitTag={{.Version}}
|
||||
|
||||
- id: navidrome_linux_386
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- PKG_CONFIG_PATH=/i386/lib/pkgconfig
|
||||
goos:
|
||||
- linux
|
||||
goarch:
|
||||
- "386"
|
||||
flags:
|
||||
- -tags=netgo
|
||||
ldflags:
|
||||
- "-extldflags '-static'"
|
||||
- -s -w -X github.com/navidrome/navidrome/consts.gitSha={{.ShortCommit}} -X github.com/navidrome/navidrome/consts.gitTag={{.Version}}
|
||||
|
||||
- id: navidrome_linux_arm
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- CC=arm-linux-gnueabi-gcc
|
||||
- CXX=arm-linux-gnueabi-g++
|
||||
- PKG_CONFIG_PATH=/arm/lib/pkgconfig
|
||||
goos:
|
||||
- linux
|
||||
goarch:
|
||||
- arm
|
||||
goarm:
|
||||
- "5"
|
||||
- "6"
|
||||
- "7"
|
||||
flags:
|
||||
- -tags=netgo
|
||||
ldflags:
|
||||
- "-extldflags '-static'"
|
||||
- -s -w -X github.com/navidrome/navidrome/consts.gitSha={{.ShortCommit}} -X github.com/navidrome/navidrome/consts.gitTag={{.Version}}
|
||||
|
||||
- id: navidrome_linux_arm64
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- CC=aarch64-linux-gnu-gcc
|
||||
- CXX=aarch64-linux-gnu-g++
|
||||
- PKG_CONFIG_PATH=/arm64/lib/pkgconfig
|
||||
goos:
|
||||
- linux
|
||||
goarch:
|
||||
- arm64
|
||||
flags:
|
||||
- -tags=netgo
|
||||
ldflags:
|
||||
- "-extldflags '-static'"
|
||||
- -s -w -X github.com/navidrome/navidrome/consts.gitSha={{.ShortCommit}} -X github.com/navidrome/navidrome/consts.gitTag={{.Version}}
|
||||
|
||||
- id: navidrome_windows_386
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- CC=i686-w64-mingw32-gcc
|
||||
- CXX=i686-w64-mingw32-g++
|
||||
- PKG_CONFIG_PATH=/mingw32/lib/pkgconfig
|
||||
goos:
|
||||
- windows
|
||||
goarch:
|
||||
- "386"
|
||||
flags:
|
||||
- -tags=netgo
|
||||
ldflags:
|
||||
- "-extldflags '-static'"
|
||||
- -s -w -X github.com/navidrome/navidrome/consts.gitSha={{.ShortCommit}} -X github.com/navidrome/navidrome/consts.gitTag={{.Version}}
|
||||
|
||||
- id: navidrome_windows_amd64
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- CC=x86_64-w64-mingw32-gcc
|
||||
- CXX=x86_64-w64-mingw32-g++
|
||||
- PKG_CONFIG_PATH=/mingw64/lib/pkgconfig
|
||||
goos:
|
||||
- windows
|
||||
goarch:
|
||||
- amd64
|
||||
flags:
|
||||
- -tags=netgo
|
||||
ldflags:
|
||||
- "-extldflags '-static'"
|
||||
- -s -w -X github.com/navidrome/navidrome/consts.gitSha={{.ShortCommit}} -X github.com/navidrome/navidrome/consts.gitTag={{.Version}}
|
||||
|
||||
- id: navidrome_darwin_amd64
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- CC=o64-clang
|
||||
- CXX=o64-clang++
|
||||
- PKG_CONFIG_PATH=/darwin/lib/pkgconfig
|
||||
goos:
|
||||
- darwin
|
||||
goarch:
|
||||
- amd64
|
||||
flags:
|
||||
- -tags=netgo
|
||||
ldflags:
|
||||
- -s -w -X github.com/navidrome/navidrome/consts.gitSha={{.ShortCommit}} -X github.com/navidrome/navidrome/consts.gitTag={{.Version}}
|
||||
|
||||
archives:
|
||||
- format_overrides:
|
||||
- goos: windows
|
||||
format: zip
|
||||
|
||||
checksum:
|
||||
name_template: "{{ .ProjectName }}_checksums.txt"
|
||||
|
||||
snapshot:
|
||||
version_template: "{{ .Tag }}-SNAPSHOT"
|
||||
|
||||
release:
|
||||
draft: true
|
||||
|
||||
changelog:
|
||||
# sort: asc
|
||||
filters:
|
||||
exclude:
|
||||
- "^docs:"
|
||||
@@ -48,14 +48,15 @@ This improves the readability of the messages
|
||||
It can be one of the following:
|
||||
1. **feat**: Addition of a new feature
|
||||
2. **fix**: Bug fix
|
||||
3. **docs**: Documentation Changes
|
||||
4. **style**: Changes to styling
|
||||
5. **refactor**: Refactoring of code
|
||||
6. **perf**: Code that affects performance
|
||||
7. **test**: Updating or improving the current tests
|
||||
8. **build**: Changes to Build process
|
||||
9. **revert**: Reverting to a previous commit
|
||||
10. **chore** : updating grunt tasks etc
|
||||
3. **sec**: Fixing security issues
|
||||
4. **docs**: Documentation Changes
|
||||
5. **style**: Changes to styling
|
||||
6. **refactor**: Refactoring of code
|
||||
7. **perf**: Code that affects performance
|
||||
8. **test**: Updating or improving the current tests
|
||||
9. **build**: Changes to Build process
|
||||
10. **revert**: Reverting to a previous commit
|
||||
11. **chore** : updating grunt tasks etc
|
||||
|
||||
If there is a breaking change in your Pull Request, please add `BREAKING CHANGE` in the optional body section
|
||||
|
||||
|
||||
147
Dockerfile
Normal file
147
Dockerfile
Normal file
@@ -0,0 +1,147 @@
|
||||
FROM --platform=$BUILDPLATFORM ghcr.io/crazy-max/osxcross:14.5-debian AS osxcross
|
||||
|
||||
########################################################################################################################
|
||||
### Build xx (original image: tonistiigi/xx)
|
||||
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/alpine:3.19 AS xx-build
|
||||
|
||||
# v1.5.0
|
||||
ENV XX_VERSION=b4e4c451c778822e6742bfc9d9a91d7c7d885c8a
|
||||
|
||||
RUN apk add -U --no-cache git
|
||||
RUN git clone https://github.com/tonistiigi/xx && \
|
||||
cd xx && \
|
||||
git checkout ${XX_VERSION} && \
|
||||
mkdir -p /out && \
|
||||
cp src/xx-* /out/
|
||||
|
||||
RUN cd /out && \
|
||||
ln -s xx-cc /out/xx-clang && \
|
||||
ln -s xx-cc /out/xx-clang++ && \
|
||||
ln -s xx-cc /out/xx-c++ && \
|
||||
ln -s xx-apt /out/xx-apt-get
|
||||
|
||||
# xx mimics the original tonistiigi/xx image
|
||||
FROM scratch AS xx
|
||||
COPY --from=xx-build /out/ /usr/bin/
|
||||
|
||||
########################################################################################################################
|
||||
### Get TagLib
|
||||
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/alpine:3.19 AS taglib-build
|
||||
ARG TARGETPLATFORM
|
||||
ARG CROSS_TAGLIB_VERSION=2.1.1-1
|
||||
ENV CROSS_TAGLIB_RELEASES_URL=https://github.com/navidrome/cross-taglib/releases/download/v${CROSS_TAGLIB_VERSION}/
|
||||
|
||||
# wget in busybox can't follow redirects
|
||||
RUN <<EOT
|
||||
apk add --no-cache wget
|
||||
PLATFORM=$(echo ${TARGETPLATFORM} | tr '/' '-')
|
||||
FILE=taglib-${PLATFORM}.tar.gz
|
||||
|
||||
DOWNLOAD_URL=${CROSS_TAGLIB_RELEASES_URL}${FILE}
|
||||
wget ${DOWNLOAD_URL}
|
||||
|
||||
mkdir /taglib
|
||||
tar -xzf ${FILE} -C /taglib
|
||||
EOT
|
||||
|
||||
########################################################################################################################
|
||||
### Build Navidrome UI
|
||||
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/node:lts-alpine AS ui
|
||||
WORKDIR /app
|
||||
|
||||
# Install node dependencies
|
||||
COPY ui/package.json ui/package-lock.json ./
|
||||
COPY ui/bin/ ./bin/
|
||||
RUN npm ci
|
||||
|
||||
# Build bundle
|
||||
COPY ui/ ./
|
||||
RUN npm run build -- --outDir=/build
|
||||
|
||||
FROM scratch AS ui-bundle
|
||||
COPY --from=ui /build /build
|
||||
|
||||
########################################################################################################################
|
||||
### Build Navidrome binary
|
||||
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/golang:1.25-bookworm AS base
|
||||
RUN apt-get update && apt-get install -y clang lld
|
||||
COPY --from=xx / /
|
||||
WORKDIR /workspace
|
||||
|
||||
FROM --platform=$BUILDPLATFORM base AS build
|
||||
|
||||
# Install build dependencies for the target platform
|
||||
ARG TARGETPLATFORM
|
||||
|
||||
RUN xx-apt install -y binutils gcc g++ libc6-dev zlib1g-dev
|
||||
RUN xx-verify --setup
|
||||
|
||||
RUN --mount=type=bind,source=. \
|
||||
--mount=type=cache,target=/root/.cache \
|
||||
--mount=type=cache,target=/go/pkg/mod \
|
||||
go mod download
|
||||
|
||||
ARG GIT_SHA
|
||||
ARG GIT_TAG
|
||||
|
||||
RUN --mount=type=bind,source=. \
|
||||
--mount=from=ui,source=/build,target=./ui/build,ro \
|
||||
--mount=from=osxcross,src=/osxcross/SDK,target=/xx-sdk,ro \
|
||||
--mount=type=cache,target=/root/.cache \
|
||||
--mount=type=cache,target=/go/pkg/mod \
|
||||
--mount=from=taglib-build,target=/taglib,src=/taglib,ro <<EOT
|
||||
|
||||
# Setup CGO cross-compilation environment
|
||||
xx-go --wrap
|
||||
export CGO_ENABLED=1
|
||||
export PKG_CONFIG_PATH=/taglib/lib/pkgconfig
|
||||
cat $(go env GOENV)
|
||||
|
||||
# Only Darwin (macOS) requires clang (default), Windows requires gcc, everything else can use any compiler.
|
||||
# So let's use gcc for everything except Darwin.
|
||||
if [ "$(xx-info os)" != "darwin" ]; then
|
||||
export CC=$(xx-info)-gcc
|
||||
export CXX=$(xx-info)-g++
|
||||
export LD_EXTRA="-extldflags '-static -latomic'"
|
||||
fi
|
||||
if [ "$(xx-info os)" = "windows" ]; then
|
||||
export EXT=".exe"
|
||||
fi
|
||||
|
||||
go build -tags=netgo -ldflags="${LD_EXTRA} -w -s \
|
||||
-X github.com/navidrome/navidrome/consts.gitSha=${GIT_SHA} \
|
||||
-X github.com/navidrome/navidrome/consts.gitTag=${GIT_TAG}" \
|
||||
-o /out/navidrome${EXT} .
|
||||
EOT
|
||||
|
||||
# Verify if the binary was built for the correct platform and it is statically linked
|
||||
RUN xx-verify --static /out/navidrome*
|
||||
|
||||
FROM scratch AS binary
|
||||
COPY --from=build /out /
|
||||
|
||||
########################################################################################################################
|
||||
### Build Final Image
|
||||
FROM public.ecr.aws/docker/library/alpine:3.19 AS final
|
||||
LABEL maintainer="deluan@navidrome.org"
|
||||
LABEL org.opencontainers.image.source="https://github.com/navidrome/navidrome"
|
||||
|
||||
# Install ffmpeg and mpv
|
||||
RUN apk add -U --no-cache ffmpeg mpv sqlite
|
||||
|
||||
# Copy navidrome binary
|
||||
COPY --from=build /out/navidrome /app/
|
||||
|
||||
VOLUME ["/data", "/music"]
|
||||
ENV ND_MUSICFOLDER=/music
|
||||
ENV ND_DATAFOLDER=/data
|
||||
ENV ND_CONFIGFILE=/data/navidrome.toml
|
||||
ENV ND_PORT=4533
|
||||
ENV GODEBUG="asyncpreemptoff=1"
|
||||
RUN touch /.nddockerenv
|
||||
|
||||
EXPOSE ${ND_PORT}
|
||||
WORKDIR /app
|
||||
|
||||
ENTRYPOINT ["/app/navidrome"]
|
||||
|
||||
191
Makefile
191
Makefile
@@ -3,43 +3,89 @@ NODE_VERSION=$(shell cat .nvmrc)
|
||||
|
||||
ifneq ("$(wildcard .git/HEAD)","")
|
||||
GIT_SHA=$(shell git rev-parse --short HEAD)
|
||||
GIT_TAG=$(shell git describe --tags `git rev-list --tags --max-count=1`)
|
||||
GIT_TAG=$(shell git describe --tags `git rev-list --tags --max-count=1`)-SNAPSHOT
|
||||
else
|
||||
GIT_SHA=source_archive
|
||||
GIT_TAG=$(patsubst navidrome-%,v%,$(notdir $(PWD)))
|
||||
GIT_TAG=$(patsubst navidrome-%,v%,$(notdir $(PWD)))-SNAPSHOT
|
||||
endif
|
||||
|
||||
CI_RELEASER_VERSION ?= 1.23.0-1 ## https://github.com/navidrome/ci-goreleaser
|
||||
SUPPORTED_PLATFORMS ?= linux/amd64,linux/arm64,linux/arm/v5,linux/arm/v6,linux/arm/v7,linux/386,darwin/amd64,darwin/arm64,windows/amd64,windows/386
|
||||
IMAGE_PLATFORMS ?= $(shell echo $(SUPPORTED_PLATFORMS) | tr ',' '\n' | grep "linux" | grep -v "arm/v5" | tr '\n' ',' | sed 's/,$$//')
|
||||
PLATFORMS ?= $(SUPPORTED_PLATFORMS)
|
||||
DOCKER_TAG ?= deluan/navidrome:develop
|
||||
|
||||
# Taglib version to use in cross-compilation, from https://github.com/navidrome/cross-taglib
|
||||
CROSS_TAGLIB_VERSION ?= 2.1.1-1
|
||||
GOLANGCI_LINT_VERSION ?= v2.5.0
|
||||
|
||||
UI_SRC_FILES := $(shell find ui -type f -not -path "ui/build/*" -not -path "ui/node_modules/*")
|
||||
|
||||
setup: check_env download-deps setup-git ##@1_Run_First Install dependencies and prepare development environment
|
||||
setup: check_env download-deps install-golangci-lint setup-git ##@1_Run_First Install dependencies and prepare development environment
|
||||
@echo Downloading Node dependencies...
|
||||
@(cd ./ui && npm ci)
|
||||
.PHONY: setup
|
||||
|
||||
dev: check_env ##@Development Start Navidrome in development mode, with hot-reload for both frontend and backend
|
||||
npx foreman -j Procfile.dev -p 4533 start
|
||||
ND_ENABLEINSIGHTSCOLLECTOR="false" npx foreman -j Procfile.dev -p 4533 start
|
||||
.PHONY: dev
|
||||
|
||||
server: check_go_env buildjs ##@Development Start the backend in development mode
|
||||
@go run github.com/cespare/reflex@latest -d none -c reflex.conf
|
||||
@ND_ENABLEINSIGHTSCOLLECTOR="false" go tool reflex -d none -c reflex.conf
|
||||
.PHONY: server
|
||||
|
||||
stop: ##@Development Stop development servers (UI and backend)
|
||||
@echo "Stopping development servers..."
|
||||
@-pkill -f "vite"
|
||||
@-pkill -f "go tool reflex.*reflex.conf"
|
||||
@-pkill -f "go run.*netgo"
|
||||
@echo "Development servers stopped."
|
||||
.PHONY: stop
|
||||
|
||||
watch: ##@Development Start Go tests in watch mode (re-run when code changes)
|
||||
go run github.com/onsi/ginkgo/v2/ginkgo@latest watch -notify ./...
|
||||
go tool ginkgo watch -tags=netgo -notify ./...
|
||||
.PHONY: watch
|
||||
|
||||
test: ##@Development Run Go tests
|
||||
go test -race -shuffle=on ./...
|
||||
PKG ?= ./...
|
||||
test: ##@Development Run Go tests. Use PKG variable to specify packages to test, e.g. make test PKG=./server
|
||||
go test -tags netgo $(PKG)
|
||||
.PHONY: test
|
||||
|
||||
testall: test ##@Development Run Go and JS tests
|
||||
@(cd ./ui && npm test -- --watchAll=false)
|
||||
testall: test-race test-i18n test-js ##@Development Run Go and JS tests
|
||||
.PHONY: testall
|
||||
|
||||
lint: ##@Development Lint Go code
|
||||
go run github.com/golangci/golangci-lint/cmd/golangci-lint@latest run -v --timeout 5m
|
||||
test-race: ##@Development Run Go tests with race detector
|
||||
go test -tags netgo -race -shuffle=on ./...
|
||||
.PHONY: test-race
|
||||
|
||||
test-js: ##@Development Run JS tests
|
||||
@(cd ./ui && npm run test)
|
||||
.PHONY: test-js
|
||||
|
||||
test-i18n: ##@Development Validate all translations files
|
||||
./.github/workflows/validate-translations.sh
|
||||
.PHONY: test-i18n
|
||||
|
||||
install-golangci-lint: ##@Development Install golangci-lint if not present
|
||||
@INSTALL=false; \
|
||||
if PATH=$$PATH:./bin which golangci-lint > /dev/null 2>&1; then \
|
||||
CURRENT_VERSION=$$(PATH=$$PATH:./bin golangci-lint version 2>/dev/null | grep -oE '[0-9]+\.[0-9]+\.[0-9]+' | head -n1); \
|
||||
REQUIRED_VERSION=$$(echo "$(GOLANGCI_LINT_VERSION)" | sed 's/^v//'); \
|
||||
if [ "$$CURRENT_VERSION" != "$$REQUIRED_VERSION" ]; then \
|
||||
echo "Found golangci-lint $$CURRENT_VERSION, but $$REQUIRED_VERSION is required. Reinstalling..."; \
|
||||
rm -f ./bin/golangci-lint; \
|
||||
INSTALL=true; \
|
||||
fi; \
|
||||
else \
|
||||
INSTALL=true; \
|
||||
fi; \
|
||||
if [ "$$INSTALL" = "true" ]; then \
|
||||
echo "Installing golangci-lint $(GOLANGCI_LINT_VERSION)..."; \
|
||||
curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/HEAD/install.sh | sh -s $(GOLANGCI_LINT_VERSION); \
|
||||
fi
|
||||
.PHONY: install-golangci-lint
|
||||
|
||||
lint: install-golangci-lint ##@Development Lint Go code
|
||||
PATH=$$PATH:./bin golangci-lint run -v --timeout 5m
|
||||
.PHONY: lint
|
||||
|
||||
lintall: lint ##@Development Lint Go and JS code
|
||||
@@ -49,16 +95,16 @@ lintall: lint ##@Development Lint Go and JS code
|
||||
|
||||
format: ##@Development Format code
|
||||
@(cd ./ui && npm run prettier)
|
||||
@go run golang.org/x/tools/cmd/goimports@latest -w `find . -name '*.go' | grep -v _gen.go$$`
|
||||
@go tool goimports -w `find . -name '*.go' | grep -v _gen.go$$ | grep -v .pb.go$$`
|
||||
@go mod tidy
|
||||
.PHONY: format
|
||||
|
||||
wire: check_go_env ##@Development Update Dependency Injection
|
||||
go run github.com/google/wire/cmd/wire@latest ./...
|
||||
go tool wire gen -tags=netgo ./...
|
||||
.PHONY: wire
|
||||
|
||||
snapshots: ##@Development Update (GoLang) Snapshot tests
|
||||
UPDATE_SNAPSHOTS=true go run github.com/onsi/ginkgo/v2/ginkgo@latest ./server/subsonic/...
|
||||
UPDATE_SNAPSHOTS=true go tool ginkgo ./server/subsonic/responses/...
|
||||
.PHONY: snapshots
|
||||
|
||||
migration-sql: ##@Development Create an empty SQL migration file
|
||||
@@ -81,53 +127,89 @@ setup-git: ##@Development Setup Git hooks (pre-commit and pre-push)
|
||||
.PHONY: setup-git
|
||||
|
||||
build: check_go_env buildjs ##@Build Build the project
|
||||
go build -ldflags="-X github.com/navidrome/navidrome/consts.gitSha=$(GIT_SHA) -X github.com/navidrome/navidrome/consts.gitTag=$(GIT_TAG)-SNAPSHOT" -tags=netgo
|
||||
go build -ldflags="-X github.com/navidrome/navidrome/consts.gitSha=$(GIT_SHA) -X github.com/navidrome/navidrome/consts.gitTag=$(GIT_TAG)" -tags=netgo
|
||||
.PHONY: build
|
||||
|
||||
buildall: deprecated build
|
||||
.PHONY: buildall
|
||||
|
||||
debug-build: check_go_env buildjs ##@Build Build the project (with remote debug on)
|
||||
go build -gcflags="all=-N -l" -ldflags="-X github.com/navidrome/navidrome/consts.gitSha=$(GIT_SHA) -X github.com/navidrome/navidrome/consts.gitTag=$(GIT_TAG)-SNAPSHOT" -tags=netgo
|
||||
go build -gcflags="all=-N -l" -ldflags="-X github.com/navidrome/navidrome/consts.gitSha=$(GIT_SHA) -X github.com/navidrome/navidrome/consts.gitTag=$(GIT_TAG)" -tags=netgo
|
||||
.PHONY: debug-build
|
||||
|
||||
buildjs: check_node_env ui/build/index.html ##@Build Build only frontend
|
||||
.PHONY: buildjs
|
||||
|
||||
docker-buildjs: ##@Build Build only frontend using Docker
|
||||
docker build --output "./ui" --target ui-bundle .
|
||||
.PHONY: docker-buildjs
|
||||
|
||||
ui/build/index.html: $(UI_SRC_FILES)
|
||||
@(cd ./ui && npm run build)
|
||||
|
||||
all: buildjs ##@Cross_Compilation Build binaries for all supported platforms.
|
||||
@echo "Building binaries for all platforms using builder ${CI_RELEASER_VERSION}"
|
||||
docker run -t -v $(PWD):/workspace -w /workspace deluan/ci-goreleaser:$(CI_RELEASER_VERSION) \
|
||||
goreleaser release --clean --skip=publish --snapshot
|
||||
.PHONY: all
|
||||
docker-platforms: ##@Cross_Compilation List supported platforms
|
||||
@echo "Supported platforms:"
|
||||
@echo "$(SUPPORTED_PLATFORMS)" | tr ',' '\n' | sort | sed 's/^/ /'
|
||||
@echo "\nUsage: make PLATFORMS=\"linux/amd64\" docker-build"
|
||||
@echo " make IMAGE_PLATFORMS=\"linux/amd64\" docker-image"
|
||||
.PHONY: docker-platforms
|
||||
|
||||
single: buildjs ##@Cross_Compilation Build binaries for a single supported platforms.
|
||||
@if [ -z "${GOOS}" -o -z "${GOARCH}" ]; then \
|
||||
echo "Usage: GOOS=<os> GOARCH=<arch> make single"; \
|
||||
echo "Options:"; \
|
||||
grep -- "- id: navidrome_" .goreleaser.yml | sed 's/- id: navidrome_//g'; \
|
||||
exit 1; \
|
||||
fi
|
||||
@echo "Building binaries for ${GOOS}/${GOARCH} using builder ${CI_RELEASER_VERSION}"
|
||||
docker run -t -v $(PWD):/workspace -e GOOS -e GOARCH -w /workspace deluan/ci-goreleaser:$(CI_RELEASER_VERSION) \
|
||||
goreleaser build --clean --snapshot -p 2 --single-target --id navidrome_${GOOS}_${GOARCH}
|
||||
.PHONY: single
|
||||
docker-build: ##@Cross_Compilation Cross-compile for any supported platform (check `make docker-platforms`)
|
||||
docker buildx build \
|
||||
--platform $(PLATFORMS) \
|
||||
--build-arg GIT_TAG=${GIT_TAG} \
|
||||
--build-arg GIT_SHA=${GIT_SHA} \
|
||||
--build-arg CROSS_TAGLIB_VERSION=${CROSS_TAGLIB_VERSION} \
|
||||
--output "./binaries" --target binary .
|
||||
.PHONY: docker-build
|
||||
|
||||
docker: buildjs ##@Build Build Docker linux/amd64 image (tagged as `deluan/navidrome:develop`)
|
||||
GOOS=linux GOARCH=amd64 make single
|
||||
@echo "Building Docker image"
|
||||
docker build . --platform linux/amd64 -t deluan/navidrome:develop -f .github/workflows/pipeline.dockerfile
|
||||
.PHONY: docker
|
||||
docker-image: ##@Cross_Compilation Build Docker image, tagged as `deluan/navidrome:develop`, override with DOCKER_TAG var. Use IMAGE_PLATFORMS to specify target platforms
|
||||
@echo $(IMAGE_PLATFORMS) | grep -q "windows" && echo "ERROR: Windows is not supported for Docker builds" && exit 1 || true
|
||||
@echo $(IMAGE_PLATFORMS) | grep -q "darwin" && echo "ERROR: macOS is not supported for Docker builds" && exit 1 || true
|
||||
@echo $(IMAGE_PLATFORMS) | grep -q "arm/v5" && echo "ERROR: Linux ARMv5 is not supported for Docker builds" && exit 1 || true
|
||||
docker buildx build \
|
||||
--platform $(IMAGE_PLATFORMS) \
|
||||
--build-arg GIT_TAG=${GIT_TAG} \
|
||||
--build-arg GIT_SHA=${GIT_SHA} \
|
||||
--build-arg CROSS_TAGLIB_VERSION=${CROSS_TAGLIB_VERSION} \
|
||||
--tag $(DOCKER_TAG) .
|
||||
.PHONY: docker-image
|
||||
|
||||
docker-msi: ##@Cross_Compilation Build MSI installer for Windows
|
||||
make docker-build PLATFORMS=windows/386,windows/amd64
|
||||
DOCKER_CLI_HINTS=false docker build -q -t navidrome-msi-builder -f release/wix/msitools.dockerfile .
|
||||
@rm -rf binaries/msi
|
||||
docker run -it --rm -v $(PWD):/workspace -v $(PWD)/binaries:/workspace/binaries -e GIT_TAG=${GIT_TAG} \
|
||||
navidrome-msi-builder sh -c "release/wix/build_msi.sh /workspace 386 && release/wix/build_msi.sh /workspace amd64"
|
||||
@du -h binaries/msi/*.msi
|
||||
.PHONY: docker-msi
|
||||
|
||||
run-docker: ##@Development Run a Navidrome Docker image. Usage: make run-docker tag=<tag>
|
||||
@if [ -z "$(tag)" ]; then echo "Usage: make run-docker tag=<tag>"; exit 1; fi
|
||||
@TAG_DIR="tmp/$$(echo '$(tag)' | tr '/:' '_')"; mkdir -p "$$TAG_DIR"; \
|
||||
VOLUMES="-v $(PWD)/$$TAG_DIR:/data"; \
|
||||
if [ -f navidrome.toml ]; then \
|
||||
VOLUMES="$$VOLUMES -v $(PWD)/navidrome.toml:/data/navidrome.toml:ro"; \
|
||||
MUSIC_FOLDER=$$(grep '^MusicFolder' navidrome.toml | head -n1 | sed 's/.*= *"//' | sed 's/".*//'); \
|
||||
if [ -n "$$MUSIC_FOLDER" ] && [ -d "$$MUSIC_FOLDER" ]; then \
|
||||
VOLUMES="$$VOLUMES -v $$MUSIC_FOLDER:/music:ro"; \
|
||||
fi; \
|
||||
fi; \
|
||||
echo "Running: docker run --rm -p 4533:4533 $$VOLUMES $(tag)"; docker run --rm -p 4533:4533 $$VOLUMES $(tag)
|
||||
.PHONY: run-docker
|
||||
|
||||
package: docker-build ##@Cross_Compilation Create binaries and packages for ALL supported platforms
|
||||
@if [ -z `which goreleaser` ]; then echo "Please install goreleaser first: https://goreleaser.com/install/"; exit 1; fi
|
||||
goreleaser release -f release/goreleaser.yml --clean --skip=publish --snapshot
|
||||
.PHONY: package
|
||||
|
||||
get-music: ##@Development Download some free music from Navidrome's demo instance
|
||||
mkdir -p music
|
||||
( cd music; \
|
||||
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=ec2093ec4801402f1e17cc462195cdbb" > brock.zip; \
|
||||
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=b376eeb4652d2498aa2b25ba0696725e" > back_on_earth.zip; \
|
||||
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=e49c609b542fc51899ee8b53aa858cb4" > ugress.zip; \
|
||||
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=350bcab3a4c1d93869e39ce496464f03" > voodoocuts.zip; \
|
||||
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=2Y3qQA6zJC3ObbBrF9ZBoV" > brock.zip; \
|
||||
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=04HrSORpypcLGNUdQp37gn" > back_on_earth.zip; \
|
||||
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=5xcMPJdeEgNrGtnzYbzAqb" > ugress.zip; \
|
||||
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=1jjQMAZrG3lUsJ0YH6ZRS0" > voodoocuts.zip; \
|
||||
for file in *.zip; do unzip -n $${file}; done )
|
||||
@echo "Done. Remember to set your MusicFolder to ./music"
|
||||
.PHONY: get-music
|
||||
@@ -136,6 +218,11 @@ get-music: ##@Development Download some free music from Navidrome's demo instanc
|
||||
##########################################
|
||||
#### Miscellaneous
|
||||
|
||||
clean:
|
||||
@rm -rf ./binaries ./dist ./ui/build/*
|
||||
@touch ./ui/build/.gitkeep
|
||||
.PHONY: clean
|
||||
|
||||
release:
|
||||
@if [[ ! "${V}" =~ ^[0-9]+\.[0-9]+\.[0-9]+.*$$ ]]; then echo "Usage: make release V=X.X.X"; exit 1; fi
|
||||
go mod tidy
|
||||
@@ -179,6 +266,24 @@ deprecated:
|
||||
@echo "WARNING: This target is deprecated and will be removed in future releases. Use 'make build' instead."
|
||||
.PHONY: deprecated
|
||||
|
||||
# Generate Go code from plugins/api/api.proto
|
||||
plugin-gen: check_go_env ##@Development Generate Go code from plugins protobuf files
|
||||
go generate ./plugins/...
|
||||
.PHONY: plugin-gen
|
||||
|
||||
plugin-examples: check_go_env ##@Development Build all example plugins
|
||||
$(MAKE) -C plugins/examples clean all
|
||||
.PHONY: plugin-examples
|
||||
|
||||
plugin-clean: check_go_env ##@Development Clean all plugins
|
||||
$(MAKE) -C plugins/examples clean
|
||||
$(MAKE) -C plugins/testdata clean
|
||||
.PHONY: plugin-clean
|
||||
|
||||
plugin-tests: check_go_env ##@Development Build all test plugins
|
||||
$(MAKE) -C plugins/testdata clean all
|
||||
.PHONY: plugin-tests
|
||||
|
||||
.DEFAULT_GOAL := help
|
||||
|
||||
HELP_FUN = \
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
JS: sh -c "cd ./ui && npm start"
|
||||
GO: go run github.com/cespare/reflex@latest -d none -c reflex.conf
|
||||
GO: go tool reflex -d none -c reflex.conf
|
||||
|
||||
12
README.md
12
README.md
@@ -7,8 +7,9 @@
|
||||
[](https://github.com/navidrome/navidrome/releases/latest)
|
||||
[](https://hub.docker.com/r/deluan/navidrome)
|
||||
[](https://discord.gg/xh7j7yF)
|
||||
[](https://www.reddit.com/r/navidrome/)
|
||||
[](https://www.reddit.com/r/navidrome/)
|
||||
[](CODE_OF_CONDUCT.md)
|
||||
[](https://gurubase.io/g/navidrome)
|
||||
|
||||
Navidrome is an open source web-based music collection server and streamer. It gives you freedom to listen to your
|
||||
music collection from any browser or mobile device. It's like your personal Spotify!
|
||||
@@ -56,6 +57,15 @@ A share of the revenue helps fund the development of Navidrome at no additional
|
||||
- **Transcoding** on the fly. Can be set per user/player. **Opus encoding is supported**
|
||||
- Translated to **various languages**
|
||||
|
||||
## Translations
|
||||
|
||||
Navidrome uses [POEditor](https://poeditor.com/) for translations, and we are always looking
|
||||
for [more contributors](https://www.navidrome.org/docs/developers/translations/)
|
||||
|
||||
<a href="https://poeditor.com/">
|
||||
<img height="32" src="https://github.com/user-attachments/assets/c19b1d2b-01e1-4682-a007-12356c42147c">
|
||||
</a>
|
||||
|
||||
## Documentation
|
||||
All documentation can be found in the project's website: https://www.navidrome.org/docs.
|
||||
Here are some useful direct links:
|
||||
|
||||
278
adapters/taglib/end_to_end_test.go
Normal file
278
adapters/taglib/end_to_end_test.go
Normal file
@@ -0,0 +1,278 @@
|
||||
package taglib
|
||||
|
||||
import (
|
||||
"io/fs"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/djherbis/times"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/model/metadata"
|
||||
"github.com/navidrome/navidrome/utils/gg"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
type testFileInfo struct {
|
||||
fs.FileInfo
|
||||
}
|
||||
|
||||
func (t testFileInfo) BirthTime() time.Time {
|
||||
if ts := times.Get(t.FileInfo); ts.HasBirthTime() {
|
||||
return ts.BirthTime()
|
||||
}
|
||||
return t.FileInfo.ModTime()
|
||||
}
|
||||
|
||||
var _ = Describe("Extractor", func() {
|
||||
toP := func(name, sortName, mbid string) model.Participant {
|
||||
return model.Participant{
|
||||
Artist: model.Artist{Name: name, SortArtistName: sortName, MbzArtistID: mbid},
|
||||
}
|
||||
}
|
||||
|
||||
roles := []struct {
|
||||
model.Role
|
||||
model.ParticipantList
|
||||
}{
|
||||
{model.RoleComposer, model.ParticipantList{
|
||||
toP("coma a", "a, coma", "bf13b584-f27c-43db-8f42-32898d33d4e2"),
|
||||
toP("comb", "comb", "924039a2-09c6-4d29-9b4f-50cc54447d36"),
|
||||
}},
|
||||
{model.RoleLyricist, model.ParticipantList{
|
||||
toP("la a", "a, la", "c84f648f-68a6-40a2-a0cb-d135b25da3c2"),
|
||||
toP("lb", "lb", "0a7c582d-143a-4540-b4e9-77200835af65"),
|
||||
}},
|
||||
{model.RoleArranger, model.ParticipantList{
|
||||
toP("aa", "", "4605a1d4-8d15-42a3-bd00-9c20e42f71e6"),
|
||||
toP("ab", "", "002f0ff8-77bf-42cc-8216-61a9c43dc145"),
|
||||
}},
|
||||
{model.RoleConductor, model.ParticipantList{
|
||||
toP("cona", "", "af86879b-2141-42af-bad2-389a4dc91489"),
|
||||
toP("conb", "", "3dfa3c70-d7d3-4b97-b953-c298dd305e12"),
|
||||
}},
|
||||
{model.RoleDirector, model.ParticipantList{
|
||||
toP("dia", "", "f943187f-73de-4794-be47-88c66f0fd0f4"),
|
||||
toP("dib", "", "bceb75da-1853-4b3d-b399-b27f0cafc389"),
|
||||
}},
|
||||
{model.RoleEngineer, model.ParticipantList{
|
||||
toP("ea", "", "f634bf6d-d66a-425d-888a-28ad39392759"),
|
||||
toP("eb", "", "243d64ae-d514-44e1-901a-b918d692baee"),
|
||||
}},
|
||||
{model.RoleProducer, model.ParticipantList{
|
||||
toP("pra", "", "d971c8d7-999c-4a5f-ac31-719721ab35d6"),
|
||||
toP("prb", "", "f0a09070-9324-434f-a599-6d25ded87b69"),
|
||||
}},
|
||||
{model.RoleRemixer, model.ParticipantList{
|
||||
toP("ra", "", "c7dc6095-9534-4c72-87cc-aea0103462cf"),
|
||||
toP("rb", "", "8ebeef51-c08c-4736-992f-c37870becedd"),
|
||||
}},
|
||||
{model.RoleDJMixer, model.ParticipantList{
|
||||
toP("dja", "", "d063f13b-7589-4efc-ab7f-c60e6db17247"),
|
||||
toP("djb", "", "3636670c-385f-4212-89c8-0ff51d6bc456"),
|
||||
}},
|
||||
{model.RoleMixer, model.ParticipantList{
|
||||
toP("ma", "", "53fb5a2d-7016-427e-a563-d91819a5f35a"),
|
||||
toP("mb", "", "64c13e65-f0da-4ab9-a300-71ee53b0376a"),
|
||||
}},
|
||||
}
|
||||
|
||||
var e *extractor
|
||||
|
||||
parseTestFile := func(path string) *model.MediaFile {
|
||||
mds, err := e.Parse(path)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
info, ok := mds[path]
|
||||
Expect(ok).To(BeTrue())
|
||||
|
||||
fileInfo, err := os.Stat(path)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
info.FileInfo = testFileInfo{FileInfo: fileInfo}
|
||||
|
||||
metadata := metadata.New(path, info)
|
||||
mf := metadata.ToMediaFile(1, "folderID")
|
||||
return &mf
|
||||
}
|
||||
|
||||
BeforeEach(func() {
|
||||
e = &extractor{}
|
||||
})
|
||||
|
||||
Describe("ReplayGain", func() {
|
||||
DescribeTable("test replaygain end-to-end", func(file string, trackGain, trackPeak, albumGain, albumPeak *float64) {
|
||||
mf := parseTestFile("tests/fixtures/" + file)
|
||||
|
||||
Expect(mf.RGTrackGain).To(Equal(trackGain))
|
||||
Expect(mf.RGTrackPeak).To(Equal(trackPeak))
|
||||
Expect(mf.RGAlbumGain).To(Equal(albumGain))
|
||||
Expect(mf.RGAlbumPeak).To(Equal(albumPeak))
|
||||
},
|
||||
Entry("mp3 with no replaygain", "no_replaygain.mp3", nil, nil, nil, nil),
|
||||
Entry("mp3 with no zero replaygain", "zero_replaygain.mp3", gg.P(0.0), gg.P(1.0), gg.P(0.0), gg.P(1.0)),
|
||||
)
|
||||
})
|
||||
|
||||
Describe("lyrics", func() {
|
||||
makeLyrics := func(code, secondLine string) model.Lyrics {
|
||||
return model.Lyrics{
|
||||
DisplayArtist: "",
|
||||
DisplayTitle: "",
|
||||
Lang: code,
|
||||
Line: []model.Line{
|
||||
{Start: gg.P(int64(0)), Value: "This is"},
|
||||
{Start: gg.P(int64(2500)), Value: secondLine},
|
||||
},
|
||||
Offset: nil,
|
||||
Synced: true,
|
||||
}
|
||||
}
|
||||
|
||||
It("should fetch both synced and unsynced lyrics in mixed flac", func() {
|
||||
mf := parseTestFile("tests/fixtures/mixed-lyrics.flac")
|
||||
|
||||
lyrics, err := mf.StructuredLyrics()
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(lyrics).To(HaveLen(2))
|
||||
|
||||
Expect(lyrics[0].Synced).To(BeTrue())
|
||||
Expect(lyrics[1].Synced).To(BeFalse())
|
||||
})
|
||||
|
||||
It("should handle mp3 with uslt and sylt", func() {
|
||||
mf := parseTestFile("tests/fixtures/test.mp3")
|
||||
|
||||
lyrics, err := mf.StructuredLyrics()
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(lyrics).To(HaveLen(4))
|
||||
|
||||
engSylt := makeLyrics("eng", "English SYLT")
|
||||
engUslt := makeLyrics("eng", "English")
|
||||
unsSylt := makeLyrics("xxx", "unspecified SYLT")
|
||||
unsUslt := makeLyrics("xxx", "unspecified")
|
||||
|
||||
// Why is the order inconsistent between runs? Nobody knows
|
||||
Expect(lyrics).To(Or(
|
||||
Equal(model.LyricList{engSylt, engUslt, unsSylt, unsUslt}),
|
||||
Equal(model.LyricList{unsSylt, unsUslt, engSylt, engUslt}),
|
||||
))
|
||||
})
|
||||
|
||||
DescribeTable("format-specific lyrics", func(file string, isId3 bool) {
|
||||
mf := parseTestFile("tests/fixtures/" + file)
|
||||
|
||||
lyrics, err := mf.StructuredLyrics()
|
||||
Expect(err).To(Not(HaveOccurred()))
|
||||
Expect(lyrics).To(HaveLen(2))
|
||||
|
||||
unspec := makeLyrics("xxx", "unspecified")
|
||||
eng := makeLyrics("xxx", "English")
|
||||
|
||||
if isId3 {
|
||||
eng.Lang = "eng"
|
||||
}
|
||||
|
||||
Expect(lyrics).To(Or(
|
||||
Equal(model.LyricList{unspec, eng}),
|
||||
Equal(model.LyricList{eng, unspec})))
|
||||
},
|
||||
Entry("flac", "test.flac", false),
|
||||
Entry("m4a", "test.m4a", false),
|
||||
Entry("ogg", "test.ogg", false),
|
||||
Entry("wma", "test.wma", false),
|
||||
Entry("wv", "test.wv", false),
|
||||
Entry("wav", "test.wav", true),
|
||||
Entry("aiff", "test.aiff", true),
|
||||
)
|
||||
})
|
||||
|
||||
Describe("Participants", func() {
|
||||
DescribeTable("test tags consistent across formats", func(format string) {
|
||||
mf := parseTestFile("tests/fixtures/test." + format)
|
||||
|
||||
for _, data := range roles {
|
||||
role := data.Role
|
||||
artists := data.ParticipantList
|
||||
|
||||
actual := mf.Participants[role]
|
||||
Expect(actual).To(HaveLen(len(artists)))
|
||||
|
||||
for i := range artists {
|
||||
actualArtist := actual[i]
|
||||
expectedArtist := artists[i]
|
||||
|
||||
Expect(actualArtist.Name).To(Equal(expectedArtist.Name))
|
||||
Expect(actualArtist.SortArtistName).To(Equal(expectedArtist.SortArtistName))
|
||||
Expect(actualArtist.MbzArtistID).To(Equal(expectedArtist.MbzArtistID))
|
||||
}
|
||||
}
|
||||
|
||||
if format != "m4a" {
|
||||
performers := mf.Participants[model.RolePerformer]
|
||||
Expect(performers).To(HaveLen(8))
|
||||
|
||||
rules := map[string][]string{
|
||||
"pgaa": {"2fd0b311-9fa8-4ff9-be5d-f6f3d16b835e", "Guitar"},
|
||||
"pgbb": {"223d030b-bf97-4c2a-ad26-b7f7bbe25c93", "Guitar", ""},
|
||||
"pvaa": {"cb195f72-448f-41c8-b962-3f3c13d09d38", "Vocals"},
|
||||
"pvbb": {"60a1f832-8ca2-49f6-8660-84d57f07b520", "Vocals", "Flute"},
|
||||
"pfaa": {"51fb40c-0305-4bf9-a11b-2ee615277725", "", "Flute"},
|
||||
}
|
||||
|
||||
for name, rule := range rules {
|
||||
mbid := rule[0]
|
||||
for i := 1; i < len(rule); i++ {
|
||||
found := false
|
||||
|
||||
for _, mapped := range performers {
|
||||
if mapped.Name == name && mapped.MbzArtistID == mbid && mapped.SubRole == rule[i] {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
Expect(found).To(BeTrue(), "Could not find matching artist")
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
Entry("FLAC format", "flac"),
|
||||
Entry("M4a format", "m4a"),
|
||||
Entry("OGG format", "ogg"),
|
||||
Entry("WV format", "wv"),
|
||||
|
||||
Entry("MP3 format", "mp3"),
|
||||
Entry("WAV format", "wav"),
|
||||
Entry("AIFF format", "aiff"),
|
||||
)
|
||||
|
||||
It("should parse wma", func() {
|
||||
mf := parseTestFile("tests/fixtures/test.wma")
|
||||
|
||||
for _, data := range roles {
|
||||
role := data.Role
|
||||
artists := data.ParticipantList
|
||||
actual := mf.Participants[role]
|
||||
|
||||
// WMA has no Arranger role
|
||||
if role == model.RoleArranger {
|
||||
Expect(actual).To(HaveLen(0))
|
||||
continue
|
||||
}
|
||||
|
||||
Expect(actual).To(HaveLen(len(artists)), role.String())
|
||||
|
||||
// For some bizarre reason, the order is inverted. We also don't get
|
||||
// sort names or MBIDs
|
||||
for i := range artists {
|
||||
idx := len(artists) - 1 - i
|
||||
|
||||
actualArtist := actual[i]
|
||||
expectedArtist := artists[idx]
|
||||
|
||||
Expect(actualArtist.Name).To(Equal(expectedArtist.Name))
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
178
adapters/taglib/taglib.go
Normal file
178
adapters/taglib/taglib.go
Normal file
@@ -0,0 +1,178 @@
|
||||
package taglib
|
||||
|
||||
import (
|
||||
"io/fs"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/core/storage/local"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model/metadata"
|
||||
)
|
||||
|
||||
type extractor struct {
|
||||
baseDir string
|
||||
}
|
||||
|
||||
func (e extractor) Parse(files ...string) (map[string]metadata.Info, error) {
|
||||
results := make(map[string]metadata.Info)
|
||||
for _, path := range files {
|
||||
props, err := e.extractMetadata(path)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
results[path] = *props
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (e extractor) Version() string {
|
||||
return Version()
|
||||
}
|
||||
|
||||
func (e extractor) extractMetadata(filePath string) (*metadata.Info, error) {
|
||||
fullPath := filepath.Join(e.baseDir, filePath)
|
||||
tags, err := Read(fullPath)
|
||||
if err != nil {
|
||||
log.Warn("extractor: Error reading metadata from file. Skipping", "filePath", fullPath, err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Parse audio properties
|
||||
ap := metadata.AudioProperties{}
|
||||
ap.BitRate = parseProp(tags, "__bitrate")
|
||||
ap.Channels = parseProp(tags, "__channels")
|
||||
ap.SampleRate = parseProp(tags, "__samplerate")
|
||||
ap.BitDepth = parseProp(tags, "__bitspersample")
|
||||
length := parseProp(tags, "__lengthinmilliseconds")
|
||||
ap.Duration = (time.Millisecond * time.Duration(length)).Round(time.Millisecond * 10)
|
||||
|
||||
// Extract basic tags
|
||||
parseBasicTag(tags, "__title", "title")
|
||||
parseBasicTag(tags, "__artist", "artist")
|
||||
parseBasicTag(tags, "__album", "album")
|
||||
parseBasicTag(tags, "__comment", "comment")
|
||||
parseBasicTag(tags, "__genre", "genre")
|
||||
parseBasicTag(tags, "__year", "year")
|
||||
parseBasicTag(tags, "__track", "tracknumber")
|
||||
|
||||
// Parse track/disc totals
|
||||
parseTuple := func(prop string) {
|
||||
tagName := prop + "number"
|
||||
tagTotal := prop + "total"
|
||||
if value, ok := tags[tagName]; ok && len(value) > 0 {
|
||||
parts := strings.Split(value[0], "/")
|
||||
tags[tagName] = []string{parts[0]}
|
||||
if len(parts) == 2 {
|
||||
tags[tagTotal] = []string{parts[1]}
|
||||
}
|
||||
}
|
||||
}
|
||||
parseTuple("track")
|
||||
parseTuple("disc")
|
||||
|
||||
// Adjust some ID3 tags
|
||||
parseLyrics(tags)
|
||||
parseTIPL(tags)
|
||||
delete(tags, "tmcl") // TMCL is already parsed by TagLib
|
||||
|
||||
return &metadata.Info{
|
||||
Tags: tags,
|
||||
AudioProperties: ap,
|
||||
HasPicture: tags["has_picture"] != nil && len(tags["has_picture"]) > 0 && tags["has_picture"][0] == "true",
|
||||
}, nil
|
||||
}
|
||||
|
||||
// parseLyrics make sure lyrics tags have language
|
||||
func parseLyrics(tags map[string][]string) {
|
||||
lyrics := tags["lyrics"]
|
||||
if len(lyrics) > 0 {
|
||||
tags["lyrics:xxx"] = lyrics
|
||||
delete(tags, "lyrics")
|
||||
}
|
||||
}
|
||||
|
||||
// These are the only roles we support, based on Picard's tag map:
|
||||
// https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html
|
||||
var tiplMapping = map[string]string{
|
||||
"arranger": "arranger",
|
||||
"engineer": "engineer",
|
||||
"producer": "producer",
|
||||
"mix": "mixer",
|
||||
"DJ-mix": "djmixer",
|
||||
}
|
||||
|
||||
// parseProp parses a property from the tags map and sets it to the target integer.
|
||||
// It also deletes the property from the tags map after parsing.
|
||||
func parseProp(tags map[string][]string, prop string) int {
|
||||
if value, ok := tags[prop]; ok && len(value) > 0 {
|
||||
v, _ := strconv.Atoi(value[0])
|
||||
delete(tags, prop)
|
||||
return v
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
// parseBasicTag checks if a basic tag (like __title, __artist, etc.) exists in the tags map.
|
||||
// If it does, it moves the value to a more appropriate tag name (like title, artist, etc.),
|
||||
// and deletes the basic tag from the map. If the target tag already exists, it ignores the basic tag.
|
||||
func parseBasicTag(tags map[string][]string, basicName string, tagName string) {
|
||||
basicValue := tags[basicName]
|
||||
if len(basicValue) == 0 {
|
||||
return
|
||||
}
|
||||
delete(tags, basicName)
|
||||
if len(tags[tagName]) == 0 {
|
||||
tags[tagName] = basicValue
|
||||
}
|
||||
}
|
||||
|
||||
// parseTIPL parses the ID3v2.4 TIPL frame string, which is received from TagLib in the format:
|
||||
//
|
||||
// "arranger Andrew Powell engineer Chris Blair engineer Pat Stapley producer Eric Woolfson".
|
||||
//
|
||||
// and breaks it down into a map of roles and names, e.g.:
|
||||
//
|
||||
// {"arranger": ["Andrew Powell"], "engineer": ["Chris Blair", "Pat Stapley"], "producer": ["Eric Woolfson"]}.
|
||||
func parseTIPL(tags map[string][]string) {
|
||||
tipl := tags["tipl"]
|
||||
if len(tipl) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
addRole := func(currentRole string, currentValue []string) {
|
||||
if currentRole != "" && len(currentValue) > 0 {
|
||||
role := tiplMapping[currentRole]
|
||||
tags[role] = append(tags[role], strings.Join(currentValue, " "))
|
||||
}
|
||||
}
|
||||
|
||||
var currentRole string
|
||||
var currentValue []string
|
||||
for _, part := range strings.Split(tipl[0], " ") {
|
||||
if _, ok := tiplMapping[part]; ok {
|
||||
addRole(currentRole, currentValue)
|
||||
currentRole = part
|
||||
currentValue = nil
|
||||
continue
|
||||
}
|
||||
currentValue = append(currentValue, part)
|
||||
}
|
||||
addRole(currentRole, currentValue)
|
||||
delete(tags, "tipl")
|
||||
}
|
||||
|
||||
var _ local.Extractor = (*extractor)(nil)
|
||||
|
||||
func init() {
|
||||
local.RegisterExtractor("taglib", func(_ fs.FS, baseDir string) local.Extractor {
|
||||
// ignores fs, as taglib extractor only works with local files
|
||||
return &extractor{baseDir}
|
||||
})
|
||||
conf.AddHook(func() {
|
||||
log.Debug("TagLib version", "version", Version())
|
||||
})
|
||||
}
|
||||
296
adapters/taglib/taglib_test.go
Normal file
296
adapters/taglib/taglib_test.go
Normal file
@@ -0,0 +1,296 @@
|
||||
package taglib
|
||||
|
||||
import (
|
||||
"io/fs"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/navidrome/navidrome/utils"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("Extractor", func() {
|
||||
var e *extractor
|
||||
|
||||
BeforeEach(func() {
|
||||
e = &extractor{}
|
||||
})
|
||||
|
||||
Describe("Parse", func() {
|
||||
It("correctly parses metadata from all files in folder", func() {
|
||||
mds, err := e.Parse(
|
||||
"tests/fixtures/test.mp3",
|
||||
"tests/fixtures/test.ogg",
|
||||
)
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(mds).To(HaveLen(2))
|
||||
|
||||
// Test MP3
|
||||
m := mds["tests/fixtures/test.mp3"]
|
||||
Expect(m.Tags).To(HaveKeyWithValue("title", []string{"Song"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("album", []string{"Album"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("artist", []string{"Artist"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("albumartist", []string{"Album Artist"}))
|
||||
|
||||
Expect(m.HasPicture).To(BeTrue())
|
||||
Expect(m.AudioProperties.Duration.String()).To(Equal("1.02s"))
|
||||
Expect(m.AudioProperties.BitRate).To(Equal(192))
|
||||
Expect(m.AudioProperties.Channels).To(Equal(2))
|
||||
Expect(m.AudioProperties.SampleRate).To(Equal(44100))
|
||||
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("compilation", []string{"1"}),
|
||||
HaveKeyWithValue("tcmp", []string{"1"})),
|
||||
)
|
||||
Expect(m.Tags).To(HaveKeyWithValue("genre", []string{"Rock"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("date", []string{"2014-05-21"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("originaldate", []string{"1996-11-21"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("releasedate", []string{"2020-12-31"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("discnumber", []string{"1"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("disctotal", []string{"2"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("comment", []string{"Comment1\nComment2"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("bpm", []string{"123"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("replaygain_album_gain", []string{"+3.21518 dB"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("replaygain_album_peak", []string{"0.9125"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("replaygain_track_gain", []string{"-1.48 dB"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("replaygain_track_peak", []string{"0.4512"}))
|
||||
|
||||
Expect(m.Tags).To(HaveKeyWithValue("tracknumber", []string{"2"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("tracktotal", []string{"10"}))
|
||||
|
||||
Expect(m.Tags).ToNot(HaveKey("lyrics"))
|
||||
Expect(m.Tags).To(Or(HaveKeyWithValue("lyrics:eng", []string{
|
||||
"[00:00.00]This is\n[00:02.50]English SYLT\n",
|
||||
"[00:00.00]This is\n[00:02.50]English",
|
||||
}), HaveKeyWithValue("lyrics:eng", []string{
|
||||
"[00:00.00]This is\n[00:02.50]English",
|
||||
"[00:00.00]This is\n[00:02.50]English SYLT\n",
|
||||
})))
|
||||
Expect(m.Tags).To(Or(HaveKeyWithValue("lyrics:xxx", []string{
|
||||
"[00:00.00]This is\n[00:02.50]unspecified SYLT\n",
|
||||
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||
}), HaveKeyWithValue("lyrics:xxx", []string{
|
||||
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||
"[00:00.00]This is\n[00:02.50]unspecified SYLT\n",
|
||||
})))
|
||||
|
||||
// Test OGG
|
||||
m = mds["tests/fixtures/test.ogg"]
|
||||
Expect(err).To(BeNil())
|
||||
Expect(m.Tags).To(HaveKeyWithValue("fbpm", []string{"141.7"}))
|
||||
|
||||
// TabLib 1.12 returns 18, previous versions return 39.
|
||||
// See https://github.com/taglib/taglib/commit/2f238921824741b2cfe6fbfbfc9701d9827ab06b
|
||||
Expect(m.AudioProperties.BitRate).To(BeElementOf(18, 19, 39, 40, 43, 49))
|
||||
Expect(m.AudioProperties.Channels).To(BeElementOf(2))
|
||||
Expect(m.AudioProperties.SampleRate).To(BeElementOf(8000))
|
||||
Expect(m.AudioProperties.SampleRate).To(BeElementOf(8000))
|
||||
Expect(m.HasPicture).To(BeTrue())
|
||||
})
|
||||
|
||||
DescribeTable("Format-Specific tests",
|
||||
func(file, duration string, channels, samplerate, bitdepth int, albumGain, albumPeak, trackGain, trackPeak string, id3Lyrics bool, image bool) {
|
||||
file = "tests/fixtures/" + file
|
||||
mds, err := e.Parse(file)
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(mds).To(HaveLen(1))
|
||||
|
||||
m := mds[file]
|
||||
|
||||
Expect(m.HasPicture).To(Equal(image))
|
||||
Expect(m.AudioProperties.Duration.String()).To(Equal(duration))
|
||||
Expect(m.AudioProperties.Channels).To(Equal(channels))
|
||||
Expect(m.AudioProperties.SampleRate).To(Equal(samplerate))
|
||||
Expect(m.AudioProperties.BitDepth).To(Equal(bitdepth))
|
||||
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("replaygain_album_gain", []string{albumGain}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_track_gain", []string{albumGain}),
|
||||
))
|
||||
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("replaygain_album_peak", []string{albumPeak}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_album_peak", []string{albumPeak}),
|
||||
))
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("replaygain_track_gain", []string{trackGain}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_track_gain", []string{trackGain}),
|
||||
))
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("replaygain_track_peak", []string{trackPeak}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_track_peak", []string{trackPeak}),
|
||||
))
|
||||
|
||||
Expect(m.Tags).To(HaveKeyWithValue("title", []string{"Title"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("album", []string{"Album"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("artist", []string{"Artist"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("albumartist", []string{"Album Artist"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("genre", []string{"Rock"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("date", []string{"2014"}))
|
||||
|
||||
Expect(m.Tags).To(HaveKeyWithValue("bpm", []string{"123"}))
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("tracknumber", []string{"3"}),
|
||||
HaveKeyWithValue("tracknumber", []string{"3/10"}),
|
||||
))
|
||||
if !strings.HasSuffix(file, "test.wma") {
|
||||
// TODO Not sure why this is not working for WMA
|
||||
Expect(m.Tags).To(HaveKeyWithValue("tracktotal", []string{"10"}))
|
||||
}
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("discnumber", []string{"1"}),
|
||||
HaveKeyWithValue("discnumber", []string{"1/2"}),
|
||||
))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("disctotal", []string{"2"}))
|
||||
|
||||
// WMA does not have a "compilation" tag, but "wm/iscompilation"
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("compilation", []string{"1"}),
|
||||
HaveKeyWithValue("wm/iscompilation", []string{"1"})),
|
||||
)
|
||||
|
||||
if id3Lyrics {
|
||||
Expect(m.Tags).To(HaveKeyWithValue("lyrics:eng", []string{
|
||||
"[00:00.00]This is\n[00:02.50]English",
|
||||
}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("lyrics:xxx", []string{
|
||||
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||
}))
|
||||
} else {
|
||||
Expect(m.Tags).To(HaveKeyWithValue("lyrics:xxx", []string{
|
||||
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||
"[00:00.00]This is\n[00:02.50]English",
|
||||
}))
|
||||
}
|
||||
|
||||
Expect(m.Tags).To(HaveKeyWithValue("comment", []string{"Comment1\nComment2"}))
|
||||
},
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=1200:duration=1" test.flac
|
||||
Entry("correctly parses flac tags", "test.flac", "1s", 1, 44100, 16, "+4.06 dB", "0.12496948", "+4.06 dB", "0.12496948", false, true),
|
||||
|
||||
Entry("correctly parses m4a (aac) gain tags", "01 Invisible (RED) Edit Version.m4a", "1.04s", 2, 44100, 16, "0.37", "0.48", "0.37", "0.48", false, true),
|
||||
Entry("correctly parses m4a (aac) gain tags (uppercase)", "test.m4a", "1.04s", 2, 44100, 16, "0.37", "0.48", "0.37", "0.48", false, true),
|
||||
Entry("correctly parses ogg (vorbis) tags", "test.ogg", "1.04s", 2, 8000, 0, "+7.64 dB", "0.11772506", "+7.64 dB", "0.11772506", false, true),
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=900:duration=1" test.wma
|
||||
// Weird note: for the tag parsing to work, the lyrics are actually stored in the reverse order
|
||||
Entry("correctly parses wma/asf tags", "test.wma", "1.02s", 1, 44100, 16, "3.27 dB", "0.132914", "3.27 dB", "0.132914", false, true),
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=800:duration=1" test.wv
|
||||
Entry("correctly parses wv (wavpak) tags", "test.wv", "1s", 1, 44100, 16, "3.43 dB", "0.125061", "3.43 dB", "0.125061", false, true),
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=1000:duration=1" test.wav
|
||||
Entry("correctly parses wav tags", "test.wav", "1s", 1, 44100, 16, "3.06 dB", "0.125056", "3.06 dB", "0.125056", true, true),
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=1400:duration=1" test.aiff
|
||||
Entry("correctly parses aiff tags", "test.aiff", "1s", 1, 44100, 16, "2.00 dB", "0.124972", "2.00 dB", "0.124972", true, true),
|
||||
)
|
||||
|
||||
// Skip these tests when running as root
|
||||
Context("Access Forbidden", func() {
|
||||
var accessForbiddenFile string
|
||||
var RegularUserContext = XContext
|
||||
var isRegularUser = os.Getuid() != 0
|
||||
if isRegularUser {
|
||||
RegularUserContext = Context
|
||||
}
|
||||
|
||||
// Only run permission tests if we are not root
|
||||
RegularUserContext("when run without root privileges", func() {
|
||||
BeforeEach(func() {
|
||||
accessForbiddenFile = utils.TempFileName("access_forbidden-", ".mp3")
|
||||
|
||||
f, err := os.OpenFile(accessForbiddenFile, os.O_WRONLY|os.O_CREATE, 0222)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
DeferCleanup(func() {
|
||||
Expect(f.Close()).To(Succeed())
|
||||
Expect(os.Remove(accessForbiddenFile)).To(Succeed())
|
||||
})
|
||||
})
|
||||
|
||||
It("correctly handle unreadable file due to insufficient read permission", func() {
|
||||
_, err := e.extractMetadata(accessForbiddenFile)
|
||||
Expect(err).To(MatchError(os.ErrPermission))
|
||||
})
|
||||
|
||||
It("skips the file if it cannot be read", func() {
|
||||
files := []string{
|
||||
"tests/fixtures/test.mp3",
|
||||
"tests/fixtures/test.ogg",
|
||||
accessForbiddenFile,
|
||||
}
|
||||
mds, err := e.Parse(files...)
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(mds).To(HaveLen(2))
|
||||
Expect(mds).ToNot(HaveKey(accessForbiddenFile))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
Describe("Error Checking", func() {
|
||||
It("returns a generic ErrPath if file does not exist", func() {
|
||||
testFilePath := "tests/fixtures/NON_EXISTENT.ogg"
|
||||
_, err := e.extractMetadata(testFilePath)
|
||||
Expect(err).To(MatchError(fs.ErrNotExist))
|
||||
})
|
||||
It("does not throw a SIGSEGV error when reading a file with an invalid frame", func() {
|
||||
// File has an empty TDAT frame
|
||||
md, err := e.extractMetadata("tests/fixtures/invalid-files/test-invalid-frame.mp3")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(md.Tags).To(HaveKeyWithValue("albumartist", []string{"Elvis Presley"}))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("parseTIPL", func() {
|
||||
var tags map[string][]string
|
||||
|
||||
BeforeEach(func() {
|
||||
tags = make(map[string][]string)
|
||||
})
|
||||
|
||||
Context("when the TIPL string is populated", func() {
|
||||
It("correctly parses roles and names", func() {
|
||||
tags["tipl"] = []string{"arranger Andrew Powell DJ-mix François Kevorkian DJ-mix Jane Doe engineer Chris Blair"}
|
||||
parseTIPL(tags)
|
||||
Expect(tags["arranger"]).To(ConsistOf("Andrew Powell"))
|
||||
Expect(tags["engineer"]).To(ConsistOf("Chris Blair"))
|
||||
Expect(tags["djmixer"]).To(ConsistOf("François Kevorkian", "Jane Doe"))
|
||||
})
|
||||
|
||||
It("handles multiple names for a single role", func() {
|
||||
tags["tipl"] = []string{"engineer Pat Stapley producer Eric Woolfson engineer Chris Blair"}
|
||||
parseTIPL(tags)
|
||||
Expect(tags["producer"]).To(ConsistOf("Eric Woolfson"))
|
||||
Expect(tags["engineer"]).To(ConsistOf("Pat Stapley", "Chris Blair"))
|
||||
})
|
||||
|
||||
It("discards roles without names", func() {
|
||||
tags["tipl"] = []string{"engineer Pat Stapley producer engineer Chris Blair"}
|
||||
parseTIPL(tags)
|
||||
Expect(tags).ToNot(HaveKey("producer"))
|
||||
Expect(tags["engineer"]).To(ConsistOf("Pat Stapley", "Chris Blair"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the TIPL string is empty", func() {
|
||||
It("does nothing", func() {
|
||||
tags["tipl"] = []string{""}
|
||||
parseTIPL(tags)
|
||||
Expect(tags).To(BeEmpty())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the TIPL is not present", func() {
|
||||
It("does nothing", func() {
|
||||
parseTIPL(tags)
|
||||
Expect(tags).To(BeEmpty())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
})
|
||||
@@ -1,10 +1,12 @@
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <typeinfo>
|
||||
|
||||
#define TAGLIB_STATIC
|
||||
#include <apeproperties.h>
|
||||
#include <apetag.h>
|
||||
#include <aifffile.h>
|
||||
#include <asffile.h>
|
||||
#include <dsffile.h>
|
||||
#include <fileref.h>
|
||||
#include <flacfile.h>
|
||||
#include <id3v2tag.h>
|
||||
@@ -16,6 +18,8 @@
|
||||
#include <tpropertymap.h>
|
||||
#include <vorbisfile.h>
|
||||
#include <wavfile.h>
|
||||
#include <wavfile.h>
|
||||
#include <wavpackfile.h>
|
||||
|
||||
#include "taglib_wrapper.h"
|
||||
|
||||
@@ -41,32 +45,60 @@ int taglib_read(const FILENAME_CHAR_T *filename, unsigned long id) {
|
||||
|
||||
// Add audio properties to the tags
|
||||
const TagLib::AudioProperties *props(f.audioProperties());
|
||||
go_map_put_int(id, (char *)"duration", props->lengthInSeconds());
|
||||
go_map_put_int(id, (char *)"lengthinmilliseconds", props->lengthInMilliseconds());
|
||||
go_map_put_int(id, (char *)"bitrate", props->bitrate());
|
||||
go_map_put_int(id, (char *)"channels", props->channels());
|
||||
go_map_put_int(id, (char *)"samplerate", props->sampleRate());
|
||||
goPutInt(id, (char *)"__lengthinmilliseconds", props->lengthInMilliseconds());
|
||||
goPutInt(id, (char *)"__bitrate", props->bitrate());
|
||||
goPutInt(id, (char *)"__channels", props->channels());
|
||||
goPutInt(id, (char *)"__samplerate", props->sampleRate());
|
||||
|
||||
// Create a map to collect all the tags
|
||||
// Extract bits per sample for supported formats
|
||||
int bitsPerSample = 0;
|
||||
if (const auto* apeProperties{ dynamic_cast<const TagLib::APE::Properties*>(props) })
|
||||
bitsPerSample = apeProperties->bitsPerSample();
|
||||
else if (const auto* asfProperties{ dynamic_cast<const TagLib::ASF::Properties*>(props) })
|
||||
bitsPerSample = asfProperties->bitsPerSample();
|
||||
else if (const auto* flacProperties{ dynamic_cast<const TagLib::FLAC::Properties*>(props) })
|
||||
bitsPerSample = flacProperties->bitsPerSample();
|
||||
else if (const auto* mp4Properties{ dynamic_cast<const TagLib::MP4::Properties*>(props) })
|
||||
bitsPerSample = mp4Properties->bitsPerSample();
|
||||
else if (const auto* wavePackProperties{ dynamic_cast<const TagLib::WavPack::Properties*>(props) })
|
||||
bitsPerSample = wavePackProperties->bitsPerSample();
|
||||
else if (const auto* aiffProperties{ dynamic_cast<const TagLib::RIFF::AIFF::Properties*>(props) })
|
||||
bitsPerSample = aiffProperties->bitsPerSample();
|
||||
else if (const auto* wavProperties{ dynamic_cast<const TagLib::RIFF::WAV::Properties*>(props) })
|
||||
bitsPerSample = wavProperties->bitsPerSample();
|
||||
else if (const auto* dsfProperties{ dynamic_cast<const TagLib::DSF::Properties*>(props) })
|
||||
bitsPerSample = dsfProperties->bitsPerSample();
|
||||
|
||||
if (bitsPerSample > 0) {
|
||||
goPutInt(id, (char *)"__bitspersample", bitsPerSample);
|
||||
}
|
||||
|
||||
// Send all properties to the Go map
|
||||
TagLib::PropertyMap tags = f.file()->properties();
|
||||
|
||||
// Make sure at least the basic properties are extracted
|
||||
TagLib::Tag *basic = f.file()->tag();
|
||||
if (!basic->isEmpty()) {
|
||||
if (!basic->title().isEmpty()) {
|
||||
tags.insert("title", basic->title());
|
||||
tags.insert("__title", basic->title());
|
||||
}
|
||||
if (!basic->artist().isEmpty()) {
|
||||
tags.insert("artist", basic->artist());
|
||||
tags.insert("__artist", basic->artist());
|
||||
}
|
||||
if (!basic->album().isEmpty()) {
|
||||
tags.insert("album", basic->album());
|
||||
tags.insert("__album", basic->album());
|
||||
}
|
||||
if (!basic->comment().isEmpty()) {
|
||||
tags.insert("__comment", basic->comment());
|
||||
}
|
||||
if (!basic->genre().isEmpty()) {
|
||||
tags.insert("__genre", basic->genre());
|
||||
}
|
||||
if (basic->year() > 0) {
|
||||
tags.insert("date", TagLib::String::number(basic->year()));
|
||||
tags.insert("__year", TagLib::String::number(basic->year()));
|
||||
}
|
||||
if (basic->track() > 0) {
|
||||
tags.insert("_track", TagLib::String::number(basic->track()));
|
||||
tags.insert("__track", TagLib::String::number(basic->track()));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -112,9 +144,9 @@ int taglib_read(const FILENAME_CHAR_T *filename, unsigned long id) {
|
||||
strncpy(language, bv.data(), 3);
|
||||
}
|
||||
|
||||
char *val = (char *)frame->text().toCString(true);
|
||||
char *val = const_cast<char*>(frame->text().toCString(true));
|
||||
|
||||
go_map_put_lyrics(id, language, val);
|
||||
goPutLyrics(id, language, val);
|
||||
}
|
||||
} else if (kv.first == "SYLT") {
|
||||
for (const auto &tag: kv.second) {
|
||||
@@ -131,8 +163,8 @@ int taglib_read(const FILENAME_CHAR_T *filename, unsigned long id) {
|
||||
if (format == TagLib::ID3v2::SynchronizedLyricsFrame::AbsoluteMilliseconds) {
|
||||
|
||||
for (const auto &line: frame->synchedText()) {
|
||||
char *text = (char *)line.text.toCString(true);
|
||||
go_map_put_lyric_line(id, language, text, line.time);
|
||||
char *text = const_cast<char*>(line.text.toCString(true));
|
||||
goPutLyricLine(id, language, text, line.time);
|
||||
}
|
||||
} else if (format == TagLib::ID3v2::SynchronizedLyricsFrame::AbsoluteMpegFrames) {
|
||||
const int sampleRate = props->sampleRate();
|
||||
@@ -140,13 +172,13 @@ int taglib_read(const FILENAME_CHAR_T *filename, unsigned long id) {
|
||||
if (sampleRate != 0) {
|
||||
for (const auto &line: frame->synchedText()) {
|
||||
const int timeInMs = (line.time * 1000) / sampleRate;
|
||||
char *text = (char *)line.text.toCString(true);
|
||||
go_map_put_lyric_line(id, language, text, timeInMs);
|
||||
char *text = const_cast<char*>(line.text.toCString(true));
|
||||
goPutLyricLine(id, language, text, timeInMs);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
} else if (kv.first == "TIPL"){
|
||||
if (!kv.second.isEmpty()) {
|
||||
tags.insert(kv.first, kv.second.front()->toString());
|
||||
}
|
||||
@@ -154,43 +186,50 @@ int taglib_read(const FILENAME_CHAR_T *filename, unsigned long id) {
|
||||
}
|
||||
}
|
||||
|
||||
// M4A may have some iTunes specific tags
|
||||
// M4A may have some iTunes specific tags not captured by the PropertyMap interface
|
||||
TagLib::MP4::File *m4afile(dynamic_cast<TagLib::MP4::File *>(f.file()));
|
||||
if (m4afile != NULL) {
|
||||
const auto itemListMap = m4afile->tag()->itemMap();
|
||||
for (const auto item: itemListMap) {
|
||||
char *key = (char *)item.first.toCString(true);
|
||||
char *key = const_cast<char*>(item.first.toCString(true));
|
||||
for (const auto value: item.second.toStringList()) {
|
||||
char *val = (char *)value.toCString(true);
|
||||
go_map_put_m4a_str(id, key, val);
|
||||
char *val = const_cast<char*>(value.toCString(true));
|
||||
goPutM4AStr(id, key, val);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// WMA/ASF files may have additional tags not captured by the general iterator
|
||||
// WMA/ASF files may have additional tags not captured by the PropertyMap interface
|
||||
TagLib::ASF::File *asfFile(dynamic_cast<TagLib::ASF::File *>(f.file()));
|
||||
if (asfFile != NULL) {
|
||||
const TagLib::ASF::Tag *asfTags{asfFile->tag()};
|
||||
const auto itemListMap = asfTags->attributeListMap();
|
||||
for (const auto item : itemListMap) {
|
||||
tags.insert(item.first, item.second.front().toString());
|
||||
char *key = const_cast<char*>(item.first.toCString(true));
|
||||
|
||||
for (auto j = item.second.begin();
|
||||
j != item.second.end(); ++j) {
|
||||
|
||||
char *val = const_cast<char*>(j->toString().toCString(true));
|
||||
goPutStr(id, key, val);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Send all collected tags to the Go map
|
||||
for (TagLib::PropertyMap::ConstIterator i = tags.begin(); i != tags.end();
|
||||
++i) {
|
||||
char *key = (char *)i->first.toCString(true);
|
||||
char *key = const_cast<char*>(i->first.toCString(true));
|
||||
for (TagLib::StringList::ConstIterator j = i->second.begin();
|
||||
j != i->second.end(); ++j) {
|
||||
char *val = (char *)(*j).toCString(true);
|
||||
go_map_put_str(id, key, val);
|
||||
char *val = const_cast<char*>((*j).toCString(true));
|
||||
goPutStr(id, key, val);
|
||||
}
|
||||
}
|
||||
|
||||
// Cover art has to be handled separately
|
||||
if (has_cover(f)) {
|
||||
go_map_put_str(id, (char *)"has_picture", (char *)"true");
|
||||
goPutStr(id, (char *)"has_picture", (char *)"true");
|
||||
}
|
||||
|
||||
return 0;
|
||||
@@ -200,41 +239,61 @@ int taglib_read(const FILENAME_CHAR_T *filename, unsigned long id) {
|
||||
char has_cover(const TagLib::FileRef f) {
|
||||
char hasCover = 0;
|
||||
// ----- MP3
|
||||
if (TagLib::MPEG::File *
|
||||
mp3File{dynamic_cast<TagLib::MPEG::File *>(f.file())}) {
|
||||
if (TagLib::MPEG::File * mp3File{dynamic_cast<TagLib::MPEG::File *>(f.file())}) {
|
||||
if (mp3File->ID3v2Tag()) {
|
||||
const auto &frameListMap{mp3File->ID3v2Tag()->frameListMap()};
|
||||
hasCover = !frameListMap["APIC"].isEmpty();
|
||||
}
|
||||
}
|
||||
// ----- FLAC
|
||||
else if (TagLib::FLAC::File *
|
||||
flacFile{dynamic_cast<TagLib::FLAC::File *>(f.file())}) {
|
||||
else if (TagLib::FLAC::File * flacFile{dynamic_cast<TagLib::FLAC::File *>(f.file())}) {
|
||||
hasCover = !flacFile->pictureList().isEmpty();
|
||||
}
|
||||
// ----- MP4
|
||||
else if (TagLib::MP4::File *
|
||||
mp4File{dynamic_cast<TagLib::MP4::File *>(f.file())}) {
|
||||
else if (TagLib::MP4::File * mp4File{dynamic_cast<TagLib::MP4::File *>(f.file())}) {
|
||||
auto &coverItem{mp4File->tag()->itemMap()["covr"]};
|
||||
TagLib::MP4::CoverArtList coverArtList{coverItem.toCoverArtList()};
|
||||
hasCover = !coverArtList.isEmpty();
|
||||
}
|
||||
// ----- Ogg
|
||||
else if (TagLib::Ogg::Vorbis::File *
|
||||
vorbisFile{dynamic_cast<TagLib::Ogg::Vorbis::File *>(f.file())}) {
|
||||
else if (TagLib::Ogg::Vorbis::File * vorbisFile{dynamic_cast<TagLib::Ogg::Vorbis::File *>(f.file())}) {
|
||||
hasCover = !vorbisFile->tag()->pictureList().isEmpty();
|
||||
}
|
||||
// ----- Opus
|
||||
else if (TagLib::Ogg::Opus::File *
|
||||
opusFile{dynamic_cast<TagLib::Ogg::Opus::File *>(f.file())}) {
|
||||
else if (TagLib::Ogg::Opus::File * opusFile{dynamic_cast<TagLib::Ogg::Opus::File *>(f.file())}) {
|
||||
hasCover = !opusFile->tag()->pictureList().isEmpty();
|
||||
}
|
||||
// ----- WAV
|
||||
else if (TagLib::RIFF::WAV::File * wavFile{ dynamic_cast<TagLib::RIFF::WAV::File*>(f.file()) }) {
|
||||
if (wavFile->hasID3v2Tag()) {
|
||||
const auto& frameListMap{ wavFile->ID3v2Tag()->frameListMap() };
|
||||
hasCover = !frameListMap["APIC"].isEmpty();
|
||||
}
|
||||
}
|
||||
// ----- AIFF
|
||||
else if (TagLib::RIFF::AIFF::File * aiffFile{ dynamic_cast<TagLib::RIFF::AIFF::File *>(f.file())}) {
|
||||
if (aiffFile->hasID3v2Tag()) {
|
||||
const auto& frameListMap{ aiffFile->tag()->frameListMap() };
|
||||
hasCover = !frameListMap["APIC"].isEmpty();
|
||||
}
|
||||
}
|
||||
// ----- WMA
|
||||
if (TagLib::ASF::File *
|
||||
asfFile{dynamic_cast<TagLib::ASF::File *>(f.file())}) {
|
||||
const TagLib::ASF::Tag *tag{asfFile->tag()};
|
||||
else if (TagLib::ASF::File * asfFile{dynamic_cast<TagLib::ASF::File *>(f.file())}) {
|
||||
const TagLib::ASF::Tag *tag{ asfFile->tag() };
|
||||
hasCover = tag && tag->attributeListMap().contains("WM/Picture");
|
||||
}
|
||||
// ----- DSF
|
||||
else if (TagLib::DSF::File * dsffile{ dynamic_cast<TagLib::DSF::File *>(f.file())}) {
|
||||
const TagLib::ID3v2::Tag *tag { dsffile->tag() };
|
||||
hasCover = tag && !tag->frameListMap()["APIC"].isEmpty();
|
||||
}
|
||||
// ----- WAVPAK (APE tag)
|
||||
else if (TagLib::WavPack::File * wvFile{dynamic_cast<TagLib::WavPack::File *>(f.file())}) {
|
||||
if (wvFile->hasAPETag()) {
|
||||
// This is the particular string that Picard uses
|
||||
hasCover = !wvFile->APETag()->itemListMap()["COVER ART (FRONT)"].isEmpty();
|
||||
}
|
||||
}
|
||||
|
||||
return hasCover;
|
||||
}
|
||||
157
adapters/taglib/taglib_wrapper.go
Normal file
157
adapters/taglib/taglib_wrapper.go
Normal file
@@ -0,0 +1,157 @@
|
||||
package taglib
|
||||
|
||||
/*
|
||||
#cgo !windows pkg-config: --define-prefix taglib
|
||||
#cgo windows pkg-config: taglib
|
||||
#cgo illumos LDFLAGS: -lstdc++ -lsendfile
|
||||
#cgo linux darwin CXXFLAGS: -std=c++11
|
||||
#cgo darwin LDFLAGS: -L/opt/homebrew/opt/taglib/lib
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include "taglib_wrapper.h"
|
||||
*/
|
||||
import "C"
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"runtime/debug"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"unsafe"
|
||||
|
||||
"github.com/navidrome/navidrome/log"
|
||||
)
|
||||
|
||||
const iTunesKeyPrefix = "----:com.apple.itunes:"
|
||||
|
||||
func Version() string {
|
||||
return C.GoString(C.taglib_version())
|
||||
}
|
||||
|
||||
func Read(filename string) (tags map[string][]string, err error) {
|
||||
// Do not crash on failures in the C code/library
|
||||
debug.SetPanicOnFault(true)
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
log.Error("extractor: recovered from panic when reading tags", "file", filename, "error", r)
|
||||
err = fmt.Errorf("extractor: recovered from panic: %s", r)
|
||||
}
|
||||
}()
|
||||
|
||||
fp := getFilename(filename)
|
||||
defer C.free(unsafe.Pointer(fp))
|
||||
id, m, release := newMap()
|
||||
defer release()
|
||||
|
||||
log.Trace("extractor: reading tags", "filename", filename, "map_id", id)
|
||||
res := C.taglib_read(fp, C.ulong(id))
|
||||
switch res {
|
||||
case C.TAGLIB_ERR_PARSE:
|
||||
// Check additional case whether the file is unreadable due to permission
|
||||
file, fileErr := os.OpenFile(filename, os.O_RDONLY, 0600)
|
||||
defer file.Close()
|
||||
|
||||
if os.IsPermission(fileErr) {
|
||||
return nil, fmt.Errorf("navidrome does not have permission: %w", fileErr)
|
||||
} else if fileErr != nil {
|
||||
return nil, fmt.Errorf("cannot parse file media file: %w", fileErr)
|
||||
} else {
|
||||
return nil, fmt.Errorf("cannot parse file media file")
|
||||
}
|
||||
case C.TAGLIB_ERR_AUDIO_PROPS:
|
||||
return nil, fmt.Errorf("can't get audio properties from file")
|
||||
}
|
||||
if log.IsGreaterOrEqualTo(log.LevelDebug) {
|
||||
j, _ := json.Marshal(m)
|
||||
log.Trace("extractor: read tags", "tags", string(j), "filename", filename, "id", id)
|
||||
} else {
|
||||
log.Trace("extractor: read tags", "tags", m, "filename", filename, "id", id)
|
||||
}
|
||||
|
||||
return m, nil
|
||||
}
|
||||
|
||||
type tagMap map[string][]string
|
||||
|
||||
var allMaps sync.Map
|
||||
var mapsNextID atomic.Uint32
|
||||
|
||||
func newMap() (uint32, tagMap, func()) {
|
||||
id := mapsNextID.Add(1)
|
||||
|
||||
m := tagMap{}
|
||||
allMaps.Store(id, m)
|
||||
|
||||
return id, m, func() {
|
||||
allMaps.Delete(id)
|
||||
}
|
||||
}
|
||||
|
||||
func doPutTag(id C.ulong, key string, val *C.char) {
|
||||
if key == "" {
|
||||
return
|
||||
}
|
||||
|
||||
r, _ := allMaps.Load(uint32(id))
|
||||
m := r.(tagMap)
|
||||
k := strings.ToLower(key)
|
||||
v := strings.TrimSpace(C.GoString(val))
|
||||
m[k] = append(m[k], v)
|
||||
}
|
||||
|
||||
//export goPutM4AStr
|
||||
func goPutM4AStr(id C.ulong, key *C.char, val *C.char) {
|
||||
k := C.GoString(key)
|
||||
|
||||
// Special for M4A, do not catch keys that have no actual name
|
||||
k = strings.TrimPrefix(k, iTunesKeyPrefix)
|
||||
doPutTag(id, k, val)
|
||||
}
|
||||
|
||||
//export goPutStr
|
||||
func goPutStr(id C.ulong, key *C.char, val *C.char) {
|
||||
doPutTag(id, C.GoString(key), val)
|
||||
}
|
||||
|
||||
//export goPutInt
|
||||
func goPutInt(id C.ulong, key *C.char, val C.int) {
|
||||
valStr := strconv.Itoa(int(val))
|
||||
vp := C.CString(valStr)
|
||||
defer C.free(unsafe.Pointer(vp))
|
||||
goPutStr(id, key, vp)
|
||||
}
|
||||
|
||||
//export goPutLyrics
|
||||
func goPutLyrics(id C.ulong, lang *C.char, val *C.char) {
|
||||
doPutTag(id, "lyrics:"+C.GoString(lang), val)
|
||||
}
|
||||
|
||||
//export goPutLyricLine
|
||||
func goPutLyricLine(id C.ulong, lang *C.char, text *C.char, time C.int) {
|
||||
language := C.GoString(lang)
|
||||
line := C.GoString(text)
|
||||
timeGo := int64(time)
|
||||
|
||||
ms := timeGo % 1000
|
||||
timeGo /= 1000
|
||||
sec := timeGo % 60
|
||||
timeGo /= 60
|
||||
minimum := timeGo % 60
|
||||
formattedLine := fmt.Sprintf("[%02d:%02d.%02d]%s\n", minimum, sec, ms/10, line)
|
||||
|
||||
key := "lyrics:" + language
|
||||
|
||||
r, _ := allMaps.Load(uint32(id))
|
||||
m := r.(tagMap)
|
||||
k := strings.ToLower(key)
|
||||
existing, ok := m[k]
|
||||
if ok {
|
||||
existing[0] += formattedLine
|
||||
} else {
|
||||
m[k] = []string{formattedLine}
|
||||
}
|
||||
}
|
||||
24
adapters/taglib/taglib_wrapper.h
Normal file
24
adapters/taglib/taglib_wrapper.h
Normal file
@@ -0,0 +1,24 @@
|
||||
#define TAGLIB_ERR_PARSE -1
|
||||
#define TAGLIB_ERR_AUDIO_PROPS -2
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
#ifdef WIN32
|
||||
#define FILENAME_CHAR_T wchar_t
|
||||
#else
|
||||
#define FILENAME_CHAR_T char
|
||||
#endif
|
||||
|
||||
extern void goPutM4AStr(unsigned long id, char *key, char *val);
|
||||
extern void goPutStr(unsigned long id, char *key, char *val);
|
||||
extern void goPutInt(unsigned long id, char *key, int val);
|
||||
extern void goPutLyrics(unsigned long id, char *lang, char *val);
|
||||
extern void goPutLyricLine(unsigned long id, char *lang, char *text, int time);
|
||||
int taglib_read(const FILENAME_CHAR_T *filename, unsigned long id);
|
||||
char* taglib_version();
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
187
cmd/backup.go
Normal file
187
cmd/backup.go
Normal file
@@ -0,0 +1,187 @@
|
||||
package cmd
|
||||
|
||||
//
|
||||
//import (
|
||||
// "context"
|
||||
// "fmt"
|
||||
// "os"
|
||||
// "strings"
|
||||
// "time"
|
||||
//
|
||||
// "github.com/navidrome/navidrome/conf"
|
||||
// "github.com/navidrome/navidrome/db"
|
||||
// "github.com/navidrome/navidrome/log"
|
||||
// "github.com/spf13/cobra"
|
||||
//)
|
||||
//
|
||||
//var (
|
||||
// backupCount int
|
||||
// backupDir string
|
||||
// force bool
|
||||
// restorePath string
|
||||
//)
|
||||
//
|
||||
//func init() {
|
||||
// rootCmd.AddCommand(backupRoot)
|
||||
//
|
||||
// backupCmd.Flags().StringVarP(&backupDir, "backup-dir", "d", "", "directory to manually make backup")
|
||||
// backupRoot.AddCommand(backupCmd)
|
||||
//
|
||||
// pruneCmd.Flags().StringVarP(&backupDir, "backup-dir", "d", "", "directory holding Navidrome backups")
|
||||
// pruneCmd.Flags().IntVarP(&backupCount, "keep-count", "k", -1, "specify the number of backups to keep. 0 remove ALL backups, and negative values mean to use the default from configuration")
|
||||
// pruneCmd.Flags().BoolVarP(&force, "force", "f", false, "bypass warning when backup count is zero")
|
||||
// backupRoot.AddCommand(pruneCmd)
|
||||
//
|
||||
// restoreCommand.Flags().StringVarP(&restorePath, "backup-file", "b", "", "path of backup database to restore")
|
||||
// restoreCommand.Flags().BoolVarP(&force, "force", "f", false, "bypass restore warning")
|
||||
// _ = restoreCommand.MarkFlagRequired("backup-file")
|
||||
// backupRoot.AddCommand(restoreCommand)
|
||||
//}
|
||||
//
|
||||
//var (
|
||||
// backupRoot = &cobra.Command{
|
||||
// Use: "backup",
|
||||
// Aliases: []string{"bkp"},
|
||||
// Short: "Create, restore and prune database backups",
|
||||
// Long: "Create, restore and prune database backups",
|
||||
// }
|
||||
//
|
||||
// backupCmd = &cobra.Command{
|
||||
// Use: "create",
|
||||
// Short: "Create a backup database",
|
||||
// Long: "Manually backup Navidrome database. This will ignore BackupCount",
|
||||
// Run: func(cmd *cobra.Command, _ []string) {
|
||||
// runBackup(cmd.Context())
|
||||
// },
|
||||
// }
|
||||
//
|
||||
// pruneCmd = &cobra.Command{
|
||||
// Use: "prune",
|
||||
// Short: "Prune database backups",
|
||||
// Long: "Manually prune database backups according to backup rules",
|
||||
// Run: func(cmd *cobra.Command, _ []string) {
|
||||
// runPrune(cmd.Context())
|
||||
// },
|
||||
// }
|
||||
//
|
||||
// restoreCommand = &cobra.Command{
|
||||
// Use: "restore",
|
||||
// Short: "Restore Navidrome database",
|
||||
// Long: "Restore Navidrome database from a backup. This must be done offline",
|
||||
// Run: func(cmd *cobra.Command, _ []string) {
|
||||
// runRestore(cmd.Context())
|
||||
// },
|
||||
// }
|
||||
//)
|
||||
//
|
||||
//func runBackup(ctx context.Context) {
|
||||
// if backupDir != "" {
|
||||
// conf.Server.Backup.Path = backupDir
|
||||
// }
|
||||
//
|
||||
// idx := strings.LastIndex(conf.Server.DbPath, "?")
|
||||
// var path string
|
||||
//
|
||||
// if idx == -1 {
|
||||
// path = conf.Server.DbPath
|
||||
// } else {
|
||||
// path = conf.Server.DbPath[:idx]
|
||||
// }
|
||||
//
|
||||
// if _, err := os.Stat(path); os.IsNotExist(err) {
|
||||
// log.Fatal("No existing database", "path", path)
|
||||
// return
|
||||
// }
|
||||
//
|
||||
// start := time.Now()
|
||||
// path, err := db.Backup(ctx)
|
||||
// if err != nil {
|
||||
// log.Fatal("Error backing up database", "backup path", conf.Server.BasePath, err)
|
||||
// }
|
||||
//
|
||||
// elapsed := time.Since(start)
|
||||
// log.Info("Backup complete", "elapsed", elapsed, "path", path)
|
||||
//}
|
||||
//
|
||||
//func runPrune(ctx context.Context) {
|
||||
// if backupDir != "" {
|
||||
// conf.Server.Backup.Path = backupDir
|
||||
// }
|
||||
//
|
||||
// if backupCount != -1 {
|
||||
// conf.Server.Backup.Count = backupCount
|
||||
// }
|
||||
//
|
||||
// if conf.Server.Backup.Count == 0 && !force {
|
||||
// fmt.Println("Warning: pruning ALL backups")
|
||||
// fmt.Printf("Please enter YES (all caps) to continue: ")
|
||||
// var input string
|
||||
// _, err := fmt.Scanln(&input)
|
||||
//
|
||||
// if input != "YES" || err != nil {
|
||||
// log.Warn("Prune cancelled")
|
||||
// return
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// idx := strings.LastIndex(conf.Server.DbPath, "?")
|
||||
// var path string
|
||||
//
|
||||
// if idx == -1 {
|
||||
// path = conf.Server.DbPath
|
||||
// } else {
|
||||
// path = conf.Server.DbPath[:idx]
|
||||
// }
|
||||
//
|
||||
// if _, err := os.Stat(path); os.IsNotExist(err) {
|
||||
// log.Fatal("No existing database", "path", path)
|
||||
// return
|
||||
// }
|
||||
//
|
||||
// start := time.Now()
|
||||
// count, err := db.Prune(ctx)
|
||||
// if err != nil {
|
||||
// log.Fatal("Error pruning up database", "backup path", conf.Server.BasePath, err)
|
||||
// }
|
||||
//
|
||||
// elapsed := time.Since(start)
|
||||
//
|
||||
// log.Info("Prune complete", "elapsed", elapsed, "successfully pruned", count)
|
||||
//}
|
||||
//
|
||||
//func runRestore(ctx context.Context) {
|
||||
// idx := strings.LastIndex(conf.Server.DbPath, "?")
|
||||
// var path string
|
||||
//
|
||||
// if idx == -1 {
|
||||
// path = conf.Server.DbPath
|
||||
// } else {
|
||||
// path = conf.Server.DbPath[:idx]
|
||||
// }
|
||||
//
|
||||
// if _, err := os.Stat(path); os.IsNotExist(err) {
|
||||
// log.Fatal("No existing database", "path", path)
|
||||
// return
|
||||
// }
|
||||
//
|
||||
// if !force {
|
||||
// fmt.Println("Warning: restoring the Navidrome database should only be done offline, especially if your backup is very old.")
|
||||
// fmt.Printf("Please enter YES (all caps) to continue: ")
|
||||
// var input string
|
||||
// _, err := fmt.Scanln(&input)
|
||||
//
|
||||
// if input != "YES" || err != nil {
|
||||
// log.Warn("Restore cancelled")
|
||||
// return
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// start := time.Now()
|
||||
// err := db.Restore(ctx, restorePath)
|
||||
// if err != nil {
|
||||
// log.Fatal("Error restoring database", "backup path", conf.Server.BasePath, err)
|
||||
// }
|
||||
//
|
||||
// elapsed := time.Since(start)
|
||||
// log.Info("Restore complete", "elapsed", elapsed)
|
||||
//}
|
||||
17
cmd/cmd_suite_test.go
Normal file
17
cmd/cmd_suite_test.go
Normal file
@@ -0,0 +1,17 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
func TestCmd(t *testing.T) {
|
||||
tests.Init(t, false)
|
||||
log.SetLevel(log.LevelFatal)
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "Cmd Suite")
|
||||
}
|
||||
@@ -5,25 +5,20 @@ import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/scanner"
|
||||
"github.com/navidrome/navidrome/scanner/metadata"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
"github.com/pelletier/go-toml/v2"
|
||||
"github.com/spf13/cobra"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
var (
|
||||
extractor string
|
||||
format string
|
||||
format string
|
||||
)
|
||||
|
||||
func init() {
|
||||
inspectCmd.Flags().StringVarP(&extractor, "extractor", "x", "", "extractor to use (ffmpeg or taglib, default: auto)")
|
||||
inspectCmd.Flags().StringVarP(&format, "format", "f", "pretty", "output format (pretty, toml, yaml, json, jsonindent)")
|
||||
inspectCmd.Flags().StringVarP(&format, "format", "f", "jsonindent", "output format (pretty, toml, yaml, json, jsonindent)")
|
||||
rootCmd.AddCommand(inspectCmd)
|
||||
}
|
||||
|
||||
@@ -48,7 +43,7 @@ var marshalers = map[string]func(interface{}) ([]byte, error){
|
||||
}
|
||||
|
||||
func prettyMarshal(v interface{}) ([]byte, error) {
|
||||
out := v.([]inspectorOutput)
|
||||
out := v.([]core.InspectOutput)
|
||||
var res strings.Builder
|
||||
for i := range out {
|
||||
res.WriteString(fmt.Sprintf("====================\nFile: %s\n\n", out[i].File))
|
||||
@@ -60,39 +55,24 @@ func prettyMarshal(v interface{}) ([]byte, error) {
|
||||
return []byte(res.String()), nil
|
||||
}
|
||||
|
||||
type inspectorOutput struct {
|
||||
File string
|
||||
RawTags metadata.ParsedTags
|
||||
MappedTags model.MediaFile
|
||||
}
|
||||
|
||||
func runInspector(args []string) {
|
||||
if extractor != "" {
|
||||
conf.Server.Scanner.Extractor = extractor
|
||||
}
|
||||
log.Info("Using extractor", "extractor", conf.Server.Scanner.Extractor)
|
||||
md, err := metadata.Extract(args...)
|
||||
if err != nil {
|
||||
log.Fatal("Error extracting tags", err)
|
||||
}
|
||||
mapper := scanner.NewMediaFileMapper(conf.Server.MusicFolder, &tests.MockedGenreRepo{})
|
||||
marshal := marshalers[format]
|
||||
if marshal == nil {
|
||||
log.Fatal("Invalid format", "format", format)
|
||||
}
|
||||
var out []inspectorOutput
|
||||
for k, v := range md {
|
||||
if !model.IsAudioFile(k) {
|
||||
var out []core.InspectOutput
|
||||
for _, filePath := range args {
|
||||
if !model.IsAudioFile(filePath) {
|
||||
log.Warn("Not an audio file", "file", filePath)
|
||||
continue
|
||||
}
|
||||
if len(v.Tags) == 0 {
|
||||
output, err := core.Inspect(filePath, 1, "")
|
||||
if err != nil {
|
||||
log.Warn("Unable to process file", "file", filePath, "error", err)
|
||||
continue
|
||||
}
|
||||
out = append(out, inspectorOutput{
|
||||
File: k,
|
||||
RawTags: v.Tags,
|
||||
MappedTags: mapper.ToMediaFile(v),
|
||||
})
|
||||
|
||||
out = append(out, *output)
|
||||
}
|
||||
data, _ := marshal(out)
|
||||
fmt.Println(string(data))
|
||||
|
||||
109
cmd/pls.go
109
cmd/pls.go
@@ -2,8 +2,12 @@ package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/csv"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"strconv"
|
||||
|
||||
"github.com/Masterminds/squirrel"
|
||||
"github.com/navidrome/navidrome/core/auth"
|
||||
@@ -15,31 +19,57 @@ import (
|
||||
)
|
||||
|
||||
var (
|
||||
playlistID string
|
||||
outputFile string
|
||||
playlistID string
|
||||
outputFile string
|
||||
userID string
|
||||
outputFormat string
|
||||
)
|
||||
|
||||
type displayPlaylist struct {
|
||||
Id string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
OwnerName string `json:"ownerName"`
|
||||
OwnerId string `json:"ownerId"`
|
||||
Public bool `json:"public"`
|
||||
}
|
||||
|
||||
type displayPlaylists []displayPlaylist
|
||||
|
||||
func init() {
|
||||
plsCmd.Flags().StringVarP(&playlistID, "playlist", "p", "", "playlist name or ID")
|
||||
plsCmd.Flags().StringVarP(&outputFile, "output", "o", "", "output file (default stdout)")
|
||||
_ = plsCmd.MarkFlagRequired("playlist")
|
||||
rootCmd.AddCommand(plsCmd)
|
||||
|
||||
listCommand.Flags().StringVarP(&userID, "user", "u", "", "username or ID")
|
||||
listCommand.Flags().StringVarP(&outputFormat, "format", "f", "csv", "output format [supported values: csv, json]")
|
||||
plsCmd.AddCommand(listCommand)
|
||||
}
|
||||
|
||||
var plsCmd = &cobra.Command{
|
||||
Use: "pls",
|
||||
Short: "Export playlists",
|
||||
Long: "Export Navidrome playlists to M3U files",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
runExporter()
|
||||
},
|
||||
}
|
||||
var (
|
||||
plsCmd = &cobra.Command{
|
||||
Use: "pls",
|
||||
Short: "Export playlists",
|
||||
Long: "Export Navidrome playlists to M3U files",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
runExporter()
|
||||
},
|
||||
}
|
||||
|
||||
listCommand = &cobra.Command{
|
||||
Use: "list",
|
||||
Short: "List playlists",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
runList()
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
func runExporter() {
|
||||
sqlDB := db.Db()
|
||||
ds := persistence.New(sqlDB)
|
||||
ctx := auth.WithAdminUser(context.Background(), ds)
|
||||
playlist, err := ds.Playlist(ctx).GetWithTracks(playlistID, true)
|
||||
playlist, err := ds.Playlist(ctx).GetWithTracks(playlistID, true, false)
|
||||
if err != nil && !errors.Is(err, model.ErrNotFound) {
|
||||
log.Fatal("Error retrieving playlist", "name", playlistID, err)
|
||||
}
|
||||
@@ -49,7 +79,7 @@ func runExporter() {
|
||||
log.Fatal("Error retrieving playlist", "name", playlistID, err)
|
||||
}
|
||||
if len(playlists) > 0 {
|
||||
playlist, err = ds.Playlist(ctx).GetWithTracks(playlists[0].ID, true)
|
||||
playlist, err = ds.Playlist(ctx).GetWithTracks(playlists[0].ID, true, false)
|
||||
if err != nil {
|
||||
log.Fatal("Error retrieving playlist", "name", playlistID, err)
|
||||
}
|
||||
@@ -69,3 +99,58 @@ func runExporter() {
|
||||
log.Fatal("Error writing to the output file", "file", outputFile, err)
|
||||
}
|
||||
}
|
||||
|
||||
func runList() {
|
||||
if outputFormat != "csv" && outputFormat != "json" {
|
||||
log.Fatal("Invalid output format. Must be one of csv, json", "format", outputFormat)
|
||||
}
|
||||
|
||||
sqlDB := db.Db()
|
||||
ds := persistence.New(sqlDB)
|
||||
ctx := auth.WithAdminUser(context.Background(), ds)
|
||||
|
||||
options := model.QueryOptions{Sort: "owner_name"}
|
||||
|
||||
if userID != "" {
|
||||
user, err := ds.User(ctx).FindByUsername(userID)
|
||||
|
||||
if err != nil && !errors.Is(err, model.ErrNotFound) {
|
||||
log.Fatal("Error retrieving user by name", "name", userID, err)
|
||||
}
|
||||
|
||||
if errors.Is(err, model.ErrNotFound) {
|
||||
user, err = ds.User(ctx).Get(userID)
|
||||
if err != nil {
|
||||
log.Fatal("Error retrieving user by id", "id", userID, err)
|
||||
}
|
||||
}
|
||||
|
||||
options.Filters = squirrel.Eq{"owner_id": user.ID}
|
||||
}
|
||||
|
||||
playlists, err := ds.Playlist(ctx).GetAll(options)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "Failed to retrieve playlists", err)
|
||||
}
|
||||
|
||||
if outputFormat == "csv" {
|
||||
w := csv.NewWriter(os.Stdout)
|
||||
_ = w.Write([]string{"playlist id", "playlist name", "owner id", "owner name", "public"})
|
||||
for _, playlist := range playlists {
|
||||
_ = w.Write([]string{playlist.ID, playlist.Name, playlist.OwnerID, playlist.OwnerName, strconv.FormatBool(playlist.Public)})
|
||||
}
|
||||
w.Flush()
|
||||
} else {
|
||||
display := make(displayPlaylists, len(playlists))
|
||||
for idx, playlist := range playlists {
|
||||
display[idx].Id = playlist.ID
|
||||
display[idx].Name = playlist.Name
|
||||
display[idx].OwnerId = playlist.OwnerID
|
||||
display[idx].OwnerName = playlist.OwnerName
|
||||
display[idx].Public = playlist.Public
|
||||
}
|
||||
|
||||
j, _ := json.Marshal(display)
|
||||
fmt.Printf("%s\n", j)
|
||||
}
|
||||
}
|
||||
|
||||
716
cmd/plugin.go
Normal file
716
cmd/plugin.go
Normal file
@@ -0,0 +1,716 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"cmp"
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"text/tabwriter"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/plugins"
|
||||
"github.com/navidrome/navidrome/plugins/schema"
|
||||
"github.com/navidrome/navidrome/utils"
|
||||
"github.com/navidrome/navidrome/utils/slice"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
const (
|
||||
pluginPackageExtension = ".ndp"
|
||||
pluginDirPermissions = 0700
|
||||
pluginFilePermissions = 0600
|
||||
)
|
||||
|
||||
func init() {
|
||||
pluginCmd := &cobra.Command{
|
||||
Use: "plugin",
|
||||
Short: "Manage Navidrome plugins",
|
||||
Long: "Commands for managing Navidrome plugins",
|
||||
}
|
||||
|
||||
listCmd := &cobra.Command{
|
||||
Use: "list",
|
||||
Short: "List installed plugins",
|
||||
Long: "List all installed plugins with their metadata",
|
||||
Run: pluginList,
|
||||
}
|
||||
|
||||
infoCmd := &cobra.Command{
|
||||
Use: "info [pluginPackage|pluginName]",
|
||||
Short: "Show details of a plugin",
|
||||
Long: "Show detailed information about a plugin package (.ndp file) or an installed plugin",
|
||||
Args: cobra.ExactArgs(1),
|
||||
Run: pluginInfo,
|
||||
}
|
||||
|
||||
installCmd := &cobra.Command{
|
||||
Use: "install [pluginPackage]",
|
||||
Short: "Install a plugin from a .ndp file",
|
||||
Long: "Install a Navidrome Plugin Package (.ndp) file",
|
||||
Args: cobra.ExactArgs(1),
|
||||
Run: pluginInstall,
|
||||
}
|
||||
|
||||
removeCmd := &cobra.Command{
|
||||
Use: "remove [pluginName]",
|
||||
Short: "Remove an installed plugin",
|
||||
Long: "Remove a plugin by name",
|
||||
Args: cobra.ExactArgs(1),
|
||||
Run: pluginRemove,
|
||||
}
|
||||
|
||||
updateCmd := &cobra.Command{
|
||||
Use: "update [pluginPackage]",
|
||||
Short: "Update an existing plugin",
|
||||
Long: "Update an installed plugin with a new version from a .ndp file",
|
||||
Args: cobra.ExactArgs(1),
|
||||
Run: pluginUpdate,
|
||||
}
|
||||
|
||||
refreshCmd := &cobra.Command{
|
||||
Use: "refresh [pluginName]",
|
||||
Short: "Reload a plugin without restarting Navidrome",
|
||||
Long: "Reload and recompile a plugin without needing to restart Navidrome",
|
||||
Args: cobra.ExactArgs(1),
|
||||
Run: pluginRefresh,
|
||||
}
|
||||
|
||||
devCmd := &cobra.Command{
|
||||
Use: "dev [folder_path]",
|
||||
Short: "Create symlink to development folder",
|
||||
Long: "Create a symlink from a plugin development folder to the plugins directory for easier development",
|
||||
Args: cobra.ExactArgs(1),
|
||||
Run: pluginDev,
|
||||
}
|
||||
|
||||
pluginCmd.AddCommand(listCmd, infoCmd, installCmd, removeCmd, updateCmd, refreshCmd, devCmd)
|
||||
rootCmd.AddCommand(pluginCmd)
|
||||
}
|
||||
|
||||
// Validation helpers
|
||||
|
||||
func validatePluginPackageFile(path string) error {
|
||||
if !utils.FileExists(path) {
|
||||
return fmt.Errorf("plugin package not found: %s", path)
|
||||
}
|
||||
if filepath.Ext(path) != pluginPackageExtension {
|
||||
return fmt.Errorf("not a valid plugin package: %s (expected %s extension)", path, pluginPackageExtension)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func validatePluginDirectory(pluginsDir, pluginName string) (string, error) {
|
||||
pluginDir := filepath.Join(pluginsDir, pluginName)
|
||||
if !utils.FileExists(pluginDir) {
|
||||
return "", fmt.Errorf("plugin not found: %s (path: %s)", pluginName, pluginDir)
|
||||
}
|
||||
return pluginDir, nil
|
||||
}
|
||||
|
||||
func resolvePluginPath(pluginDir string) (resolvedPath string, isSymlink bool, err error) {
|
||||
// Check if it's a directory or a symlink
|
||||
lstat, err := os.Lstat(pluginDir)
|
||||
if err != nil {
|
||||
return "", false, fmt.Errorf("failed to stat plugin: %w", err)
|
||||
}
|
||||
|
||||
isSymlink = lstat.Mode()&os.ModeSymlink != 0
|
||||
|
||||
if isSymlink {
|
||||
// Resolve the symlink target
|
||||
targetDir, err := os.Readlink(pluginDir)
|
||||
if err != nil {
|
||||
return "", true, fmt.Errorf("failed to resolve symlink: %w", err)
|
||||
}
|
||||
|
||||
// If target is a relative path, make it absolute
|
||||
if !filepath.IsAbs(targetDir) {
|
||||
targetDir = filepath.Join(filepath.Dir(pluginDir), targetDir)
|
||||
}
|
||||
|
||||
// Verify the target exists and is a directory
|
||||
targetInfo, err := os.Stat(targetDir)
|
||||
if err != nil {
|
||||
return "", true, fmt.Errorf("failed to access symlink target %s: %w", targetDir, err)
|
||||
}
|
||||
|
||||
if !targetInfo.IsDir() {
|
||||
return "", true, fmt.Errorf("symlink target is not a directory: %s", targetDir)
|
||||
}
|
||||
|
||||
return targetDir, true, nil
|
||||
} else if !lstat.IsDir() {
|
||||
return "", false, fmt.Errorf("not a valid plugin directory: %s", pluginDir)
|
||||
}
|
||||
|
||||
return pluginDir, false, nil
|
||||
}
|
||||
|
||||
// Package handling helpers
|
||||
|
||||
func loadAndValidatePackage(ndpPath string) (*plugins.PluginPackage, error) {
|
||||
if err := validatePluginPackageFile(ndpPath); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
pkg, err := plugins.LoadPackage(ndpPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to load plugin package: %w", err)
|
||||
}
|
||||
|
||||
return pkg, nil
|
||||
}
|
||||
|
||||
func extractAndSetupPlugin(ndpPath, targetDir string) error {
|
||||
if err := plugins.ExtractPackage(ndpPath, targetDir); err != nil {
|
||||
return fmt.Errorf("failed to extract plugin package: %w", err)
|
||||
}
|
||||
|
||||
ensurePluginDirPermissions(targetDir)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Display helpers
|
||||
|
||||
func displayPluginTableRow(w *tabwriter.Writer, discovery plugins.PluginDiscoveryEntry) {
|
||||
if discovery.Error != nil {
|
||||
// Handle global errors (like directory read failure)
|
||||
if discovery.ID == "" {
|
||||
log.Error("Failed to read plugins directory", "folder", conf.Server.Plugins.Folder, discovery.Error)
|
||||
return
|
||||
}
|
||||
// Handle individual plugin errors - show them in the table
|
||||
fmt.Fprintf(w, "%s\tERROR\tERROR\tERROR\tERROR\t%v\n", discovery.ID, discovery.Error)
|
||||
return
|
||||
}
|
||||
|
||||
// Mark symlinks with an indicator
|
||||
nameDisplay := discovery.Manifest.Name
|
||||
if discovery.IsSymlink {
|
||||
nameDisplay = nameDisplay + " (dev)"
|
||||
}
|
||||
|
||||
// Convert capabilities to strings
|
||||
capabilities := slice.Map(discovery.Manifest.Capabilities, func(cap schema.PluginManifestCapabilitiesElem) string {
|
||||
return string(cap)
|
||||
})
|
||||
|
||||
fmt.Fprintf(w, "%s\t%s\t%s\t%s\t%s\t%s\n",
|
||||
discovery.ID,
|
||||
nameDisplay,
|
||||
cmp.Or(discovery.Manifest.Author, "-"),
|
||||
cmp.Or(discovery.Manifest.Version, "-"),
|
||||
strings.Join(capabilities, ", "),
|
||||
cmp.Or(discovery.Manifest.Description, "-"))
|
||||
}
|
||||
|
||||
func displayTypedPermissions(permissions schema.PluginManifestPermissions, indent string) {
|
||||
if permissions.Http != nil {
|
||||
fmt.Printf("%shttp:\n", indent)
|
||||
fmt.Printf("%s Reason: %s\n", indent, permissions.Http.Reason)
|
||||
fmt.Printf("%s Allow Local Network: %t\n", indent, permissions.Http.AllowLocalNetwork)
|
||||
fmt.Printf("%s Allowed URLs:\n", indent)
|
||||
for urlPattern, methodEnums := range permissions.Http.AllowedUrls {
|
||||
methods := make([]string, len(methodEnums))
|
||||
for i, methodEnum := range methodEnums {
|
||||
methods[i] = string(methodEnum)
|
||||
}
|
||||
fmt.Printf("%s %s: [%s]\n", indent, urlPattern, strings.Join(methods, ", "))
|
||||
}
|
||||
fmt.Println()
|
||||
}
|
||||
|
||||
if permissions.Config != nil {
|
||||
fmt.Printf("%sconfig:\n", indent)
|
||||
fmt.Printf("%s Reason: %s\n", indent, permissions.Config.Reason)
|
||||
fmt.Println()
|
||||
}
|
||||
|
||||
if permissions.Scheduler != nil {
|
||||
fmt.Printf("%sscheduler:\n", indent)
|
||||
fmt.Printf("%s Reason: %s\n", indent, permissions.Scheduler.Reason)
|
||||
fmt.Println()
|
||||
}
|
||||
|
||||
if permissions.Websocket != nil {
|
||||
fmt.Printf("%swebsocket:\n", indent)
|
||||
fmt.Printf("%s Reason: %s\n", indent, permissions.Websocket.Reason)
|
||||
fmt.Printf("%s Allow Local Network: %t\n", indent, permissions.Websocket.AllowLocalNetwork)
|
||||
fmt.Printf("%s Allowed URLs: [%s]\n", indent, strings.Join(permissions.Websocket.AllowedUrls, ", "))
|
||||
fmt.Println()
|
||||
}
|
||||
|
||||
if permissions.Cache != nil {
|
||||
fmt.Printf("%scache:\n", indent)
|
||||
fmt.Printf("%s Reason: %s\n", indent, permissions.Cache.Reason)
|
||||
fmt.Println()
|
||||
}
|
||||
|
||||
if permissions.Artwork != nil {
|
||||
fmt.Printf("%sartwork:\n", indent)
|
||||
fmt.Printf("%s Reason: %s\n", indent, permissions.Artwork.Reason)
|
||||
fmt.Println()
|
||||
}
|
||||
|
||||
if permissions.Subsonicapi != nil {
|
||||
allowedUsers := "All Users"
|
||||
if len(permissions.Subsonicapi.AllowedUsernames) > 0 {
|
||||
allowedUsers = strings.Join(permissions.Subsonicapi.AllowedUsernames, ", ")
|
||||
}
|
||||
fmt.Printf("%ssubsonicapi:\n", indent)
|
||||
fmt.Printf("%s Reason: %s\n", indent, permissions.Subsonicapi.Reason)
|
||||
fmt.Printf("%s Allow Admins: %t\n", indent, permissions.Subsonicapi.AllowAdmins)
|
||||
fmt.Printf("%s Allowed Usernames: [%s]\n", indent, allowedUsers)
|
||||
fmt.Println()
|
||||
}
|
||||
}
|
||||
|
||||
func displayPluginDetails(manifest *schema.PluginManifest, fileInfo *pluginFileInfo, permInfo *pluginPermissionInfo) {
|
||||
fmt.Println("\nPlugin Information:")
|
||||
fmt.Printf(" Name: %s\n", manifest.Name)
|
||||
fmt.Printf(" Author: %s\n", manifest.Author)
|
||||
fmt.Printf(" Version: %s\n", manifest.Version)
|
||||
fmt.Printf(" Description: %s\n", manifest.Description)
|
||||
|
||||
fmt.Print(" Capabilities: ")
|
||||
capabilities := make([]string, len(manifest.Capabilities))
|
||||
for i, cap := range manifest.Capabilities {
|
||||
capabilities[i] = string(cap)
|
||||
}
|
||||
fmt.Print(strings.Join(capabilities, ", "))
|
||||
fmt.Println()
|
||||
|
||||
// Display manifest permissions using the typed permissions
|
||||
fmt.Println(" Required Permissions:")
|
||||
displayTypedPermissions(manifest.Permissions, " ")
|
||||
|
||||
// Print file information if available
|
||||
if fileInfo != nil {
|
||||
fmt.Println("Package Information:")
|
||||
fmt.Printf(" File: %s\n", fileInfo.path)
|
||||
fmt.Printf(" Size: %d bytes (%.2f KB)\n", fileInfo.size, float64(fileInfo.size)/1024)
|
||||
fmt.Printf(" SHA-256: %s\n", fileInfo.hash)
|
||||
fmt.Printf(" Modified: %s\n", fileInfo.modTime.Format(time.RFC3339))
|
||||
}
|
||||
|
||||
// Print file permissions information if available
|
||||
if permInfo != nil {
|
||||
fmt.Println("File Permissions:")
|
||||
fmt.Printf(" Plugin Directory: %s (%s)\n", permInfo.dirPath, permInfo.dirMode)
|
||||
if permInfo.isSymlink {
|
||||
fmt.Printf(" Symlink Target: %s (%s)\n", permInfo.targetPath, permInfo.targetMode)
|
||||
}
|
||||
fmt.Printf(" Manifest File: %s\n", permInfo.manifestMode)
|
||||
if permInfo.wasmMode != "" {
|
||||
fmt.Printf(" WASM File: %s\n", permInfo.wasmMode)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type pluginFileInfo struct {
|
||||
path string
|
||||
size int64
|
||||
hash string
|
||||
modTime time.Time
|
||||
}
|
||||
|
||||
type pluginPermissionInfo struct {
|
||||
dirPath string
|
||||
dirMode string
|
||||
isSymlink bool
|
||||
targetPath string
|
||||
targetMode string
|
||||
manifestMode string
|
||||
wasmMode string
|
||||
}
|
||||
|
||||
func getFileInfo(path string) *pluginFileInfo {
|
||||
fileInfo, err := os.Stat(path)
|
||||
if err != nil {
|
||||
log.Error("Failed to get file information", err)
|
||||
return nil
|
||||
}
|
||||
|
||||
return &pluginFileInfo{
|
||||
path: path,
|
||||
size: fileInfo.Size(),
|
||||
hash: calculateSHA256(path),
|
||||
modTime: fileInfo.ModTime(),
|
||||
}
|
||||
}
|
||||
|
||||
func getPermissionInfo(pluginDir string) *pluginPermissionInfo {
|
||||
// Get plugin directory permissions
|
||||
dirInfo, err := os.Lstat(pluginDir)
|
||||
if err != nil {
|
||||
log.Error("Failed to get plugin directory permissions", err)
|
||||
return nil
|
||||
}
|
||||
|
||||
permInfo := &pluginPermissionInfo{
|
||||
dirPath: pluginDir,
|
||||
dirMode: dirInfo.Mode().String(),
|
||||
}
|
||||
|
||||
// Check if it's a symlink
|
||||
if dirInfo.Mode()&os.ModeSymlink != 0 {
|
||||
permInfo.isSymlink = true
|
||||
|
||||
// Get target path and permissions
|
||||
targetPath, err := os.Readlink(pluginDir)
|
||||
if err == nil {
|
||||
if !filepath.IsAbs(targetPath) {
|
||||
targetPath = filepath.Join(filepath.Dir(pluginDir), targetPath)
|
||||
}
|
||||
permInfo.targetPath = targetPath
|
||||
|
||||
if targetInfo, err := os.Stat(targetPath); err == nil {
|
||||
permInfo.targetMode = targetInfo.Mode().String()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Get manifest file permissions
|
||||
manifestPath := filepath.Join(pluginDir, "manifest.json")
|
||||
if manifestInfo, err := os.Stat(manifestPath); err == nil {
|
||||
permInfo.manifestMode = manifestInfo.Mode().String()
|
||||
}
|
||||
|
||||
// Get WASM file permissions (look for .wasm files)
|
||||
entries, err := os.ReadDir(pluginDir)
|
||||
if err == nil {
|
||||
for _, entry := range entries {
|
||||
if filepath.Ext(entry.Name()) == ".wasm" {
|
||||
wasmPath := filepath.Join(pluginDir, entry.Name())
|
||||
if wasmInfo, err := os.Stat(wasmPath); err == nil {
|
||||
permInfo.wasmMode = wasmInfo.Mode().String()
|
||||
break // Just show the first WASM file found
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return permInfo
|
||||
}
|
||||
|
||||
// Command implementations
|
||||
|
||||
func pluginList(cmd *cobra.Command, args []string) {
|
||||
discoveries := plugins.DiscoverPlugins(conf.Server.Plugins.Folder)
|
||||
|
||||
w := tabwriter.NewWriter(os.Stdout, 0, 0, 2, ' ', 0)
|
||||
fmt.Fprintln(w, "ID\tNAME\tAUTHOR\tVERSION\tCAPABILITIES\tDESCRIPTION")
|
||||
|
||||
for _, discovery := range discoveries {
|
||||
displayPluginTableRow(w, discovery)
|
||||
}
|
||||
w.Flush()
|
||||
}
|
||||
|
||||
func pluginInfo(cmd *cobra.Command, args []string) {
|
||||
path := args[0]
|
||||
pluginsDir := conf.Server.Plugins.Folder
|
||||
|
||||
var manifest *schema.PluginManifest
|
||||
var fileInfo *pluginFileInfo
|
||||
var permInfo *pluginPermissionInfo
|
||||
|
||||
if filepath.Ext(path) == pluginPackageExtension {
|
||||
// It's a package file
|
||||
pkg, err := loadAndValidatePackage(path)
|
||||
if err != nil {
|
||||
log.Fatal("Failed to load plugin package", err)
|
||||
}
|
||||
manifest = pkg.Manifest
|
||||
fileInfo = getFileInfo(path)
|
||||
// No permission info for package files
|
||||
} else {
|
||||
// It's a plugin name
|
||||
pluginDir, err := validatePluginDirectory(pluginsDir, path)
|
||||
if err != nil {
|
||||
log.Fatal("Plugin validation failed", err)
|
||||
}
|
||||
|
||||
manifest, err = plugins.LoadManifest(pluginDir)
|
||||
if err != nil {
|
||||
log.Fatal("Failed to load plugin manifest", err)
|
||||
}
|
||||
|
||||
// Get permission info for installed plugins
|
||||
permInfo = getPermissionInfo(pluginDir)
|
||||
}
|
||||
|
||||
displayPluginDetails(manifest, fileInfo, permInfo)
|
||||
}
|
||||
|
||||
func pluginInstall(cmd *cobra.Command, args []string) {
|
||||
ndpPath := args[0]
|
||||
pluginsDir := conf.Server.Plugins.Folder
|
||||
|
||||
pkg, err := loadAndValidatePackage(ndpPath)
|
||||
if err != nil {
|
||||
log.Fatal("Package validation failed", err)
|
||||
}
|
||||
|
||||
// Create target directory based on plugin name
|
||||
targetDir := filepath.Join(pluginsDir, pkg.Manifest.Name)
|
||||
|
||||
// Check if plugin already exists
|
||||
if utils.FileExists(targetDir) {
|
||||
log.Fatal("Plugin already installed", "name", pkg.Manifest.Name, "path", targetDir,
|
||||
"use", "navidrome plugin update")
|
||||
}
|
||||
|
||||
if err := extractAndSetupPlugin(ndpPath, targetDir); err != nil {
|
||||
log.Fatal("Plugin installation failed", err)
|
||||
}
|
||||
|
||||
fmt.Printf("Plugin '%s' v%s installed successfully\n", pkg.Manifest.Name, pkg.Manifest.Version)
|
||||
}
|
||||
|
||||
func pluginRemove(cmd *cobra.Command, args []string) {
|
||||
pluginName := args[0]
|
||||
pluginsDir := conf.Server.Plugins.Folder
|
||||
|
||||
pluginDir, err := validatePluginDirectory(pluginsDir, pluginName)
|
||||
if err != nil {
|
||||
log.Fatal("Plugin validation failed", err)
|
||||
}
|
||||
|
||||
_, isSymlink, err := resolvePluginPath(pluginDir)
|
||||
if err != nil {
|
||||
log.Fatal("Failed to resolve plugin path", err)
|
||||
}
|
||||
|
||||
if isSymlink {
|
||||
// For symlinked plugins (dev mode), just remove the symlink
|
||||
if err := os.Remove(pluginDir); err != nil {
|
||||
log.Fatal("Failed to remove plugin symlink", "name", pluginName, err)
|
||||
}
|
||||
fmt.Printf("Development plugin symlink '%s' removed successfully (target directory preserved)\n", pluginName)
|
||||
} else {
|
||||
// For regular plugins, remove the entire directory
|
||||
if err := os.RemoveAll(pluginDir); err != nil {
|
||||
log.Fatal("Failed to remove plugin directory", "name", pluginName, err)
|
||||
}
|
||||
fmt.Printf("Plugin '%s' removed successfully\n", pluginName)
|
||||
}
|
||||
}
|
||||
|
||||
func pluginUpdate(cmd *cobra.Command, args []string) {
|
||||
ndpPath := args[0]
|
||||
pluginsDir := conf.Server.Plugins.Folder
|
||||
|
||||
pkg, err := loadAndValidatePackage(ndpPath)
|
||||
if err != nil {
|
||||
log.Fatal("Package validation failed", err)
|
||||
}
|
||||
|
||||
// Check if plugin exists
|
||||
targetDir := filepath.Join(pluginsDir, pkg.Manifest.Name)
|
||||
if !utils.FileExists(targetDir) {
|
||||
log.Fatal("Plugin not found", "name", pkg.Manifest.Name, "path", targetDir,
|
||||
"use", "navidrome plugin install")
|
||||
}
|
||||
|
||||
// Create a backup of the existing plugin
|
||||
backupDir := targetDir + ".bak." + time.Now().Format("20060102150405")
|
||||
if err := os.Rename(targetDir, backupDir); err != nil {
|
||||
log.Fatal("Failed to backup existing plugin", err)
|
||||
}
|
||||
|
||||
// Extract the new package
|
||||
if err := extractAndSetupPlugin(ndpPath, targetDir); err != nil {
|
||||
// Restore backup if extraction failed
|
||||
os.RemoveAll(targetDir)
|
||||
_ = os.Rename(backupDir, targetDir) // Ignore error as we're already in a fatal path
|
||||
log.Fatal("Plugin update failed", err)
|
||||
}
|
||||
|
||||
// Remove the backup
|
||||
os.RemoveAll(backupDir)
|
||||
|
||||
fmt.Printf("Plugin '%s' updated to v%s successfully\n", pkg.Manifest.Name, pkg.Manifest.Version)
|
||||
}
|
||||
|
||||
func pluginRefresh(cmd *cobra.Command, args []string) {
|
||||
pluginName := args[0]
|
||||
pluginsDir := conf.Server.Plugins.Folder
|
||||
|
||||
pluginDir, err := validatePluginDirectory(pluginsDir, pluginName)
|
||||
if err != nil {
|
||||
log.Fatal("Plugin validation failed", err)
|
||||
}
|
||||
|
||||
resolvedPath, isSymlink, err := resolvePluginPath(pluginDir)
|
||||
if err != nil {
|
||||
log.Fatal("Failed to resolve plugin path", err)
|
||||
}
|
||||
|
||||
if isSymlink {
|
||||
log.Debug("Processing symlinked plugin", "name", pluginName, "link", pluginDir, "target", resolvedPath)
|
||||
}
|
||||
|
||||
fmt.Printf("Refreshing plugin '%s'...\n", pluginName)
|
||||
|
||||
// Get the plugin manager and refresh
|
||||
mgr := GetPluginManager(cmd.Context())
|
||||
log.Debug("Scanning plugins directory", "path", pluginsDir)
|
||||
mgr.ScanPlugins()
|
||||
|
||||
log.Info("Waiting for plugin compilation to complete", "name", pluginName)
|
||||
|
||||
// Wait for compilation to complete
|
||||
if err := mgr.EnsureCompiled(pluginName); err != nil {
|
||||
log.Fatal("Failed to compile refreshed plugin", "name", pluginName, err)
|
||||
}
|
||||
|
||||
log.Info("Plugin compilation completed successfully", "name", pluginName)
|
||||
fmt.Printf("Plugin '%s' refreshed successfully\n", pluginName)
|
||||
}
|
||||
|
||||
func pluginDev(cmd *cobra.Command, args []string) {
|
||||
sourcePath, err := filepath.Abs(args[0])
|
||||
if err != nil {
|
||||
log.Fatal("Invalid path", "path", args[0], err)
|
||||
}
|
||||
pluginsDir := conf.Server.Plugins.Folder
|
||||
|
||||
// Validate source directory and manifest
|
||||
if err := validateDevSource(sourcePath); err != nil {
|
||||
log.Fatal("Source validation failed", err)
|
||||
}
|
||||
|
||||
// Load manifest to get plugin name
|
||||
manifest, err := plugins.LoadManifest(sourcePath)
|
||||
if err != nil {
|
||||
log.Fatal("Failed to load plugin manifest", "path", filepath.Join(sourcePath, "manifest.json"), err)
|
||||
}
|
||||
|
||||
pluginName := cmp.Or(manifest.Name, filepath.Base(sourcePath))
|
||||
targetPath := filepath.Join(pluginsDir, pluginName)
|
||||
|
||||
// Handle existing target
|
||||
if err := handleExistingTarget(targetPath, sourcePath); err != nil {
|
||||
log.Fatal("Failed to handle existing target", err)
|
||||
}
|
||||
|
||||
// Create target directory if needed
|
||||
if err := os.MkdirAll(filepath.Dir(targetPath), 0755); err != nil {
|
||||
log.Fatal("Failed to create plugins directory", "path", filepath.Dir(targetPath), err)
|
||||
}
|
||||
|
||||
// Create the symlink
|
||||
if err := os.Symlink(sourcePath, targetPath); err != nil {
|
||||
log.Fatal("Failed to create symlink", "source", sourcePath, "target", targetPath, err)
|
||||
}
|
||||
|
||||
fmt.Printf("Development symlink created: '%s' -> '%s'\n", targetPath, sourcePath)
|
||||
fmt.Println("Plugin can be refreshed with: navidrome plugin refresh", pluginName)
|
||||
}
|
||||
|
||||
// Utility functions
|
||||
|
||||
func validateDevSource(sourcePath string) error {
|
||||
sourceInfo, err := os.Stat(sourcePath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("source folder not found: %s (%w)", sourcePath, err)
|
||||
}
|
||||
if !sourceInfo.IsDir() {
|
||||
return fmt.Errorf("source path is not a directory: %s", sourcePath)
|
||||
}
|
||||
|
||||
manifestPath := filepath.Join(sourcePath, "manifest.json")
|
||||
if !utils.FileExists(manifestPath) {
|
||||
return fmt.Errorf("source folder missing manifest.json: %s", sourcePath)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func handleExistingTarget(targetPath, sourcePath string) error {
|
||||
if !utils.FileExists(targetPath) {
|
||||
return nil // Nothing to handle
|
||||
}
|
||||
|
||||
// Check if it's already a symlink to our source
|
||||
existingLink, err := os.Readlink(targetPath)
|
||||
if err == nil && existingLink == sourcePath {
|
||||
fmt.Printf("Symlink already exists and points to the correct source\n")
|
||||
return fmt.Errorf("symlink already exists") // This will cause early return in caller
|
||||
}
|
||||
|
||||
// Handle case where target exists but is not a symlink to our source
|
||||
fmt.Printf("Target path '%s' already exists.\n", targetPath)
|
||||
fmt.Print("Do you want to replace it? (y/N): ")
|
||||
var response string
|
||||
_, err = fmt.Scanln(&response)
|
||||
if err != nil || strings.ToLower(response) != "y" {
|
||||
if err != nil {
|
||||
log.Debug("Error reading input, assuming 'no'", err)
|
||||
}
|
||||
return fmt.Errorf("operation canceled")
|
||||
}
|
||||
|
||||
// Remove existing target
|
||||
if err := os.RemoveAll(targetPath); err != nil {
|
||||
return fmt.Errorf("failed to remove existing target %s: %w", targetPath, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func ensurePluginDirPermissions(dir string) {
|
||||
if err := os.Chmod(dir, pluginDirPermissions); err != nil {
|
||||
log.Error("Failed to set plugin directory permissions", "dir", dir, err)
|
||||
}
|
||||
|
||||
// Apply permissions to all files in the directory
|
||||
entries, err := os.ReadDir(dir)
|
||||
if err != nil {
|
||||
log.Error("Failed to read plugin directory", "dir", dir, err)
|
||||
return
|
||||
}
|
||||
|
||||
for _, entry := range entries {
|
||||
path := filepath.Join(dir, entry.Name())
|
||||
info, err := os.Stat(path)
|
||||
if err != nil {
|
||||
log.Error("Failed to stat file", "path", path, err)
|
||||
continue
|
||||
}
|
||||
|
||||
mode := os.FileMode(pluginFilePermissions) // Files
|
||||
if info.IsDir() {
|
||||
mode = os.FileMode(pluginDirPermissions) // Directories
|
||||
ensurePluginDirPermissions(path) // Recursive
|
||||
}
|
||||
|
||||
if err := os.Chmod(path, mode); err != nil {
|
||||
log.Error("Failed to set file permissions", "path", path, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func calculateSHA256(filePath string) string {
|
||||
file, err := os.Open(filePath)
|
||||
if err != nil {
|
||||
log.Error("Failed to open file for hashing", err)
|
||||
return "N/A"
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
hasher := sha256.New()
|
||||
if _, err := io.Copy(hasher, file); err != nil {
|
||||
log.Error("Failed to calculate hash", err)
|
||||
return "N/A"
|
||||
}
|
||||
|
||||
return hex.EncodeToString(hasher.Sum(nil))
|
||||
}
|
||||
193
cmd/plugin_test.go
Normal file
193
cmd/plugin_test.go
Normal file
@@ -0,0 +1,193 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/conf/configtest"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var _ = Describe("Plugin CLI Commands", func() {
|
||||
var tempDir string
|
||||
var cmd *cobra.Command
|
||||
var stdOut *os.File
|
||||
var origStdout *os.File
|
||||
var outReader *os.File
|
||||
|
||||
// Helper to create a test plugin with the given name and details
|
||||
createTestPlugin := func(name, author, version string, capabilities []string) string {
|
||||
pluginDir := filepath.Join(tempDir, name)
|
||||
Expect(os.MkdirAll(pluginDir, 0755)).To(Succeed())
|
||||
|
||||
// Create a properly formatted capabilities JSON array
|
||||
capabilitiesJSON := `"` + strings.Join(capabilities, `", "`) + `"`
|
||||
|
||||
manifest := `{
|
||||
"name": "` + name + `",
|
||||
"author": "` + author + `",
|
||||
"version": "` + version + `",
|
||||
"description": "Plugin for testing",
|
||||
"website": "https://test.navidrome.org/` + name + `",
|
||||
"capabilities": [` + capabilitiesJSON + `],
|
||||
"permissions": {}
|
||||
}`
|
||||
|
||||
Expect(os.WriteFile(filepath.Join(pluginDir, "manifest.json"), []byte(manifest), 0600)).To(Succeed())
|
||||
|
||||
// Create a dummy WASM file
|
||||
wasmContent := []byte("dummy wasm content for testing")
|
||||
Expect(os.WriteFile(filepath.Join(pluginDir, "plugin.wasm"), wasmContent, 0600)).To(Succeed())
|
||||
|
||||
return pluginDir
|
||||
}
|
||||
|
||||
// Helper to execute a command and return captured output
|
||||
captureOutput := func(reader io.Reader) string {
|
||||
stdOut.Close()
|
||||
outputBytes, err := io.ReadAll(reader)
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
return string(outputBytes)
|
||||
}
|
||||
|
||||
BeforeEach(func() {
|
||||
DeferCleanup(configtest.SetupConfig())
|
||||
tempDir = GinkgoT().TempDir()
|
||||
|
||||
// Setup config
|
||||
conf.Server.Plugins.Enabled = true
|
||||
conf.Server.Plugins.Folder = tempDir
|
||||
|
||||
// Create a command for testing
|
||||
cmd = &cobra.Command{Use: "test"}
|
||||
|
||||
// Setup stdout capture
|
||||
origStdout = os.Stdout
|
||||
var err error
|
||||
outReader, stdOut, err = os.Pipe()
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
os.Stdout = stdOut
|
||||
|
||||
DeferCleanup(func() {
|
||||
os.Stdout = origStdout
|
||||
})
|
||||
})
|
||||
|
||||
AfterEach(func() {
|
||||
os.Stdout = origStdout
|
||||
if stdOut != nil {
|
||||
stdOut.Close()
|
||||
}
|
||||
if outReader != nil {
|
||||
outReader.Close()
|
||||
}
|
||||
})
|
||||
|
||||
Describe("Plugin list command", func() {
|
||||
It("should list installed plugins", func() {
|
||||
// Create test plugins
|
||||
createTestPlugin("plugin1", "Test Author", "1.0.0", []string{"MetadataAgent"})
|
||||
createTestPlugin("plugin2", "Another Author", "2.1.0", []string{"Scrobbler"})
|
||||
|
||||
// Execute command
|
||||
pluginList(cmd, []string{})
|
||||
|
||||
// Verify output
|
||||
output := captureOutput(outReader)
|
||||
|
||||
Expect(output).To(ContainSubstring("plugin1"))
|
||||
Expect(output).To(ContainSubstring("Test Author"))
|
||||
Expect(output).To(ContainSubstring("1.0.0"))
|
||||
Expect(output).To(ContainSubstring("MetadataAgent"))
|
||||
|
||||
Expect(output).To(ContainSubstring("plugin2"))
|
||||
Expect(output).To(ContainSubstring("Another Author"))
|
||||
Expect(output).To(ContainSubstring("2.1.0"))
|
||||
Expect(output).To(ContainSubstring("Scrobbler"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Plugin info command", func() {
|
||||
It("should display information about an installed plugin", func() {
|
||||
// Create test plugin with multiple capabilities
|
||||
createTestPlugin("test-plugin", "Test Author", "1.0.0",
|
||||
[]string{"MetadataAgent", "Scrobbler"})
|
||||
|
||||
// Execute command
|
||||
pluginInfo(cmd, []string{"test-plugin"})
|
||||
|
||||
// Verify output
|
||||
output := captureOutput(outReader)
|
||||
|
||||
Expect(output).To(ContainSubstring("Name: test-plugin"))
|
||||
Expect(output).To(ContainSubstring("Author: Test Author"))
|
||||
Expect(output).To(ContainSubstring("Version: 1.0.0"))
|
||||
Expect(output).To(ContainSubstring("Description: Plugin for testing"))
|
||||
Expect(output).To(ContainSubstring("Capabilities: MetadataAgent, Scrobbler"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Plugin remove command", func() {
|
||||
It("should remove a regular plugin directory", func() {
|
||||
// Create test plugin
|
||||
pluginDir := createTestPlugin("regular-plugin", "Test Author", "1.0.0",
|
||||
[]string{"MetadataAgent"})
|
||||
|
||||
// Execute command
|
||||
pluginRemove(cmd, []string{"regular-plugin"})
|
||||
|
||||
// Verify output
|
||||
output := captureOutput(outReader)
|
||||
Expect(output).To(ContainSubstring("Plugin 'regular-plugin' removed successfully"))
|
||||
|
||||
// Verify directory is actually removed
|
||||
_, err := os.Stat(pluginDir)
|
||||
Expect(os.IsNotExist(err)).To(BeTrue())
|
||||
})
|
||||
|
||||
It("should remove only the symlink for a development plugin", func() {
|
||||
// Create a real source directory
|
||||
sourceDir := filepath.Join(GinkgoT().TempDir(), "dev-plugin-source")
|
||||
Expect(os.MkdirAll(sourceDir, 0755)).To(Succeed())
|
||||
|
||||
manifest := `{
|
||||
"name": "dev-plugin",
|
||||
"author": "Dev Author",
|
||||
"version": "0.1.0",
|
||||
"description": "Development plugin for testing",
|
||||
"website": "https://test.navidrome.org/dev-plugin",
|
||||
"capabilities": ["Scrobbler"],
|
||||
"permissions": {}
|
||||
}`
|
||||
Expect(os.WriteFile(filepath.Join(sourceDir, "manifest.json"), []byte(manifest), 0600)).To(Succeed())
|
||||
|
||||
// Create a dummy WASM file
|
||||
wasmContent := []byte("dummy wasm content for testing")
|
||||
Expect(os.WriteFile(filepath.Join(sourceDir, "plugin.wasm"), wasmContent, 0600)).To(Succeed())
|
||||
|
||||
// Create a symlink in the plugins directory
|
||||
symlinkPath := filepath.Join(tempDir, "dev-plugin")
|
||||
Expect(os.Symlink(sourceDir, symlinkPath)).To(Succeed())
|
||||
|
||||
// Execute command
|
||||
pluginRemove(cmd, []string{"dev-plugin"})
|
||||
|
||||
// Verify output
|
||||
output := captureOutput(outReader)
|
||||
Expect(output).To(ContainSubstring("Development plugin symlink 'dev-plugin' removed successfully"))
|
||||
Expect(output).To(ContainSubstring("target directory preserved"))
|
||||
|
||||
// Verify the symlink is removed but source directory exists
|
||||
_, err := os.Lstat(symlinkPath)
|
||||
Expect(os.IsNotExist(err)).To(BeTrue())
|
||||
|
||||
_, err = os.Stat(sourceDir)
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
})
|
||||
})
|
||||
})
|
||||
218
cmd/root.go
218
cmd/root.go
@@ -2,7 +2,6 @@ package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/signal"
|
||||
"strings"
|
||||
@@ -10,15 +9,15 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/go-chi/chi/v5/middleware"
|
||||
_ "github.com/navidrome/navidrome/adapters/taglib"
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/db"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/resources"
|
||||
"github.com/navidrome/navidrome/scheduler"
|
||||
"github.com/navidrome/navidrome/server/backgrounds"
|
||||
"github.com/prometheus/client_golang/prometheus/promhttp"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/viper"
|
||||
"golang.org/x/sync/errgroup"
|
||||
@@ -37,7 +36,7 @@ Complete documentation is available at https://www.navidrome.org/docs`,
|
||||
preRun()
|
||||
},
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
runNavidrome()
|
||||
runNavidrome(cmd.Context())
|
||||
},
|
||||
PostRun: func(cmd *cobra.Command, args []string) {
|
||||
postRun()
|
||||
@@ -48,10 +47,12 @@ Complete documentation is available at https://www.navidrome.org/docs`,
|
||||
|
||||
// Execute runs the root cobra command, which will start the Navidrome server by calling the runNavidrome function.
|
||||
func Execute() {
|
||||
ctx, cancel := mainContext(context.Background())
|
||||
defer cancel()
|
||||
|
||||
rootCmd.SetVersionTemplate(`{{println .Version}}`)
|
||||
if err := rootCmd.Execute(); err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
if err := rootCmd.ExecuteContext(ctx); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -59,7 +60,7 @@ func preRun() {
|
||||
if !noBanner {
|
||||
println(resources.Banner())
|
||||
}
|
||||
conf.Load()
|
||||
conf.Load(noBanner)
|
||||
}
|
||||
|
||||
func postRun() {
|
||||
@@ -69,18 +70,24 @@ func postRun() {
|
||||
// runNavidrome is the main entry point for the Navidrome server. It starts all the services and blocks.
|
||||
// If any of the services returns an error, it will log it and exit. If the process receives a signal to exit,
|
||||
// it will cancel the context and exit gracefully.
|
||||
func runNavidrome() {
|
||||
defer db.Init()()
|
||||
|
||||
ctx, cancel := mainContext()
|
||||
defer cancel()
|
||||
func runNavidrome(ctx context.Context) {
|
||||
defer db.Init(ctx)()
|
||||
|
||||
g, ctx := errgroup.WithContext(ctx)
|
||||
g.Go(startServer(ctx))
|
||||
g.Go(startSignaller(ctx))
|
||||
g.Go(startScheduler(ctx))
|
||||
g.Go(startPlaybackServer(ctx))
|
||||
g.Go(schedulePeriodicScan(ctx))
|
||||
g.Go(schedulePeriodicBackup(ctx))
|
||||
g.Go(startInsightsCollector(ctx))
|
||||
g.Go(startPluginManager(ctx))
|
||||
g.Go(runInitialScan(ctx))
|
||||
if conf.Server.Scanner.Enabled {
|
||||
g.Go(startScanWatcher(ctx))
|
||||
g.Go(schedulePeriodicScan(ctx))
|
||||
} else {
|
||||
log.Warn(ctx, "Automatic Scanning is DISABLED")
|
||||
}
|
||||
|
||||
if err := g.Wait(); err != nil {
|
||||
log.Error("Fatal error in Navidrome. Aborting", err)
|
||||
@@ -88,8 +95,8 @@ func runNavidrome() {
|
||||
}
|
||||
|
||||
// mainContext returns a context that is cancelled when the process receives a signal to exit.
|
||||
func mainContext() (context.Context, context.CancelFunc) {
|
||||
return signal.NotifyContext(context.Background(),
|
||||
func mainContext(ctx context.Context) (context.Context, context.CancelFunc) {
|
||||
return signal.NotifyContext(ctx,
|
||||
os.Interrupt,
|
||||
syscall.SIGHUP,
|
||||
syscall.SIGTERM,
|
||||
@@ -100,9 +107,9 @@ func mainContext() (context.Context, context.CancelFunc) {
|
||||
// startServer starts the Navidrome web server, adding all the necessary routers.
|
||||
func startServer(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
a := CreateServer(conf.Server.MusicFolder)
|
||||
a.MountRouter("Native API", consts.URLPathNativeAPI, CreateNativeAPIRouter())
|
||||
a.MountRouter("Subsonic API", consts.URLPathSubsonicAPI, CreateSubsonicAPIRouter())
|
||||
a := CreateServer()
|
||||
a.MountRouter("Native API", consts.URLPathNativeAPI, CreateNativeAPIRouter(ctx))
|
||||
a.MountRouter("Subsonic API", consts.URLPathSubsonicAPI, CreateSubsonicAPIRouter(ctx))
|
||||
a.MountRouter("Public Endpoints", consts.URLPathPublic, CreatePublicRouter())
|
||||
if conf.Server.LastFM.Enabled {
|
||||
a.MountRouter("LastFM Auth", consts.URLPathNativeAPI+"/lastfm", CreateLastFMRouter())
|
||||
@@ -111,15 +118,16 @@ func startServer(ctx context.Context) func() error {
|
||||
a.MountRouter("ListenBrainz Auth", consts.URLPathNativeAPI+"/listenbrainz", CreateListenBrainzRouter())
|
||||
}
|
||||
if conf.Server.Prometheus.Enabled {
|
||||
// blocking call because takes <1ms but useful if fails
|
||||
core.WriteInitialMetrics()
|
||||
a.MountRouter("Prometheus metrics", conf.Server.Prometheus.MetricsPath, promhttp.Handler())
|
||||
p := CreatePrometheus()
|
||||
// blocking call because takes <100ms but useful if fails
|
||||
p.WriteInitialMetrics(ctx)
|
||||
a.MountRouter("Prometheus metrics", conf.Server.Prometheus.MetricsPath, p.GetHandler())
|
||||
}
|
||||
if conf.Server.DevEnableProfiler {
|
||||
a.MountRouter("Profiling", "/debug", middleware.Profiler())
|
||||
}
|
||||
if strings.HasPrefix(conf.Server.UILoginBackgroundURL, "/") {
|
||||
a.MountRouter("Background images", consts.DefaultUILoginBackgroundURL, backgrounds.NewHandler())
|
||||
a.MountRouter("Background images", conf.Server.UILoginBackgroundURL, backgrounds.NewHandler())
|
||||
}
|
||||
return a.Run(ctx, conf.Server.Address, conf.Server.Port, conf.Server.TLSCert, conf.Server.TLSKey)
|
||||
}
|
||||
@@ -128,29 +136,134 @@ func startServer(ctx context.Context) func() error {
|
||||
// schedulePeriodicScan schedules a periodic scan of the music library, if configured.
|
||||
func schedulePeriodicScan(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
schedule := conf.Server.ScanSchedule
|
||||
schedule := conf.Server.Scanner.Schedule
|
||||
if schedule == "" {
|
||||
log.Warn("Periodic scan is DISABLED")
|
||||
log.Info(ctx, "Periodic scan is DISABLED")
|
||||
return nil
|
||||
}
|
||||
|
||||
scanner := GetScanner()
|
||||
s := CreateScanner(ctx)
|
||||
schedulerInstance := scheduler.GetInstance()
|
||||
|
||||
log.Info("Scheduling periodic scan", "schedule", schedule)
|
||||
err := schedulerInstance.Add(schedule, func() {
|
||||
_ = scanner.RescanAll(ctx, false)
|
||||
_, err := schedulerInstance.Add(schedule, func() {
|
||||
_, err := s.ScanAll(ctx, false)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error executing periodic scan", err)
|
||||
}
|
||||
})
|
||||
if err != nil {
|
||||
log.Error("Error scheduling periodic scan", err)
|
||||
log.Error(ctx, "Error scheduling periodic scan", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
time.Sleep(2 * time.Second) // Wait 2 seconds before the initial scan
|
||||
log.Debug("Executing initial scan")
|
||||
if err := scanner.RescanAll(ctx, false); err != nil {
|
||||
log.Error("Error executing initial scan", err)
|
||||
func pidHashChanged(ds model.DataStore) (bool, error) {
|
||||
pidAlbum, err := ds.Property(context.Background()).DefaultGet(consts.PIDAlbumKey, "")
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
pidTrack, err := ds.Property(context.Background()).DefaultGet(consts.PIDTrackKey, "")
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return !strings.EqualFold(pidAlbum, conf.Server.PID.Album) || !strings.EqualFold(pidTrack, conf.Server.PID.Track), nil
|
||||
}
|
||||
|
||||
// runInitialScan runs an initial scan of the music library if needed.
|
||||
func runInitialScan(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
ds := CreateDataStore()
|
||||
fullScanRequired, err := ds.Property(ctx).DefaultGet(consts.FullScanAfterMigrationFlagKey, "0")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
log.Debug("Finished initial scan")
|
||||
inProgress, err := ds.Library(ctx).ScanInProgress()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
pidHasChanged, err := pidHashChanged(ds)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
scanNeeded := conf.Server.Scanner.ScanOnStartup || inProgress || fullScanRequired == "1" || pidHasChanged
|
||||
time.Sleep(2 * time.Second) // Wait 2 seconds before the initial scan
|
||||
if scanNeeded {
|
||||
s := CreateScanner(ctx)
|
||||
switch {
|
||||
case fullScanRequired == "1":
|
||||
log.Warn(ctx, "Full scan required after migration")
|
||||
_ = ds.Property(ctx).Delete(consts.FullScanAfterMigrationFlagKey)
|
||||
case pidHasChanged:
|
||||
log.Warn(ctx, "PID config changed, performing full scan")
|
||||
fullScanRequired = "1"
|
||||
case inProgress:
|
||||
log.Warn(ctx, "Resuming interrupted scan")
|
||||
default:
|
||||
log.Info("Executing initial scan")
|
||||
}
|
||||
|
||||
_, err = s.ScanAll(ctx, fullScanRequired == "1")
|
||||
if err != nil {
|
||||
log.Error(ctx, "Scan failed", err)
|
||||
} else {
|
||||
log.Info(ctx, "Scan completed")
|
||||
}
|
||||
} else {
|
||||
log.Debug(ctx, "Initial scan not needed")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func startScanWatcher(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
if conf.Server.Scanner.WatcherWait == 0 {
|
||||
log.Debug("Folder watcher is DISABLED")
|
||||
return nil
|
||||
}
|
||||
w := CreateScanWatcher(ctx)
|
||||
err := w.Run(ctx)
|
||||
if err != nil {
|
||||
log.Error("Error starting watcher", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func schedulePeriodicBackup(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
//schedule := conf.Server.Backup.Schedule
|
||||
//if schedule == "" {
|
||||
// log.Info(ctx, "Periodic backup is DISABLED")
|
||||
// return nil
|
||||
//}
|
||||
//
|
||||
//schedulerInstance := scheduler.GetInstance()
|
||||
//
|
||||
//log.Info("Scheduling periodic backup", "schedule", schedule)
|
||||
//_, err := schedulerInstance.Add(schedule, func() {
|
||||
// start := time.Now()
|
||||
// path, err := db.Backup(ctx)
|
||||
// elapsed := time.Since(start)
|
||||
// if err != nil {
|
||||
// log.Error(ctx, "Error backing up database", "elapsed", elapsed, err)
|
||||
// return
|
||||
// }
|
||||
// log.Info(ctx, "Backup complete", "elapsed", elapsed, "path", path)
|
||||
//
|
||||
// count, err := db.Prune(ctx)
|
||||
// if err != nil {
|
||||
// log.Error(ctx, "Error pruning database", "error", err)
|
||||
// } else if count > 0 {
|
||||
// log.Info(ctx, "Successfully pruned old files", "count", count)
|
||||
// } else {
|
||||
// log.Info(ctx, "No backups pruned")
|
||||
// }
|
||||
//})
|
||||
//
|
||||
//return err
|
||||
return nil
|
||||
}
|
||||
}
|
||||
@@ -165,6 +278,25 @@ func startScheduler(ctx context.Context) func() error {
|
||||
}
|
||||
}
|
||||
|
||||
// startInsightsCollector starts the Navidrome Insight Collector, if configured.
|
||||
func startInsightsCollector(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
if !conf.Server.EnableInsightsCollector {
|
||||
log.Info(ctx, "Insight Collector is DISABLED")
|
||||
return nil
|
||||
}
|
||||
log.Info(ctx, "Starting Insight Collector")
|
||||
select {
|
||||
case <-time.After(conf.Server.DevInsightsInitialDelay):
|
||||
case <-ctx.Done():
|
||||
return nil
|
||||
}
|
||||
ic := CreateInsights()
|
||||
ic.Run(ctx)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// startPlaybackServer starts the Navidrome playback server, if configured.
|
||||
// It is responsible for the Jukebox functionality
|
||||
func startPlaybackServer(ctx context.Context) func() error {
|
||||
@@ -179,6 +311,22 @@ func startPlaybackServer(ctx context.Context) func() error {
|
||||
}
|
||||
}
|
||||
|
||||
// startPluginManager starts the plugin manager, if configured.
|
||||
func startPluginManager(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
if !conf.Server.Plugins.Enabled {
|
||||
log.Debug("Plugins are DISABLED")
|
||||
return nil
|
||||
}
|
||||
log.Info(ctx, "Starting plugin manager")
|
||||
// Get the manager instance and scan for plugins
|
||||
manager := GetPluginManager(ctx)
|
||||
manager.ScanPlugins()
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Implement some struct tags to map flags to viper
|
||||
func init() {
|
||||
cobra.OnInitialize(func() {
|
||||
@@ -191,11 +339,13 @@ func init() {
|
||||
rootCmd.PersistentFlags().String("datafolder", viper.GetString("datafolder"), "folder to store application data (DB), needs write access")
|
||||
rootCmd.PersistentFlags().String("cachefolder", viper.GetString("cachefolder"), "folder to store cache data (transcoding, images...), needs write access")
|
||||
rootCmd.PersistentFlags().StringP("loglevel", "l", viper.GetString("loglevel"), "log level, possible values: error, info, debug, trace")
|
||||
rootCmd.PersistentFlags().String("logfile", viper.GetString("logfile"), "log file path, if not set logs will be printed to stderr")
|
||||
|
||||
_ = viper.BindPFlag("musicfolder", rootCmd.PersistentFlags().Lookup("musicfolder"))
|
||||
_ = viper.BindPFlag("datafolder", rootCmd.PersistentFlags().Lookup("datafolder"))
|
||||
_ = viper.BindPFlag("cachefolder", rootCmd.PersistentFlags().Lookup("cachefolder"))
|
||||
_ = viper.BindPFlag("loglevel", rootCmd.PersistentFlags().Lookup("loglevel"))
|
||||
_ = viper.BindPFlag("logfile", rootCmd.PersistentFlags().Lookup("logfile"))
|
||||
|
||||
rootCmd.Flags().StringP("address", "a", viper.GetString("address"), "IP address to bind to")
|
||||
rootCmd.Flags().IntP("port", "p", viper.GetInt("port"), "HTTP port Navidrome will listen to")
|
||||
|
||||
64
cmd/scan.go
64
cmd/scan.go
@@ -2,15 +2,26 @@ package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/gob"
|
||||
"os"
|
||||
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/db"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/persistence"
|
||||
"github.com/navidrome/navidrome/scanner"
|
||||
"github.com/navidrome/navidrome/utils/pl"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var fullRescan bool
|
||||
var (
|
||||
fullScan bool
|
||||
subprocess bool
|
||||
)
|
||||
|
||||
func init() {
|
||||
scanCmd.Flags().BoolVarP(&fullRescan, "full", "f", false, "check all subfolders, ignoring timestamps")
|
||||
scanCmd.Flags().BoolVarP(&fullScan, "full", "f", false, "check all subfolders, ignoring timestamps")
|
||||
scanCmd.Flags().BoolVarP(&subprocess, "subprocess", "", false, "run as subprocess (internal use)")
|
||||
rootCmd.AddCommand(scanCmd)
|
||||
}
|
||||
|
||||
@@ -19,16 +30,55 @@ var scanCmd = &cobra.Command{
|
||||
Short: "Scan music folder",
|
||||
Long: "Scan music folder for updates",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
runScanner()
|
||||
runScanner(cmd.Context())
|
||||
},
|
||||
}
|
||||
|
||||
func runScanner() {
|
||||
scanner := GetScanner()
|
||||
_ = scanner.RescanAll(context.Background(), fullRescan)
|
||||
if fullRescan {
|
||||
func trackScanInteractively(ctx context.Context, progress <-chan *scanner.ProgressInfo) {
|
||||
for status := range pl.ReadOrDone(ctx, progress) {
|
||||
if status.Warning != "" {
|
||||
log.Warn(ctx, "Scan warning", "error", status.Warning)
|
||||
}
|
||||
if status.Error != "" {
|
||||
log.Error(ctx, "Scan error", "error", status.Error)
|
||||
}
|
||||
// Discard the progress status, we only care about errors
|
||||
}
|
||||
|
||||
if fullScan {
|
||||
log.Info("Finished full rescan")
|
||||
} else {
|
||||
log.Info("Finished rescan")
|
||||
}
|
||||
}
|
||||
|
||||
func trackScanAsSubprocess(ctx context.Context, progress <-chan *scanner.ProgressInfo) {
|
||||
encoder := gob.NewEncoder(os.Stdout)
|
||||
for status := range pl.ReadOrDone(ctx, progress) {
|
||||
err := encoder.Encode(status)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Failed to encode status", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func runScanner(ctx context.Context) {
|
||||
defer db.Init(ctx)()
|
||||
|
||||
sqlDB := db.Db()
|
||||
defer db.Db().Close()
|
||||
ds := persistence.New(sqlDB)
|
||||
pls := core.NewPlaylists(ds)
|
||||
|
||||
progress, err := scanner.CallScan(ctx, ds, pls, fullScan)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "Failed to scan", err)
|
||||
}
|
||||
|
||||
// Wait for the scanner to finish
|
||||
if subprocess {
|
||||
trackScanAsSubprocess(ctx, progress)
|
||||
} else {
|
||||
trackScanInteractively(ctx, progress)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,7 +16,7 @@ const triggerScanSignal = syscall.SIGUSR1
|
||||
|
||||
func startSignaller(ctx context.Context) func() error {
|
||||
log.Info(ctx, "Starting signaler")
|
||||
scanner := GetScanner()
|
||||
scanner := CreateScanner(ctx)
|
||||
|
||||
return func() error {
|
||||
var sigChan = make(chan os.Signal, 1)
|
||||
@@ -27,11 +27,11 @@ func startSignaller(ctx context.Context) func() error {
|
||||
case sig := <-sigChan:
|
||||
log.Info(ctx, "Received signal, triggering a new scan", "signal", sig)
|
||||
start := time.Now()
|
||||
err := scanner.RescanAll(ctx, false)
|
||||
_, err := scanner.ScanAll(ctx, false)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error scanning", err)
|
||||
}
|
||||
log.Info(ctx, "Triggered scan complete", "elapsed", time.Since(start).Round(100*time.Millisecond))
|
||||
log.Info(ctx, "Triggered scan complete", "elapsed", time.Since(start))
|
||||
case <-ctx.Done():
|
||||
return nil
|
||||
}
|
||||
|
||||
267
cmd/svc.go
Normal file
267
cmd/svc.go
Normal file
@@ -0,0 +1,267 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/kardianos/service"
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var (
|
||||
svcStatusLabels = map[service.Status]string{
|
||||
service.StatusUnknown: "Unknown",
|
||||
service.StatusStopped: "Stopped",
|
||||
service.StatusRunning: "Running",
|
||||
}
|
||||
|
||||
installUser string
|
||||
workingDirectory string
|
||||
)
|
||||
|
||||
func init() {
|
||||
svcCmd.AddCommand(buildInstallCmd())
|
||||
svcCmd.AddCommand(buildUninstallCmd())
|
||||
svcCmd.AddCommand(buildStartCmd())
|
||||
svcCmd.AddCommand(buildStopCmd())
|
||||
svcCmd.AddCommand(buildStatusCmd())
|
||||
svcCmd.AddCommand(buildExecuteCmd())
|
||||
rootCmd.AddCommand(svcCmd)
|
||||
}
|
||||
|
||||
var svcCmd = &cobra.Command{
|
||||
Use: "service",
|
||||
Aliases: []string{"svc"},
|
||||
Short: "Manage Navidrome as a service",
|
||||
Long: fmt.Sprintf("Manage Navidrome as a service, using the OS service manager (%s)", service.Platform()),
|
||||
Run: runServiceCmd,
|
||||
}
|
||||
|
||||
type svcControl struct {
|
||||
ctx context.Context
|
||||
cancel context.CancelFunc
|
||||
done chan struct{}
|
||||
}
|
||||
|
||||
func (p *svcControl) Start(service.Service) error {
|
||||
p.done = make(chan struct{})
|
||||
p.ctx, p.cancel = context.WithCancel(context.Background())
|
||||
go func() {
|
||||
runNavidrome(p.ctx)
|
||||
close(p.done)
|
||||
}()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *svcControl) Stop(service.Service) error {
|
||||
log.Info("Stopping service")
|
||||
p.cancel()
|
||||
select {
|
||||
case <-p.done:
|
||||
log.Info("Service stopped gracefully")
|
||||
case <-time.After(10 * time.Second):
|
||||
log.Error("Service did not stop in time. Killing it.")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
var svcInstance = sync.OnceValue(func() service.Service {
|
||||
options := make(service.KeyValue)
|
||||
options["Restart"] = "on-failure"
|
||||
options["SuccessExitStatus"] = "1 2 8 SIGKILL"
|
||||
options["UserService"] = false
|
||||
options["LogDirectory"] = conf.Server.DataFolder
|
||||
options["SystemdScript"] = systemdScript
|
||||
if conf.Server.LogFile != "" {
|
||||
options["LogOutput"] = false
|
||||
} else {
|
||||
options["LogOutput"] = true
|
||||
options["LogDirectory"] = conf.Server.DataFolder
|
||||
}
|
||||
svcConfig := &service.Config{
|
||||
UserName: installUser,
|
||||
Name: "navidrome",
|
||||
DisplayName: "Navidrome",
|
||||
Description: "Your Personal Streaming Service",
|
||||
Dependencies: []string{
|
||||
"After=remote-fs.target network.target",
|
||||
},
|
||||
WorkingDirectory: executablePath(),
|
||||
Option: options,
|
||||
}
|
||||
arguments := []string{"service", "execute"}
|
||||
if conf.Server.ConfigFile != "" {
|
||||
arguments = append(arguments, "-c", conf.Server.ConfigFile)
|
||||
}
|
||||
svcConfig.Arguments = arguments
|
||||
|
||||
prg := &svcControl{}
|
||||
svc, err := service.New(prg, svcConfig)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
return svc
|
||||
})
|
||||
|
||||
func runServiceCmd(cmd *cobra.Command, _ []string) {
|
||||
_ = cmd.Help()
|
||||
}
|
||||
|
||||
func executablePath() string {
|
||||
if workingDirectory != "" {
|
||||
return workingDirectory
|
||||
}
|
||||
|
||||
ex, err := os.Executable()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
return filepath.Dir(ex)
|
||||
}
|
||||
|
||||
func buildInstallCmd() *cobra.Command {
|
||||
runInstallCmd := func(_ *cobra.Command, _ []string) {
|
||||
var err error
|
||||
println("Installing service with:")
|
||||
println(" working directory: " + executablePath())
|
||||
println(" music folder: " + conf.Server.MusicFolder)
|
||||
println(" data folder: " + conf.Server.DataFolder)
|
||||
if conf.Server.LogFile != "" {
|
||||
println(" log file: " + conf.Server.LogFile)
|
||||
} else {
|
||||
println(" logs folder: " + conf.Server.DataFolder)
|
||||
}
|
||||
if cfgFile != "" {
|
||||
conf.Server.ConfigFile, err = filepath.Abs(cfgFile)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
println(" config file: " + conf.Server.ConfigFile)
|
||||
}
|
||||
err = svcInstance().Install()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
println("Service installed. Use 'navidrome svc start' to start it.")
|
||||
}
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "install",
|
||||
Short: "Install Navidrome service.",
|
||||
Run: runInstallCmd,
|
||||
}
|
||||
cmd.Flags().StringVarP(&installUser, "user", "u", "", "user to run service")
|
||||
cmd.Flags().StringVarP(&workingDirectory, "working-directory", "w", "", "working directory of service")
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func buildUninstallCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: "uninstall",
|
||||
Short: "Uninstall Navidrome service. Does not delete the music or data folders",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
err := svcInstance().Uninstall()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
println("Service uninstalled. Music and data folders are still intact.")
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func buildStartCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: "start",
|
||||
Short: "Start Navidrome service",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
err := svcInstance().Start()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
println("Service started. Use 'navidrome svc status' to check its status.")
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func buildStopCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: "stop",
|
||||
Short: "Stop Navidrome service",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
err := svcInstance().Stop()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
println("Service stopped. Use 'navidrome svc status' to check its status.")
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func buildStatusCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: "status",
|
||||
Short: "Show Navidrome service status",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
status, err := svcInstance().Status()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
fmt.Printf("Navidrome is %s.\n", svcStatusLabels[status])
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func buildExecuteCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: "execute",
|
||||
Short: "Run navidrome as a service in the foreground (it is very unlikely you want to run this, you are better off running just navidrome)",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
err := svcInstance().Run()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const systemdScript = `[Unit]
|
||||
Description={{.Description}}
|
||||
ConditionFileIsExecutable={{.Path|cmdEscape}}
|
||||
{{range $i, $dep := .Dependencies}}
|
||||
{{$dep}} {{end}}
|
||||
|
||||
[Service]
|
||||
StartLimitInterval=5
|
||||
StartLimitBurst=10
|
||||
ExecStart={{.Path|cmdEscape}}{{range .Arguments}} {{.|cmd}}{{end}}
|
||||
{{if .WorkingDirectory}}WorkingDirectory={{.WorkingDirectory|cmdEscape}}{{end}}
|
||||
{{if .UserName}}User={{.UserName}}{{end}}
|
||||
{{if .Restart}}Restart={{.Restart}}{{end}}
|
||||
{{if .SuccessExitStatus}}SuccessExitStatus={{.SuccessExitStatus}}{{end}}
|
||||
TimeoutStopSec=20
|
||||
RestartSec=120
|
||||
EnvironmentFile=-/etc/sysconfig/{{.Name}}
|
||||
|
||||
DevicePolicy=closed
|
||||
NoNewPrivileges=yes
|
||||
PrivateTmp=yes
|
||||
ProtectControlGroups=yes
|
||||
ProtectKernelModules=yes
|
||||
ProtectKernelTunables=yes
|
||||
RestrictAddressFamilies=AF_UNIX AF_INET AF_INET6
|
||||
RestrictNamespaces=yes
|
||||
RestrictRealtime=yes
|
||||
SystemCallFilter=~@clock @debug @module @mount @obsolete @reboot @setuid @swap
|
||||
{{if .WorkingDirectory}}ReadWritePaths={{.WorkingDirectory|cmdEscape}}{{end}}
|
||||
ProtectSystem=full
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
`
|
||||
163
cmd/wire_gen.go
163
cmd/wire_gen.go
@@ -1,23 +1,28 @@
|
||||
// Code generated by Wire. DO NOT EDIT.
|
||||
|
||||
//go:generate go run -mod=mod github.com/google/wire/cmd/wire
|
||||
//go:generate go run -mod=mod github.com/google/wire/cmd/wire gen -tags "netgo"
|
||||
//go:build !wireinject
|
||||
// +build !wireinject
|
||||
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/google/wire"
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
"github.com/navidrome/navidrome/core/agents/lastfm"
|
||||
"github.com/navidrome/navidrome/core/agents/listenbrainz"
|
||||
"github.com/navidrome/navidrome/core/artwork"
|
||||
"github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/core/ffmpeg"
|
||||
"github.com/navidrome/navidrome/core/metrics"
|
||||
"github.com/navidrome/navidrome/core/playback"
|
||||
"github.com/navidrome/navidrome/core/scrobbler"
|
||||
"github.com/navidrome/navidrome/db"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/persistence"
|
||||
"github.com/navidrome/navidrome/plugins"
|
||||
"github.com/navidrome/navidrome/scanner"
|
||||
"github.com/navidrome/navidrome/server"
|
||||
"github.com/navidrome/navidrome/server/events"
|
||||
@@ -26,56 +31,86 @@ import (
|
||||
"github.com/navidrome/navidrome/server/subsonic"
|
||||
)
|
||||
|
||||
import (
|
||||
_ "github.com/navidrome/navidrome/adapters/taglib"
|
||||
)
|
||||
|
||||
// Injectors from wire_injectors.go:
|
||||
|
||||
func CreateServer(musicFolder string) *server.Server {
|
||||
dbDB := db.Db()
|
||||
dataStore := persistence.New(dbDB)
|
||||
func CreateDataStore() model.DataStore {
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
return dataStore
|
||||
}
|
||||
|
||||
func CreateServer() *server.Server {
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
broker := events.GetBroker()
|
||||
serverServer := server.New(dataStore, broker)
|
||||
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
|
||||
manager := plugins.GetManager(dataStore, metricsMetrics)
|
||||
insights := metrics.GetInstance(dataStore, manager)
|
||||
serverServer := server.New(dataStore, broker, insights)
|
||||
return serverServer
|
||||
}
|
||||
|
||||
func CreateNativeAPIRouter() *nativeapi.Router {
|
||||
dbDB := db.Db()
|
||||
dataStore := persistence.New(dbDB)
|
||||
func CreateNativeAPIRouter(ctx context.Context) *nativeapi.Router {
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
share := core.NewShare(dataStore)
|
||||
playlists := core.NewPlaylists(dataStore)
|
||||
router := nativeapi.New(dataStore, share, playlists)
|
||||
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
|
||||
manager := plugins.GetManager(dataStore, metricsMetrics)
|
||||
insights := metrics.GetInstance(dataStore, manager)
|
||||
fileCache := artwork.GetImageCache()
|
||||
fFmpeg := ffmpeg.New()
|
||||
agentsAgents := agents.GetAgents(dataStore, manager)
|
||||
provider := external.NewProvider(dataStore, agentsAgents)
|
||||
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
|
||||
cacheWarmer := artwork.NewCacheWarmer(artworkArtwork, fileCache)
|
||||
broker := events.GetBroker()
|
||||
scannerScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
|
||||
watcher := scanner.GetWatcher(dataStore, scannerScanner)
|
||||
library := core.NewLibrary(dataStore, scannerScanner, watcher, broker)
|
||||
router := nativeapi.New(dataStore, share, playlists, insights, library)
|
||||
return router
|
||||
}
|
||||
|
||||
func CreateSubsonicAPIRouter() *subsonic.Router {
|
||||
dbDB := db.Db()
|
||||
dataStore := persistence.New(dbDB)
|
||||
func CreateSubsonicAPIRouter(ctx context.Context) *subsonic.Router {
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
fileCache := artwork.GetImageCache()
|
||||
fFmpeg := ffmpeg.New()
|
||||
agentsAgents := agents.New(dataStore)
|
||||
externalMetadata := core.NewExternalMetadata(dataStore, agentsAgents)
|
||||
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, externalMetadata)
|
||||
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
|
||||
manager := plugins.GetManager(dataStore, metricsMetrics)
|
||||
agentsAgents := agents.GetAgents(dataStore, manager)
|
||||
provider := external.NewProvider(dataStore, agentsAgents)
|
||||
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
|
||||
transcodingCache := core.GetTranscodingCache()
|
||||
mediaStreamer := core.NewMediaStreamer(dataStore, fFmpeg, transcodingCache)
|
||||
share := core.NewShare(dataStore)
|
||||
archiver := core.NewArchiver(mediaStreamer, dataStore, share)
|
||||
players := core.NewPlayers(dataStore)
|
||||
playlists := core.NewPlaylists(dataStore)
|
||||
cacheWarmer := artwork.NewCacheWarmer(artworkArtwork, fileCache)
|
||||
broker := events.GetBroker()
|
||||
scannerScanner := scanner.GetInstance(dataStore, playlists, cacheWarmer, broker)
|
||||
playTracker := scrobbler.GetPlayTracker(dataStore, broker)
|
||||
playlists := core.NewPlaylists(dataStore)
|
||||
scannerScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
|
||||
playTracker := scrobbler.GetPlayTracker(dataStore, broker, manager)
|
||||
playbackServer := playback.GetInstance(dataStore)
|
||||
router := subsonic.New(dataStore, artworkArtwork, mediaStreamer, archiver, players, externalMetadata, scannerScanner, broker, playlists, playTracker, share, playbackServer)
|
||||
router := subsonic.New(dataStore, artworkArtwork, mediaStreamer, archiver, players, provider, scannerScanner, broker, playlists, playTracker, share, playbackServer, metricsMetrics)
|
||||
return router
|
||||
}
|
||||
|
||||
func CreatePublicRouter() *public.Router {
|
||||
dbDB := db.Db()
|
||||
dataStore := persistence.New(dbDB)
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
fileCache := artwork.GetImageCache()
|
||||
fFmpeg := ffmpeg.New()
|
||||
agentsAgents := agents.New(dataStore)
|
||||
externalMetadata := core.NewExternalMetadata(dataStore, agentsAgents)
|
||||
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, externalMetadata)
|
||||
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
|
||||
manager := plugins.GetManager(dataStore, metricsMetrics)
|
||||
agentsAgents := agents.GetAgents(dataStore, manager)
|
||||
provider := external.NewProvider(dataStore, agentsAgents)
|
||||
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
|
||||
transcodingCache := core.GetTranscodingCache()
|
||||
mediaStreamer := core.NewMediaStreamer(dataStore, fFmpeg, transcodingCache)
|
||||
share := core.NewShare(dataStore)
|
||||
@@ -85,41 +120,91 @@ func CreatePublicRouter() *public.Router {
|
||||
}
|
||||
|
||||
func CreateLastFMRouter() *lastfm.Router {
|
||||
dbDB := db.Db()
|
||||
dataStore := persistence.New(dbDB)
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
router := lastfm.NewRouter(dataStore)
|
||||
return router
|
||||
}
|
||||
|
||||
func CreateListenBrainzRouter() *listenbrainz.Router {
|
||||
dbDB := db.Db()
|
||||
dataStore := persistence.New(dbDB)
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
router := listenbrainz.NewRouter(dataStore)
|
||||
return router
|
||||
}
|
||||
|
||||
func GetScanner() scanner.Scanner {
|
||||
dbDB := db.Db()
|
||||
dataStore := persistence.New(dbDB)
|
||||
playlists := core.NewPlaylists(dataStore)
|
||||
func CreateInsights() metrics.Insights {
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
|
||||
manager := plugins.GetManager(dataStore, metricsMetrics)
|
||||
insights := metrics.GetInstance(dataStore, manager)
|
||||
return insights
|
||||
}
|
||||
|
||||
func CreatePrometheus() metrics.Metrics {
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
|
||||
return metricsMetrics
|
||||
}
|
||||
|
||||
func CreateScanner(ctx context.Context) scanner.Scanner {
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
fileCache := artwork.GetImageCache()
|
||||
fFmpeg := ffmpeg.New()
|
||||
agentsAgents := agents.New(dataStore)
|
||||
externalMetadata := core.NewExternalMetadata(dataStore, agentsAgents)
|
||||
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, externalMetadata)
|
||||
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
|
||||
manager := plugins.GetManager(dataStore, metricsMetrics)
|
||||
agentsAgents := agents.GetAgents(dataStore, manager)
|
||||
provider := external.NewProvider(dataStore, agentsAgents)
|
||||
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
|
||||
cacheWarmer := artwork.NewCacheWarmer(artworkArtwork, fileCache)
|
||||
broker := events.GetBroker()
|
||||
scannerScanner := scanner.GetInstance(dataStore, playlists, cacheWarmer, broker)
|
||||
playlists := core.NewPlaylists(dataStore)
|
||||
scannerScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
|
||||
return scannerScanner
|
||||
}
|
||||
|
||||
func CreateScanWatcher(ctx context.Context) scanner.Watcher {
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
fileCache := artwork.GetImageCache()
|
||||
fFmpeg := ffmpeg.New()
|
||||
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
|
||||
manager := plugins.GetManager(dataStore, metricsMetrics)
|
||||
agentsAgents := agents.GetAgents(dataStore, manager)
|
||||
provider := external.NewProvider(dataStore, agentsAgents)
|
||||
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
|
||||
cacheWarmer := artwork.NewCacheWarmer(artworkArtwork, fileCache)
|
||||
broker := events.GetBroker()
|
||||
playlists := core.NewPlaylists(dataStore)
|
||||
scannerScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
|
||||
watcher := scanner.GetWatcher(dataStore, scannerScanner)
|
||||
return watcher
|
||||
}
|
||||
|
||||
func GetPlaybackServer() playback.PlaybackServer {
|
||||
dbDB := db.Db()
|
||||
dataStore := persistence.New(dbDB)
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
playbackServer := playback.GetInstance(dataStore)
|
||||
return playbackServer
|
||||
}
|
||||
|
||||
func getPluginManager() plugins.Manager {
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
|
||||
manager := plugins.GetManager(dataStore, metricsMetrics)
|
||||
return manager
|
||||
}
|
||||
|
||||
// wire_injectors.go:
|
||||
|
||||
var allProviders = wire.NewSet(core.Set, artwork.Set, server.New, subsonic.New, nativeapi.New, public.New, persistence.New, lastfm.NewRouter, listenbrainz.NewRouter, events.GetBroker, scanner.GetInstance, db.Db)
|
||||
var allProviders = wire.NewSet(core.Set, artwork.Set, server.New, subsonic.New, nativeapi.New, public.New, persistence.New, lastfm.NewRouter, listenbrainz.NewRouter, events.GetBroker, scanner.New, scanner.GetWatcher, plugins.GetManager, metrics.GetPrometheusInstance, db.Db, wire.Bind(new(agents.PluginLoader), new(plugins.Manager)), wire.Bind(new(scrobbler.PluginLoader), new(plugins.Manager)), wire.Bind(new(metrics.PluginLoader), new(plugins.Manager)), wire.Bind(new(core.Scanner), new(scanner.Scanner)), wire.Bind(new(core.Watcher), new(scanner.Watcher)))
|
||||
|
||||
func GetPluginManager(ctx context.Context) plugins.Manager {
|
||||
manager := getPluginManager()
|
||||
manager.SetSubsonicRouter(CreateSubsonicAPIRouter(ctx))
|
||||
return manager
|
||||
}
|
||||
|
||||
@@ -3,14 +3,21 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/google/wire"
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
"github.com/navidrome/navidrome/core/agents/lastfm"
|
||||
"github.com/navidrome/navidrome/core/agents/listenbrainz"
|
||||
"github.com/navidrome/navidrome/core/artwork"
|
||||
"github.com/navidrome/navidrome/core/metrics"
|
||||
"github.com/navidrome/navidrome/core/playback"
|
||||
"github.com/navidrome/navidrome/core/scrobbler"
|
||||
"github.com/navidrome/navidrome/db"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/persistence"
|
||||
"github.com/navidrome/navidrome/plugins"
|
||||
"github.com/navidrome/navidrome/scanner"
|
||||
"github.com/navidrome/navidrome/server"
|
||||
"github.com/navidrome/navidrome/server/events"
|
||||
@@ -30,23 +37,37 @@ var allProviders = wire.NewSet(
|
||||
lastfm.NewRouter,
|
||||
listenbrainz.NewRouter,
|
||||
events.GetBroker,
|
||||
scanner.GetInstance,
|
||||
scanner.New,
|
||||
scanner.GetWatcher,
|
||||
plugins.GetManager,
|
||||
metrics.GetPrometheusInstance,
|
||||
db.Db,
|
||||
wire.Bind(new(agents.PluginLoader), new(plugins.Manager)),
|
||||
wire.Bind(new(scrobbler.PluginLoader), new(plugins.Manager)),
|
||||
wire.Bind(new(metrics.PluginLoader), new(plugins.Manager)),
|
||||
wire.Bind(new(core.Scanner), new(scanner.Scanner)),
|
||||
wire.Bind(new(core.Watcher), new(scanner.Watcher)),
|
||||
)
|
||||
|
||||
func CreateServer(musicFolder string) *server.Server {
|
||||
func CreateDataStore() model.DataStore {
|
||||
panic(wire.Build(
|
||||
allProviders,
|
||||
))
|
||||
}
|
||||
|
||||
func CreateNativeAPIRouter() *nativeapi.Router {
|
||||
func CreateServer() *server.Server {
|
||||
panic(wire.Build(
|
||||
allProviders,
|
||||
))
|
||||
}
|
||||
|
||||
func CreateSubsonicAPIRouter() *subsonic.Router {
|
||||
func CreateNativeAPIRouter(ctx context.Context) *nativeapi.Router {
|
||||
panic(wire.Build(
|
||||
allProviders,
|
||||
))
|
||||
}
|
||||
|
||||
func CreateSubsonicAPIRouter(ctx context.Context) *subsonic.Router {
|
||||
panic(wire.Build(
|
||||
allProviders,
|
||||
))
|
||||
@@ -70,7 +91,25 @@ func CreateListenBrainzRouter() *listenbrainz.Router {
|
||||
))
|
||||
}
|
||||
|
||||
func GetScanner() scanner.Scanner {
|
||||
func CreateInsights() metrics.Insights {
|
||||
panic(wire.Build(
|
||||
allProviders,
|
||||
))
|
||||
}
|
||||
|
||||
func CreatePrometheus() metrics.Metrics {
|
||||
panic(wire.Build(
|
||||
allProviders,
|
||||
))
|
||||
}
|
||||
|
||||
func CreateScanner(ctx context.Context) scanner.Scanner {
|
||||
panic(wire.Build(
|
||||
allProviders,
|
||||
))
|
||||
}
|
||||
|
||||
func CreateScanWatcher(ctx context.Context) scanner.Watcher {
|
||||
panic(wire.Build(
|
||||
allProviders,
|
||||
))
|
||||
@@ -81,3 +120,15 @@ func GetPlaybackServer() playback.PlaybackServer {
|
||||
allProviders,
|
||||
))
|
||||
}
|
||||
|
||||
func getPluginManager() plugins.Manager {
|
||||
panic(wire.Build(
|
||||
allProviders,
|
||||
))
|
||||
}
|
||||
|
||||
func GetPluginManager(ctx context.Context) plugins.Manager {
|
||||
manager := getPluginManager()
|
||||
manager.SetSubsonicRouter(CreateSubsonicAPIRouter(ctx))
|
||||
return manager
|
||||
}
|
||||
|
||||
4
conf/buildtags/buildtags.go
Normal file
4
conf/buildtags/buildtags.go
Normal file
@@ -0,0 +1,4 @@
|
||||
package buildtags
|
||||
|
||||
// This file is left intentionally empty. It is used to make sure the package is not empty, in the case all
|
||||
// required build tags are disabled.
|
||||
11
conf/buildtags/netgo.go
Normal file
11
conf/buildtags/netgo.go
Normal file
@@ -0,0 +1,11 @@
|
||||
//go:build netgo
|
||||
|
||||
package buildtags
|
||||
|
||||
// NOTICE: This file was created to force the inclusion of the `netgo` tag when compiling the project.
|
||||
// If the tag is not included, the compilation will fail because this variable won't be defined, and the `main.go`
|
||||
// file requires it.
|
||||
|
||||
// Why this tag is required? See https://github.com/navidrome/navidrome/issues/700
|
||||
|
||||
var NETGO = true
|
||||
@@ -9,9 +9,12 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/bmatcuk/doublestar/v4"
|
||||
"github.com/go-viper/encoding/ini"
|
||||
"github.com/kr/pretty"
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/utils/run"
|
||||
"github.com/robfig/cron/v3"
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
@@ -26,8 +29,7 @@ type configOptions struct {
|
||||
CacheFolder string
|
||||
DbPath string
|
||||
LogLevel string
|
||||
ScanInterval time.Duration
|
||||
ScanSchedule string
|
||||
LogFile string
|
||||
SessionTimeout time.Duration
|
||||
BaseURL string
|
||||
BasePath string
|
||||
@@ -41,6 +43,7 @@ type configOptions struct {
|
||||
EnableTranscodingConfig bool
|
||||
EnableDownloads bool
|
||||
EnableExternalServices bool
|
||||
EnableInsightsCollector bool
|
||||
EnableMediaFileCoverArt bool
|
||||
TranscodingCacheSize string
|
||||
ImageCacheSize string
|
||||
@@ -57,25 +60,27 @@ type configOptions struct {
|
||||
PreferSortTags bool
|
||||
IgnoredArticles string
|
||||
IndexGroups string
|
||||
SubsonicArtistParticipations bool
|
||||
FFmpegPath string
|
||||
MPVPath string
|
||||
MPVCmdTemplate string
|
||||
CoverArtPriority string
|
||||
CoverJpegQuality int
|
||||
ArtistArtPriority string
|
||||
LyricsPriority string
|
||||
EnableGravatar bool
|
||||
EnableFavourites bool
|
||||
EnableStarRating bool
|
||||
EnableUserEditing bool
|
||||
EnableSharing bool
|
||||
ShareURL string
|
||||
DefaultShareExpiration time.Duration
|
||||
DefaultDownloadableShare bool
|
||||
DefaultTheme string
|
||||
DefaultLanguage string
|
||||
DefaultUIVolume int
|
||||
EnableReplayGain bool
|
||||
EnableCoverAnimation bool
|
||||
EnableNowPlaying bool
|
||||
GATrackingID string
|
||||
EnableLogRedacting bool
|
||||
AuthRequestLimit int
|
||||
@@ -83,45 +88,85 @@ type configOptions struct {
|
||||
PasswordEncryptionKey string
|
||||
ReverseProxyUserHeader string
|
||||
ReverseProxyWhitelist string
|
||||
HTTPSecurityHeaders secureOptions
|
||||
Prometheus prometheusOptions
|
||||
Scanner scannerOptions
|
||||
Jukebox jukeboxOptions
|
||||
|
||||
Agents string
|
||||
LastFM lastfmOptions
|
||||
Spotify spotifyOptions
|
||||
ListenBrainz listenBrainzOptions
|
||||
Plugins pluginsOptions
|
||||
PluginConfig map[string]map[string]string
|
||||
HTTPSecurityHeaders secureOptions `json:",omitzero"`
|
||||
Prometheus prometheusOptions `json:",omitzero"`
|
||||
Scanner scannerOptions `json:",omitzero"`
|
||||
Jukebox jukeboxOptions `json:",omitzero"`
|
||||
Backup backupOptions `json:",omitzero"`
|
||||
PID pidOptions `json:",omitzero"`
|
||||
Inspect inspectOptions `json:",omitzero"`
|
||||
Subsonic subsonicOptions `json:",omitzero"`
|
||||
LastFM lastfmOptions `json:",omitzero"`
|
||||
Spotify spotifyOptions `json:",omitzero"`
|
||||
Deezer deezerOptions `json:",omitzero"`
|
||||
ListenBrainz listenBrainzOptions `json:",omitzero"`
|
||||
Tags map[string]TagConf `json:",omitempty"`
|
||||
Agents string
|
||||
|
||||
// DevFlags. These are used to enable/disable debugging and incomplete features
|
||||
DevLogLevels map[string]string `json:",omitempty"`
|
||||
DevLogSourceLine bool
|
||||
DevLogLevels map[string]string
|
||||
DevEnableProfiler bool
|
||||
DevAutoCreateAdminPassword string
|
||||
DevAutoLoginUsername string
|
||||
DevActivityPanel bool
|
||||
DevActivityPanelUpdateRate time.Duration
|
||||
DevSidebarPlaylists bool
|
||||
DevEnableBufferedScrobble bool
|
||||
DevShowArtistPage bool
|
||||
DevUIShowConfig bool
|
||||
DevNewEventStream bool
|
||||
DevOffsetOptimize int
|
||||
DevArtworkMaxRequests int
|
||||
DevArtworkThrottleBacklogLimit int
|
||||
DevArtworkThrottleBacklogTimeout time.Duration
|
||||
DevArtistInfoTimeToLive time.Duration
|
||||
DevAlbumInfoTimeToLive time.Duration
|
||||
DevExternalScanner bool
|
||||
DevScannerThreads uint
|
||||
DevInsightsInitialDelay time.Duration
|
||||
DevEnablePlayerInsights bool
|
||||
DevEnablePluginsInsights bool
|
||||
DevPluginCompilationTimeout time.Duration
|
||||
DevExternalArtistFetchMultiplier float64
|
||||
}
|
||||
|
||||
type scannerOptions struct {
|
||||
Enabled bool
|
||||
Schedule string
|
||||
WatcherWait time.Duration
|
||||
ScanOnStartup bool
|
||||
Extractor string
|
||||
GenreSeparators string
|
||||
GroupAlbumReleases bool
|
||||
ArtistJoiner string
|
||||
GenreSeparators string // Deprecated: Use Tags.genre.Split instead
|
||||
GroupAlbumReleases bool // Deprecated: Use PID.Album instead
|
||||
FollowSymlinks bool // Whether to follow symlinks when scanning directories
|
||||
PurgeMissing string // Values: "never", "always", "full"
|
||||
}
|
||||
|
||||
type subsonicOptions struct {
|
||||
AppendSubtitle bool
|
||||
ArtistParticipations bool
|
||||
DefaultReportRealPath bool
|
||||
LegacyClients string
|
||||
}
|
||||
|
||||
type TagConf struct {
|
||||
Ignore bool `yaml:"ignore" json:",omitempty"`
|
||||
Aliases []string `yaml:"aliases" json:",omitempty"`
|
||||
Type string `yaml:"type" json:",omitempty"`
|
||||
MaxLength int `yaml:"maxLength" json:",omitempty"`
|
||||
Split []string `yaml:"split" json:",omitempty"`
|
||||
Album bool `yaml:"album" json:",omitempty"`
|
||||
}
|
||||
|
||||
type lastfmOptions struct {
|
||||
Enabled bool
|
||||
ApiKey string
|
||||
Secret string
|
||||
Language string
|
||||
Enabled bool
|
||||
ApiKey string
|
||||
Secret string
|
||||
Language string
|
||||
ScrobbleFirstArtistOnly bool
|
||||
}
|
||||
|
||||
type spotifyOptions struct {
|
||||
@@ -129,6 +174,10 @@ type spotifyOptions struct {
|
||||
Secret string
|
||||
}
|
||||
|
||||
type deezerOptions struct {
|
||||
Enabled bool
|
||||
}
|
||||
|
||||
type listenBrainzOptions struct {
|
||||
Enabled bool
|
||||
BaseURL string
|
||||
@@ -141,6 +190,7 @@ type secureOptions struct {
|
||||
type prometheusOptions struct {
|
||||
Enabled bool
|
||||
MetricsPath string
|
||||
Password string
|
||||
}
|
||||
|
||||
type AudioDeviceDefinition []string
|
||||
@@ -152,6 +202,30 @@ type jukeboxOptions struct {
|
||||
AdminOnly bool
|
||||
}
|
||||
|
||||
type backupOptions struct {
|
||||
Count int
|
||||
Path string
|
||||
Schedule string
|
||||
}
|
||||
|
||||
type pidOptions struct {
|
||||
Track string
|
||||
Album string
|
||||
}
|
||||
|
||||
type inspectOptions struct {
|
||||
Enabled bool
|
||||
MaxRequests int
|
||||
BacklogLimit int
|
||||
BacklogTimeout int
|
||||
}
|
||||
|
||||
type pluginsOptions struct {
|
||||
Enabled bool
|
||||
Folder string
|
||||
CacheSize string
|
||||
}
|
||||
|
||||
var (
|
||||
Server = &configOptions{}
|
||||
hooks []func()
|
||||
@@ -164,18 +238,21 @@ func LoadFromFile(confFile string) {
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error reading config file:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
Load()
|
||||
Load(true)
|
||||
}
|
||||
|
||||
func Load() {
|
||||
func Load(noConfigDump bool) {
|
||||
parseIniFileConfiguration()
|
||||
|
||||
err := viper.Unmarshal(&Server)
|
||||
if err != nil {
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error parsing config:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
err = os.MkdirAll(Server.DataFolder, os.ModePerm)
|
||||
if err != nil {
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error creating data path:", "path", Server.DataFolder, err)
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error creating data path:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
@@ -184,28 +261,63 @@ func Load() {
|
||||
}
|
||||
err = os.MkdirAll(Server.CacheFolder, os.ModePerm)
|
||||
if err != nil {
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error creating cache path:", "path", Server.CacheFolder, err)
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error creating cache path:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
if Server.Plugins.Enabled {
|
||||
if Server.Plugins.Folder == "" {
|
||||
Server.Plugins.Folder = filepath.Join(Server.DataFolder, "plugins")
|
||||
}
|
||||
err = os.MkdirAll(Server.Plugins.Folder, 0700)
|
||||
if err != nil {
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error creating plugins path:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
Server.ConfigFile = viper.GetViper().ConfigFileUsed()
|
||||
if Server.DbPath == "" {
|
||||
Server.DbPath = filepath.Join(Server.DataFolder, consts.DefaultDbPath)
|
||||
}
|
||||
|
||||
if Server.Backup.Path != "" {
|
||||
err = os.MkdirAll(Server.Backup.Path, os.ModePerm)
|
||||
if err != nil {
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error creating backup path:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
out := os.Stderr
|
||||
if Server.LogFile != "" {
|
||||
out, err = os.OpenFile(Server.LogFile, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
|
||||
if err != nil {
|
||||
_, _ = fmt.Fprintf(os.Stderr, "FATAL: Error opening log file %s: %s\n", Server.LogFile, err.Error())
|
||||
os.Exit(1)
|
||||
}
|
||||
log.SetOutput(out)
|
||||
}
|
||||
|
||||
log.SetLevelString(Server.LogLevel)
|
||||
log.SetLogLevels(Server.DevLogLevels)
|
||||
log.SetLogSourceLine(Server.DevLogSourceLine)
|
||||
log.SetRedacting(Server.EnableLogRedacting)
|
||||
|
||||
if err := validateScanSchedule(); err != nil {
|
||||
err = run.Sequentially(
|
||||
validateScanSchedule,
|
||||
validateBackupSchedule,
|
||||
validatePlaylistsPath,
|
||||
validatePurgeMissingOption,
|
||||
)
|
||||
if err != nil {
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
if Server.BaseURL != "" {
|
||||
u, err := url.Parse(Server.BaseURL)
|
||||
if err != nil {
|
||||
_, _ = fmt.Fprintf(os.Stderr, "FATAL: Invalid BaseURL %s: %s\n", Server.BaseURL, err.Error())
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Invalid BaseURL:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
Server.BasePath = u.Path
|
||||
@@ -216,28 +328,75 @@ func Load() {
|
||||
}
|
||||
|
||||
// Print current configuration if log level is Debug
|
||||
if log.IsGreaterOrEqualTo(log.LevelDebug) {
|
||||
if log.IsGreaterOrEqualTo(log.LevelDebug) && !noConfigDump {
|
||||
prettyConf := pretty.Sprintf("Loaded configuration from '%s': %# v", Server.ConfigFile, Server)
|
||||
if Server.EnableLogRedacting {
|
||||
prettyConf = log.Redact(prettyConf)
|
||||
}
|
||||
_, _ = fmt.Fprintln(os.Stderr, prettyConf)
|
||||
_, _ = fmt.Fprintln(out, prettyConf)
|
||||
}
|
||||
|
||||
if !Server.EnableExternalServices {
|
||||
disableExternalServices()
|
||||
}
|
||||
|
||||
if Server.Scanner.Extractor != consts.DefaultScannerExtractor {
|
||||
log.Warn(fmt.Sprintf("Extractor '%s' is not implemented, using 'taglib'", Server.Scanner.Extractor))
|
||||
Server.Scanner.Extractor = consts.DefaultScannerExtractor
|
||||
}
|
||||
logDeprecatedOptions("Scanner.GenreSeparators")
|
||||
logDeprecatedOptions("Scanner.GroupAlbumReleases")
|
||||
logDeprecatedOptions("DevEnableBufferedScrobble") // Deprecated: Buffered scrobbling is now always enabled and this option is ignored
|
||||
|
||||
// Call init hooks
|
||||
for _, hook := range hooks {
|
||||
hook()
|
||||
}
|
||||
}
|
||||
|
||||
func logDeprecatedOptions(options ...string) {
|
||||
for _, option := range options {
|
||||
envVar := "ND_" + strings.ToUpper(strings.ReplaceAll(option, ".", "_"))
|
||||
if os.Getenv(envVar) != "" {
|
||||
log.Warn(fmt.Sprintf("Option '%s' is deprecated and will be ignored in a future release", envVar))
|
||||
}
|
||||
if viper.InConfig(option) {
|
||||
log.Warn(fmt.Sprintf("Option '%s' is deprecated and will be ignored in a future release", option))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// parseIniFileConfiguration is used to parse the config file when it is in INI format. For INI files, it
|
||||
// would require a nested structure, so instead we unmarshal it to a map and then merge the nested [default]
|
||||
// section into the root level.
|
||||
func parseIniFileConfiguration() {
|
||||
cfgFile := viper.ConfigFileUsed()
|
||||
if strings.ToLower(filepath.Ext(cfgFile)) == ".ini" {
|
||||
var iniConfig map[string]interface{}
|
||||
err := viper.Unmarshal(&iniConfig)
|
||||
if err != nil {
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error parsing config:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
cfg, ok := iniConfig["default"].(map[string]any)
|
||||
if !ok {
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error parsing config: missing [default] section:", iniConfig)
|
||||
os.Exit(1)
|
||||
}
|
||||
err = viper.MergeConfigMap(cfg)
|
||||
if err != nil {
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error parsing config:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func disableExternalServices() {
|
||||
log.Info("All external integrations are DISABLED!")
|
||||
Server.EnableInsightsCollector = false
|
||||
Server.LastFM.Enabled = false
|
||||
Server.Spotify.ID = ""
|
||||
Server.Deezer.Enabled = false
|
||||
Server.ListenBrainz.Enabled = false
|
||||
Server.Agents = ""
|
||||
if Server.UILoginBackgroundURL == consts.DefaultUILoginBackgroundURL {
|
||||
@@ -245,33 +404,67 @@ func disableExternalServices() {
|
||||
}
|
||||
}
|
||||
|
||||
func validateScanSchedule() error {
|
||||
if Server.ScanInterval != -1 {
|
||||
log.Warn("ScanInterval is DEPRECATED. Please use ScanSchedule. See docs at https://navidrome.org/docs/usage/configuration-options/")
|
||||
if Server.ScanSchedule != "@every 1m" {
|
||||
log.Error("You cannot specify both ScanInterval and ScanSchedule, ignoring ScanInterval")
|
||||
} else {
|
||||
if Server.ScanInterval == 0 {
|
||||
Server.ScanSchedule = ""
|
||||
} else {
|
||||
Server.ScanSchedule = fmt.Sprintf("@every %s", Server.ScanInterval)
|
||||
}
|
||||
log.Warn("Setting ScanSchedule", "schedule", Server.ScanSchedule)
|
||||
func validatePlaylistsPath() error {
|
||||
for _, path := range strings.Split(Server.PlaylistsPath, string(filepath.ListSeparator)) {
|
||||
_, err := doublestar.Match(path, "")
|
||||
if err != nil {
|
||||
log.Error("Invalid PlaylistsPath", "path", path, err)
|
||||
return err
|
||||
}
|
||||
}
|
||||
if Server.ScanSchedule == "0" || Server.ScanSchedule == "" {
|
||||
Server.ScanSchedule = ""
|
||||
return nil
|
||||
}
|
||||
|
||||
func validatePurgeMissingOption() error {
|
||||
allowedValues := []string{consts.PurgeMissingNever, consts.PurgeMissingAlways, consts.PurgeMissingFull}
|
||||
valid := false
|
||||
for _, v := range allowedValues {
|
||||
if v == Server.Scanner.PurgeMissing {
|
||||
valid = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !valid {
|
||||
err := fmt.Errorf("Invalid Scanner.PurgeMissing value: '%s'. Must be one of: %v", Server.Scanner.PurgeMissing, allowedValues)
|
||||
log.Error(err.Error())
|
||||
Server.Scanner.PurgeMissing = consts.PurgeMissingNever
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func validateScanSchedule() error {
|
||||
if Server.Scanner.Schedule == "0" || Server.Scanner.Schedule == "" {
|
||||
Server.Scanner.Schedule = ""
|
||||
return nil
|
||||
}
|
||||
if _, err := time.ParseDuration(Server.ScanSchedule); err == nil {
|
||||
Server.ScanSchedule = "@every " + Server.ScanSchedule
|
||||
var err error
|
||||
Server.Scanner.Schedule, err = validateSchedule(Server.Scanner.Schedule, "Scanner.Schedule")
|
||||
return err
|
||||
}
|
||||
|
||||
func validateBackupSchedule() error {
|
||||
if Server.Backup.Path == "" || Server.Backup.Schedule == "" || Server.Backup.Count == 0 {
|
||||
Server.Backup.Schedule = ""
|
||||
return nil
|
||||
}
|
||||
var err error
|
||||
Server.Backup.Schedule, err = validateSchedule(Server.Backup.Schedule, "Backup.Schedule")
|
||||
return err
|
||||
}
|
||||
|
||||
func validateSchedule(schedule, field string) (string, error) {
|
||||
if _, err := time.ParseDuration(schedule); err == nil {
|
||||
schedule = "@every " + schedule
|
||||
}
|
||||
c := cron.New()
|
||||
_, err := c.AddFunc(Server.ScanSchedule, func() {})
|
||||
id, err := c.AddFunc(schedule, func() {})
|
||||
if err != nil {
|
||||
log.Error("Invalid ScanSchedule. Please read format spec at https://pkg.go.dev/github.com/robfig/cron#hdr-CRON_Expression_Format", "schedule", Server.ScanSchedule, err)
|
||||
log.Error(fmt.Sprintf("Invalid %s. Please read format spec at https://pkg.go.dev/github.com/robfig/cron#hdr-CRON_Expression_Format", field), "schedule", schedule, err)
|
||||
} else {
|
||||
c.Remove(id)
|
||||
}
|
||||
return err
|
||||
return schedule, err
|
||||
}
|
||||
|
||||
// AddHook is used to register initialization code that should run as soon as the config is loaded
|
||||
@@ -279,17 +472,16 @@ func AddHook(hook func()) {
|
||||
hooks = append(hooks, hook)
|
||||
}
|
||||
|
||||
func init() {
|
||||
func setViperDefaults() {
|
||||
viper.SetDefault("musicfolder", filepath.Join(".", "music"))
|
||||
viper.SetDefault("cachefolder", "")
|
||||
viper.SetDefault("datafolder", ".")
|
||||
viper.SetDefault("loglevel", "info")
|
||||
viper.SetDefault("logfile", "")
|
||||
viper.SetDefault("address", "0.0.0.0")
|
||||
viper.SetDefault("port", 4533)
|
||||
viper.SetDefault("unixsocketperm", "0660")
|
||||
viper.SetDefault("sessiontimeout", consts.DefaultSessionTimeout)
|
||||
viper.SetDefault("scaninterval", -1)
|
||||
viper.SetDefault("scanschedule", "@every 1m")
|
||||
viper.SetDefault("baseurl", "")
|
||||
viper.SetDefault("tlscert", "")
|
||||
viper.SetDefault("tlskey", "")
|
||||
@@ -303,7 +495,7 @@ func init() {
|
||||
viper.SetDefault("enableartworkprecache", true)
|
||||
viper.SetDefault("autoimportplaylists", true)
|
||||
viper.SetDefault("defaultplaylistpublicvisibility", false)
|
||||
viper.SetDefault("playlistspath", consts.DefaultPlaylistsPath)
|
||||
viper.SetDefault("playlistspath", "")
|
||||
viper.SetDefault("smartPlaylistRefreshDelay", 5*time.Second)
|
||||
viper.SetDefault("enabledownloads", true)
|
||||
viper.SetDefault("enableexternalservices", true)
|
||||
@@ -315,13 +507,12 @@ func init() {
|
||||
viper.SetDefault("prefersorttags", false)
|
||||
viper.SetDefault("ignoredarticles", "The El La Los Las Le Les Os As O A")
|
||||
viper.SetDefault("indexgroups", "A B C D E F G H I J K L M N O P Q R S T U V W X-Z(XYZ) [Unknown]([)")
|
||||
viper.SetDefault("subsonicartistparticipations", false)
|
||||
viper.SetDefault("ffmpegpath", "")
|
||||
viper.SetDefault("mpvcmdtemplate", "mpv --audio-device=%d --no-audio-display --pause %f --input-ipc-server=%s")
|
||||
|
||||
viper.SetDefault("mpvcmdtemplate", "mpv --audio-device=%d --no-audio-display %f --input-ipc-server=%s")
|
||||
viper.SetDefault("coverartpriority", "cover.*, folder.*, front.*, embedded, external")
|
||||
viper.SetDefault("coverjpegquality", 75)
|
||||
viper.SetDefault("artistartpriority", "artist.*, album/artist.*, external")
|
||||
viper.SetDefault("lyricspriority", ".lrc,.txt,embedded")
|
||||
viper.SetDefault("enablegravatar", false)
|
||||
viper.SetDefault("enablefavourites", true)
|
||||
viper.SetDefault("enablestarrating", true)
|
||||
@@ -331,38 +522,64 @@ func init() {
|
||||
viper.SetDefault("defaultuivolume", consts.DefaultUIVolume)
|
||||
viper.SetDefault("enablereplaygain", true)
|
||||
viper.SetDefault("enablecoveranimation", true)
|
||||
viper.SetDefault("enablenowplaying", true)
|
||||
viper.SetDefault("enablesharing", false)
|
||||
viper.SetDefault("shareurl", "")
|
||||
viper.SetDefault("defaultshareexpiration", 8760*time.Hour)
|
||||
viper.SetDefault("defaultdownloadableshare", false)
|
||||
viper.SetDefault("gatrackingid", "")
|
||||
viper.SetDefault("enableinsightscollector", true)
|
||||
viper.SetDefault("enablelogredacting", true)
|
||||
viper.SetDefault("authrequestlimit", 5)
|
||||
viper.SetDefault("authwindowlength", 20*time.Second)
|
||||
viper.SetDefault("passwordencryptionkey", "")
|
||||
|
||||
viper.SetDefault("reverseproxyuserheader", "Remote-User")
|
||||
viper.SetDefault("reverseproxywhitelist", "")
|
||||
|
||||
viper.SetDefault("prometheus.enabled", false)
|
||||
viper.SetDefault("prometheus.metricspath", "/metrics")
|
||||
|
||||
viper.SetDefault("prometheus.metricspath", consts.PrometheusDefaultPath)
|
||||
viper.SetDefault("prometheus.password", "")
|
||||
viper.SetDefault("jukebox.enabled", false)
|
||||
viper.SetDefault("jukebox.devices", []AudioDeviceDefinition{})
|
||||
viper.SetDefault("jukebox.default", "")
|
||||
viper.SetDefault("jukebox.adminonly", true)
|
||||
|
||||
viper.SetDefault("scanner.enabled", true)
|
||||
viper.SetDefault("scanner.schedule", "0")
|
||||
viper.SetDefault("scanner.extractor", consts.DefaultScannerExtractor)
|
||||
viper.SetDefault("scanner.genreseparators", ";/,")
|
||||
viper.SetDefault("scanner.watcherwait", consts.DefaultWatcherWait)
|
||||
viper.SetDefault("scanner.scanonstartup", true)
|
||||
viper.SetDefault("scanner.artistjoiner", consts.ArtistJoiner)
|
||||
viper.SetDefault("scanner.genreseparators", "")
|
||||
viper.SetDefault("scanner.groupalbumreleases", false)
|
||||
|
||||
viper.SetDefault("agents", "lastfm,spotify")
|
||||
viper.SetDefault("scanner.followsymlinks", true)
|
||||
viper.SetDefault("scanner.purgemissing", consts.PurgeMissingNever)
|
||||
viper.SetDefault("subsonic.appendsubtitle", true)
|
||||
viper.SetDefault("subsonic.artistparticipations", false)
|
||||
viper.SetDefault("subsonic.defaultreportrealpath", false)
|
||||
viper.SetDefault("subsonic.legacyclients", "DSub")
|
||||
viper.SetDefault("agents", "lastfm,spotify,deezer")
|
||||
viper.SetDefault("lastfm.enabled", true)
|
||||
viper.SetDefault("lastfm.language", "en")
|
||||
viper.SetDefault("lastfm.apikey", "")
|
||||
viper.SetDefault("lastfm.secret", "")
|
||||
viper.SetDefault("lastfm.scrobblefirstartistonly", false)
|
||||
viper.SetDefault("spotify.id", "")
|
||||
viper.SetDefault("spotify.secret", "")
|
||||
viper.SetDefault("deezer.enabled", true)
|
||||
viper.SetDefault("listenbrainz.enabled", true)
|
||||
viper.SetDefault("listenbrainz.baseurl", "https://api.listenbrainz.org/1/")
|
||||
|
||||
viper.SetDefault("httpsecurityheaders.customframeoptionsvalue", "DENY")
|
||||
viper.SetDefault("backup.path", "")
|
||||
viper.SetDefault("backup.schedule", "")
|
||||
viper.SetDefault("backup.count", 0)
|
||||
viper.SetDefault("pid.track", consts.DefaultTrackPID)
|
||||
viper.SetDefault("pid.album", consts.DefaultAlbumPID)
|
||||
viper.SetDefault("inspect.enabled", true)
|
||||
viper.SetDefault("inspect.maxrequests", 1)
|
||||
viper.SetDefault("inspect.backloglimit", consts.RequestThrottleBacklogLimit)
|
||||
viper.SetDefault("inspect.backlogtimeout", consts.RequestThrottleBacklogTimeout)
|
||||
viper.SetDefault("plugins.folder", "")
|
||||
viper.SetDefault("plugins.enabled", false)
|
||||
viper.SetDefault("plugins.cachesize", "100MB")
|
||||
|
||||
// DevFlags. These are used to enable/disable debugging and incomplete features
|
||||
viper.SetDefault("devlogsourceline", false)
|
||||
@@ -370,21 +587,35 @@ func init() {
|
||||
viper.SetDefault("devautocreateadminpassword", "")
|
||||
viper.SetDefault("devautologinusername", "")
|
||||
viper.SetDefault("devactivitypanel", true)
|
||||
viper.SetDefault("enablesharing", false)
|
||||
viper.SetDefault("shareurl", "")
|
||||
viper.SetDefault("defaultdownloadableshare", false)
|
||||
viper.SetDefault("devenablebufferedscrobble", true)
|
||||
viper.SetDefault("devactivitypanelupdaterate", 300*time.Millisecond)
|
||||
viper.SetDefault("devsidebarplaylists", true)
|
||||
viper.SetDefault("devshowartistpage", true)
|
||||
viper.SetDefault("devuishowconfig", true)
|
||||
viper.SetDefault("devneweventstream", true)
|
||||
viper.SetDefault("devoffsetoptimize", 50000)
|
||||
viper.SetDefault("devartworkmaxrequests", max(2, runtime.NumCPU()/3))
|
||||
viper.SetDefault("devartworkthrottlebackloglimit", consts.RequestThrottleBacklogLimit)
|
||||
viper.SetDefault("devartworkthrottlebacklogtimeout", consts.RequestThrottleBacklogTimeout)
|
||||
viper.SetDefault("devartistinfotimetolive", consts.ArtistInfoTimeToLive)
|
||||
viper.SetDefault("devalbuminfotimetolive", consts.AlbumInfoTimeToLive)
|
||||
viper.SetDefault("devexternalscanner", true)
|
||||
viper.SetDefault("devscannerthreads", 5)
|
||||
viper.SetDefault("devinsightsinitialdelay", consts.InsightsInitialDelay)
|
||||
viper.SetDefault("devenableplayerinsights", true)
|
||||
viper.SetDefault("devenablepluginsinsights", true)
|
||||
viper.SetDefault("devplugincompilationtimeout", time.Minute)
|
||||
viper.SetDefault("devexternalartistfetchmultiplier", 1.5)
|
||||
}
|
||||
|
||||
func init() {
|
||||
setViperDefaults()
|
||||
}
|
||||
|
||||
func InitConfig(cfgFile string) {
|
||||
codecRegistry := viper.NewCodecRegistry()
|
||||
_ = codecRegistry.RegisterCodec("ini", ini.Codec{})
|
||||
viper.SetOptions(viper.WithCodecRegistry(codecRegistry))
|
||||
|
||||
cfgFile = getConfigFile(cfgFile)
|
||||
if cfgFile != "" {
|
||||
// Use config file from the flag.
|
||||
@@ -408,9 +639,17 @@ func InitConfig(cfgFile string) {
|
||||
}
|
||||
}
|
||||
|
||||
// getConfigFile returns the path to the config file, either from the flag or from the environment variable.
|
||||
// If it is defined in the environment variable, it will check if the file exists.
|
||||
func getConfigFile(cfgFile string) string {
|
||||
if cfgFile != "" {
|
||||
return cfgFile
|
||||
}
|
||||
return os.Getenv("ND_CONFIGFILE")
|
||||
cfgFile = os.Getenv("ND_CONFIGFILE")
|
||||
if cfgFile != "" {
|
||||
if _, err := os.Stat(cfgFile); err == nil {
|
||||
return cfgFile
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
51
conf/configuration_test.go
Normal file
51
conf/configuration_test.go
Normal file
@@ -0,0 +1,51 @@
|
||||
package conf_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
func TestConfiguration(t *testing.T) {
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "Configuration Suite")
|
||||
}
|
||||
|
||||
var _ = Describe("Configuration", func() {
|
||||
BeforeEach(func() {
|
||||
// Reset viper configuration
|
||||
viper.Reset()
|
||||
conf.SetViperDefaults()
|
||||
viper.SetDefault("datafolder", GinkgoT().TempDir())
|
||||
viper.SetDefault("loglevel", "error")
|
||||
conf.ResetConf()
|
||||
})
|
||||
|
||||
DescribeTable("should load configuration from",
|
||||
func(format string) {
|
||||
filename := filepath.Join("testdata", "cfg."+format)
|
||||
|
||||
// Initialize config with the test file
|
||||
conf.InitConfig(filename)
|
||||
// Load the configuration (with noConfigDump=true)
|
||||
conf.Load(true)
|
||||
|
||||
// Execute the format-specific assertions
|
||||
Expect(conf.Server.MusicFolder).To(Equal(fmt.Sprintf("/%s/music", format)))
|
||||
Expect(conf.Server.UIWelcomeMessage).To(Equal("Welcome " + format))
|
||||
Expect(conf.Server.Tags["custom"].Aliases).To(Equal([]string{format, "test"}))
|
||||
|
||||
// The config file used should be the one we created
|
||||
Expect(conf.Server.ConfigFile).To(Equal(filename))
|
||||
},
|
||||
Entry("TOML format", "toml"),
|
||||
Entry("YAML format", "yaml"),
|
||||
Entry("INI format", "ini"),
|
||||
Entry("JSON format", "json"),
|
||||
)
|
||||
})
|
||||
7
conf/export_test.go
Normal file
7
conf/export_test.go
Normal file
@@ -0,0 +1,7 @@
|
||||
package conf
|
||||
|
||||
func ResetConf() {
|
||||
Server = &configOptions{}
|
||||
}
|
||||
|
||||
var SetViperDefaults = setViperDefaults
|
||||
@@ -21,6 +21,7 @@ func initMimeTypes() {
|
||||
// In some circumstances, Windows sets JS mime-type to `text/plain`!
|
||||
_ = mime.AddExtensionType(".js", "text/javascript")
|
||||
_ = mime.AddExtensionType(".css", "text/css")
|
||||
_ = mime.AddExtensionType(".webmanifest", "application/manifest+json")
|
||||
|
||||
f, err := resources.FS().Open("mime_types.yaml")
|
||||
if err != nil {
|
||||
|
||||
6
conf/testdata/cfg.ini
vendored
Normal file
6
conf/testdata/cfg.ini
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
[default]
|
||||
MusicFolder = /ini/music
|
||||
UIWelcomeMessage = Welcome ini
|
||||
|
||||
[Tags]
|
||||
Custom.Aliases = ini,test
|
||||
12
conf/testdata/cfg.json
vendored
Normal file
12
conf/testdata/cfg.json
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"musicFolder": "/json/music",
|
||||
"uiWelcomeMessage": "Welcome json",
|
||||
"Tags": {
|
||||
"custom": {
|
||||
"aliases": [
|
||||
"json",
|
||||
"test"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
5
conf/testdata/cfg.toml
vendored
Normal file
5
conf/testdata/cfg.toml
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
musicFolder = "/toml/music"
|
||||
uiWelcomeMessage = "Welcome toml"
|
||||
|
||||
[Tags.custom]
|
||||
aliases = ["toml", "test"]
|
||||
7
conf/testdata/cfg.yaml
vendored
Normal file
7
conf/testdata/cfg.yaml
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
musicFolder: "/yaml/music"
|
||||
uiWelcomeMessage: "Welcome yaml"
|
||||
Tags:
|
||||
custom:
|
||||
aliases:
|
||||
- yaml
|
||||
- test
|
||||
@@ -1,26 +1,32 @@
|
||||
package consts
|
||||
|
||||
import (
|
||||
"crypto/md5"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/model/id"
|
||||
)
|
||||
|
||||
const (
|
||||
AppName = "navidrome"
|
||||
|
||||
DefaultDbPath = "navidrome.db?cache=shared&_cache_size=1000000000&_busy_timeout=5000&_journal_mode=WAL&_synchronous=NORMAL&_foreign_keys=on&_txlock=immediate"
|
||||
InitialSetupFlagKey = "InitialSetup"
|
||||
DefaultDbPath = "navidrome.db?cache=shared&_busy_timeout=15000&_journal_mode=WAL&_foreign_keys=on&synchronous=normal"
|
||||
InitialSetupFlagKey = "InitialSetup"
|
||||
FullScanAfterMigrationFlagKey = "FullScanAfterMigration"
|
||||
LastScanErrorKey = "LastScanError"
|
||||
LastScanTypeKey = "LastScanType"
|
||||
LastScanStartTimeKey = "LastScanStartTime"
|
||||
|
||||
UIAuthorizationHeader = "X-ND-Authorization"
|
||||
UIClientUniqueIDHeader = "X-ND-Client-Unique-Id"
|
||||
JWTSecretKey = "JWTSecret"
|
||||
JWTIssuer = "ND"
|
||||
DefaultSessionTimeout = 24 * time.Hour
|
||||
DefaultSessionTimeout = 48 * time.Hour
|
||||
CookieExpiry = 365 * 24 * 3600 // One year
|
||||
|
||||
OptimizeDBSchedule = "@every 24h"
|
||||
|
||||
// DefaultEncryptionKey This is the encryption key used if none is specified in the `PasswordEncryptionKey` option
|
||||
// Never ever change this! Or it will break all Navidrome installations that don't set the config option
|
||||
DefaultEncryptionKey = "just for obfuscation"
|
||||
@@ -50,14 +56,16 @@ const (
|
||||
|
||||
ServerReadHeaderTimeout = 3 * time.Second
|
||||
|
||||
ArtistInfoTimeToLive = 24 * time.Hour
|
||||
AlbumInfoTimeToLive = 7 * 24 * time.Hour
|
||||
ArtistInfoTimeToLive = 24 * time.Hour
|
||||
AlbumInfoTimeToLive = 7 * 24 * time.Hour
|
||||
UpdateLastAccessFrequency = time.Minute
|
||||
UpdatePlayerFrequency = time.Minute
|
||||
|
||||
I18nFolder = "i18n"
|
||||
SkipScanFile = ".ndignore"
|
||||
I18nFolder = "i18n"
|
||||
ScanIgnoreFile = ".ndignore"
|
||||
|
||||
PlaceholderArtistArt = "artist-placeholder.webp"
|
||||
PlaceholderAlbumArt = "placeholder.png"
|
||||
PlaceholderAlbumArt = "album-placeholder.webp"
|
||||
PlaceholderAvatar = "logo-192x192.png"
|
||||
UICoverArtSize = 300
|
||||
DefaultUIVolume = 100
|
||||
@@ -65,8 +73,14 @@ const (
|
||||
DefaultHttpClientTimeOut = 10 * time.Second
|
||||
|
||||
DefaultScannerExtractor = "taglib"
|
||||
DefaultWatcherWait = 5 * time.Second
|
||||
Zwsp = string('\u200b')
|
||||
)
|
||||
|
||||
Zwsp = string('\u200b')
|
||||
// Prometheus options
|
||||
const (
|
||||
PrometheusDefaultPath = "/metrics"
|
||||
PrometheusAuthUser = "navidrome"
|
||||
)
|
||||
|
||||
// Cache options
|
||||
@@ -86,6 +100,27 @@ const (
|
||||
AlbumPlayCountModeNormalized = "normalized"
|
||||
)
|
||||
|
||||
const (
|
||||
//DefaultAlbumPID = "album_legacy"
|
||||
DefaultAlbumPID = "musicbrainz_albumid|albumartistid,album,albumversion,releasedate"
|
||||
DefaultTrackPID = "musicbrainz_trackid|albumid,discnumber,tracknumber,title"
|
||||
PIDAlbumKey = "PIDAlbum"
|
||||
PIDTrackKey = "PIDTrack"
|
||||
)
|
||||
|
||||
const (
|
||||
InsightsIDKey = "InsightsID"
|
||||
InsightsEndpoint = "https://insights.navidrome.org/collect"
|
||||
InsightsUpdateInterval = 24 * time.Hour
|
||||
InsightsInitialDelay = 30 * time.Minute
|
||||
)
|
||||
|
||||
const (
|
||||
PurgeMissingNever = "never"
|
||||
PurgeMissingAlways = "always"
|
||||
PurgeMissingFull = "full"
|
||||
)
|
||||
|
||||
var (
|
||||
DefaultDownsamplingFormat = "opus"
|
||||
DefaultTranscodings = []struct {
|
||||
@@ -113,17 +148,29 @@ var (
|
||||
Command: "ffmpeg -i %s -ss %t -map 0:a:0 -b:a %bk -v 0 -c:a aac -f adts -",
|
||||
},
|
||||
}
|
||||
|
||||
DefaultPlaylistsPath = strings.Join([]string{".", "**/**"}, string(filepath.ListSeparator))
|
||||
)
|
||||
|
||||
var (
|
||||
VariousArtists = "Various Artists"
|
||||
VariousArtistsID = fmt.Sprintf("%x", md5.Sum([]byte(strings.ToLower(VariousArtists))))
|
||||
UnknownAlbum = "[Unknown Album]"
|
||||
UnknownArtist = "[Unknown Artist]"
|
||||
UnknownArtistID = fmt.Sprintf("%x", md5.Sum([]byte(strings.ToLower(UnknownArtist))))
|
||||
VariousArtists = "Various Artists"
|
||||
// TODO This will be dynamic when using disambiguation
|
||||
VariousArtistsID = "63sqASlAfjbGMuLP4JhnZU"
|
||||
UnknownAlbum = "[Unknown Album]"
|
||||
UnknownArtist = "[Unknown Artist]"
|
||||
// TODO This will be dynamic when using disambiguation
|
||||
UnknownArtistID = id.NewHash(strings.ToLower(UnknownArtist))
|
||||
VariousArtistsMbzId = "89ad4ac3-39f7-470e-963a-56509c546377"
|
||||
|
||||
ServerStart = time.Now()
|
||||
ArtistJoiner = " • "
|
||||
)
|
||||
|
||||
var (
|
||||
ServerStart = time.Now()
|
||||
|
||||
InContainer = func() bool {
|
||||
// Check if the /.nddockerenv file exists
|
||||
if _, err := os.Stat("/.nddockerenv"); err == nil {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}()
|
||||
)
|
||||
|
||||
@@ -11,15 +11,13 @@ WantedBy=multi-user.target
|
||||
User=navidrome
|
||||
Group=navidrome
|
||||
Type=simple
|
||||
ExecStart=/usr/bin/navidrome
|
||||
ExecStart=/usr/bin/navidrome --configfile "/etc/navidrome/navidrome.toml"
|
||||
StateDirectory=navidrome
|
||||
WorkingDirectory=/var/lib/navidrome
|
||||
TimeoutStopSec=20
|
||||
KillMode=process
|
||||
Restart=on-failure
|
||||
|
||||
EnvironmentFile=-/etc/sysconfig/navidrome
|
||||
|
||||
# See https://www.freedesktop.org/software/systemd/man/systemd.exec.html
|
||||
CapabilityBoundingSet=
|
||||
DevicePolicy=closed
|
||||
|
||||
@@ -2,6 +2,7 @@ package agents
|
||||
|
||||
import (
|
||||
"context"
|
||||
"slices"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
@@ -10,31 +11,110 @@ import (
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/utils"
|
||||
"github.com/navidrome/navidrome/utils/singleton"
|
||||
)
|
||||
|
||||
type Agents struct {
|
||||
ds model.DataStore
|
||||
agents []Interface
|
||||
// PluginLoader defines an interface for loading plugins
|
||||
type PluginLoader interface {
|
||||
// PluginNames returns the names of all plugins that implement a particular service
|
||||
PluginNames(capability string) []string
|
||||
// LoadMediaAgent loads and returns a media agent plugin
|
||||
LoadMediaAgent(name string) (Interface, bool)
|
||||
}
|
||||
|
||||
func New(ds model.DataStore) *Agents {
|
||||
var order []string
|
||||
if conf.Server.Agents != "" {
|
||||
order = strings.Split(conf.Server.Agents, ",")
|
||||
type Agents struct {
|
||||
ds model.DataStore
|
||||
pluginLoader PluginLoader
|
||||
}
|
||||
|
||||
// GetAgents returns the singleton instance of Agents
|
||||
func GetAgents(ds model.DataStore, pluginLoader PluginLoader) *Agents {
|
||||
return singleton.GetInstance(func() *Agents {
|
||||
return createAgents(ds, pluginLoader)
|
||||
})
|
||||
}
|
||||
|
||||
// createAgents creates a new Agents instance. Used in tests
|
||||
func createAgents(ds model.DataStore, pluginLoader PluginLoader) *Agents {
|
||||
return &Agents{
|
||||
ds: ds,
|
||||
pluginLoader: pluginLoader,
|
||||
}
|
||||
order = append(order, LocalAgentName)
|
||||
var res []Interface
|
||||
for _, name := range order {
|
||||
init, ok := Map[name]
|
||||
if !ok {
|
||||
log.Error("Agent not available. Check configuration", "name", name)
|
||||
continue
|
||||
}
|
||||
|
||||
// enabledAgent represents an enabled agent with its type information
|
||||
type enabledAgent struct {
|
||||
name string
|
||||
isPlugin bool
|
||||
}
|
||||
|
||||
// getEnabledAgentNames returns the current list of enabled agents, including:
|
||||
// 1. Built-in agents and plugins from config (in the specified order)
|
||||
// 2. Always include LocalAgentName
|
||||
// 3. If config is empty, include ONLY LocalAgentName
|
||||
// Each enabledAgent contains the name and whether it's a plugin (true) or built-in (false)
|
||||
func (a *Agents) getEnabledAgentNames() []enabledAgent {
|
||||
// If no agents configured, ONLY use the local agent
|
||||
if conf.Server.Agents == "" {
|
||||
return []enabledAgent{{name: LocalAgentName, isPlugin: false}}
|
||||
}
|
||||
|
||||
// Get all available plugin names
|
||||
var availablePlugins []string
|
||||
if a.pluginLoader != nil {
|
||||
availablePlugins = a.pluginLoader.PluginNames("MetadataAgent")
|
||||
}
|
||||
|
||||
configuredAgents := strings.Split(conf.Server.Agents, ",")
|
||||
|
||||
// Always add LocalAgentName if not already included
|
||||
hasLocalAgent := slices.Contains(configuredAgents, LocalAgentName)
|
||||
if !hasLocalAgent {
|
||||
configuredAgents = append(configuredAgents, LocalAgentName)
|
||||
}
|
||||
|
||||
// Filter to only include valid agents (built-in or plugins)
|
||||
var validAgents []enabledAgent
|
||||
for _, name := range configuredAgents {
|
||||
// Check if it's a built-in agent
|
||||
isBuiltIn := Map[name] != nil
|
||||
|
||||
// Check if it's a plugin
|
||||
isPlugin := slices.Contains(availablePlugins, name)
|
||||
|
||||
if isBuiltIn {
|
||||
validAgents = append(validAgents, enabledAgent{name: name, isPlugin: false})
|
||||
} else if isPlugin {
|
||||
validAgents = append(validAgents, enabledAgent{name: name, isPlugin: true})
|
||||
} else {
|
||||
log.Warn("Unknown agent ignored", "name", name)
|
||||
}
|
||||
}
|
||||
return validAgents
|
||||
}
|
||||
|
||||
res = append(res, init(ds))
|
||||
func (a *Agents) getAgent(ea enabledAgent) Interface {
|
||||
if ea.isPlugin {
|
||||
// Try to load WASM plugin agent (if plugin loader is available)
|
||||
if a.pluginLoader != nil {
|
||||
agent, ok := a.pluginLoader.LoadMediaAgent(ea.name)
|
||||
if ok && agent != nil {
|
||||
return agent
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Try to get built-in agent
|
||||
constructor, ok := Map[ea.name]
|
||||
if ok {
|
||||
agent := constructor(a.ds)
|
||||
if agent != nil {
|
||||
return agent
|
||||
}
|
||||
log.Debug("Built-in agent not available. Missing configuration?", "name", ea.name)
|
||||
}
|
||||
}
|
||||
|
||||
return &Agents{ds: ds, agents: res}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *Agents) AgentName() string {
|
||||
@@ -49,15 +129,19 @@ func (a *Agents) GetArtistMBID(ctx context.Context, id string, name string) (str
|
||||
return "", nil
|
||||
}
|
||||
start := time.Now()
|
||||
for _, ag := range a.agents {
|
||||
for _, enabledAgent := range a.getEnabledAgentNames() {
|
||||
ag := a.getAgent(enabledAgent)
|
||||
if ag == nil {
|
||||
continue
|
||||
}
|
||||
if utils.IsCtxDone(ctx) {
|
||||
break
|
||||
}
|
||||
agent, ok := ag.(ArtistMBIDRetriever)
|
||||
retriever, ok := ag.(ArtistMBIDRetriever)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
mbid, err := agent.GetArtistMBID(ctx, id, name)
|
||||
mbid, err := retriever.GetArtistMBID(ctx, id, name)
|
||||
if mbid != "" && err == nil {
|
||||
log.Debug(ctx, "Got MBID", "agent", ag.AgentName(), "artist", name, "mbid", mbid, "elapsed", time.Since(start))
|
||||
return mbid, nil
|
||||
@@ -74,15 +158,19 @@ func (a *Agents) GetArtistURL(ctx context.Context, id, name, mbid string) (strin
|
||||
return "", nil
|
||||
}
|
||||
start := time.Now()
|
||||
for _, ag := range a.agents {
|
||||
for _, enabledAgent := range a.getEnabledAgentNames() {
|
||||
ag := a.getAgent(enabledAgent)
|
||||
if ag == nil {
|
||||
continue
|
||||
}
|
||||
if utils.IsCtxDone(ctx) {
|
||||
break
|
||||
}
|
||||
agent, ok := ag.(ArtistURLRetriever)
|
||||
retriever, ok := ag.(ArtistURLRetriever)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
url, err := agent.GetArtistURL(ctx, id, name, mbid)
|
||||
url, err := retriever.GetArtistURL(ctx, id, name, mbid)
|
||||
if url != "" && err == nil {
|
||||
log.Debug(ctx, "Got External Url", "agent", ag.AgentName(), "artist", name, "url", url, "elapsed", time.Since(start))
|
||||
return url, nil
|
||||
@@ -99,15 +187,19 @@ func (a *Agents) GetArtistBiography(ctx context.Context, id, name, mbid string)
|
||||
return "", nil
|
||||
}
|
||||
start := time.Now()
|
||||
for _, ag := range a.agents {
|
||||
for _, enabledAgent := range a.getEnabledAgentNames() {
|
||||
ag := a.getAgent(enabledAgent)
|
||||
if ag == nil {
|
||||
continue
|
||||
}
|
||||
if utils.IsCtxDone(ctx) {
|
||||
break
|
||||
}
|
||||
agent, ok := ag.(ArtistBiographyRetriever)
|
||||
retriever, ok := ag.(ArtistBiographyRetriever)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
bio, err := agent.GetArtistBiography(ctx, id, name, mbid)
|
||||
bio, err := retriever.GetArtistBiography(ctx, id, name, mbid)
|
||||
if err == nil {
|
||||
log.Debug(ctx, "Got Biography", "agent", ag.AgentName(), "artist", name, "len", len(bio), "elapsed", time.Since(start))
|
||||
return bio, nil
|
||||
@@ -116,6 +208,8 @@ func (a *Agents) GetArtistBiography(ctx context.Context, id, name, mbid string)
|
||||
return "", ErrNotFound
|
||||
}
|
||||
|
||||
// GetSimilarArtists returns similar artists by id, name, and/or mbid. Because some artists returned from an enabled
|
||||
// agent may not exist in the database, return at most limit * conf.Server.DevExternalArtistFetchMultiplier items.
|
||||
func (a *Agents) GetSimilarArtists(ctx context.Context, id, name, mbid string, limit int) ([]Artist, error) {
|
||||
switch id {
|
||||
case consts.UnknownArtistID:
|
||||
@@ -123,16 +217,23 @@ func (a *Agents) GetSimilarArtists(ctx context.Context, id, name, mbid string, l
|
||||
case consts.VariousArtistsID:
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
overLimit := int(float64(limit) * conf.Server.DevExternalArtistFetchMultiplier)
|
||||
|
||||
start := time.Now()
|
||||
for _, ag := range a.agents {
|
||||
for _, enabledAgent := range a.getEnabledAgentNames() {
|
||||
ag := a.getAgent(enabledAgent)
|
||||
if ag == nil {
|
||||
continue
|
||||
}
|
||||
if utils.IsCtxDone(ctx) {
|
||||
break
|
||||
}
|
||||
agent, ok := ag.(ArtistSimilarRetriever)
|
||||
retriever, ok := ag.(ArtistSimilarRetriever)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
similar, err := agent.GetSimilarArtists(ctx, id, name, mbid, limit)
|
||||
similar, err := retriever.GetSimilarArtists(ctx, id, name, mbid, overLimit)
|
||||
if len(similar) > 0 && err == nil {
|
||||
if log.IsGreaterOrEqualTo(log.LevelTrace) {
|
||||
log.Debug(ctx, "Got Similar Artists", "agent", ag.AgentName(), "artist", name, "similar", similar, "elapsed", time.Since(start))
|
||||
@@ -153,15 +254,19 @@ func (a *Agents) GetArtistImages(ctx context.Context, id, name, mbid string) ([]
|
||||
return nil, nil
|
||||
}
|
||||
start := time.Now()
|
||||
for _, ag := range a.agents {
|
||||
for _, enabledAgent := range a.getEnabledAgentNames() {
|
||||
ag := a.getAgent(enabledAgent)
|
||||
if ag == nil {
|
||||
continue
|
||||
}
|
||||
if utils.IsCtxDone(ctx) {
|
||||
break
|
||||
}
|
||||
agent, ok := ag.(ArtistImageRetriever)
|
||||
retriever, ok := ag.(ArtistImageRetriever)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
images, err := agent.GetArtistImages(ctx, id, name, mbid)
|
||||
images, err := retriever.GetArtistImages(ctx, id, name, mbid)
|
||||
if len(images) > 0 && err == nil {
|
||||
log.Debug(ctx, "Got Images", "agent", ag.AgentName(), "artist", name, "images", images, "elapsed", time.Since(start))
|
||||
return images, nil
|
||||
@@ -170,6 +275,8 @@ func (a *Agents) GetArtistImages(ctx context.Context, id, name, mbid string) ([]
|
||||
return nil, ErrNotFound
|
||||
}
|
||||
|
||||
// GetArtistTopSongs returns top songs by id, name, and/or mbid. Because some songs returned from an enabled
|
||||
// agent may not exist in the database, return at most limit * conf.Server.DevExternalArtistFetchMultiplier items.
|
||||
func (a *Agents) GetArtistTopSongs(ctx context.Context, id, artistName, mbid string, count int) ([]Song, error) {
|
||||
switch id {
|
||||
case consts.UnknownArtistID:
|
||||
@@ -177,16 +284,23 @@ func (a *Agents) GetArtistTopSongs(ctx context.Context, id, artistName, mbid str
|
||||
case consts.VariousArtistsID:
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
overLimit := int(float64(count) * conf.Server.DevExternalArtistFetchMultiplier)
|
||||
|
||||
start := time.Now()
|
||||
for _, ag := range a.agents {
|
||||
for _, enabledAgent := range a.getEnabledAgentNames() {
|
||||
ag := a.getAgent(enabledAgent)
|
||||
if ag == nil {
|
||||
continue
|
||||
}
|
||||
if utils.IsCtxDone(ctx) {
|
||||
break
|
||||
}
|
||||
agent, ok := ag.(ArtistTopSongsRetriever)
|
||||
retriever, ok := ag.(ArtistTopSongsRetriever)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
songs, err := agent.GetArtistTopSongs(ctx, id, artistName, mbid, count)
|
||||
songs, err := retriever.GetArtistTopSongs(ctx, id, artistName, mbid, overLimit)
|
||||
if len(songs) > 0 && err == nil {
|
||||
log.Debug(ctx, "Got Top Songs", "agent", ag.AgentName(), "artist", artistName, "songs", songs, "elapsed", time.Since(start))
|
||||
return songs, nil
|
||||
@@ -200,15 +314,19 @@ func (a *Agents) GetAlbumInfo(ctx context.Context, name, artist, mbid string) (*
|
||||
return nil, ErrNotFound
|
||||
}
|
||||
start := time.Now()
|
||||
for _, ag := range a.agents {
|
||||
for _, enabledAgent := range a.getEnabledAgentNames() {
|
||||
ag := a.getAgent(enabledAgent)
|
||||
if ag == nil {
|
||||
continue
|
||||
}
|
||||
if utils.IsCtxDone(ctx) {
|
||||
break
|
||||
}
|
||||
agent, ok := ag.(AlbumInfoRetriever)
|
||||
retriever, ok := ag.(AlbumInfoRetriever)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
album, err := agent.GetAlbumInfo(ctx, name, artist, mbid)
|
||||
album, err := retriever.GetAlbumInfo(ctx, name, artist, mbid)
|
||||
if err == nil {
|
||||
log.Debug(ctx, "Got Album Info", "agent", ag.AgentName(), "album", name, "artist", artist,
|
||||
"mbid", mbid, "elapsed", time.Since(start))
|
||||
@@ -218,6 +336,33 @@ func (a *Agents) GetAlbumInfo(ctx context.Context, name, artist, mbid string) (*
|
||||
return nil, ErrNotFound
|
||||
}
|
||||
|
||||
func (a *Agents) GetAlbumImages(ctx context.Context, name, artist, mbid string) ([]ExternalImage, error) {
|
||||
if name == consts.UnknownAlbum {
|
||||
return nil, ErrNotFound
|
||||
}
|
||||
start := time.Now()
|
||||
for _, enabledAgent := range a.getEnabledAgentNames() {
|
||||
ag := a.getAgent(enabledAgent)
|
||||
if ag == nil {
|
||||
continue
|
||||
}
|
||||
if utils.IsCtxDone(ctx) {
|
||||
break
|
||||
}
|
||||
retriever, ok := ag.(AlbumImageRetriever)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
images, err := retriever.GetAlbumImages(ctx, name, artist, mbid)
|
||||
if len(images) > 0 && err == nil {
|
||||
log.Debug(ctx, "Got Album Images", "agent", ag.AgentName(), "album", name, "artist", artist,
|
||||
"mbid", mbid, "elapsed", time.Since(start))
|
||||
return images, nil
|
||||
}
|
||||
}
|
||||
return nil, ErrNotFound
|
||||
}
|
||||
|
||||
var _ Interface = (*Agents)(nil)
|
||||
var _ ArtistMBIDRetriever = (*Agents)(nil)
|
||||
var _ ArtistURLRetriever = (*Agents)(nil)
|
||||
@@ -226,3 +371,4 @@ var _ ArtistSimilarRetriever = (*Agents)(nil)
|
||||
var _ ArtistImageRetriever = (*Agents)(nil)
|
||||
var _ ArtistTopSongsRetriever = (*Agents)(nil)
|
||||
var _ AlbumInfoRetriever = (*Agents)(nil)
|
||||
var _ AlbumImageRetriever = (*Agents)(nil)
|
||||
|
||||
281
core/agents/agents_plugin_test.go
Normal file
281
core/agents/agents_plugin_test.go
Normal file
@@ -0,0 +1,281 @@
|
||||
package agents
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/utils/slice"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
// MockPluginLoader implements PluginLoader for testing
|
||||
type MockPluginLoader struct {
|
||||
pluginNames []string
|
||||
loadedAgents map[string]*MockAgent
|
||||
pluginCallCount map[string]int
|
||||
}
|
||||
|
||||
func NewMockPluginLoader() *MockPluginLoader {
|
||||
return &MockPluginLoader{
|
||||
pluginNames: []string{},
|
||||
loadedAgents: make(map[string]*MockAgent),
|
||||
pluginCallCount: make(map[string]int),
|
||||
}
|
||||
}
|
||||
|
||||
func (m *MockPluginLoader) PluginNames(serviceName string) []string {
|
||||
return m.pluginNames
|
||||
}
|
||||
|
||||
func (m *MockPluginLoader) LoadMediaAgent(name string) (Interface, bool) {
|
||||
m.pluginCallCount[name]++
|
||||
agent, exists := m.loadedAgents[name]
|
||||
return agent, exists
|
||||
}
|
||||
|
||||
// MockAgent is a mock agent implementation for testing
|
||||
type MockAgent struct {
|
||||
name string
|
||||
mbid string
|
||||
}
|
||||
|
||||
func (m *MockAgent) AgentName() string {
|
||||
return m.name
|
||||
}
|
||||
|
||||
func (m *MockAgent) GetArtistMBID(ctx context.Context, id string, name string) (string, error) {
|
||||
return m.mbid, nil
|
||||
}
|
||||
|
||||
var _ Interface = (*MockAgent)(nil)
|
||||
var _ ArtistMBIDRetriever = (*MockAgent)(nil)
|
||||
|
||||
var _ PluginLoader = (*MockPluginLoader)(nil)
|
||||
|
||||
var _ = Describe("Agents with Plugin Loading", func() {
|
||||
var mockLoader *MockPluginLoader
|
||||
var agents *Agents
|
||||
|
||||
BeforeEach(func() {
|
||||
mockLoader = NewMockPluginLoader()
|
||||
|
||||
// Create the agents instance with our mock loader
|
||||
agents = createAgents(nil, mockLoader)
|
||||
})
|
||||
|
||||
Context("Dynamic agent discovery", func() {
|
||||
It("should include ONLY local agent when no config is specified", func() {
|
||||
// Ensure no specific agents are configured
|
||||
conf.Server.Agents = ""
|
||||
|
||||
// Add some plugin agents that should be ignored
|
||||
mockLoader.pluginNames = append(mockLoader.pluginNames, "plugin_agent", "another_plugin")
|
||||
|
||||
// Should only include the local agent
|
||||
enabledAgents := agents.getEnabledAgentNames()
|
||||
Expect(enabledAgents).To(HaveLen(1))
|
||||
Expect(enabledAgents[0].name).To(Equal(LocalAgentName))
|
||||
Expect(enabledAgents[0].isPlugin).To(BeFalse()) // LocalAgent is built-in, not plugin
|
||||
})
|
||||
|
||||
It("should NOT include plugin agents when no config is specified", func() {
|
||||
// Ensure no specific agents are configured
|
||||
conf.Server.Agents = ""
|
||||
|
||||
// Add a plugin agent
|
||||
mockLoader.pluginNames = append(mockLoader.pluginNames, "plugin_agent")
|
||||
|
||||
// Should only include the local agent
|
||||
enabledAgents := agents.getEnabledAgentNames()
|
||||
Expect(enabledAgents).To(HaveLen(1))
|
||||
Expect(enabledAgents[0].name).To(Equal(LocalAgentName))
|
||||
Expect(enabledAgents[0].isPlugin).To(BeFalse()) // LocalAgent is built-in, not plugin
|
||||
})
|
||||
|
||||
It("should include plugin agents in the enabled agents list ONLY when explicitly configured", func() {
|
||||
// Add a plugin agent
|
||||
mockLoader.pluginNames = append(mockLoader.pluginNames, "plugin_agent")
|
||||
|
||||
// With no config, should not include plugin
|
||||
conf.Server.Agents = ""
|
||||
enabledAgents := agents.getEnabledAgentNames()
|
||||
Expect(enabledAgents).To(HaveLen(1))
|
||||
Expect(enabledAgents[0].name).To(Equal(LocalAgentName))
|
||||
|
||||
// When explicitly configured, should include plugin
|
||||
conf.Server.Agents = "plugin_agent"
|
||||
enabledAgents = agents.getEnabledAgentNames()
|
||||
var agentNames []string
|
||||
var pluginAgentFound bool
|
||||
for _, agent := range enabledAgents {
|
||||
agentNames = append(agentNames, agent.name)
|
||||
if agent.name == "plugin_agent" {
|
||||
pluginAgentFound = true
|
||||
Expect(agent.isPlugin).To(BeTrue()) // plugin_agent is a plugin
|
||||
}
|
||||
}
|
||||
Expect(agentNames).To(ContainElements(LocalAgentName, "plugin_agent"))
|
||||
Expect(pluginAgentFound).To(BeTrue())
|
||||
})
|
||||
|
||||
It("should only include configured plugin agents when config is specified", func() {
|
||||
// Add two plugin agents
|
||||
mockLoader.pluginNames = append(mockLoader.pluginNames, "plugin_one", "plugin_two")
|
||||
|
||||
// Configure only one of them
|
||||
conf.Server.Agents = "plugin_one"
|
||||
|
||||
// Verify only the configured one is included
|
||||
enabledAgents := agents.getEnabledAgentNames()
|
||||
var agentNames []string
|
||||
var pluginOneFound bool
|
||||
for _, agent := range enabledAgents {
|
||||
agentNames = append(agentNames, agent.name)
|
||||
if agent.name == "plugin_one" {
|
||||
pluginOneFound = true
|
||||
Expect(agent.isPlugin).To(BeTrue()) // plugin_one is a plugin
|
||||
}
|
||||
}
|
||||
Expect(agentNames).To(ContainElements(LocalAgentName, "plugin_one"))
|
||||
Expect(agentNames).NotTo(ContainElement("plugin_two"))
|
||||
Expect(pluginOneFound).To(BeTrue())
|
||||
})
|
||||
|
||||
It("should load plugin agents on demand", func() {
|
||||
ctx := context.Background()
|
||||
|
||||
// Configure to use our plugin
|
||||
conf.Server.Agents = "plugin_agent"
|
||||
|
||||
// Add a plugin agent
|
||||
mockLoader.pluginNames = append(mockLoader.pluginNames, "plugin_agent")
|
||||
mockLoader.loadedAgents["plugin_agent"] = &MockAgent{
|
||||
name: "plugin_agent",
|
||||
mbid: "plugin-mbid",
|
||||
}
|
||||
|
||||
// Try to get data from it
|
||||
mbid, err := agents.GetArtistMBID(ctx, "123", "Artist")
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(mbid).To(Equal("plugin-mbid"))
|
||||
Expect(mockLoader.pluginCallCount["plugin_agent"]).To(Equal(1))
|
||||
})
|
||||
|
||||
It("should try both built-in and plugin agents", func() {
|
||||
// Create a mock built-in agent
|
||||
Register("built_in", func(ds model.DataStore) Interface {
|
||||
return &MockAgent{
|
||||
name: "built_in",
|
||||
mbid: "built-in-mbid",
|
||||
}
|
||||
})
|
||||
defer func() {
|
||||
delete(Map, "built_in")
|
||||
}()
|
||||
|
||||
// Configure to use both built-in and plugin
|
||||
conf.Server.Agents = "built_in,plugin_agent"
|
||||
|
||||
// Add a plugin agent
|
||||
mockLoader.pluginNames = append(mockLoader.pluginNames, "plugin_agent")
|
||||
mockLoader.loadedAgents["plugin_agent"] = &MockAgent{
|
||||
name: "plugin_agent",
|
||||
mbid: "plugin-mbid",
|
||||
}
|
||||
|
||||
// Verify that both are in the enabled list
|
||||
enabledAgents := agents.getEnabledAgentNames()
|
||||
var agentNames []string
|
||||
var builtInFound, pluginFound bool
|
||||
for _, agent := range enabledAgents {
|
||||
agentNames = append(agentNames, agent.name)
|
||||
if agent.name == "built_in" {
|
||||
builtInFound = true
|
||||
Expect(agent.isPlugin).To(BeFalse()) // built-in agent
|
||||
}
|
||||
if agent.name == "plugin_agent" {
|
||||
pluginFound = true
|
||||
Expect(agent.isPlugin).To(BeTrue()) // plugin agent
|
||||
}
|
||||
}
|
||||
Expect(agentNames).To(ContainElements("built_in", "plugin_agent", LocalAgentName))
|
||||
Expect(builtInFound).To(BeTrue())
|
||||
Expect(pluginFound).To(BeTrue())
|
||||
})
|
||||
|
||||
It("should respect the order specified in configuration", func() {
|
||||
// Create mock built-in agents
|
||||
Register("agent_a", func(ds model.DataStore) Interface {
|
||||
return &MockAgent{name: "agent_a"}
|
||||
})
|
||||
Register("agent_b", func(ds model.DataStore) Interface {
|
||||
return &MockAgent{name: "agent_b"}
|
||||
})
|
||||
defer func() {
|
||||
delete(Map, "agent_a")
|
||||
delete(Map, "agent_b")
|
||||
}()
|
||||
|
||||
// Add plugin agents
|
||||
mockLoader.pluginNames = append(mockLoader.pluginNames, "plugin_x", "plugin_y")
|
||||
|
||||
// Configure specific order - plugin first, then built-ins
|
||||
conf.Server.Agents = "plugin_y,agent_b,plugin_x,agent_a"
|
||||
|
||||
// Get the agent names
|
||||
enabledAgents := agents.getEnabledAgentNames()
|
||||
|
||||
// Extract just the names to verify the order
|
||||
agentNames := slice.Map(enabledAgents, func(a enabledAgent) string { return a.name })
|
||||
|
||||
// Verify the order matches configuration, with LocalAgentName at the end
|
||||
Expect(agentNames).To(HaveExactElements("plugin_y", "agent_b", "plugin_x", "agent_a", LocalAgentName))
|
||||
})
|
||||
|
||||
It("should NOT call LoadMediaAgent for built-in agents", func() {
|
||||
ctx := context.Background()
|
||||
|
||||
// Create a mock built-in agent
|
||||
Register("builtin_agent", func(ds model.DataStore) Interface {
|
||||
return &MockAgent{
|
||||
name: "builtin_agent",
|
||||
mbid: "builtin-mbid",
|
||||
}
|
||||
})
|
||||
defer func() {
|
||||
delete(Map, "builtin_agent")
|
||||
}()
|
||||
|
||||
// Configure to use only built-in agents
|
||||
conf.Server.Agents = "builtin_agent"
|
||||
|
||||
// Call GetArtistMBID which should only use the built-in agent
|
||||
mbid, err := agents.GetArtistMBID(ctx, "123", "Artist")
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(mbid).To(Equal("builtin-mbid"))
|
||||
|
||||
// Verify LoadMediaAgent was NEVER called (no plugin loading for built-in agents)
|
||||
Expect(mockLoader.pluginCallCount).To(BeEmpty())
|
||||
})
|
||||
|
||||
It("should NOT call LoadMediaAgent for invalid agent names", func() {
|
||||
ctx := context.Background()
|
||||
|
||||
// Configure with an invalid agent name (not built-in, not a plugin)
|
||||
conf.Server.Agents = "invalid_agent"
|
||||
|
||||
// This should only result in using the local agent (as the invalid one is ignored)
|
||||
_, err := agents.GetArtistMBID(ctx, "123", "Artist")
|
||||
|
||||
// Should get ErrNotFound since only local agent is available and it returns not found for this operation
|
||||
Expect(err).To(MatchError(ErrNotFound))
|
||||
|
||||
// Verify LoadMediaAgent was NEVER called for the invalid agent
|
||||
Expect(mockLoader.pluginCallCount).To(BeEmpty())
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"context"
|
||||
"errors"
|
||||
|
||||
"github.com/navidrome/navidrome/conf/configtest"
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
@@ -19,6 +20,7 @@ var _ = Describe("Agents", func() {
|
||||
var ds model.DataStore
|
||||
var mfRepo *tests.MockMediaFileRepo
|
||||
BeforeEach(func() {
|
||||
DeferCleanup(configtest.SetupConfig())
|
||||
ctx, cancel = context.WithCancel(context.Background())
|
||||
mfRepo = tests.CreateMockMediaFileRepo()
|
||||
ds = &tests.MockDataStore{MockedMediaFile: mfRepo}
|
||||
@@ -28,7 +30,7 @@ var _ = Describe("Agents", func() {
|
||||
var ag *Agents
|
||||
BeforeEach(func() {
|
||||
conf.Server.Agents = ""
|
||||
ag = New(ds)
|
||||
ag = createAgents(ds, nil)
|
||||
})
|
||||
|
||||
It("calls the placeholder GetArtistImages", func() {
|
||||
@@ -44,19 +46,27 @@ var _ = Describe("Agents", func() {
|
||||
var mock *mockAgent
|
||||
BeforeEach(func() {
|
||||
mock = &mockAgent{}
|
||||
Register("fake", func(ds model.DataStore) Interface {
|
||||
return mock
|
||||
})
|
||||
Register("empty", func(ds model.DataStore) Interface {
|
||||
return struct {
|
||||
Interface
|
||||
}{}
|
||||
})
|
||||
conf.Server.Agents = "empty,fake"
|
||||
ag = New(ds)
|
||||
Register("fake", func(model.DataStore) Interface { return mock })
|
||||
Register("disabled", func(model.DataStore) Interface { return nil })
|
||||
Register("empty", func(model.DataStore) Interface { return &emptyAgent{} })
|
||||
conf.Server.Agents = "empty,fake,disabled"
|
||||
ag = createAgents(ds, nil)
|
||||
Expect(ag.AgentName()).To(Equal("agents"))
|
||||
})
|
||||
|
||||
It("does not register disabled agents", func() {
|
||||
var ags []string
|
||||
for _, enabledAgent := range ag.getEnabledAgentNames() {
|
||||
agent := ag.getAgent(enabledAgent)
|
||||
if agent != nil {
|
||||
ags = append(ags, agent.AgentName())
|
||||
}
|
||||
}
|
||||
// local agent is always appended to the end of the agents list
|
||||
Expect(ags).To(HaveExactElements("empty", "fake", "local"))
|
||||
Expect(ags).ToNot(ContainElement("disabled"))
|
||||
})
|
||||
|
||||
Describe("GetArtistMBID", func() {
|
||||
It("returns on first match", func() {
|
||||
Expect(ag.GetArtistMBID(ctx, "123", "test")).To(Equal("mbid"))
|
||||
@@ -170,6 +180,42 @@ var _ = Describe("Agents", func() {
|
||||
Expect(err).To(MatchError(ErrNotFound))
|
||||
Expect(mock.Args).To(BeEmpty())
|
||||
})
|
||||
|
||||
Context("with multiple image agents", func() {
|
||||
var first *testImageAgent
|
||||
var second *testImageAgent
|
||||
|
||||
BeforeEach(func() {
|
||||
first = &testImageAgent{Name: "imgFail", Err: errors.New("fail")}
|
||||
second = &testImageAgent{Name: "imgOk", Images: []ExternalImage{{URL: "ok", Size: 1}}}
|
||||
Register("imgFail", func(model.DataStore) Interface { return first })
|
||||
Register("imgOk", func(model.DataStore) Interface { return second })
|
||||
})
|
||||
|
||||
It("falls back to the next agent on error", func() {
|
||||
conf.Server.Agents = "imgFail,imgOk"
|
||||
ag = createAgents(ds, nil)
|
||||
|
||||
images, err := ag.GetArtistImages(ctx, "id", "artist", "mbid")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(images).To(Equal([]ExternalImage{{URL: "ok", Size: 1}}))
|
||||
Expect(first.Args).To(HaveExactElements("id", "artist", "mbid"))
|
||||
Expect(second.Args).To(HaveExactElements("id", "artist", "mbid"))
|
||||
})
|
||||
|
||||
It("falls back if the first agent returns no images", func() {
|
||||
first.Err = nil
|
||||
first.Images = []ExternalImage{}
|
||||
conf.Server.Agents = "imgFail,imgOk"
|
||||
ag = createAgents(ds, nil)
|
||||
|
||||
images, err := ag.GetArtistImages(ctx, "id", "artist", "mbid")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(images).To(Equal([]ExternalImage{{URL: "ok", Size: 1}}))
|
||||
Expect(first.Args).To(HaveExactElements("id", "artist", "mbid"))
|
||||
Expect(second.Args).To(HaveExactElements("id", "artist", "mbid"))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("GetSimilarArtists", func() {
|
||||
@@ -196,6 +242,7 @@ var _ = Describe("Agents", func() {
|
||||
|
||||
Describe("GetArtistTopSongs", func() {
|
||||
It("returns on first match", func() {
|
||||
conf.Server.DevExternalArtistFetchMultiplier = 1
|
||||
Expect(ag.GetArtistTopSongs(ctx, "123", "test", "mb123", 2)).To(Equal([]Song{{
|
||||
Name: "A Song",
|
||||
MBID: "mbid444",
|
||||
@@ -203,6 +250,7 @@ var _ = Describe("Agents", func() {
|
||||
Expect(mock.Args).To(HaveExactElements("123", "test", "mb123", 2))
|
||||
})
|
||||
It("skips the agent if it returns an error", func() {
|
||||
conf.Server.DevExternalArtistFetchMultiplier = 1
|
||||
mock.Err = errors.New("error")
|
||||
_, err := ag.GetArtistTopSongs(ctx, "123", "test", "mb123", 2)
|
||||
Expect(err).To(MatchError(ErrNotFound))
|
||||
@@ -214,6 +262,14 @@ var _ = Describe("Agents", func() {
|
||||
Expect(err).To(MatchError(ErrNotFound))
|
||||
Expect(mock.Args).To(BeEmpty())
|
||||
})
|
||||
It("fetches with multiplier", func() {
|
||||
conf.Server.DevExternalArtistFetchMultiplier = 2
|
||||
Expect(ag.GetArtistTopSongs(ctx, "123", "test", "mb123", 2)).To(Equal([]Song{{
|
||||
Name: "A Song",
|
||||
MBID: "mbid444",
|
||||
}}))
|
||||
Expect(mock.Args).To(HaveExactElements("123", "test", "mb123", 4))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("GetAlbumInfo", func() {
|
||||
@@ -223,18 +279,6 @@ var _ = Describe("Agents", func() {
|
||||
MBID: "mbid444",
|
||||
Description: "A Description",
|
||||
URL: "External URL",
|
||||
Images: []ExternalImage{
|
||||
{
|
||||
Size: 174,
|
||||
URL: "https://lastfm.freetls.fastly.net/i/u/174s/00000000000000000000000000000000.png",
|
||||
}, {
|
||||
Size: 64,
|
||||
URL: "https://lastfm.freetls.fastly.net/i/u/64s/00000000000000000000000000000000.png",
|
||||
}, {
|
||||
Size: 34,
|
||||
URL: "https://lastfm.freetls.fastly.net/i/u/34s/00000000000000000000000000000000.png",
|
||||
},
|
||||
},
|
||||
}))
|
||||
Expect(mock.Args).To(HaveExactElements("album", "artist", "mbid"))
|
||||
})
|
||||
@@ -330,17 +374,27 @@ func (a *mockAgent) GetAlbumInfo(ctx context.Context, name, artist, mbid string)
|
||||
MBID: "mbid444",
|
||||
Description: "A Description",
|
||||
URL: "External URL",
|
||||
Images: []ExternalImage{
|
||||
{
|
||||
Size: 174,
|
||||
URL: "https://lastfm.freetls.fastly.net/i/u/174s/00000000000000000000000000000000.png",
|
||||
}, {
|
||||
Size: 64,
|
||||
URL: "https://lastfm.freetls.fastly.net/i/u/64s/00000000000000000000000000000000.png",
|
||||
}, {
|
||||
Size: 34,
|
||||
URL: "https://lastfm.freetls.fastly.net/i/u/34s/00000000000000000000000000000000.png",
|
||||
},
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
type emptyAgent struct {
|
||||
Interface
|
||||
}
|
||||
|
||||
func (e *emptyAgent) AgentName() string {
|
||||
return "empty"
|
||||
}
|
||||
|
||||
type testImageAgent struct {
|
||||
Name string
|
||||
Images []ExternalImage
|
||||
Err error
|
||||
Args []interface{}
|
||||
}
|
||||
|
||||
func (t *testImageAgent) AgentName() string { return t.Name }
|
||||
|
||||
func (t *testImageAgent) GetArtistImages(_ context.Context, id, name, mbid string) ([]ExternalImage, error) {
|
||||
t.Args = []interface{}{id, name, mbid}
|
||||
return t.Images, t.Err
|
||||
}
|
||||
|
||||
83
core/agents/deezer/client.go
Normal file
83
core/agents/deezer/client.go
Normal file
@@ -0,0 +1,83 @@
|
||||
package deezer
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strconv"
|
||||
|
||||
"github.com/navidrome/navidrome/log"
|
||||
)
|
||||
|
||||
const apiBaseURL = "https://api.deezer.com"
|
||||
|
||||
var (
|
||||
ErrNotFound = errors.New("deezer: not found")
|
||||
)
|
||||
|
||||
type httpDoer interface {
|
||||
Do(req *http.Request) (*http.Response, error)
|
||||
}
|
||||
|
||||
type client struct {
|
||||
httpDoer httpDoer
|
||||
}
|
||||
|
||||
func newClient(hc httpDoer) *client {
|
||||
return &client{hc}
|
||||
}
|
||||
|
||||
func (c *client) searchArtists(ctx context.Context, name string, limit int) ([]Artist, error) {
|
||||
params := url.Values{}
|
||||
params.Add("q", name)
|
||||
params.Add("limit", strconv.Itoa(limit))
|
||||
req, err := http.NewRequestWithContext(ctx, "GET", apiBaseURL+"/search/artist", nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req.URL.RawQuery = params.Encode()
|
||||
|
||||
var results SearchArtistResults
|
||||
err = c.makeRequest(req, &results)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(results.Data) == 0 {
|
||||
return nil, ErrNotFound
|
||||
}
|
||||
return results.Data, nil
|
||||
}
|
||||
|
||||
func (c *client) makeRequest(req *http.Request, response interface{}) error {
|
||||
log.Trace(req.Context(), fmt.Sprintf("Sending Deezer %s request", req.Method), "url", req.URL)
|
||||
resp, err := c.httpDoer.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer resp.Body.Close()
|
||||
data, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if resp.StatusCode != 200 {
|
||||
return c.parseError(data)
|
||||
}
|
||||
|
||||
return json.Unmarshal(data, response)
|
||||
}
|
||||
|
||||
func (c *client) parseError(data []byte) error {
|
||||
var deezerError Error
|
||||
err := json.Unmarshal(data, &deezerError)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return fmt.Errorf("deezer error(%d): %s", deezerError.Error.Code, deezerError.Error.Message)
|
||||
}
|
||||
68
core/agents/deezer/client_test.go
Normal file
68
core/agents/deezer/client_test.go
Normal file
@@ -0,0 +1,68 @@
|
||||
package deezer
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("client", func() {
|
||||
var httpClient *fakeHttpClient
|
||||
var client *client
|
||||
|
||||
BeforeEach(func() {
|
||||
httpClient = &fakeHttpClient{}
|
||||
client = newClient(httpClient)
|
||||
})
|
||||
|
||||
Describe("ArtistImages", func() {
|
||||
It("returns artist images from a successful request", func() {
|
||||
f, err := os.Open("tests/fixtures/deezer.search.artist.json")
|
||||
Expect(err).To(BeNil())
|
||||
httpClient.mock("https://api.deezer.com/search/artist", http.Response{Body: f, StatusCode: 200})
|
||||
|
||||
artists, err := client.searchArtists(context.TODO(), "Michael Jackson", 20)
|
||||
Expect(err).To(BeNil())
|
||||
Expect(artists).To(HaveLen(17))
|
||||
Expect(artists[0].Name).To(Equal("Michael Jackson"))
|
||||
Expect(artists[0].PictureXl).To(Equal("https://cdn-images.dzcdn.net/images/artist/97fae13b2b30e4aec2e8c9e0c7839d92/1000x1000-000000-80-0-0.jpg"))
|
||||
})
|
||||
|
||||
It("fails if artist was not found", func() {
|
||||
httpClient.mock("https://api.deezer.com/search/artist", http.Response{
|
||||
StatusCode: 200,
|
||||
Body: io.NopCloser(bytes.NewBufferString(`{"data":[],"total":0}`)),
|
||||
})
|
||||
|
||||
_, err := client.searchArtists(context.TODO(), "Michael Jackson", 20)
|
||||
Expect(err).To(MatchError(ErrNotFound))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
type fakeHttpClient struct {
|
||||
responses map[string]*http.Response
|
||||
lastRequest *http.Request
|
||||
}
|
||||
|
||||
func (c *fakeHttpClient) mock(url string, response http.Response) {
|
||||
if c.responses == nil {
|
||||
c.responses = make(map[string]*http.Response)
|
||||
}
|
||||
c.responses[url] = &response
|
||||
}
|
||||
|
||||
func (c *fakeHttpClient) Do(req *http.Request) (*http.Response, error) {
|
||||
c.lastRequest = req
|
||||
u := req.URL
|
||||
u.RawQuery = ""
|
||||
if resp, ok := c.responses[u.String()]; ok {
|
||||
return resp, nil
|
||||
}
|
||||
panic("URL not mocked: " + u.String())
|
||||
}
|
||||
97
core/agents/deezer/deezer.go
Normal file
97
core/agents/deezer/deezer.go
Normal file
@@ -0,0 +1,97 @@
|
||||
package deezer
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/utils/cache"
|
||||
)
|
||||
|
||||
const deezerAgentName = "deezer"
|
||||
const deezerApiPictureXlSize = 1000
|
||||
const deezerApiPictureBigSize = 500
|
||||
const deezerApiPictureMediumSize = 250
|
||||
const deezerApiPictureSmallSize = 56
|
||||
const deezerArtistSearchLimit = 50
|
||||
|
||||
type deezerAgent struct {
|
||||
dataStore model.DataStore
|
||||
client *client
|
||||
}
|
||||
|
||||
func deezerConstructor(dataStore model.DataStore) agents.Interface {
|
||||
agent := &deezerAgent{dataStore: dataStore}
|
||||
httpClient := &http.Client{
|
||||
Timeout: consts.DefaultHttpClientTimeOut,
|
||||
}
|
||||
cachedHttpClient := cache.NewHTTPClient(httpClient, consts.DefaultHttpClientTimeOut)
|
||||
agent.client = newClient(cachedHttpClient)
|
||||
return agent
|
||||
}
|
||||
|
||||
func (s *deezerAgent) AgentName() string {
|
||||
return deezerAgentName
|
||||
}
|
||||
|
||||
func (s *deezerAgent) GetArtistImages(ctx context.Context, _, name, _ string) ([]agents.ExternalImage, error) {
|
||||
artist, err := s.searchArtist(ctx, name)
|
||||
if err != nil {
|
||||
if errors.Is(err, agents.ErrNotFound) {
|
||||
log.Warn(ctx, "Artist not found in deezer", "artist", name)
|
||||
} else {
|
||||
log.Error(ctx, "Error calling deezer", "artist", name, err)
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var res []agents.ExternalImage
|
||||
possibleImages := []struct {
|
||||
URL string
|
||||
Size int
|
||||
}{
|
||||
{artist.PictureXl, deezerApiPictureXlSize},
|
||||
{artist.PictureBig, deezerApiPictureBigSize},
|
||||
{artist.PictureMedium, deezerApiPictureMediumSize},
|
||||
{artist.PictureSmall, deezerApiPictureSmallSize},
|
||||
}
|
||||
for _, imgData := range possibleImages {
|
||||
if imgData.URL != "" {
|
||||
res = append(res, agents.ExternalImage{
|
||||
URL: imgData.URL,
|
||||
Size: imgData.Size,
|
||||
})
|
||||
}
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (s *deezerAgent) searchArtist(ctx context.Context, name string) (*Artist, error) {
|
||||
artists, err := s.client.searchArtists(ctx, name, deezerArtistSearchLimit)
|
||||
if errors.Is(err, ErrNotFound) || len(artists) == 0 {
|
||||
return nil, agents.ErrNotFound
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// If the first one has the same name, that's the one
|
||||
if !strings.EqualFold(artists[0].Name, name) {
|
||||
return nil, agents.ErrNotFound
|
||||
}
|
||||
return &artists[0], err
|
||||
}
|
||||
|
||||
func init() {
|
||||
conf.AddHook(func() {
|
||||
if conf.Server.Deezer.Enabled {
|
||||
agents.Register(deezerAgentName, deezerConstructor)
|
||||
}
|
||||
})
|
||||
}
|
||||
17
core/agents/deezer/deezer_suite_test.go
Normal file
17
core/agents/deezer/deezer_suite_test.go
Normal file
@@ -0,0 +1,17 @@
|
||||
package deezer
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
func TestDeezer(t *testing.T) {
|
||||
tests.Init(t, false)
|
||||
log.SetLevel(log.LevelFatal)
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "Deezer Test Suite")
|
||||
}
|
||||
31
core/agents/deezer/responses.go
Normal file
31
core/agents/deezer/responses.go
Normal file
@@ -0,0 +1,31 @@
|
||||
package deezer
|
||||
|
||||
type SearchArtistResults struct {
|
||||
Data []Artist `json:"data"`
|
||||
Total int `json:"total"`
|
||||
Next string `json:"next"`
|
||||
}
|
||||
|
||||
type Artist struct {
|
||||
ID int `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Link string `json:"link"`
|
||||
Picture string `json:"picture"`
|
||||
PictureSmall string `json:"picture_small"`
|
||||
PictureMedium string `json:"picture_medium"`
|
||||
PictureBig string `json:"picture_big"`
|
||||
PictureXl string `json:"picture_xl"`
|
||||
NbAlbum int `json:"nb_album"`
|
||||
NbFan int `json:"nb_fan"`
|
||||
Radio bool `json:"radio"`
|
||||
Tracklist string `json:"tracklist"`
|
||||
Type string `json:"type"`
|
||||
}
|
||||
|
||||
type Error struct {
|
||||
Error struct {
|
||||
Type string `json:"type"`
|
||||
Message string `json:"message"`
|
||||
Code int `json:"code"`
|
||||
} `json:"error"`
|
||||
}
|
||||
38
core/agents/deezer/responses_test.go
Normal file
38
core/agents/deezer/responses_test.go
Normal file
@@ -0,0 +1,38 @@
|
||||
package deezer
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"os"
|
||||
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("Responses", func() {
|
||||
Describe("Search type=artist", func() {
|
||||
It("parses the artist search result correctly ", func() {
|
||||
var resp SearchArtistResults
|
||||
body, err := os.ReadFile("tests/fixtures/deezer.search.artist.json")
|
||||
Expect(err).To(BeNil())
|
||||
err = json.Unmarshal(body, &resp)
|
||||
Expect(err).To(BeNil())
|
||||
|
||||
Expect(resp.Data).To(HaveLen(17))
|
||||
michael := resp.Data[0]
|
||||
Expect(michael.Name).To(Equal("Michael Jackson"))
|
||||
Expect(michael.PictureXl).To(Equal("https://cdn-images.dzcdn.net/images/artist/97fae13b2b30e4aec2e8c9e0c7839d92/1000x1000-000000-80-0-0.jpg"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Error", func() {
|
||||
It("parses the error response correctly", func() {
|
||||
var errorResp Error
|
||||
body := []byte(`{"error":{"type":"MissingParameterException","message":"Missing parameters: q","code":501}}`)
|
||||
err := json.Unmarshal(body, &errorResp)
|
||||
Expect(err).To(BeNil())
|
||||
|
||||
Expect(errorResp.Error.Code).To(Equal(501))
|
||||
Expect(errorResp.Error.Message).To(Equal("Missing parameters: q"))
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -13,12 +13,12 @@ type Interface interface {
|
||||
AgentName() string
|
||||
}
|
||||
|
||||
// AlbumInfo contains album metadata (no images)
|
||||
type AlbumInfo struct {
|
||||
Name string
|
||||
MBID string
|
||||
Description string
|
||||
URL string
|
||||
Images []ExternalImage
|
||||
}
|
||||
|
||||
type Artist struct {
|
||||
@@ -40,11 +40,16 @@ var (
|
||||
ErrNotFound = errors.New("not found")
|
||||
)
|
||||
|
||||
// TODO Break up this interface in more specific methods, like artists
|
||||
// AlbumInfoRetriever provides album info (no images)
|
||||
type AlbumInfoRetriever interface {
|
||||
GetAlbumInfo(ctx context.Context, name, artist, mbid string) (*AlbumInfo, error)
|
||||
}
|
||||
|
||||
// AlbumImageRetriever provides album images
|
||||
type AlbumImageRetriever interface {
|
||||
GetAlbumImages(ctx context.Context, name, artist, mbid string) ([]ExternalImage, error)
|
||||
}
|
||||
|
||||
type ArtistMBIDRetriever interface {
|
||||
GetArtistMBID(ctx context.Context, id string, name string) (string, error)
|
||||
}
|
||||
|
||||
@@ -3,11 +3,14 @@ package lastfm
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/andybalholm/cascadia"
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
@@ -15,6 +18,7 @@ import (
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/utils/cache"
|
||||
"golang.org/x/net/html"
|
||||
)
|
||||
|
||||
const (
|
||||
@@ -28,15 +32,19 @@ var ignoredBiographies = []string{
|
||||
}
|
||||
|
||||
type lastfmAgent struct {
|
||||
ds model.DataStore
|
||||
sessionKeys *agents.SessionKeys
|
||||
apiKey string
|
||||
secret string
|
||||
lang string
|
||||
client *client
|
||||
ds model.DataStore
|
||||
sessionKeys *agents.SessionKeys
|
||||
apiKey string
|
||||
secret string
|
||||
lang string
|
||||
client *client
|
||||
getInfoMutex sync.Mutex
|
||||
}
|
||||
|
||||
func lastFMConstructor(ds model.DataStore) *lastfmAgent {
|
||||
if !conf.Server.LastFM.Enabled || conf.Server.LastFM.ApiKey == "" || conf.Server.LastFM.Secret == "" {
|
||||
return nil
|
||||
}
|
||||
l := &lastfmAgent{
|
||||
ds: ds,
|
||||
lang: conf.Server.LastFM.Language,
|
||||
@@ -64,16 +72,23 @@ func (l *lastfmAgent) GetAlbumInfo(ctx context.Context, name, artist, mbid strin
|
||||
return nil, err
|
||||
}
|
||||
|
||||
response := agents.AlbumInfo{
|
||||
return &agents.AlbumInfo{
|
||||
Name: a.Name,
|
||||
MBID: a.MBID,
|
||||
Description: a.Description.Summary,
|
||||
URL: a.URL,
|
||||
Images: make([]agents.ExternalImage, 0),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) GetAlbumImages(ctx context.Context, name, artist, mbid string) ([]agents.ExternalImage, error) {
|
||||
a, err := l.callAlbumGetInfo(ctx, name, artist, mbid)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Last.fm can return duplicate sizes.
|
||||
seenSizes := map[int]bool{}
|
||||
images := make([]agents.ExternalImage, 0)
|
||||
|
||||
// This assumes that Last.fm returns images with size small, medium, and large.
|
||||
// This is true as of December 29, 2022
|
||||
@@ -84,27 +99,24 @@ func (l *lastfmAgent) GetAlbumInfo(ctx context.Context, name, artist, mbid strin
|
||||
log.Trace(ctx, "LastFM/albuminfo image URL does not match expected regex or is empty", "url", img.URL, "size", img.Size)
|
||||
continue
|
||||
}
|
||||
|
||||
numericSize, err := strconv.Atoi(size[0][2:])
|
||||
if err != nil {
|
||||
log.Error(ctx, "LastFM/albuminfo image URL does not match expected regex", "url", img.URL, "size", img.Size, err)
|
||||
return nil, err
|
||||
} else {
|
||||
if _, exists := seenSizes[numericSize]; !exists {
|
||||
response.Images = append(response.Images, agents.ExternalImage{
|
||||
Size: numericSize,
|
||||
URL: img.URL,
|
||||
})
|
||||
seenSizes[numericSize] = true
|
||||
}
|
||||
}
|
||||
if _, exists := seenSizes[numericSize]; !exists {
|
||||
images = append(images, agents.ExternalImage{
|
||||
Size: numericSize,
|
||||
URL: img.URL,
|
||||
})
|
||||
seenSizes[numericSize] = true
|
||||
}
|
||||
}
|
||||
|
||||
return &response, nil
|
||||
return images, nil
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) GetArtistMBID(ctx context.Context, id string, name string) (string, error) {
|
||||
a, err := l.callArtistGetInfo(ctx, name, "")
|
||||
a, err := l.callArtistGetInfo(ctx, name)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
@@ -115,7 +127,7 @@ func (l *lastfmAgent) GetArtistMBID(ctx context.Context, id string, name string)
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) GetArtistURL(ctx context.Context, id, name, mbid string) (string, error) {
|
||||
a, err := l.callArtistGetInfo(ctx, name, mbid)
|
||||
a, err := l.callArtistGetInfo(ctx, name)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
@@ -126,7 +138,7 @@ func (l *lastfmAgent) GetArtistURL(ctx context.Context, id, name, mbid string) (
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) GetArtistBiography(ctx context.Context, id, name, mbid string) (string, error) {
|
||||
a, err := l.callArtistGetInfo(ctx, name, mbid)
|
||||
a, err := l.callArtistGetInfo(ctx, name)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
@@ -143,7 +155,7 @@ func (l *lastfmAgent) GetArtistBiography(ctx context.Context, id, name, mbid str
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) GetSimilarArtists(ctx context.Context, id, name, mbid string, limit int) ([]agents.Artist, error) {
|
||||
resp, err := l.callArtistGetSimilar(ctx, name, mbid, limit)
|
||||
resp, err := l.callArtistGetSimilar(ctx, name, limit)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -161,7 +173,7 @@ func (l *lastfmAgent) GetSimilarArtists(ctx context.Context, id, name, mbid stri
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) GetArtistTopSongs(ctx context.Context, id, artistName, mbid string, count int) ([]agents.Song, error) {
|
||||
resp, err := l.callArtistGetTopTracks(ctx, artistName, mbid, count)
|
||||
resp, err := l.callArtistGetTopTracks(ctx, artistName, count)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -178,13 +190,55 @@ func (l *lastfmAgent) GetArtistTopSongs(ctx context.Context, id, artistName, mbi
|
||||
return res, nil
|
||||
}
|
||||
|
||||
var artistOpenGraphQuery = cascadia.MustCompile(`html > head > meta[property="og:image"]`)
|
||||
|
||||
func (l *lastfmAgent) GetArtistImages(ctx context.Context, _, name, mbid string) ([]agents.ExternalImage, error) {
|
||||
log.Debug(ctx, "Getting artist images from Last.fm", "name", name)
|
||||
hc := http.Client{
|
||||
Timeout: consts.DefaultHttpClientTimeOut,
|
||||
}
|
||||
a, err := l.callArtistGetInfo(ctx, name)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("get artist info: %w", err)
|
||||
}
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, a.URL, nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("create artist image request: %w", err)
|
||||
}
|
||||
resp, err := hc.Do(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("get artist url: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
node, err := html.Parse(resp.Body)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("parse html: %w", err)
|
||||
}
|
||||
|
||||
var res []agents.ExternalImage
|
||||
n := cascadia.Query(node, artistOpenGraphQuery)
|
||||
if n == nil {
|
||||
return res, nil
|
||||
}
|
||||
for _, attr := range n.Attr {
|
||||
if attr.Key == "content" {
|
||||
res = []agents.ExternalImage{
|
||||
{URL: attr.Val},
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) callAlbumGetInfo(ctx context.Context, name, artist, mbid string) (*Album, error) {
|
||||
a, err := l.client.albumGetInfo(ctx, name, artist, mbid)
|
||||
var lfErr *lastFMError
|
||||
isLastFMError := errors.As(err, &lfErr)
|
||||
|
||||
if mbid != "" && (isLastFMError && lfErr.Code == 6) {
|
||||
log.Warn(ctx, "LastFM/album.getInfo could not find album by mbid, trying again", "album", name, "mbid", mbid)
|
||||
log.Debug(ctx, "LastFM/album.getInfo could not find album by mbid, trying again", "album", name, "mbid", mbid)
|
||||
return l.callAlbumGetInfo(ctx, name, artist, "")
|
||||
}
|
||||
|
||||
@@ -199,61 +253,51 @@ func (l *lastfmAgent) callAlbumGetInfo(ctx context.Context, name, artist, mbid s
|
||||
return a, nil
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) callArtistGetInfo(ctx context.Context, name string, mbid string) (*Artist, error) {
|
||||
a, err := l.client.artistGetInfo(ctx, name, mbid)
|
||||
var lfErr *lastFMError
|
||||
isLastFMError := errors.As(err, &lfErr)
|
||||
|
||||
if mbid != "" && ((err == nil && a.Name == "[unknown]") || (isLastFMError && lfErr.Code == 6)) {
|
||||
log.Warn(ctx, "LastFM/artist.getInfo could not find artist by mbid, trying again", "artist", name, "mbid", mbid)
|
||||
return l.callArtistGetInfo(ctx, name, "")
|
||||
}
|
||||
func (l *lastfmAgent) callArtistGetInfo(ctx context.Context, name string) (*Artist, error) {
|
||||
l.getInfoMutex.Lock()
|
||||
defer l.getInfoMutex.Unlock()
|
||||
|
||||
a, err := l.client.artistGetInfo(ctx, name)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error calling LastFM/artist.getInfo", "artist", name, "mbid", mbid, err)
|
||||
log.Error(ctx, "Error calling LastFM/artist.getInfo", "artist", name, err)
|
||||
return nil, err
|
||||
}
|
||||
return a, nil
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) callArtistGetSimilar(ctx context.Context, name string, mbid string, limit int) ([]Artist, error) {
|
||||
s, err := l.client.artistGetSimilar(ctx, name, mbid, limit)
|
||||
var lfErr *lastFMError
|
||||
isLastFMError := errors.As(err, &lfErr)
|
||||
if mbid != "" && ((err == nil && s.Attr.Artist == "[unknown]") || (isLastFMError && lfErr.Code == 6)) {
|
||||
log.Warn(ctx, "LastFM/artist.getSimilar could not find artist by mbid, trying again", "artist", name, "mbid", mbid)
|
||||
return l.callArtistGetSimilar(ctx, name, "", limit)
|
||||
}
|
||||
func (l *lastfmAgent) callArtistGetSimilar(ctx context.Context, name string, limit int) ([]Artist, error) {
|
||||
s, err := l.client.artistGetSimilar(ctx, name, limit)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error calling LastFM/artist.getSimilar", "artist", name, "mbid", mbid, err)
|
||||
log.Error(ctx, "Error calling LastFM/artist.getSimilar", "artist", name, err)
|
||||
return nil, err
|
||||
}
|
||||
return s.Artists, nil
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) callArtistGetTopTracks(ctx context.Context, artistName, mbid string, count int) ([]Track, error) {
|
||||
t, err := l.client.artistGetTopTracks(ctx, artistName, mbid, count)
|
||||
var lfErr *lastFMError
|
||||
isLastFMError := errors.As(err, &lfErr)
|
||||
if mbid != "" && ((err == nil && t.Attr.Artist == "[unknown]") || (isLastFMError && lfErr.Code == 6)) {
|
||||
log.Warn(ctx, "LastFM/artist.getTopTracks could not find artist by mbid, trying again", "artist", artistName, "mbid", mbid)
|
||||
return l.callArtistGetTopTracks(ctx, artistName, "", count)
|
||||
}
|
||||
func (l *lastfmAgent) callArtistGetTopTracks(ctx context.Context, artistName string, count int) ([]Track, error) {
|
||||
t, err := l.client.artistGetTopTracks(ctx, artistName, count)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error calling LastFM/artist.getTopTracks", "artist", artistName, "mbid", mbid, err)
|
||||
log.Error(ctx, "Error calling LastFM/artist.getTopTracks", "artist", artistName, err)
|
||||
return nil, err
|
||||
}
|
||||
return t.Track, nil
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) NowPlaying(ctx context.Context, userId string, track *model.MediaFile) error {
|
||||
func (l *lastfmAgent) getArtistForScrobble(track *model.MediaFile) string {
|
||||
if conf.Server.LastFM.ScrobbleFirstArtistOnly && len(track.Participants[model.RoleArtist]) > 0 {
|
||||
return track.Participants[model.RoleArtist][0].Name
|
||||
}
|
||||
return track.Artist
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) NowPlaying(ctx context.Context, userId string, track *model.MediaFile, position int) error {
|
||||
sk, err := l.sessionKeys.Get(ctx, userId)
|
||||
if err != nil || sk == "" {
|
||||
return scrobbler.ErrNotAuthorized
|
||||
}
|
||||
|
||||
err = l.client.updateNowPlaying(ctx, sk, ScrobbleInfo{
|
||||
artist: track.Artist,
|
||||
artist: l.getArtistForScrobble(track),
|
||||
track: track.Title,
|
||||
album: track.Album,
|
||||
trackNumber: track.TrackNumber,
|
||||
@@ -263,7 +307,7 @@ func (l *lastfmAgent) NowPlaying(ctx context.Context, userId string, track *mode
|
||||
})
|
||||
if err != nil {
|
||||
log.Warn(ctx, "Last.fm client.updateNowPlaying returned error", "track", track.Title, err)
|
||||
return scrobbler.ErrUnrecoverable
|
||||
return errors.Join(err, scrobbler.ErrUnrecoverable)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@@ -271,7 +315,7 @@ func (l *lastfmAgent) NowPlaying(ctx context.Context, userId string, track *mode
|
||||
func (l *lastfmAgent) Scrobble(ctx context.Context, userId string, s scrobbler.Scrobble) error {
|
||||
sk, err := l.sessionKeys.Get(ctx, userId)
|
||||
if err != nil || sk == "" {
|
||||
return scrobbler.ErrNotAuthorized
|
||||
return errors.Join(err, scrobbler.ErrNotAuthorized)
|
||||
}
|
||||
|
||||
if s.Duration <= 30 {
|
||||
@@ -279,7 +323,7 @@ func (l *lastfmAgent) Scrobble(ctx context.Context, userId string, s scrobbler.S
|
||||
return nil
|
||||
}
|
||||
err = l.client.scrobble(ctx, sk, ScrobbleInfo{
|
||||
artist: s.Artist,
|
||||
artist: l.getArtistForScrobble(&s.MediaFile),
|
||||
track: s.Title,
|
||||
album: s.Album,
|
||||
trackNumber: s.TrackNumber,
|
||||
@@ -295,12 +339,12 @@ func (l *lastfmAgent) Scrobble(ctx context.Context, userId string, s scrobbler.S
|
||||
isLastFMError := errors.As(err, &lfErr)
|
||||
if !isLastFMError {
|
||||
log.Warn(ctx, "Last.fm client.scrobble returned error", "track", s.Title, err)
|
||||
return scrobbler.ErrRetryLater
|
||||
return errors.Join(err, scrobbler.ErrRetryLater)
|
||||
}
|
||||
if lfErr.Code == 11 || lfErr.Code == 16 {
|
||||
return scrobbler.ErrRetryLater
|
||||
return errors.Join(err, scrobbler.ErrRetryLater)
|
||||
}
|
||||
return scrobbler.ErrUnrecoverable
|
||||
return errors.Join(err, scrobbler.ErrUnrecoverable)
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) IsAuthorized(ctx context.Context, userId string) bool {
|
||||
@@ -310,15 +354,23 @@ func (l *lastfmAgent) IsAuthorized(ctx context.Context, userId string) bool {
|
||||
|
||||
func init() {
|
||||
conf.AddHook(func() {
|
||||
if conf.Server.LastFM.Enabled {
|
||||
if conf.Server.LastFM.ApiKey != "" && conf.Server.LastFM.Secret != "" {
|
||||
agents.Register(lastFMAgentName, func(ds model.DataStore) agents.Interface {
|
||||
return lastFMConstructor(ds)
|
||||
})
|
||||
scrobbler.Register(lastFMAgentName, func(ds model.DataStore) scrobbler.Scrobbler {
|
||||
return lastFMConstructor(ds)
|
||||
})
|
||||
agents.Register(lastFMAgentName, func(ds model.DataStore) agents.Interface {
|
||||
// This is a workaround for the fact that a (Interface)(nil) is not the same as a (*lastfmAgent)(nil)
|
||||
// See https://go.dev/doc/faq#nil_error
|
||||
a := lastFMConstructor(ds)
|
||||
if a != nil {
|
||||
return a
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
scrobbler.Register(lastFMAgentName, func(ds model.DataStore) scrobbler.Scrobbler {
|
||||
// Same as above - this is a workaround for the fact that a (Scrobbler)(nil) is not the same as a (*lastfmAgent)(nil)
|
||||
// See https://go.dev/doc/faq#nil_error
|
||||
a := lastFMConstructor(ds)
|
||||
if a != nil {
|
||||
return a
|
||||
}
|
||||
return nil
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/conf/configtest"
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
"github.com/navidrome/navidrome/core/scrobbler"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
@@ -30,16 +31,38 @@ var _ = Describe("lastfmAgent", func() {
|
||||
BeforeEach(func() {
|
||||
ds = &tests.MockDataStore{}
|
||||
ctx = context.Background()
|
||||
DeferCleanup(configtest.SetupConfig())
|
||||
conf.Server.LastFM.Enabled = true
|
||||
conf.Server.LastFM.ApiKey = "123"
|
||||
conf.Server.LastFM.Secret = "secret"
|
||||
})
|
||||
Describe("lastFMConstructor", func() {
|
||||
It("uses configured api key and language", func() {
|
||||
conf.Server.LastFM.ApiKey = "123"
|
||||
conf.Server.LastFM.Secret = "secret"
|
||||
conf.Server.LastFM.Language = "pt"
|
||||
agent := lastFMConstructor(ds)
|
||||
Expect(agent.apiKey).To(Equal("123"))
|
||||
Expect(agent.secret).To(Equal("secret"))
|
||||
Expect(agent.lang).To(Equal("pt"))
|
||||
When("Agent is properly configured", func() {
|
||||
It("uses configured api key and language", func() {
|
||||
conf.Server.LastFM.Language = "pt"
|
||||
agent := lastFMConstructor(ds)
|
||||
Expect(agent.apiKey).To(Equal("123"))
|
||||
Expect(agent.secret).To(Equal("secret"))
|
||||
Expect(agent.lang).To(Equal("pt"))
|
||||
})
|
||||
})
|
||||
When("Agent is disabled", func() {
|
||||
It("returns nil", func() {
|
||||
conf.Server.LastFM.Enabled = false
|
||||
Expect(lastFMConstructor(ds)).To(BeNil())
|
||||
})
|
||||
})
|
||||
When("ApiKey is empty", func() {
|
||||
It("returns nil", func() {
|
||||
conf.Server.LastFM.ApiKey = ""
|
||||
Expect(lastFMConstructor(ds)).To(BeNil())
|
||||
})
|
||||
})
|
||||
When("Secret is empty", func() {
|
||||
It("returns nil", func() {
|
||||
conf.Server.LastFM.Secret = ""
|
||||
Expect(lastFMConstructor(ds)).To(BeNil())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -56,48 +79,25 @@ var _ = Describe("lastfmAgent", func() {
|
||||
It("returns the biography", func() {
|
||||
f, _ := os.Open("tests/fixtures/lastfm.artist.getinfo.json")
|
||||
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||
Expect(agent.GetArtistBiography(ctx, "123", "U2", "mbid-1234")).To(Equal("U2 é uma das mais importantes bandas de rock de todos os tempos. Formada em 1976 em Dublin, composta por Bono (vocalista e guitarrista), The Edge (guitarrista, pianista e backing vocal), Adam Clayton (baixista), Larry Mullen, Jr. (baterista e percussionista).\n\nDesde a década de 80, U2 é uma das bandas mais populares no mundo. Seus shows são únicos e um verdadeiro festival de efeitos especiais, além de serem um dos que mais arrecadam anualmente. <a href=\"https://www.last.fm/music/U2\">Read more on Last.fm</a>"))
|
||||
Expect(agent.GetArtistBiography(ctx, "123", "U2", "")).To(Equal("U2 é uma das mais importantes bandas de rock de todos os tempos. Formada em 1976 em Dublin, composta por Bono (vocalista e guitarrista), The Edge (guitarrista, pianista e backing vocal), Adam Clayton (baixista), Larry Mullen, Jr. (baterista e percussionista).\n\nDesde a década de 80, U2 é uma das bandas mais populares no mundo. Seus shows são únicos e um verdadeiro festival de efeitos especiais, além de serem um dos que mais arrecadam anualmente. <a href=\"https://www.last.fm/music/U2\">Read more on Last.fm</a>"))
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(Equal("mbid-1234"))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call fails", func() {
|
||||
httpClient.Err = errors.New("error")
|
||||
_, err := agent.GetArtistBiography(ctx, "123", "U2", "mbid-1234")
|
||||
_, err := agent.GetArtistBiography(ctx, "123", "U2", "")
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(Equal("mbid-1234"))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call returns an error", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError3)), StatusCode: 200}
|
||||
_, err := agent.GetArtistBiography(ctx, "123", "U2", "mbid-1234")
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(Equal("mbid-1234"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call returns an error 6 and mbid is empty", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError6)), StatusCode: 200}
|
||||
_, err := agent.GetArtistBiography(ctx, "123", "U2", "")
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
})
|
||||
|
||||
Context("MBID non existent in Last.fm", func() {
|
||||
It("calls again when the response is artist == [unknown]", func() {
|
||||
f, _ := os.Open("tests/fixtures/lastfm.artist.getinfo.unknown.json")
|
||||
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||
_, _ = agent.GetArtistBiography(ctx, "123", "U2", "mbid-1234")
|
||||
Expect(httpClient.RequestCount).To(Equal(2))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(BeEmpty())
|
||||
})
|
||||
It("calls again when last.fm returns an error 6", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError6)), StatusCode: 200}
|
||||
_, _ = agent.GetArtistBiography(ctx, "123", "U2", "mbid-1234")
|
||||
Expect(httpClient.RequestCount).To(Equal(2))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(BeEmpty())
|
||||
})
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
})
|
||||
})
|
||||
|
||||
@@ -114,51 +114,28 @@ var _ = Describe("lastfmAgent", func() {
|
||||
It("returns similar artists", func() {
|
||||
f, _ := os.Open("tests/fixtures/lastfm.artist.getsimilar.json")
|
||||
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||
Expect(agent.GetSimilarArtists(ctx, "123", "U2", "mbid-1234", 2)).To(Equal([]agents.Artist{
|
||||
Expect(agent.GetSimilarArtists(ctx, "123", "U2", "", 2)).To(Equal([]agents.Artist{
|
||||
{Name: "Passengers", MBID: "e110c11f-1c94-4471-a350-c38f46b29389"},
|
||||
{Name: "INXS", MBID: "481bf5f9-2e7c-4c44-b08a-05b32bc7c00d"},
|
||||
}))
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(Equal("mbid-1234"))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call fails", func() {
|
||||
httpClient.Err = errors.New("error")
|
||||
_, err := agent.GetSimilarArtists(ctx, "123", "U2", "mbid-1234", 2)
|
||||
_, err := agent.GetSimilarArtists(ctx, "123", "U2", "", 2)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(Equal("mbid-1234"))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call returns an error", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError3)), StatusCode: 200}
|
||||
_, err := agent.GetSimilarArtists(ctx, "123", "U2", "mbid-1234", 2)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(Equal("mbid-1234"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call returns an error 6 and mbid is empty", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError6)), StatusCode: 200}
|
||||
_, err := agent.GetSimilarArtists(ctx, "123", "U2", "", 2)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
})
|
||||
|
||||
Context("MBID non existent in Last.fm", func() {
|
||||
It("calls again when the response is artist == [unknown]", func() {
|
||||
f, _ := os.Open("tests/fixtures/lastfm.artist.getsimilar.unknown.json")
|
||||
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||
_, _ = agent.GetSimilarArtists(ctx, "123", "U2", "mbid-1234", 2)
|
||||
Expect(httpClient.RequestCount).To(Equal(2))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(BeEmpty())
|
||||
})
|
||||
It("calls again when last.fm returns an error 6", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError6)), StatusCode: 200}
|
||||
_, _ = agent.GetSimilarArtists(ctx, "123", "U2", "mbid-1234", 2)
|
||||
Expect(httpClient.RequestCount).To(Equal(2))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(BeEmpty())
|
||||
})
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
})
|
||||
})
|
||||
|
||||
@@ -175,51 +152,28 @@ var _ = Describe("lastfmAgent", func() {
|
||||
It("returns top songs", func() {
|
||||
f, _ := os.Open("tests/fixtures/lastfm.artist.gettoptracks.json")
|
||||
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||
Expect(agent.GetArtistTopSongs(ctx, "123", "U2", "mbid-1234", 2)).To(Equal([]agents.Song{
|
||||
Expect(agent.GetArtistTopSongs(ctx, "123", "U2", "", 2)).To(Equal([]agents.Song{
|
||||
{Name: "Beautiful Day", MBID: "f7f264d0-a89b-4682-9cd7-a4e7c37637af"},
|
||||
{Name: "With or Without You", MBID: "6b9a509f-6907-4a6e-9345-2f12da09ba4b"},
|
||||
}))
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(Equal("mbid-1234"))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call fails", func() {
|
||||
httpClient.Err = errors.New("error")
|
||||
_, err := agent.GetArtistTopSongs(ctx, "123", "U2", "mbid-1234", 2)
|
||||
_, err := agent.GetArtistTopSongs(ctx, "123", "U2", "", 2)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(Equal("mbid-1234"))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call returns an error", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError3)), StatusCode: 200}
|
||||
_, err := agent.GetArtistTopSongs(ctx, "123", "U2", "mbid-1234", 2)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(Equal("mbid-1234"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call returns an error 6 and mbid is empty", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError6)), StatusCode: 200}
|
||||
_, err := agent.GetArtistTopSongs(ctx, "123", "U2", "", 2)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
})
|
||||
|
||||
Context("MBID non existent in Last.fm", func() {
|
||||
It("calls again when the response is artist == [unknown]", func() {
|
||||
f, _ := os.Open("tests/fixtures/lastfm.artist.gettoptracks.unknown.json")
|
||||
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||
_, _ = agent.GetArtistTopSongs(ctx, "123", "U2", "mbid-1234", 2)
|
||||
Expect(httpClient.RequestCount).To(Equal(2))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(BeEmpty())
|
||||
})
|
||||
It("calls again when last.fm returns an error 6", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError6)), StatusCode: 200}
|
||||
_, _ = agent.GetArtistTopSongs(ctx, "123", "U2", "mbid-1234", 2)
|
||||
Expect(httpClient.RequestCount).To(Equal(2))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(BeEmpty())
|
||||
})
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
})
|
||||
})
|
||||
|
||||
@@ -242,6 +196,12 @@ var _ = Describe("lastfmAgent", func() {
|
||||
TrackNumber: 1,
|
||||
Duration: 180,
|
||||
MbzRecordingID: "mbz-123",
|
||||
Participants: map[model.Role]model.ParticipantList{
|
||||
model.RoleArtist: []model.Participant{
|
||||
{Artist: model.Artist{ID: "ar-1", Name: "First Artist"}},
|
||||
{Artist: model.Artist{ID: "ar-2", Name: "Second Artist"}},
|
||||
},
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
@@ -249,7 +209,7 @@ var _ = Describe("lastfmAgent", func() {
|
||||
It("calls Last.fm with correct params", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString("{}")), StatusCode: 200}
|
||||
|
||||
err := agent.NowPlaying(ctx, "user-1", track)
|
||||
err := agent.NowPlaying(ctx, "user-1", track, 0)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(httpClient.SavedRequest.Method).To(Equal(http.MethodPost))
|
||||
@@ -266,7 +226,7 @@ var _ = Describe("lastfmAgent", func() {
|
||||
})
|
||||
|
||||
It("returns ErrNotAuthorized if user is not linked", func() {
|
||||
err := agent.NowPlaying(ctx, "user-2", track)
|
||||
err := agent.NowPlaying(ctx, "user-2", track, 0)
|
||||
Expect(err).To(MatchError(scrobbler.ErrNotAuthorized))
|
||||
})
|
||||
})
|
||||
@@ -293,6 +253,23 @@ var _ = Describe("lastfmAgent", func() {
|
||||
Expect(sentParams.Get("timestamp")).To(Equal(strconv.FormatInt(ts.Unix(), 10)))
|
||||
})
|
||||
|
||||
When("ScrobbleFirstArtistOnly is true", func() {
|
||||
BeforeEach(func() {
|
||||
conf.Server.LastFM.ScrobbleFirstArtistOnly = true
|
||||
})
|
||||
|
||||
It("uses only the first artist", func() {
|
||||
ts := time.Now()
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString("{}")), StatusCode: 200}
|
||||
|
||||
err := agent.Scrobble(ctx, "user-1", scrobbler.Scrobble{MediaFile: *track, TimeStamp: ts})
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
sentParams := httpClient.SavedRequest.URL.Query()
|
||||
Expect(sentParams.Get("artist")).To(Equal("First Artist"))
|
||||
})
|
||||
})
|
||||
|
||||
It("skips songs with less than 31 seconds", func() {
|
||||
track.Duration = 29
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString("{}")), StatusCode: 200}
|
||||
@@ -368,24 +345,6 @@ var _ = Describe("lastfmAgent", func() {
|
||||
MBID: "03c91c40-49a6-44a7-90e7-a700edf97a62",
|
||||
Description: "Believe is the twenty-third studio album by American singer-actress Cher, released on November 10, 1998 by Warner Bros. Records. The RIAA certified it Quadruple Platinum on December 23, 1999, recognizing four million shipments in the United States; Worldwide, the album has sold more than 20 million copies, making it the biggest-selling album of her career. In 1999 the album received three Grammy Awards nominations including \"Record of the Year\", \"Best Pop Album\" and winning \"Best Dance Recording\" for the single \"Believe\". It was released by Warner Bros. Records at the end of 1998. The album was executive produced by Rob <a href=\"https://www.last.fm/music/Cher/Believe\">Read more on Last.fm</a>.",
|
||||
URL: "https://www.last.fm/music/Cher/Believe",
|
||||
Images: []agents.ExternalImage{
|
||||
{
|
||||
URL: "https://lastfm.freetls.fastly.net/i/u/34s/3b54885952161aaea4ce2965b2db1638.png",
|
||||
Size: 34,
|
||||
},
|
||||
{
|
||||
URL: "https://lastfm.freetls.fastly.net/i/u/64s/3b54885952161aaea4ce2965b2db1638.png",
|
||||
Size: 64,
|
||||
},
|
||||
{
|
||||
URL: "https://lastfm.freetls.fastly.net/i/u/174s/3b54885952161aaea4ce2965b2db1638.png",
|
||||
Size: 174,
|
||||
},
|
||||
{
|
||||
URL: "https://lastfm.freetls.fastly.net/i/u/300x300/3b54885952161aaea4ce2965b2db1638.png",
|
||||
Size: 300,
|
||||
},
|
||||
},
|
||||
}))
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(Equal("03c91c40-49a6-44a7-90e7-a700edf97a62"))
|
||||
@@ -395,9 +354,8 @@ var _ = Describe("lastfmAgent", func() {
|
||||
f, _ := os.Open("tests/fixtures/lastfm.album.getinfo.empty_urls.json")
|
||||
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||
Expect(agent.GetAlbumInfo(ctx, "The Definitive Less Damage And More Joy", "The Jesus and Mary Chain", "")).To(Equal(&agents.AlbumInfo{
|
||||
Name: "The Definitive Less Damage And More Joy",
|
||||
URL: "https://www.last.fm/music/The+Jesus+and+Mary+Chain/The+Definitive+Less+Damage+And+More+Joy",
|
||||
Images: []agents.ExternalImage{},
|
||||
Name: "The Definitive Less Damage And More Joy",
|
||||
URL: "https://www.last.fm/music/The+Jesus+and+Mary+Chain/The+Definitive+Less+Damage+And+More+Joy",
|
||||
}))
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("album")).To(Equal("The Definitive Less Damage And More Joy"))
|
||||
|
||||
@@ -65,7 +65,9 @@ func (s *Router) routes() http.Handler {
|
||||
}
|
||||
|
||||
func (s *Router) getLinkStatus(w http.ResponseWriter, r *http.Request) {
|
||||
resp := map[string]interface{}{}
|
||||
resp := map[string]interface{}{
|
||||
"apiKey": s.apiKey,
|
||||
}
|
||||
u, _ := request.UserFrom(r.Context())
|
||||
key, err := s.sessionKeys.Get(r.Context(), u.ID)
|
||||
if err != nil && !errors.Is(err, model.ErrNotFound) {
|
||||
|
||||
@@ -59,11 +59,10 @@ func (c *client) albumGetInfo(ctx context.Context, name string, artist string, m
|
||||
return &response.Album, nil
|
||||
}
|
||||
|
||||
func (c *client) artistGetInfo(ctx context.Context, name string, mbid string) (*Artist, error) {
|
||||
func (c *client) artistGetInfo(ctx context.Context, name string) (*Artist, error) {
|
||||
params := url.Values{}
|
||||
params.Add("method", "artist.getInfo")
|
||||
params.Add("artist", name)
|
||||
params.Add("mbid", mbid)
|
||||
params.Add("lang", c.lang)
|
||||
response, err := c.makeRequest(ctx, http.MethodGet, params, false)
|
||||
if err != nil {
|
||||
@@ -72,11 +71,10 @@ func (c *client) artistGetInfo(ctx context.Context, name string, mbid string) (*
|
||||
return &response.Artist, nil
|
||||
}
|
||||
|
||||
func (c *client) artistGetSimilar(ctx context.Context, name string, mbid string, limit int) (*SimilarArtists, error) {
|
||||
func (c *client) artistGetSimilar(ctx context.Context, name string, limit int) (*SimilarArtists, error) {
|
||||
params := url.Values{}
|
||||
params.Add("method", "artist.getSimilar")
|
||||
params.Add("artist", name)
|
||||
params.Add("mbid", mbid)
|
||||
params.Add("limit", strconv.Itoa(limit))
|
||||
response, err := c.makeRequest(ctx, http.MethodGet, params, false)
|
||||
if err != nil {
|
||||
@@ -85,11 +83,10 @@ func (c *client) artistGetSimilar(ctx context.Context, name string, mbid string,
|
||||
return &response.SimilarArtists, nil
|
||||
}
|
||||
|
||||
func (c *client) artistGetTopTracks(ctx context.Context, name string, mbid string, limit int) (*TopTracks, error) {
|
||||
func (c *client) artistGetTopTracks(ctx context.Context, name string, limit int) (*TopTracks, error) {
|
||||
params := url.Values{}
|
||||
params.Add("method", "artist.getTopTracks")
|
||||
params.Add("artist", name)
|
||||
params.Add("mbid", mbid)
|
||||
params.Add("limit", strconv.Itoa(limit))
|
||||
response, err := c.makeRequest(ctx, http.MethodGet, params, false)
|
||||
if err != nil {
|
||||
|
||||
@@ -42,10 +42,10 @@ var _ = Describe("client", func() {
|
||||
f, _ := os.Open("tests/fixtures/lastfm.artist.getinfo.json")
|
||||
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||
|
||||
artist, err := client.artistGetInfo(context.Background(), "U2", "123")
|
||||
artist, err := client.artistGetInfo(context.Background(), "U2")
|
||||
Expect(err).To(BeNil())
|
||||
Expect(artist.Name).To(Equal("U2"))
|
||||
Expect(httpClient.SavedRequest.URL.String()).To(Equal(apiBaseUrl + "?api_key=API_KEY&artist=U2&format=json&lang=pt&mbid=123&method=artist.getInfo"))
|
||||
Expect(httpClient.SavedRequest.URL.String()).To(Equal(apiBaseUrl + "?api_key=API_KEY&artist=U2&format=json&lang=pt&method=artist.getInfo"))
|
||||
})
|
||||
|
||||
It("fails if Last.fm returns an http status != 200", func() {
|
||||
@@ -54,7 +54,7 @@ var _ = Describe("client", func() {
|
||||
StatusCode: 500,
|
||||
}
|
||||
|
||||
_, err := client.artistGetInfo(context.Background(), "U2", "123")
|
||||
_, err := client.artistGetInfo(context.Background(), "U2")
|
||||
Expect(err).To(MatchError("last.fm http status: (500)"))
|
||||
})
|
||||
|
||||
@@ -64,7 +64,7 @@ var _ = Describe("client", func() {
|
||||
StatusCode: 400,
|
||||
}
|
||||
|
||||
_, err := client.artistGetInfo(context.Background(), "U2", "123")
|
||||
_, err := client.artistGetInfo(context.Background(), "U2")
|
||||
Expect(err).To(MatchError(&lastFMError{Code: 3, Message: "Invalid Method - No method with that name in this package"}))
|
||||
})
|
||||
|
||||
@@ -74,14 +74,14 @@ var _ = Describe("client", func() {
|
||||
StatusCode: 200,
|
||||
}
|
||||
|
||||
_, err := client.artistGetInfo(context.Background(), "U2", "123")
|
||||
_, err := client.artistGetInfo(context.Background(), "U2")
|
||||
Expect(err).To(MatchError(&lastFMError{Code: 6, Message: "The artist you supplied could not be found"}))
|
||||
})
|
||||
|
||||
It("fails if HttpClient.Do() returns error", func() {
|
||||
httpClient.Err = errors.New("generic error")
|
||||
|
||||
_, err := client.artistGetInfo(context.Background(), "U2", "123")
|
||||
_, err := client.artistGetInfo(context.Background(), "U2")
|
||||
Expect(err).To(MatchError("generic error"))
|
||||
})
|
||||
|
||||
@@ -91,7 +91,7 @@ var _ = Describe("client", func() {
|
||||
StatusCode: 200,
|
||||
}
|
||||
|
||||
_, err := client.artistGetInfo(context.Background(), "U2", "123")
|
||||
_, err := client.artistGetInfo(context.Background(), "U2")
|
||||
Expect(err).To(MatchError("invalid character '<' looking for beginning of value"))
|
||||
})
|
||||
|
||||
@@ -102,10 +102,10 @@ var _ = Describe("client", func() {
|
||||
f, _ := os.Open("tests/fixtures/lastfm.artist.getsimilar.json")
|
||||
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||
|
||||
similar, err := client.artistGetSimilar(context.Background(), "U2", "123", 2)
|
||||
similar, err := client.artistGetSimilar(context.Background(), "U2", 2)
|
||||
Expect(err).To(BeNil())
|
||||
Expect(len(similar.Artists)).To(Equal(2))
|
||||
Expect(httpClient.SavedRequest.URL.String()).To(Equal(apiBaseUrl + "?api_key=API_KEY&artist=U2&format=json&limit=2&mbid=123&method=artist.getSimilar"))
|
||||
Expect(httpClient.SavedRequest.URL.String()).To(Equal(apiBaseUrl + "?api_key=API_KEY&artist=U2&format=json&limit=2&method=artist.getSimilar"))
|
||||
})
|
||||
})
|
||||
|
||||
@@ -114,10 +114,10 @@ var _ = Describe("client", func() {
|
||||
f, _ := os.Open("tests/fixtures/lastfm.artist.gettoptracks.json")
|
||||
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||
|
||||
top, err := client.artistGetTopTracks(context.Background(), "U2", "123", 2)
|
||||
top, err := client.artistGetTopTracks(context.Background(), "U2", 2)
|
||||
Expect(err).To(BeNil())
|
||||
Expect(len(top.Track)).To(Equal(2))
|
||||
Expect(httpClient.SavedRequest.URL.String()).To(Equal(apiBaseUrl + "?api_key=API_KEY&artist=U2&format=json&limit=2&mbid=123&method=artist.getTopTracks"))
|
||||
Expect(httpClient.SavedRequest.URL.String()).To(Equal(apiBaseUrl + "?api_key=API_KEY&artist=U2&format=json&limit=2&method=artist.getTopTracks"))
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@ import (
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/utils/cache"
|
||||
"github.com/navidrome/navidrome/utils/slice"
|
||||
)
|
||||
|
||||
const (
|
||||
@@ -45,6 +46,12 @@ func (l *listenBrainzAgent) AgentName() string {
|
||||
}
|
||||
|
||||
func (l *listenBrainzAgent) formatListen(track *model.MediaFile) listenInfo {
|
||||
artistMBIDs := slice.Map(track.Participants[model.RoleArtist], func(p model.Participant) string {
|
||||
return p.MbzArtistID
|
||||
})
|
||||
artistNames := slice.Map(track.Participants[model.RoleArtist], func(p model.Participant) string {
|
||||
return p.Name
|
||||
})
|
||||
li := listenInfo{
|
||||
TrackMetadata: trackMetadata{
|
||||
ArtistName: track.Artist,
|
||||
@@ -54,9 +61,11 @@ func (l *listenBrainzAgent) formatListen(track *model.MediaFile) listenInfo {
|
||||
SubmissionClient: consts.AppName,
|
||||
SubmissionClientVersion: consts.Version,
|
||||
TrackNumber: track.TrackNumber,
|
||||
ArtistMbzIDs: []string{track.MbzArtistID},
|
||||
RecordingMbzID: track.MbzRecordingID,
|
||||
ReleaseMbID: track.MbzAlbumID,
|
||||
ArtistNames: artistNames,
|
||||
ArtistMBIDs: artistMBIDs,
|
||||
RecordingMBID: track.MbzRecordingID,
|
||||
ReleaseMBID: track.MbzAlbumID,
|
||||
ReleaseGroupMBID: track.MbzReleaseGroupID,
|
||||
DurationMs: int(track.Duration * 1000),
|
||||
},
|
||||
},
|
||||
@@ -64,17 +73,17 @@ func (l *listenBrainzAgent) formatListen(track *model.MediaFile) listenInfo {
|
||||
return li
|
||||
}
|
||||
|
||||
func (l *listenBrainzAgent) NowPlaying(ctx context.Context, userId string, track *model.MediaFile) error {
|
||||
func (l *listenBrainzAgent) NowPlaying(ctx context.Context, userId string, track *model.MediaFile, position int) error {
|
||||
sk, err := l.sessionKeys.Get(ctx, userId)
|
||||
if err != nil || sk == "" {
|
||||
return scrobbler.ErrNotAuthorized
|
||||
return errors.Join(err, scrobbler.ErrNotAuthorized)
|
||||
}
|
||||
|
||||
li := l.formatListen(track)
|
||||
err = l.client.updateNowPlaying(ctx, sk, li)
|
||||
if err != nil {
|
||||
log.Warn(ctx, "ListenBrainz updateNowPlaying returned error", "track", track.Title, err)
|
||||
return scrobbler.ErrUnrecoverable
|
||||
return errors.Join(err, scrobbler.ErrUnrecoverable)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@@ -82,7 +91,7 @@ func (l *listenBrainzAgent) NowPlaying(ctx context.Context, userId string, track
|
||||
func (l *listenBrainzAgent) Scrobble(ctx context.Context, userId string, s scrobbler.Scrobble) error {
|
||||
sk, err := l.sessionKeys.Get(ctx, userId)
|
||||
if err != nil || sk == "" {
|
||||
return scrobbler.ErrNotAuthorized
|
||||
return errors.Join(err, scrobbler.ErrNotAuthorized)
|
||||
}
|
||||
|
||||
li := l.formatListen(&s.MediaFile)
|
||||
@@ -96,12 +105,12 @@ func (l *listenBrainzAgent) Scrobble(ctx context.Context, userId string, s scrob
|
||||
isListenBrainzError := errors.As(err, &lbErr)
|
||||
if !isListenBrainzError {
|
||||
log.Warn(ctx, "ListenBrainz Scrobble returned HTTP error", "track", s.Title, err)
|
||||
return scrobbler.ErrRetryLater
|
||||
return errors.Join(err, scrobbler.ErrRetryLater)
|
||||
}
|
||||
if lbErr.Code == 500 || lbErr.Code == 503 {
|
||||
return scrobbler.ErrRetryLater
|
||||
return errors.Join(err, scrobbler.ErrRetryLater)
|
||||
}
|
||||
return scrobbler.ErrUnrecoverable
|
||||
return errors.Join(err, scrobbler.ErrUnrecoverable)
|
||||
}
|
||||
|
||||
func (l *listenBrainzAgent) IsAuthorized(ctx context.Context, userId string) bool {
|
||||
|
||||
@@ -32,24 +32,26 @@ var _ = Describe("listenBrainzAgent", func() {
|
||||
agent = listenBrainzConstructor(ds)
|
||||
agent.client = newClient("http://localhost:8080", httpClient)
|
||||
track = &model.MediaFile{
|
||||
ID: "123",
|
||||
Title: "Track Title",
|
||||
Album: "Track Album",
|
||||
Artist: "Track Artist",
|
||||
TrackNumber: 1,
|
||||
MbzRecordingID: "mbz-123",
|
||||
MbzAlbumID: "mbz-456",
|
||||
MbzArtistID: "mbz-789",
|
||||
Duration: 142.2,
|
||||
ID: "123",
|
||||
Title: "Track Title",
|
||||
Album: "Track Album",
|
||||
Artist: "Track Artist",
|
||||
TrackNumber: 1,
|
||||
MbzRecordingID: "mbz-123",
|
||||
MbzAlbumID: "mbz-456",
|
||||
MbzReleaseGroupID: "mbz-789",
|
||||
Duration: 142.2,
|
||||
Participants: map[model.Role]model.ParticipantList{
|
||||
model.RoleArtist: []model.Participant{
|
||||
{Artist: model.Artist{ID: "ar-1", Name: "Artist 1", MbzArtistID: "mbz-111"}},
|
||||
{Artist: model.Artist{ID: "ar-2", Name: "Artist 2", MbzArtistID: "mbz-222"}},
|
||||
},
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
Describe("formatListen", func() {
|
||||
It("constructs the listenInfo properly", func() {
|
||||
var idArtistId = func(element interface{}) string {
|
||||
return element.(string)
|
||||
}
|
||||
|
||||
lr := agent.formatListen(track)
|
||||
Expect(lr).To(MatchAllFields(Fields{
|
||||
"ListenedAt": Equal(0),
|
||||
@@ -61,12 +63,12 @@ var _ = Describe("listenBrainzAgent", func() {
|
||||
"SubmissionClient": Equal(consts.AppName),
|
||||
"SubmissionClientVersion": Equal(consts.Version),
|
||||
"TrackNumber": Equal(track.TrackNumber),
|
||||
"RecordingMbzID": Equal(track.MbzRecordingID),
|
||||
"ReleaseMbID": Equal(track.MbzAlbumID),
|
||||
"ArtistMbzIDs": MatchAllElements(idArtistId, Elements{
|
||||
"mbz-789": Equal(track.MbzArtistID),
|
||||
}),
|
||||
"DurationMs": Equal(142200),
|
||||
"RecordingMBID": Equal(track.MbzRecordingID),
|
||||
"ReleaseMBID": Equal(track.MbzAlbumID),
|
||||
"ReleaseGroupMBID": Equal(track.MbzReleaseGroupID),
|
||||
"ArtistNames": ConsistOf("Artist 1", "Artist 2"),
|
||||
"ArtistMBIDs": ConsistOf("mbz-111", "mbz-222"),
|
||||
"DurationMs": Equal(142200),
|
||||
}),
|
||||
}),
|
||||
}))
|
||||
@@ -77,12 +79,12 @@ var _ = Describe("listenBrainzAgent", func() {
|
||||
It("updates NowPlaying successfully", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(`{"status": "ok"}`)), StatusCode: 200}
|
||||
|
||||
err := agent.NowPlaying(ctx, "user-1", track)
|
||||
err := agent.NowPlaying(ctx, "user-1", track, 0)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
})
|
||||
|
||||
It("returns ErrNotAuthorized if user is not linked", func() {
|
||||
err := agent.NowPlaying(ctx, "user-2", track)
|
||||
err := agent.NowPlaying(ctx, "user-2", track, 0)
|
||||
Expect(err).To(MatchError(scrobbler.ErrNotAuthorized))
|
||||
})
|
||||
})
|
||||
|
||||
@@ -76,9 +76,11 @@ type additionalInfo struct {
|
||||
SubmissionClient string `json:"submission_client,omitempty"`
|
||||
SubmissionClientVersion string `json:"submission_client_version,omitempty"`
|
||||
TrackNumber int `json:"tracknumber,omitempty"`
|
||||
RecordingMbzID string `json:"recording_mbid,omitempty"`
|
||||
ArtistMbzIDs []string `json:"artist_mbids,omitempty"`
|
||||
ReleaseMbID string `json:"release_mbid,omitempty"`
|
||||
ArtistNames []string `json:"artist_names,omitempty"`
|
||||
ArtistMBIDs []string `json:"artist_mbids,omitempty"`
|
||||
RecordingMBID string `json:"recording_mbid,omitempty"`
|
||||
ReleaseMBID string `json:"release_mbid,omitempty"`
|
||||
ReleaseGroupMBID string `json:"release_group_mbid,omitempty"`
|
||||
DurationMs int `json:"duration_ms,omitempty"`
|
||||
}
|
||||
|
||||
|
||||
@@ -74,11 +74,12 @@ var _ = Describe("client", func() {
|
||||
TrackName: "Track Title",
|
||||
ReleaseName: "Track Album",
|
||||
AdditionalInfo: additionalInfo{
|
||||
TrackNumber: 1,
|
||||
RecordingMbzID: "mbz-123",
|
||||
ArtistMbzIDs: []string{"mbz-789"},
|
||||
ReleaseMbID: "mbz-456",
|
||||
DurationMs: 142200,
|
||||
TrackNumber: 1,
|
||||
ArtistNames: []string{"Artist 1", "Artist 2"},
|
||||
ArtistMBIDs: []string{"mbz-789", "mbz-012"},
|
||||
RecordingMBID: "mbz-123",
|
||||
ReleaseMBID: "mbz-456",
|
||||
DurationMs: 142200,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -27,6 +27,9 @@ type spotifyAgent struct {
|
||||
}
|
||||
|
||||
func spotifyConstructor(ds model.DataStore) agents.Interface {
|
||||
if conf.Server.Spotify.ID == "" || conf.Server.Spotify.Secret == "" {
|
||||
return nil
|
||||
}
|
||||
l := &spotifyAgent{
|
||||
ds: ds,
|
||||
id: conf.Server.Spotify.ID,
|
||||
@@ -88,8 +91,6 @@ func (s *spotifyAgent) searchArtist(ctx context.Context, name string) (*Artist,
|
||||
|
||||
func init() {
|
||||
conf.AddHook(func() {
|
||||
if conf.Server.Spotify.ID != "" && conf.Server.Spotify.Secret != "" {
|
||||
agents.Register(spotifyAgentName, spotifyConstructor)
|
||||
}
|
||||
agents.Register(spotifyAgentName, spotifyConstructor)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -53,11 +53,11 @@ func (a *archiver) zipAlbums(ctx context.Context, id string, format string, bitr
|
||||
})
|
||||
for _, album := range albums {
|
||||
discs := slice.Group(album, func(mf model.MediaFile) int { return mf.DiscNumber })
|
||||
isMultDisc := len(discs) > 1
|
||||
isMultiDisc := len(discs) > 1
|
||||
log.Debug(ctx, "Zipping album", "name", album[0].Album, "artist", album[0].AlbumArtist,
|
||||
"format", format, "bitrate", bitrate, "isMultDisc", isMultDisc, "numTracks", len(album))
|
||||
"format", format, "bitrate", bitrate, "isMultiDisc", isMultiDisc, "numTracks", len(album))
|
||||
for _, mf := range album {
|
||||
file := a.albumFilename(mf, format, isMultDisc)
|
||||
file := a.albumFilename(mf, format, isMultiDisc)
|
||||
_ = a.addFileToZip(ctx, z, mf, format, bitrate, file)
|
||||
}
|
||||
}
|
||||
@@ -78,12 +78,12 @@ func createZipWriter(out io.Writer, format string, bitrate int) *zip.Writer {
|
||||
return z
|
||||
}
|
||||
|
||||
func (a *archiver) albumFilename(mf model.MediaFile, format string, isMultDisc bool) string {
|
||||
func (a *archiver) albumFilename(mf model.MediaFile, format string, isMultiDisc bool) string {
|
||||
_, file := filepath.Split(mf.Path)
|
||||
if format != "raw" {
|
||||
file = strings.TrimSuffix(file, mf.Suffix) + format
|
||||
}
|
||||
if isMultDisc {
|
||||
if isMultiDisc {
|
||||
file = fmt.Sprintf("Disc %02d/%s", mf.DiscNumber, file)
|
||||
}
|
||||
return fmt.Sprintf("%s/%s", sanitizeName(mf.Album), file)
|
||||
@@ -91,33 +91,58 @@ func (a *archiver) albumFilename(mf model.MediaFile, format string, isMultDisc b
|
||||
|
||||
func (a *archiver) ZipShare(ctx context.Context, id string, out io.Writer) error {
|
||||
s, err := a.shares.Load(ctx, id)
|
||||
if !s.Downloadable {
|
||||
return model.ErrNotAuthorized
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !s.Downloadable {
|
||||
return model.ErrNotAuthorized
|
||||
}
|
||||
log.Debug(ctx, "Zipping share", "name", s.ID, "format", s.Format, "bitrate", s.MaxBitRate, "numTracks", len(s.Tracks))
|
||||
return a.zipMediaFiles(ctx, id, s.Format, s.MaxBitRate, out, s.Tracks)
|
||||
return a.zipMediaFiles(ctx, id, s.ID, s.Format, s.MaxBitRate, out, s.Tracks, false)
|
||||
}
|
||||
|
||||
func (a *archiver) ZipPlaylist(ctx context.Context, id string, format string, bitrate int, out io.Writer) error {
|
||||
pls, err := a.ds.Playlist(ctx).GetWithTracks(id, true)
|
||||
pls, err := a.ds.Playlist(ctx).GetWithTracks(id, true, false)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error loading mediafiles from playlist", "id", id, err)
|
||||
return err
|
||||
}
|
||||
mfs := pls.MediaFiles()
|
||||
log.Debug(ctx, "Zipping playlist", "name", pls.Name, "format", format, "bitrate", bitrate, "numTracks", len(mfs))
|
||||
return a.zipMediaFiles(ctx, id, format, bitrate, out, mfs)
|
||||
return a.zipMediaFiles(ctx, id, pls.Name, format, bitrate, out, mfs, true)
|
||||
}
|
||||
|
||||
func (a *archiver) zipMediaFiles(ctx context.Context, id string, format string, bitrate int, out io.Writer, mfs model.MediaFiles) error {
|
||||
func (a *archiver) zipMediaFiles(ctx context.Context, id, name string, format string, bitrate int, out io.Writer, mfs model.MediaFiles, addM3U bool) error {
|
||||
z := createZipWriter(out, format, bitrate)
|
||||
|
||||
zippedMfs := make(model.MediaFiles, len(mfs))
|
||||
for idx, mf := range mfs {
|
||||
file := a.playlistFilename(mf, format, idx)
|
||||
_ = a.addFileToZip(ctx, z, mf, format, bitrate, file)
|
||||
mf.Path = file
|
||||
zippedMfs[idx] = mf
|
||||
}
|
||||
|
||||
// Add M3U file if requested
|
||||
if addM3U && len(zippedMfs) > 0 {
|
||||
plsName := sanitizeName(name)
|
||||
w, err := z.CreateHeader(&zip.FileHeader{
|
||||
Name: plsName + ".m3u",
|
||||
Modified: mfs[0].UpdatedAt,
|
||||
Method: zip.Store,
|
||||
})
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error creating playlist zip entry", err)
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = w.Write([]byte(zippedMfs.ToM3U8(plsName, false)))
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error writing m3u in zip", err)
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
err := z.Close()
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error closing zip file", "id", id, err)
|
||||
@@ -138,13 +163,14 @@ func sanitizeName(target string) string {
|
||||
}
|
||||
|
||||
func (a *archiver) addFileToZip(ctx context.Context, z *zip.Writer, mf model.MediaFile, format string, bitrate int, filename string) error {
|
||||
path := mf.AbsolutePath()
|
||||
w, err := z.CreateHeader(&zip.FileHeader{
|
||||
Name: filename,
|
||||
Modified: mf.UpdatedAt,
|
||||
Method: zip.Store,
|
||||
})
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error creating zip entry", "file", mf.Path, err)
|
||||
log.Error(ctx, "Error creating zip entry", "file", path, err)
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -152,22 +178,22 @@ func (a *archiver) addFileToZip(ctx context.Context, z *zip.Writer, mf model.Med
|
||||
if format != "raw" && format != "" {
|
||||
r, err = a.ms.DoStream(ctx, &mf, format, bitrate, 0)
|
||||
} else {
|
||||
r, err = os.Open(mf.Path)
|
||||
r, err = os.Open(path)
|
||||
}
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error opening file for zipping", "file", mf.Path, "format", format, err)
|
||||
log.Error(ctx, "Error opening file for zipping", "file", path, "format", format, err)
|
||||
return err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if err := r.Close(); err != nil && log.IsGreaterOrEqualTo(log.LevelDebug) {
|
||||
log.Error(ctx, "Error closing stream", "id", mf.ID, "file", mf.Path, err)
|
||||
log.Error(ctx, "Error closing stream", "id", mf.ID, "file", path, err)
|
||||
}
|
||||
}()
|
||||
|
||||
_, err = io.Copy(w, r)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error zipping file", "file", mf.Path, err)
|
||||
log.Error(ctx, "Error zipping file", "file", path, err)
|
||||
return err
|
||||
}
|
||||
|
||||
|
||||
@@ -25,8 +25,8 @@ var _ = Describe("Archiver", func() {
|
||||
|
||||
BeforeEach(func() {
|
||||
ms = &mockMediaStreamer{}
|
||||
ds = &mockDataStore{}
|
||||
sh = &mockShare{}
|
||||
ds = &mockDataStore{}
|
||||
arch = core.NewArchiver(ms, ds, sh)
|
||||
})
|
||||
|
||||
@@ -134,7 +134,7 @@ var _ = Describe("Archiver", func() {
|
||||
}
|
||||
|
||||
plRepo := &mockPlaylistRepository{}
|
||||
plRepo.On("GetWithTracks", "1", true).Return(pls, nil)
|
||||
plRepo.On("GetWithTracks", "1", true, false).Return(pls, nil)
|
||||
ds.On("Playlist", mock.Anything).Return(plRepo)
|
||||
ms.On("DoStream", mock.Anything, mock.Anything, "mp3", 128, 0).Return(io.NopCloser(strings.NewReader("test")), nil).Times(2)
|
||||
|
||||
@@ -145,9 +145,21 @@ var _ = Describe("Archiver", func() {
|
||||
zr, err := zip.NewReader(bytes.NewReader(out.Bytes()), int64(out.Len()))
|
||||
Expect(err).To(BeNil())
|
||||
|
||||
Expect(len(zr.File)).To(Equal(2))
|
||||
Expect(len(zr.File)).To(Equal(3))
|
||||
Expect(zr.File[0].Name).To(Equal("01 - AC_DC - track1.mp3"))
|
||||
Expect(zr.File[1].Name).To(Equal("02 - Artist 2 - track2.mp3"))
|
||||
Expect(zr.File[2].Name).To(Equal("Test Playlist.m3u"))
|
||||
|
||||
// Verify M3U content
|
||||
m3uFile, err := zr.File[2].Open()
|
||||
Expect(err).To(BeNil())
|
||||
defer m3uFile.Close()
|
||||
|
||||
m3uContent, err := io.ReadAll(m3uFile)
|
||||
Expect(err).To(BeNil())
|
||||
|
||||
expectedM3U := "#EXTM3U\n#PLAYLIST:Test Playlist\n#EXTINF:0,AC/DC - track1\n01 - AC_DC - track1.mp3\n#EXTINF:0,Artist 2 - track2\n02 - Artist 2 - track2.mp3\n"
|
||||
Expect(string(m3uContent)).To(Equal(expectedM3U))
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -167,6 +179,19 @@ func (m *mockDataStore) Playlist(ctx context.Context) model.PlaylistRepository {
|
||||
return args.Get(0).(model.PlaylistRepository)
|
||||
}
|
||||
|
||||
func (m *mockDataStore) Library(context.Context) model.LibraryRepository {
|
||||
return &mockLibraryRepository{}
|
||||
}
|
||||
|
||||
type mockLibraryRepository struct {
|
||||
mock.Mock
|
||||
model.LibraryRepository
|
||||
}
|
||||
|
||||
func (m *mockLibraryRepository) GetPath(id int) (string, error) {
|
||||
return "/music", nil
|
||||
}
|
||||
|
||||
type mockMediaFileRepository struct {
|
||||
mock.Mock
|
||||
model.MediaFileRepository
|
||||
@@ -182,8 +207,8 @@ type mockPlaylistRepository struct {
|
||||
model.PlaylistRepository
|
||||
}
|
||||
|
||||
func (m *mockPlaylistRepository) GetWithTracks(id string, includeTracks bool) (*model.Playlist, error) {
|
||||
args := m.Called(id, includeTracks)
|
||||
func (m *mockPlaylistRepository) GetWithTracks(id string, refreshSmartPlaylists, includeMissing bool) (*model.Playlist, error) {
|
||||
args := m.Called(id, refreshSmartPlaylists, includeMissing)
|
||||
return args.Get(0).(*model.Playlist), args.Error(1)
|
||||
}
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/core/ffmpeg"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
@@ -24,15 +24,15 @@ type Artwork interface {
|
||||
GetOrPlaceholder(ctx context.Context, id string, size int, square bool) (io.ReadCloser, time.Time, error)
|
||||
}
|
||||
|
||||
func NewArtwork(ds model.DataStore, cache cache.FileCache, ffmpeg ffmpeg.FFmpeg, em core.ExternalMetadata) Artwork {
|
||||
return &artwork{ds: ds, cache: cache, ffmpeg: ffmpeg, em: em}
|
||||
func NewArtwork(ds model.DataStore, cache cache.FileCache, ffmpeg ffmpeg.FFmpeg, provider external.Provider) Artwork {
|
||||
return &artwork{ds: ds, cache: cache, ffmpeg: ffmpeg, provider: provider}
|
||||
}
|
||||
|
||||
type artwork struct {
|
||||
ds model.DataStore
|
||||
cache cache.FileCache
|
||||
ffmpeg ffmpeg.FFmpeg
|
||||
em core.ExternalMetadata
|
||||
ds model.DataStore
|
||||
cache cache.FileCache
|
||||
ffmpeg ffmpeg.FFmpeg
|
||||
provider external.Provider
|
||||
}
|
||||
|
||||
type artworkReader interface {
|
||||
@@ -115,9 +115,9 @@ func (a *artwork) getArtworkReader(ctx context.Context, artID model.ArtworkID, s
|
||||
} else {
|
||||
switch artID.Kind {
|
||||
case model.KindArtistArtwork:
|
||||
artReader, err = newArtistReader(ctx, a, artID, a.em)
|
||||
artReader, err = newArtistArtworkReader(ctx, a, artID, a.provider)
|
||||
case model.KindAlbumArtwork:
|
||||
artReader, err = newAlbumArtworkReader(ctx, a, artID, a.em)
|
||||
artReader, err = newAlbumArtworkReader(ctx, a, artID, a.provider)
|
||||
case model.KindMediaFileArtwork:
|
||||
artReader, err = newMediafileArtworkReader(ctx, a, artID)
|
||||
case model.KindPlaylistArtwork:
|
||||
|
||||
@@ -12,7 +12,6 @@ import (
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/conf/configtest"
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
@@ -24,6 +23,7 @@ var _ = Describe("Artwork", func() {
|
||||
var aw *artwork
|
||||
var ds model.DataStore
|
||||
var ffmpeg *tests.MockFFmpeg
|
||||
var folderRepo *fakeFolderRepo
|
||||
ctx := log.NewContext(context.TODO())
|
||||
var alOnlyEmbed, alEmbedNotFound, alOnlyExternal, alExternalNotFound, alMultipleCovers model.Album
|
||||
var arMultipleCovers model.Artist
|
||||
@@ -34,20 +34,21 @@ var _ = Describe("Artwork", func() {
|
||||
conf.Server.ImageCacheSize = "0" // Disable cache
|
||||
conf.Server.CoverArtPriority = "folder.*, cover.*, embedded , front.*"
|
||||
|
||||
ds = &tests.MockDataStore{MockedTranscoding: &tests.MockTranscodingRepo{}}
|
||||
alOnlyEmbed = model.Album{ID: "222", Name: "Only embed", EmbedArtPath: "tests/fixtures/artist/an-album/test.mp3"}
|
||||
alEmbedNotFound = model.Album{ID: "333", Name: "Embed not found", EmbedArtPath: "tests/fixtures/NON_EXISTENT.mp3"}
|
||||
alOnlyExternal = model.Album{ID: "444", Name: "Only external", ImageFiles: "tests/fixtures/artist/an-album/front.png"}
|
||||
alExternalNotFound = model.Album{ID: "555", Name: "External not found", ImageFiles: "tests/fixtures/NON_EXISTENT.png"}
|
||||
folderRepo = &fakeFolderRepo{}
|
||||
ds = &tests.MockDataStore{
|
||||
MockedTranscoding: &tests.MockTranscodingRepo{},
|
||||
MockedFolder: folderRepo,
|
||||
}
|
||||
alOnlyEmbed = model.Album{ID: "222", Name: "Only embed", EmbedArtPath: "tests/fixtures/artist/an-album/test.mp3", FolderIDs: []string{"f1"}}
|
||||
alEmbedNotFound = model.Album{ID: "333", Name: "Embed not found", EmbedArtPath: "tests/fixtures/NON_EXISTENT.mp3", FolderIDs: []string{"f1"}}
|
||||
alOnlyExternal = model.Album{ID: "444", Name: "Only external", FolderIDs: []string{"f1"}}
|
||||
alExternalNotFound = model.Album{ID: "555", Name: "External not found", FolderIDs: []string{"f2"}}
|
||||
arMultipleCovers = model.Artist{ID: "777", Name: "All options"}
|
||||
alMultipleCovers = model.Album{
|
||||
ID: "666",
|
||||
Name: "All options",
|
||||
EmbedArtPath: "tests/fixtures/artist/an-album/test.mp3",
|
||||
Paths: "tests/fixtures/artist/an-album",
|
||||
ImageFiles: "tests/fixtures/artist/an-album/cover.jpg" + consts.Zwsp +
|
||||
"tests/fixtures/artist/an-album/front.png" + consts.Zwsp +
|
||||
"tests/fixtures/artist/an-album/artist.png",
|
||||
ID: "666",
|
||||
Name: "All options",
|
||||
EmbedArtPath: "tests/fixtures/artist/an-album/test.mp3",
|
||||
FolderIDs: []string{"f1"},
|
||||
AlbumArtistID: "777",
|
||||
}
|
||||
mfWithEmbed = model.MediaFile{ID: "22", Path: "tests/fixtures/test.mp3", HasCoverArt: true, AlbumID: "222"}
|
||||
@@ -69,6 +70,7 @@ var _ = Describe("Artwork", func() {
|
||||
})
|
||||
Context("Embed images", func() {
|
||||
BeforeEach(func() {
|
||||
folderRepo.result = nil
|
||||
ds.Album(ctx).(*tests.MockAlbumRepo).SetData(model.Albums{
|
||||
alOnlyEmbed,
|
||||
alEmbedNotFound,
|
||||
@@ -91,12 +93,17 @@ var _ = Describe("Artwork", func() {
|
||||
})
|
||||
Context("External images", func() {
|
||||
BeforeEach(func() {
|
||||
folderRepo.result = []model.Folder{}
|
||||
ds.Album(ctx).(*tests.MockAlbumRepo).SetData(model.Albums{
|
||||
alOnlyExternal,
|
||||
alExternalNotFound,
|
||||
})
|
||||
})
|
||||
It("returns external cover", func() {
|
||||
folderRepo.result = []model.Folder{{
|
||||
Path: "tests/fixtures/artist/an-album",
|
||||
ImageFiles: []string{"front.png"},
|
||||
}}
|
||||
aw, err := newAlbumArtworkReader(ctx, aw, alOnlyExternal.CoverArtID(), nil)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
_, path, err := aw.Reader(ctx)
|
||||
@@ -104,6 +111,7 @@ var _ = Describe("Artwork", func() {
|
||||
Expect(path).To(Equal("tests/fixtures/artist/an-album/front.png"))
|
||||
})
|
||||
It("returns ErrUnavailable if external file is not available", func() {
|
||||
folderRepo.result = []model.Folder{}
|
||||
aw, err := newAlbumArtworkReader(ctx, aw, alExternalNotFound.CoverArtID(), nil)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
_, _, err = aw.Reader(ctx)
|
||||
@@ -112,6 +120,10 @@ var _ = Describe("Artwork", func() {
|
||||
})
|
||||
Context("Multiple covers", func() {
|
||||
BeforeEach(func() {
|
||||
folderRepo.result = []model.Folder{{
|
||||
Path: "tests/fixtures/artist/an-album",
|
||||
ImageFiles: []string{"cover.jpg", "front.png", "artist.png"},
|
||||
}}
|
||||
ds.Album(ctx).(*tests.MockAlbumRepo).SetData(model.Albums{
|
||||
alMultipleCovers,
|
||||
})
|
||||
@@ -134,6 +146,10 @@ var _ = Describe("Artwork", func() {
|
||||
Describe("artistArtworkReader", func() {
|
||||
Context("Multiple covers", func() {
|
||||
BeforeEach(func() {
|
||||
folderRepo.result = []model.Folder{{
|
||||
Path: "tests/fixtures/artist/an-album",
|
||||
ImageFiles: []string{"artist.png"},
|
||||
}}
|
||||
ds.Artist(ctx).(*tests.MockArtistRepo).SetData(model.Artists{
|
||||
arMultipleCovers,
|
||||
})
|
||||
@@ -147,7 +163,7 @@ var _ = Describe("Artwork", func() {
|
||||
DescribeTable("ArtistArtPriority",
|
||||
func(priority string, expected string) {
|
||||
conf.Server.ArtistArtPriority = priority
|
||||
aw, err := newArtistReader(ctx, aw, arMultipleCovers.CoverArtID(), nil)
|
||||
aw, err := newArtistArtworkReader(ctx, aw, arMultipleCovers.CoverArtID(), nil)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
_, path, err := aw.Reader(ctx)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
@@ -161,12 +177,16 @@ var _ = Describe("Artwork", func() {
|
||||
Describe("mediafileArtworkReader", func() {
|
||||
Context("ID not found", func() {
|
||||
It("returns ErrNotFound if mediafile is not in the DB", func() {
|
||||
_, err := newAlbumArtworkReader(ctx, aw, alMultipleCovers.CoverArtID(), nil)
|
||||
_, err := newMediafileArtworkReader(ctx, aw, model.MustParseArtworkID("mf-NOT-FOUND"))
|
||||
Expect(err).To(MatchError(model.ErrNotFound))
|
||||
})
|
||||
})
|
||||
Context("Embed images", func() {
|
||||
BeforeEach(func() {
|
||||
folderRepo.result = []model.Folder{{
|
||||
Path: "tests/fixtures/artist/an-album",
|
||||
ImageFiles: []string{"front.png"},
|
||||
}}
|
||||
ds.Album(ctx).(*tests.MockAlbumRepo).SetData(model.Albums{
|
||||
alOnlyEmbed,
|
||||
alOnlyExternal,
|
||||
@@ -189,11 +209,17 @@ var _ = Describe("Artwork", func() {
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
r, path, err := aw.Reader(ctx)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(io.ReadAll(r)).To(Equal([]byte("content from ffmpeg")))
|
||||
data, _ := io.ReadAll(r)
|
||||
Expect(data).ToNot(BeEmpty())
|
||||
Expect(path).To(Equal("tests/fixtures/test.ogg"))
|
||||
})
|
||||
It("returns album cover if cannot read embed artwork", func() {
|
||||
// Force fromTag to fail
|
||||
mfCorruptedCover.Path = "tests/fixtures/DOES_NOT_EXIST.ogg"
|
||||
Expect(ds.MediaFile(ctx).(*tests.MockMediaFileRepo).Put(&mfCorruptedCover)).To(Succeed())
|
||||
// Simulate ffmpeg error
|
||||
ffmpeg.Error = errors.New("not available")
|
||||
|
||||
aw, err := newMediafileArtworkReader(ctx, aw, mfCorruptedCover.CoverArtID())
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
_, path, err := aw.Reader(ctx)
|
||||
@@ -211,6 +237,10 @@ var _ = Describe("Artwork", func() {
|
||||
})
|
||||
Describe("resizedArtworkReader", func() {
|
||||
BeforeEach(func() {
|
||||
folderRepo.result = []model.Folder{{
|
||||
Path: "tests/fixtures/artist/an-album",
|
||||
ImageFiles: []string{"cover.jpg", "front.png"},
|
||||
}}
|
||||
ds.Album(ctx).(*tests.MockAlbumRepo).SetData(model.Albums{
|
||||
alMultipleCovers,
|
||||
})
|
||||
@@ -247,10 +277,11 @@ var _ = Describe("Artwork", func() {
|
||||
coverFileName := "cover." + format
|
||||
dirName := createImage(format, landscape, size)
|
||||
alCover = model.Album{
|
||||
ID: "444",
|
||||
Name: "Only external",
|
||||
ImageFiles: filepath.Join(dirName, coverFileName),
|
||||
ID: "444",
|
||||
Name: "Only external",
|
||||
FolderIDs: []string{"tmp"},
|
||||
}
|
||||
folderRepo.result = []model.Folder{{Path: dirName, ImageFiles: []string{coverFileName}}}
|
||||
ds.Album(ctx).(*tests.MockAlbumRepo).SetData(model.Albums{
|
||||
alCover,
|
||||
})
|
||||
|
||||
@@ -22,12 +22,21 @@ type CacheWarmer interface {
|
||||
PreCache(artID model.ArtworkID)
|
||||
}
|
||||
|
||||
// NewCacheWarmer creates a new CacheWarmer instance. The CacheWarmer will pre-cache Artwork images in the background
|
||||
// to speed up the response time when the image is requested by the UI. The cache is pre-populated with the original
|
||||
// image size, as well as the size defined in the UICoverArtSize constant.
|
||||
func NewCacheWarmer(artwork Artwork, cache cache.FileCache) CacheWarmer {
|
||||
// If image cache is disabled, return a NOOP implementation
|
||||
if conf.Server.ImageCacheSize == "0" || !conf.Server.EnableArtworkPrecache {
|
||||
return &noopCacheWarmer{}
|
||||
}
|
||||
|
||||
// If the file cache is disabled, return a NOOP implementation
|
||||
if cache.Disabled(context.Background()) {
|
||||
log.Debug("Image cache disabled. Cache warmer will not run")
|
||||
return &noopCacheWarmer{}
|
||||
}
|
||||
|
||||
a := &cacheWarmer{
|
||||
artwork: artwork,
|
||||
cache: cache,
|
||||
@@ -49,13 +58,8 @@ type cacheWarmer struct {
|
||||
wakeSignal chan struct{}
|
||||
}
|
||||
|
||||
var ignoredIds = map[string]struct{}{
|
||||
consts.VariousArtistsID: {},
|
||||
consts.UnknownArtistID: {},
|
||||
}
|
||||
|
||||
func (a *cacheWarmer) PreCache(artID model.ArtworkID) {
|
||||
if _, shouldIgnore := ignoredIds[artID.ID]; shouldIgnore {
|
||||
if a.cache.Disabled(context.Background()) {
|
||||
return
|
||||
}
|
||||
a.mutex.Lock()
|
||||
@@ -79,10 +83,24 @@ func (a *cacheWarmer) run(ctx context.Context) {
|
||||
break
|
||||
}
|
||||
|
||||
if a.cache.Disabled(ctx) {
|
||||
a.mutex.Lock()
|
||||
pending := len(a.buffer)
|
||||
a.buffer = make(map[model.ArtworkID]struct{})
|
||||
a.mutex.Unlock()
|
||||
if pending > 0 {
|
||||
log.Trace(ctx, "Cache disabled, discarding precache buffer", "bufferLen", pending)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// If cache not available, keep waiting
|
||||
if !a.cache.Available(ctx) {
|
||||
if len(a.buffer) > 0 {
|
||||
log.Trace(ctx, "Cache not available, buffering precache request", "bufferLen", len(a.buffer))
|
||||
a.mutex.Lock()
|
||||
bufferLen := len(a.buffer)
|
||||
a.mutex.Unlock()
|
||||
if bufferLen > 0 {
|
||||
log.Trace(ctx, "Cache not available, buffering precache request", "bufferLen", bufferLen)
|
||||
}
|
||||
continue
|
||||
}
|
||||
@@ -104,14 +122,8 @@ func (a *cacheWarmer) run(ctx context.Context) {
|
||||
}
|
||||
|
||||
func (a *cacheWarmer) waitSignal(ctx context.Context, timeout time.Duration) {
|
||||
var to <-chan time.Time
|
||||
if !a.cache.Available(ctx) {
|
||||
tmr := time.NewTimer(timeout)
|
||||
defer tmr.Stop()
|
||||
to = tmr.C
|
||||
}
|
||||
select {
|
||||
case <-to:
|
||||
case <-time.After(timeout):
|
||||
case <-a.wakeSignal:
|
||||
case <-ctx.Done():
|
||||
}
|
||||
@@ -130,7 +142,7 @@ func (a *cacheWarmer) doCacheImage(ctx context.Context, id model.ArtworkID) erro
|
||||
ctx, cancel := context.WithTimeout(ctx, 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
r, _, err := a.artwork.Get(ctx, id, consts.UICoverArtSize, false)
|
||||
r, _, err := a.artwork.Get(ctx, id, consts.UICoverArtSize, true)
|
||||
if err != nil {
|
||||
return fmt.Errorf("caching id='%s': %w", id, err)
|
||||
}
|
||||
@@ -142,6 +154,10 @@ func (a *cacheWarmer) doCacheImage(ctx context.Context, id model.ArtworkID) erro
|
||||
return nil
|
||||
}
|
||||
|
||||
func NoopCacheWarmer() CacheWarmer {
|
||||
return &noopCacheWarmer{}
|
||||
}
|
||||
|
||||
type noopCacheWarmer struct{}
|
||||
|
||||
func (a *noopCacheWarmer) PreCache(model.ArtworkID) {}
|
||||
|
||||
221
core/artwork/cache_warmer_test.go
Normal file
221
core/artwork/cache_warmer_test.go
Normal file
@@ -0,0 +1,221 @@
|
||||
package artwork
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/conf/configtest"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/utils/cache"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("CacheWarmer", func() {
|
||||
var (
|
||||
fc *mockFileCache
|
||||
aw *mockArtwork
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
DeferCleanup(configtest.SetupConfig())
|
||||
fc = &mockFileCache{}
|
||||
aw = &mockArtwork{}
|
||||
})
|
||||
|
||||
Context("initialization", func() {
|
||||
It("returns noop when cache is disabled", func() {
|
||||
fc.SetDisabled(true)
|
||||
cw := NewCacheWarmer(aw, fc)
|
||||
_, ok := cw.(*noopCacheWarmer)
|
||||
Expect(ok).To(BeTrue())
|
||||
})
|
||||
|
||||
It("returns noop when ImageCacheSize is 0", func() {
|
||||
conf.Server.ImageCacheSize = "0"
|
||||
cw := NewCacheWarmer(aw, fc)
|
||||
_, ok := cw.(*noopCacheWarmer)
|
||||
Expect(ok).To(BeTrue())
|
||||
})
|
||||
|
||||
It("returns noop when EnableArtworkPrecache is false", func() {
|
||||
conf.Server.EnableArtworkPrecache = false
|
||||
cw := NewCacheWarmer(aw, fc)
|
||||
_, ok := cw.(*noopCacheWarmer)
|
||||
Expect(ok).To(BeTrue())
|
||||
})
|
||||
|
||||
It("returns real implementation when properly configured", func() {
|
||||
conf.Server.ImageCacheSize = "100MB"
|
||||
conf.Server.EnableArtworkPrecache = true
|
||||
fc.SetDisabled(false)
|
||||
cw := NewCacheWarmer(aw, fc)
|
||||
_, ok := cw.(*cacheWarmer)
|
||||
Expect(ok).To(BeTrue())
|
||||
})
|
||||
})
|
||||
|
||||
Context("buffer management", func() {
|
||||
BeforeEach(func() {
|
||||
conf.Server.ImageCacheSize = "100MB"
|
||||
conf.Server.EnableArtworkPrecache = true
|
||||
fc.SetDisabled(false)
|
||||
})
|
||||
|
||||
It("drops buffered items when cache becomes disabled", func() {
|
||||
cw := NewCacheWarmer(aw, fc).(*cacheWarmer)
|
||||
cw.PreCache(model.MustParseArtworkID("al-test"))
|
||||
fc.SetDisabled(true)
|
||||
Eventually(func() int {
|
||||
cw.mutex.Lock()
|
||||
defer cw.mutex.Unlock()
|
||||
return len(cw.buffer)
|
||||
}).Should(Equal(0))
|
||||
})
|
||||
|
||||
It("adds multiple items to buffer", func() {
|
||||
fc.SetReady(false) // Make cache unavailable so items stay in buffer
|
||||
cw := NewCacheWarmer(aw, fc).(*cacheWarmer)
|
||||
cw.PreCache(model.MustParseArtworkID("al-1"))
|
||||
cw.PreCache(model.MustParseArtworkID("al-2"))
|
||||
cw.mutex.Lock()
|
||||
defer cw.mutex.Unlock()
|
||||
Expect(len(cw.buffer)).To(Equal(2))
|
||||
})
|
||||
|
||||
It("deduplicates items in buffer", func() {
|
||||
cw := NewCacheWarmer(aw, fc).(*cacheWarmer)
|
||||
cw.PreCache(model.MustParseArtworkID("al-1"))
|
||||
cw.PreCache(model.MustParseArtworkID("al-1"))
|
||||
cw.mutex.Lock()
|
||||
defer cw.mutex.Unlock()
|
||||
Expect(len(cw.buffer)).To(Equal(1))
|
||||
})
|
||||
})
|
||||
|
||||
Context("error handling", func() {
|
||||
BeforeEach(func() {
|
||||
conf.Server.ImageCacheSize = "100MB"
|
||||
conf.Server.EnableArtworkPrecache = true
|
||||
fc.SetDisabled(false)
|
||||
})
|
||||
|
||||
It("continues processing after artwork retrieval error", func() {
|
||||
aw.err = errors.New("artwork error")
|
||||
cw := NewCacheWarmer(aw, fc).(*cacheWarmer)
|
||||
cw.PreCache(model.MustParseArtworkID("al-error"))
|
||||
cw.PreCache(model.MustParseArtworkID("al-1"))
|
||||
|
||||
Eventually(func() int {
|
||||
cw.mutex.Lock()
|
||||
defer cw.mutex.Unlock()
|
||||
return len(cw.buffer)
|
||||
}).Should(Equal(0))
|
||||
})
|
||||
|
||||
It("continues processing after cache error", func() {
|
||||
fc.err = errors.New("cache error")
|
||||
cw := NewCacheWarmer(aw, fc).(*cacheWarmer)
|
||||
cw.PreCache(model.MustParseArtworkID("al-error"))
|
||||
cw.PreCache(model.MustParseArtworkID("al-1"))
|
||||
|
||||
Eventually(func() int {
|
||||
cw.mutex.Lock()
|
||||
defer cw.mutex.Unlock()
|
||||
return len(cw.buffer)
|
||||
}).Should(Equal(0))
|
||||
})
|
||||
})
|
||||
|
||||
Context("background processing", func() {
|
||||
BeforeEach(func() {
|
||||
conf.Server.ImageCacheSize = "100MB"
|
||||
conf.Server.EnableArtworkPrecache = true
|
||||
fc.SetDisabled(false)
|
||||
})
|
||||
|
||||
It("processes items in batches", func() {
|
||||
cw := NewCacheWarmer(aw, fc).(*cacheWarmer)
|
||||
for i := 0; i < 5; i++ {
|
||||
cw.PreCache(model.MustParseArtworkID(fmt.Sprintf("al-%d", i)))
|
||||
}
|
||||
|
||||
Eventually(func() int {
|
||||
cw.mutex.Lock()
|
||||
defer cw.mutex.Unlock()
|
||||
return len(cw.buffer)
|
||||
}).Should(Equal(0))
|
||||
})
|
||||
|
||||
It("wakes up on new items", func() {
|
||||
cw := NewCacheWarmer(aw, fc).(*cacheWarmer)
|
||||
|
||||
// Add first batch
|
||||
cw.PreCache(model.MustParseArtworkID("al-1"))
|
||||
Eventually(func() int {
|
||||
cw.mutex.Lock()
|
||||
defer cw.mutex.Unlock()
|
||||
return len(cw.buffer)
|
||||
}).Should(Equal(0))
|
||||
|
||||
// Add second batch
|
||||
cw.PreCache(model.MustParseArtworkID("al-2"))
|
||||
Eventually(func() int {
|
||||
cw.mutex.Lock()
|
||||
defer cw.mutex.Unlock()
|
||||
return len(cw.buffer)
|
||||
}).Should(Equal(0))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
type mockArtwork struct {
|
||||
err error
|
||||
}
|
||||
|
||||
func (m *mockArtwork) Get(ctx context.Context, artID model.ArtworkID, size int, square bool) (io.ReadCloser, time.Time, error) {
|
||||
if m.err != nil {
|
||||
return nil, time.Time{}, m.err
|
||||
}
|
||||
return io.NopCloser(strings.NewReader("test")), time.Now(), nil
|
||||
}
|
||||
|
||||
func (m *mockArtwork) GetOrPlaceholder(ctx context.Context, id string, size int, square bool) (io.ReadCloser, time.Time, error) {
|
||||
return m.Get(ctx, model.ArtworkID{}, size, square)
|
||||
}
|
||||
|
||||
type mockFileCache struct {
|
||||
disabled atomic.Bool
|
||||
ready atomic.Bool
|
||||
err error
|
||||
}
|
||||
|
||||
func (f *mockFileCache) Get(ctx context.Context, item cache.Item) (*cache.CachedStream, error) {
|
||||
if f.err != nil {
|
||||
return nil, f.err
|
||||
}
|
||||
return &cache.CachedStream{Reader: io.NopCloser(strings.NewReader("cached"))}, nil
|
||||
}
|
||||
|
||||
func (f *mockFileCache) Available(ctx context.Context) bool {
|
||||
return f.ready.Load() && !f.disabled.Load()
|
||||
}
|
||||
|
||||
func (f *mockFileCache) Disabled(ctx context.Context) bool {
|
||||
return f.disabled.Load()
|
||||
}
|
||||
|
||||
func (f *mockFileCache) SetDisabled(v bool) {
|
||||
f.disabled.Store(v)
|
||||
f.ready.Store(true)
|
||||
}
|
||||
|
||||
func (f *mockFileCache) SetReady(v bool) {
|
||||
f.ready.Store(v)
|
||||
}
|
||||
@@ -5,34 +5,52 @@ import (
|
||||
"crypto/md5"
|
||||
"fmt"
|
||||
"io"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/Masterminds/squirrel"
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/core/ffmpeg"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
)
|
||||
|
||||
type albumArtworkReader struct {
|
||||
cacheKey
|
||||
a *artwork
|
||||
em core.ExternalMetadata
|
||||
album model.Album
|
||||
a *artwork
|
||||
provider external.Provider
|
||||
album model.Album
|
||||
updatedAt *time.Time
|
||||
imgFiles []string
|
||||
rootFolder string
|
||||
}
|
||||
|
||||
func newAlbumArtworkReader(ctx context.Context, artwork *artwork, artID model.ArtworkID, em core.ExternalMetadata) (*albumArtworkReader, error) {
|
||||
func newAlbumArtworkReader(ctx context.Context, artwork *artwork, artID model.ArtworkID, provider external.Provider) (*albumArtworkReader, error) {
|
||||
al, err := artwork.ds.Album(ctx).Get(artID.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
_, imgFiles, imagesUpdateAt, err := loadAlbumFoldersPaths(ctx, artwork.ds, *al)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
a := &albumArtworkReader{
|
||||
a: artwork,
|
||||
em: em,
|
||||
album: *al,
|
||||
a: artwork,
|
||||
provider: provider,
|
||||
album: *al,
|
||||
updatedAt: imagesUpdateAt,
|
||||
imgFiles: imgFiles,
|
||||
rootFolder: core.AbsolutePath(ctx, artwork.ds, al.LibraryID, ""),
|
||||
}
|
||||
a.cacheKey.artID = artID
|
||||
a.cacheKey.lastUpdate = al.UpdatedAt
|
||||
if a.updatedAt != nil && a.updatedAt.After(al.UpdatedAt) {
|
||||
a.cacheKey.lastUpdate = *a.updatedAt
|
||||
} else {
|
||||
a.cacheKey.lastUpdate = al.UpdatedAt
|
||||
}
|
||||
return a, nil
|
||||
}
|
||||
|
||||
@@ -63,12 +81,43 @@ func (a *albumArtworkReader) fromCoverArtPriority(ctx context.Context, ffmpeg ff
|
||||
pattern = strings.TrimSpace(pattern)
|
||||
switch {
|
||||
case pattern == "embedded":
|
||||
ff = append(ff, fromTag(a.album.EmbedArtPath), fromFFmpegTag(ctx, ffmpeg, a.album.EmbedArtPath))
|
||||
embedArtPath := filepath.Join(a.rootFolder, a.album.EmbedArtPath)
|
||||
ff = append(ff, fromTag(ctx, embedArtPath), fromFFmpegTag(ctx, ffmpeg, embedArtPath))
|
||||
case pattern == "external":
|
||||
ff = append(ff, fromAlbumExternalSource(ctx, a.album, a.em))
|
||||
case a.album.ImageFiles != "":
|
||||
ff = append(ff, fromExternalFile(ctx, a.album.ImageFiles, pattern))
|
||||
ff = append(ff, fromAlbumExternalSource(ctx, a.album, a.provider))
|
||||
case len(a.imgFiles) > 0:
|
||||
ff = append(ff, fromExternalFile(ctx, a.imgFiles, pattern))
|
||||
}
|
||||
}
|
||||
return ff
|
||||
}
|
||||
|
||||
func loadAlbumFoldersPaths(ctx context.Context, ds model.DataStore, albums ...model.Album) ([]string, []string, *time.Time, error) {
|
||||
var folderIDs []string
|
||||
for _, album := range albums {
|
||||
folderIDs = append(folderIDs, album.FolderIDs...)
|
||||
}
|
||||
folders, err := ds.Folder(ctx).GetAll(model.QueryOptions{Filters: squirrel.Eq{"folder.id": folderIDs, "missing": false}})
|
||||
if err != nil {
|
||||
return nil, nil, nil, err
|
||||
}
|
||||
var paths []string
|
||||
var imgFiles []string
|
||||
var updatedAt time.Time
|
||||
for _, f := range folders {
|
||||
path := f.AbsolutePath()
|
||||
paths = append(paths, path)
|
||||
if f.ImagesUpdatedAt.After(updatedAt) {
|
||||
updatedAt = f.ImagesUpdatedAt
|
||||
}
|
||||
for _, img := range f.ImageFiles {
|
||||
imgFiles = append(imgFiles, filepath.Join(path, img))
|
||||
}
|
||||
}
|
||||
|
||||
// Sort image files to ensure consistent selection of cover art
|
||||
// This prioritizes files from lower-numbered disc folders by sorting the paths
|
||||
slices.Sort(imgFiles)
|
||||
|
||||
return paths, imgFiles, &updatedAt, nil
|
||||
}
|
||||
|
||||
76
core/artwork/reader_album_test.go
Normal file
76
core/artwork/reader_album_test.go
Normal file
@@ -0,0 +1,76 @@
|
||||
package artwork
|
||||
|
||||
import (
|
||||
"context"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/model"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("Album Artwork Reader", func() {
|
||||
Describe("loadAlbumFoldersPaths", func() {
|
||||
var (
|
||||
ctx context.Context
|
||||
ds *fakeDataStore
|
||||
repo *fakeFolderRepo
|
||||
album model.Album
|
||||
now time.Time
|
||||
expectedAt time.Time
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
ctx = context.Background()
|
||||
now = time.Now().Truncate(time.Second)
|
||||
expectedAt = now.Add(5 * time.Minute)
|
||||
|
||||
// Set up the test folders with image files
|
||||
repo = &fakeFolderRepo{
|
||||
result: []model.Folder{
|
||||
{
|
||||
Path: "Artist/Album/Disc1",
|
||||
ImagesUpdatedAt: expectedAt,
|
||||
ImageFiles: []string{"cover.jpg", "back.jpg"},
|
||||
},
|
||||
{
|
||||
Path: "Artist/Album/Disc2",
|
||||
ImagesUpdatedAt: now,
|
||||
ImageFiles: []string{"cover.jpg"},
|
||||
},
|
||||
{
|
||||
Path: "Artist/Album/Disc10",
|
||||
ImagesUpdatedAt: now,
|
||||
ImageFiles: []string{"cover.jpg"},
|
||||
},
|
||||
},
|
||||
err: nil,
|
||||
}
|
||||
ds = &fakeDataStore{
|
||||
folderRepo: repo,
|
||||
}
|
||||
album = model.Album{
|
||||
ID: "album1",
|
||||
Name: "Album",
|
||||
FolderIDs: []string{"folder1", "folder2", "folder3"},
|
||||
}
|
||||
})
|
||||
|
||||
It("returns sorted image files", func() {
|
||||
_, imgFiles, imagesUpdatedAt, err := loadAlbumFoldersPaths(ctx, ds, album)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(*imagesUpdatedAt).To(Equal(expectedAt))
|
||||
|
||||
// Check that image files are sorted alphabetically
|
||||
Expect(imgFiles).To(HaveLen(4))
|
||||
|
||||
// The files should be sorted by full path
|
||||
Expect(imgFiles[0]).To(Equal(filepath.FromSlash("Artist/Album/Disc1/back.jpg")))
|
||||
Expect(imgFiles[1]).To(Equal(filepath.FromSlash("Artist/Album/Disc1/cover.jpg")))
|
||||
Expect(imgFiles[2]).To(Equal(filepath.FromSlash("Artist/Album/Disc10/cover.jpg")))
|
||||
Expect(imgFiles[3]).To(Equal(filepath.FromSlash("Artist/Album/Disc2/cover.jpg")))
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -13,52 +13,65 @@ import (
|
||||
|
||||
"github.com/Masterminds/squirrel"
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/utils/str"
|
||||
)
|
||||
|
||||
const (
|
||||
// maxArtistFolderTraversalDepth defines how many directory levels to search
|
||||
// when looking for artist images (artist folder + parent directories)
|
||||
maxArtistFolderTraversalDepth = 3
|
||||
)
|
||||
|
||||
type artistReader struct {
|
||||
cacheKey
|
||||
a *artwork
|
||||
em core.ExternalMetadata
|
||||
provider external.Provider
|
||||
artist model.Artist
|
||||
artistFolder string
|
||||
files string
|
||||
imgFiles []string
|
||||
}
|
||||
|
||||
func newArtistReader(ctx context.Context, artwork *artwork, artID model.ArtworkID, em core.ExternalMetadata) (*artistReader, error) {
|
||||
func newArtistArtworkReader(ctx context.Context, artwork *artwork, artID model.ArtworkID, provider external.Provider) (*artistReader, error) {
|
||||
ar, err := artwork.ds.Artist(ctx).Get(artID.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
als, err := artwork.ds.Album(ctx).GetAll(model.QueryOptions{Filters: squirrel.Eq{"album_artist_id": artID.ID}})
|
||||
// Only consider albums where the artist is the sole album artist.
|
||||
als, err := artwork.ds.Album(ctx).GetAll(model.QueryOptions{
|
||||
Filters: squirrel.And{
|
||||
squirrel.Eq{"album_artist_id": artID.ID},
|
||||
squirrel.Eq{"jsonb_array_length(participants->'albumartist')": 1},
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
albumPaths, imgFiles, imagesUpdatedAt, err := loadAlbumFoldersPaths(ctx, artwork.ds, als...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
artistFolder, artistFolderLastUpdate, err := loadArtistFolder(ctx, artwork.ds, als, albumPaths)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
a := &artistReader{
|
||||
a: artwork,
|
||||
em: em,
|
||||
artist: *ar,
|
||||
a: artwork,
|
||||
provider: provider,
|
||||
artist: *ar,
|
||||
artistFolder: artistFolder,
|
||||
imgFiles: imgFiles,
|
||||
}
|
||||
// TODO Find a way to factor in the ExternalUpdateInfoAt in the cache key. Problem is that it can
|
||||
// change _after_ retrieving from external sources, making the key invalid
|
||||
//a.cacheKey.lastUpdate = ar.ExternalInfoUpdatedAt
|
||||
var files []string
|
||||
var paths []string
|
||||
for _, al := range als {
|
||||
files = append(files, al.ImageFiles)
|
||||
paths = append(paths, splitList(al.Paths)...)
|
||||
if a.cacheKey.lastUpdate.Before(al.UpdatedAt) {
|
||||
a.cacheKey.lastUpdate = al.UpdatedAt
|
||||
}
|
||||
}
|
||||
a.files = strings.Join(files, consts.Zwsp)
|
||||
a.artistFolder = str.LongestCommonPrefix(paths)
|
||||
if !strings.HasSuffix(a.artistFolder, string(filepath.Separator)) {
|
||||
a.artistFolder, _ = filepath.Split(a.artistFolder)
|
||||
|
||||
a.cacheKey.lastUpdate = *imagesUpdatedAt
|
||||
if artistFolderLastUpdate.After(a.cacheKey.lastUpdate) {
|
||||
a.cacheKey.lastUpdate = artistFolderLastUpdate
|
||||
}
|
||||
a.cacheKey.artID = artID
|
||||
return a, nil
|
||||
@@ -89,9 +102,9 @@ func (a *artistReader) fromArtistArtPriority(ctx context.Context, priority strin
|
||||
pattern = strings.TrimSpace(pattern)
|
||||
switch {
|
||||
case pattern == "external":
|
||||
ff = append(ff, fromArtistExternalSource(ctx, a.artist, a.em))
|
||||
ff = append(ff, fromArtistExternalSource(ctx, a.artist, a.provider))
|
||||
case strings.HasPrefix(pattern, "album/"):
|
||||
ff = append(ff, fromExternalFile(ctx, a.files, strings.TrimPrefix(pattern, "album/")))
|
||||
ff = append(ff, fromExternalFile(ctx, a.imgFiles, strings.TrimPrefix(pattern, "album/")))
|
||||
default:
|
||||
ff = append(ff, fromArtistFolder(ctx, a.artistFolder, pattern))
|
||||
}
|
||||
@@ -101,27 +114,73 @@ func (a *artistReader) fromArtistArtPriority(ctx context.Context, priority strin
|
||||
|
||||
func fromArtistFolder(ctx context.Context, artistFolder string, pattern string) sourceFunc {
|
||||
return func() (io.ReadCloser, string, error) {
|
||||
fsys := os.DirFS(artistFolder)
|
||||
matches, err := fs.Glob(fsys, pattern)
|
||||
if err != nil {
|
||||
log.Warn(ctx, "Error matching artist image pattern", "pattern", pattern, "folder", artistFolder)
|
||||
return nil, "", err
|
||||
}
|
||||
if len(matches) == 0 {
|
||||
return nil, "", fmt.Errorf(`no matches for '%s' in '%s'`, pattern, artistFolder)
|
||||
}
|
||||
for _, m := range matches {
|
||||
filePath := filepath.Join(artistFolder, m)
|
||||
if !model.IsImageFile(m) {
|
||||
continue
|
||||
current := artistFolder
|
||||
for i := 0; i < maxArtistFolderTraversalDepth; i++ {
|
||||
if reader, path, err := findImageInFolder(ctx, current, pattern); err == nil {
|
||||
return reader, path, nil
|
||||
}
|
||||
f, err := os.Open(filePath)
|
||||
if err != nil {
|
||||
log.Warn(ctx, "Could not open cover art file", "file", filePath, err)
|
||||
return nil, "", err
|
||||
|
||||
parent := filepath.Dir(current)
|
||||
if parent == current {
|
||||
break
|
||||
}
|
||||
return f, filePath, nil
|
||||
current = parent
|
||||
}
|
||||
return nil, "", nil
|
||||
return nil, "", fmt.Errorf(`no matches for '%s' in '%s' or its parent directories`, pattern, artistFolder)
|
||||
}
|
||||
}
|
||||
|
||||
func findImageInFolder(ctx context.Context, folder, pattern string) (io.ReadCloser, string, error) {
|
||||
log.Trace(ctx, "looking for artist image", "pattern", pattern, "folder", folder)
|
||||
fsys := os.DirFS(folder)
|
||||
matches, err := fs.Glob(fsys, pattern)
|
||||
if err != nil {
|
||||
log.Warn(ctx, "Error matching artist image pattern", "pattern", pattern, "folder", folder, err)
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
for _, m := range matches {
|
||||
if !model.IsImageFile(m) {
|
||||
continue
|
||||
}
|
||||
filePath := filepath.Join(folder, m)
|
||||
f, err := os.Open(filePath)
|
||||
if err != nil {
|
||||
log.Warn(ctx, "Could not open cover art file", "file", filePath, err)
|
||||
continue
|
||||
}
|
||||
return f, filePath, nil
|
||||
}
|
||||
|
||||
return nil, "", fmt.Errorf(`no matches for '%s' in '%s'`, pattern, folder)
|
||||
}
|
||||
|
||||
func loadArtistFolder(ctx context.Context, ds model.DataStore, albums model.Albums, paths []string) (string, time.Time, error) {
|
||||
if len(albums) == 0 {
|
||||
return "", time.Time{}, nil
|
||||
}
|
||||
libID := albums[0].LibraryID // Just need one of the albums, as they should all be in the same Library - for now! TODO: Support multiple libraries
|
||||
|
||||
folderPath := str.LongestCommonPrefix(paths)
|
||||
if !strings.HasSuffix(folderPath, string(filepath.Separator)) {
|
||||
folderPath, _ = filepath.Split(folderPath)
|
||||
}
|
||||
folderPath = filepath.Dir(folderPath)
|
||||
|
||||
// Manipulate the path to get the folder ID
|
||||
// TODO: This is a bit hacky, but it's the easiest way to get the folder ID, ATM
|
||||
libPath := core.AbsolutePath(ctx, ds, libID, "")
|
||||
folderID := model.FolderID(model.Library{ID: libID, Path: libPath}, folderPath)
|
||||
|
||||
log.Trace(ctx, "Calculating artist folder details", "folderPath", folderPath, "folderID", folderID,
|
||||
"libPath", libPath, "libID", libID, "albumPaths", paths)
|
||||
|
||||
// Get the last update time for the folder
|
||||
folders, err := ds.Folder(ctx).GetAll(model.QueryOptions{Filters: squirrel.Eq{"folder.id": folderID, "missing": false}})
|
||||
if err != nil || len(folders) == 0 {
|
||||
log.Warn(ctx, "Could not find folder for artist", "folderPath", folderPath, "id", folderID,
|
||||
"libPath", libPath, "libID", libID, err)
|
||||
return "", time.Time{}, err
|
||||
}
|
||||
return folderPath, folders[0].ImagesUpdatedAt, nil
|
||||
}
|
||||
|
||||
391
core/artwork/reader_artist_test.go
Normal file
391
core/artwork/reader_artist_test.go
Normal file
@@ -0,0 +1,391 @@
|
||||
package artwork
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("artistArtworkReader", func() {
|
||||
var _ = Describe("loadArtistFolder", func() {
|
||||
var (
|
||||
ctx context.Context
|
||||
fds *fakeDataStore
|
||||
repo *fakeFolderRepo
|
||||
albums model.Albums
|
||||
paths []string
|
||||
now time.Time
|
||||
expectedUpdTime time.Time
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
ctx = context.Background()
|
||||
DeferCleanup(stubCoreAbsolutePath())
|
||||
|
||||
now = time.Now().Truncate(time.Second)
|
||||
expectedUpdTime = now.Add(5 * time.Minute)
|
||||
repo = &fakeFolderRepo{
|
||||
result: []model.Folder{
|
||||
{
|
||||
ImagesUpdatedAt: expectedUpdTime,
|
||||
},
|
||||
},
|
||||
err: nil,
|
||||
}
|
||||
fds = &fakeDataStore{
|
||||
folderRepo: repo,
|
||||
}
|
||||
albums = model.Albums{
|
||||
{LibraryID: 1, ID: "album1", Name: "Album 1"},
|
||||
}
|
||||
})
|
||||
|
||||
When("no albums provided", func() {
|
||||
It("returns empty and zero time", func() {
|
||||
folder, upd, err := loadArtistFolder(ctx, fds, model.Albums{}, []string{"/dummy/path"})
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(folder).To(BeEmpty())
|
||||
Expect(upd).To(BeZero())
|
||||
})
|
||||
})
|
||||
|
||||
When("artist has only one album", func() {
|
||||
It("returns the parent folder", func() {
|
||||
paths = []string{
|
||||
filepath.FromSlash("/music/artist/album1"),
|
||||
}
|
||||
folder, upd, err := loadArtistFolder(ctx, fds, albums, paths)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(folder).To(Equal("/music/artist"))
|
||||
Expect(upd).To(Equal(expectedUpdTime))
|
||||
})
|
||||
})
|
||||
|
||||
When("the artist have multiple albums", func() {
|
||||
It("returns the common prefix for the albums paths", func() {
|
||||
paths = []string{
|
||||
filepath.FromSlash("/music/library/artist/one"),
|
||||
filepath.FromSlash("/music/library/artist/two"),
|
||||
}
|
||||
folder, upd, err := loadArtistFolder(ctx, fds, albums, paths)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(folder).To(Equal(filepath.FromSlash("/music/library/artist")))
|
||||
Expect(upd).To(Equal(expectedUpdTime))
|
||||
})
|
||||
})
|
||||
|
||||
When("the album paths contain same prefix", func() {
|
||||
It("returns the common prefix", func() {
|
||||
paths = []string{
|
||||
filepath.FromSlash("/music/artist/album1"),
|
||||
filepath.FromSlash("/music/artist/album2"),
|
||||
}
|
||||
folder, upd, err := loadArtistFolder(ctx, fds, albums, paths)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(folder).To(Equal("/music/artist"))
|
||||
Expect(upd).To(Equal(expectedUpdTime))
|
||||
})
|
||||
})
|
||||
|
||||
When("ds.Folder().GetAll returns an error", func() {
|
||||
It("returns an error", func() {
|
||||
paths = []string{
|
||||
filepath.FromSlash("/music/artist/album1"),
|
||||
filepath.FromSlash("/music/artist/album2"),
|
||||
}
|
||||
repo.err = errors.New("fake error")
|
||||
folder, upd, err := loadArtistFolder(ctx, fds, albums, paths)
|
||||
Expect(err).To(MatchError(ContainSubstring("fake error")))
|
||||
// Folder and time are empty on error.
|
||||
Expect(folder).To(BeEmpty())
|
||||
Expect(upd).To(BeZero())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
var _ = Describe("fromArtistFolder", func() {
|
||||
var (
|
||||
ctx context.Context
|
||||
tempDir string
|
||||
testFunc sourceFunc
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
ctx = context.Background()
|
||||
tempDir = GinkgoT().TempDir()
|
||||
})
|
||||
|
||||
When("artist folder contains matching image", func() {
|
||||
BeforeEach(func() {
|
||||
// Create test structure: /temp/artist/artist.jpg
|
||||
artistDir := filepath.Join(tempDir, "artist")
|
||||
Expect(os.MkdirAll(artistDir, 0755)).To(Succeed())
|
||||
|
||||
artistImagePath := filepath.Join(artistDir, "artist.jpg")
|
||||
Expect(os.WriteFile(artistImagePath, []byte("fake image data"), 0600)).To(Succeed())
|
||||
|
||||
testFunc = fromArtistFolder(ctx, artistDir, "artist.*")
|
||||
})
|
||||
|
||||
It("finds and returns the image", func() {
|
||||
reader, path, err := testFunc()
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(reader).ToNot(BeNil())
|
||||
Expect(path).To(ContainSubstring("artist.jpg"))
|
||||
|
||||
// Verify we can read the content
|
||||
data, err := io.ReadAll(reader)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(string(data)).To(Equal("fake image data"))
|
||||
reader.Close()
|
||||
})
|
||||
})
|
||||
|
||||
When("artist folder is empty but parent contains image", func() {
|
||||
BeforeEach(func() {
|
||||
// Create test structure: /temp/parent/artist.jpg and /temp/parent/artist/album/
|
||||
parentDir := filepath.Join(tempDir, "parent")
|
||||
artistDir := filepath.Join(parentDir, "artist")
|
||||
albumDir := filepath.Join(artistDir, "album")
|
||||
Expect(os.MkdirAll(albumDir, 0755)).To(Succeed())
|
||||
|
||||
// Put artist image in parent directory
|
||||
artistImagePath := filepath.Join(parentDir, "artist.jpg")
|
||||
Expect(os.WriteFile(artistImagePath, []byte("parent image"), 0600)).To(Succeed())
|
||||
|
||||
testFunc = fromArtistFolder(ctx, artistDir, "artist.*")
|
||||
})
|
||||
|
||||
It("finds image in parent directory", func() {
|
||||
reader, path, err := testFunc()
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(reader).ToNot(BeNil())
|
||||
Expect(path).To(ContainSubstring("parent" + string(filepath.Separator) + "artist.jpg"))
|
||||
|
||||
data, err := io.ReadAll(reader)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(string(data)).To(Equal("parent image"))
|
||||
reader.Close()
|
||||
})
|
||||
})
|
||||
|
||||
When("image is two levels up", func() {
|
||||
BeforeEach(func() {
|
||||
// Create test structure: /temp/grandparent/artist.jpg and /temp/grandparent/parent/artist/
|
||||
grandparentDir := filepath.Join(tempDir, "grandparent")
|
||||
parentDir := filepath.Join(grandparentDir, "parent")
|
||||
artistDir := filepath.Join(parentDir, "artist")
|
||||
Expect(os.MkdirAll(artistDir, 0755)).To(Succeed())
|
||||
|
||||
// Put artist image in grandparent directory
|
||||
artistImagePath := filepath.Join(grandparentDir, "artist.jpg")
|
||||
Expect(os.WriteFile(artistImagePath, []byte("grandparent image"), 0600)).To(Succeed())
|
||||
|
||||
testFunc = fromArtistFolder(ctx, artistDir, "artist.*")
|
||||
})
|
||||
|
||||
It("finds image in grandparent directory", func() {
|
||||
reader, path, err := testFunc()
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(reader).ToNot(BeNil())
|
||||
Expect(path).To(ContainSubstring("grandparent" + string(filepath.Separator) + "artist.jpg"))
|
||||
|
||||
data, err := io.ReadAll(reader)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(string(data)).To(Equal("grandparent image"))
|
||||
reader.Close()
|
||||
})
|
||||
})
|
||||
|
||||
When("images exist at multiple levels", func() {
|
||||
BeforeEach(func() {
|
||||
// Create test structure with images at multiple levels
|
||||
grandparentDir := filepath.Join(tempDir, "grandparent")
|
||||
parentDir := filepath.Join(grandparentDir, "parent")
|
||||
artistDir := filepath.Join(parentDir, "artist")
|
||||
Expect(os.MkdirAll(artistDir, 0755)).To(Succeed())
|
||||
|
||||
// Put artist images at all levels
|
||||
Expect(os.WriteFile(filepath.Join(artistDir, "artist.jpg"), []byte("artist level"), 0600)).To(Succeed())
|
||||
Expect(os.WriteFile(filepath.Join(parentDir, "artist.jpg"), []byte("parent level"), 0600)).To(Succeed())
|
||||
Expect(os.WriteFile(filepath.Join(grandparentDir, "artist.jpg"), []byte("grandparent level"), 0600)).To(Succeed())
|
||||
|
||||
testFunc = fromArtistFolder(ctx, artistDir, "artist.*")
|
||||
})
|
||||
|
||||
It("prioritizes the closest (artist folder) image", func() {
|
||||
reader, path, err := testFunc()
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(reader).ToNot(BeNil())
|
||||
Expect(path).To(ContainSubstring("artist" + string(filepath.Separator) + "artist.jpg"))
|
||||
|
||||
data, err := io.ReadAll(reader)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(string(data)).To(Equal("artist level"))
|
||||
reader.Close()
|
||||
})
|
||||
})
|
||||
|
||||
When("pattern matches multiple files", func() {
|
||||
BeforeEach(func() {
|
||||
artistDir := filepath.Join(tempDir, "artist")
|
||||
Expect(os.MkdirAll(artistDir, 0755)).To(Succeed())
|
||||
|
||||
// Create multiple matching files
|
||||
Expect(os.WriteFile(filepath.Join(artistDir, "artist.jpg"), []byte("jpg image"), 0600)).To(Succeed())
|
||||
Expect(os.WriteFile(filepath.Join(artistDir, "artist.png"), []byte("png image"), 0600)).To(Succeed())
|
||||
Expect(os.WriteFile(filepath.Join(artistDir, "artist.txt"), []byte("text file"), 0600)).To(Succeed())
|
||||
|
||||
testFunc = fromArtistFolder(ctx, artistDir, "artist.*")
|
||||
})
|
||||
|
||||
It("returns the first valid image file", func() {
|
||||
reader, path, err := testFunc()
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(reader).ToNot(BeNil())
|
||||
|
||||
// Should return an image file, not the text file
|
||||
Expect(path).To(SatisfyAny(
|
||||
ContainSubstring("artist.jpg"),
|
||||
ContainSubstring("artist.png"),
|
||||
))
|
||||
Expect(path).ToNot(ContainSubstring("artist.txt"))
|
||||
reader.Close()
|
||||
})
|
||||
})
|
||||
|
||||
When("no matching files exist anywhere", func() {
|
||||
BeforeEach(func() {
|
||||
artistDir := filepath.Join(tempDir, "artist")
|
||||
Expect(os.MkdirAll(artistDir, 0755)).To(Succeed())
|
||||
|
||||
// Create non-matching files
|
||||
Expect(os.WriteFile(filepath.Join(artistDir, "cover.jpg"), []byte("cover image"), 0600)).To(Succeed())
|
||||
|
||||
testFunc = fromArtistFolder(ctx, artistDir, "artist.*")
|
||||
})
|
||||
|
||||
It("returns an error", func() {
|
||||
reader, path, err := testFunc()
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(reader).To(BeNil())
|
||||
Expect(path).To(BeEmpty())
|
||||
Expect(err.Error()).To(ContainSubstring("no matches for 'artist.*'"))
|
||||
Expect(err.Error()).To(ContainSubstring("parent directories"))
|
||||
})
|
||||
})
|
||||
|
||||
When("directory traversal reaches filesystem root", func() {
|
||||
BeforeEach(func() {
|
||||
// Start from a shallow directory to test root boundary
|
||||
artistDir := filepath.Join(tempDir, "artist")
|
||||
Expect(os.MkdirAll(artistDir, 0755)).To(Succeed())
|
||||
|
||||
testFunc = fromArtistFolder(ctx, artistDir, "artist.*")
|
||||
})
|
||||
|
||||
It("handles root boundary gracefully", func() {
|
||||
reader, path, err := testFunc()
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(reader).To(BeNil())
|
||||
Expect(path).To(BeEmpty())
|
||||
// Should not panic or cause infinite loop
|
||||
})
|
||||
})
|
||||
|
||||
When("file exists but cannot be opened", func() {
|
||||
BeforeEach(func() {
|
||||
artistDir := filepath.Join(tempDir, "artist")
|
||||
Expect(os.MkdirAll(artistDir, 0755)).To(Succeed())
|
||||
|
||||
// Create a file that cannot be opened (permission denied)
|
||||
restrictedFile := filepath.Join(artistDir, "artist.jpg")
|
||||
Expect(os.WriteFile(restrictedFile, []byte("restricted"), 0600)).To(Succeed())
|
||||
|
||||
testFunc = fromArtistFolder(ctx, artistDir, "artist.*")
|
||||
})
|
||||
|
||||
It("logs warning and continues searching", func() {
|
||||
// This test depends on the ability to restrict file permissions
|
||||
// For now, we'll just ensure it doesn't panic and returns appropriate error
|
||||
reader, _, err := testFunc()
|
||||
// The file should be readable in test environment, so this will succeed
|
||||
// In a real scenario with permission issues, it would continue searching
|
||||
if err == nil {
|
||||
Expect(reader).ToNot(BeNil())
|
||||
reader.Close()
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
When("single album artist scenario (original issue)", func() {
|
||||
BeforeEach(func() {
|
||||
// Simulate the exact folder structure from the issue:
|
||||
// /music/artist/album1/ (single album)
|
||||
// /music/artist/artist.jpg (artist image that should be found)
|
||||
artistDir := filepath.Join(tempDir, "music", "artist")
|
||||
albumDir := filepath.Join(artistDir, "album1")
|
||||
Expect(os.MkdirAll(albumDir, 0755)).To(Succeed())
|
||||
|
||||
// Create artist.jpg in the artist folder (this was not being found before)
|
||||
artistImagePath := filepath.Join(artistDir, "artist.jpg")
|
||||
Expect(os.WriteFile(artistImagePath, []byte("single album artist image"), 0600)).To(Succeed())
|
||||
|
||||
// The fromArtistFolder is called with the artist folder path
|
||||
testFunc = fromArtistFolder(ctx, artistDir, "artist.*")
|
||||
})
|
||||
|
||||
It("finds artist.jpg in artist folder for single album artist", func() {
|
||||
reader, path, err := testFunc()
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(reader).ToNot(BeNil())
|
||||
Expect(path).To(ContainSubstring("artist.jpg"))
|
||||
Expect(path).To(ContainSubstring("artist"))
|
||||
|
||||
// Verify the content
|
||||
data, err := io.ReadAll(reader)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(string(data)).To(Equal("single album artist image"))
|
||||
reader.Close()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
type fakeFolderRepo struct {
|
||||
model.FolderRepository
|
||||
result []model.Folder
|
||||
err error
|
||||
}
|
||||
|
||||
func (f *fakeFolderRepo) GetAll(...model.QueryOptions) ([]model.Folder, error) {
|
||||
return f.result, f.err
|
||||
}
|
||||
|
||||
type fakeDataStore struct {
|
||||
model.DataStore
|
||||
folderRepo *fakeFolderRepo
|
||||
}
|
||||
|
||||
func (fds *fakeDataStore) Folder(_ context.Context) model.FolderRepository {
|
||||
return fds.folderRepo
|
||||
}
|
||||
|
||||
func stubCoreAbsolutePath() func() {
|
||||
// Override core.AbsolutePath to return a fixed string during tests.
|
||||
original := core.AbsolutePath
|
||||
core.AbsolutePath = func(_ context.Context, ds model.DataStore, libID int, p string) string {
|
||||
return filepath.FromSlash("/music")
|
||||
}
|
||||
return func() {
|
||||
core.AbsolutePath = original
|
||||
}
|
||||
}
|
||||
@@ -54,9 +54,10 @@ func (a *mediafileArtworkReader) LastUpdated() time.Time {
|
||||
func (a *mediafileArtworkReader) Reader(ctx context.Context) (io.ReadCloser, string, error) {
|
||||
var ff []sourceFunc
|
||||
if a.mediafile.CoverArtID().Kind == model.KindMediaFileArtwork {
|
||||
path := a.mediafile.AbsolutePath()
|
||||
ff = []sourceFunc{
|
||||
fromTag(a.mediafile.Path),
|
||||
fromFFmpegTag(ctx, a.a.ffmpeg, a.mediafile.Path),
|
||||
fromTag(ctx, path),
|
||||
fromFFmpegTag(ctx, a.a.ffmpeg, path),
|
||||
}
|
||||
}
|
||||
ff = append(ff, fromAlbum(ctx, a.a, a.mediafile.AlbumCoverArtID()))
|
||||
|
||||
@@ -61,7 +61,7 @@ func (a *playlistArtworkReader) fromGeneratedTiledCover(ctx context.Context) sou
|
||||
}
|
||||
}
|
||||
|
||||
func toArtworkIDs(albumIDs []string) []model.ArtworkID {
|
||||
func toAlbumArtworkIDs(albumIDs []string) []model.ArtworkID {
|
||||
return slice.Map(albumIDs, func(id string) model.ArtworkID {
|
||||
al := model.Album{ID: id}
|
||||
return al.CoverArtID()
|
||||
@@ -75,24 +75,21 @@ func (a *playlistArtworkReader) loadTiles(ctx context.Context) ([]image.Image, e
|
||||
log.Error(ctx, "Error getting album IDs for playlist", "id", a.pl.ID, "name", a.pl.Name, err)
|
||||
return nil, err
|
||||
}
|
||||
ids := toArtworkIDs(albumIds)
|
||||
ids := toAlbumArtworkIDs(albumIds)
|
||||
|
||||
var tiles []image.Image
|
||||
for len(tiles) < 4 {
|
||||
if len(ids) == 0 {
|
||||
for _, id := range ids {
|
||||
r, _, err := fromAlbum(ctx, a.a, id)()
|
||||
if err == nil {
|
||||
tile, err := a.createTile(ctx, r)
|
||||
if err == nil {
|
||||
tiles = append(tiles, tile)
|
||||
}
|
||||
_ = r.Close()
|
||||
}
|
||||
if len(tiles) == 4 {
|
||||
break
|
||||
}
|
||||
id := ids[len(ids)-1]
|
||||
ids = ids[0 : len(ids)-1]
|
||||
r, _, err := fromAlbum(ctx, a.a, id)()
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
tile, err := a.createTile(ctx, r)
|
||||
if err == nil {
|
||||
tiles = append(tiles, tile)
|
||||
}
|
||||
_ = r.Close()
|
||||
}
|
||||
switch len(tiles) {
|
||||
case 0:
|
||||
|
||||
@@ -59,25 +59,21 @@ func (a *resizedArtworkReader) Reader(ctx context.Context) (io.ReadCloser, strin
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
// Keep a copy of the original data. In case we can't resize it, send it as is
|
||||
buf := new(bytes.Buffer)
|
||||
r := io.TeeReader(orig, buf)
|
||||
defer orig.Close()
|
||||
|
||||
resized, origSize, err := resizeImage(r, a.size, a.square)
|
||||
resized, origSize, err := resizeImage(orig, a.size, a.square)
|
||||
if resized == nil {
|
||||
log.Trace(ctx, "Image smaller than requested size", "artID", a.artID, "original", origSize, "resized", a.size)
|
||||
log.Trace(ctx, "Image smaller than requested size", "artID", a.artID, "original", origSize, "resized", a.size, "square", a.square)
|
||||
} else {
|
||||
log.Trace(ctx, "Resizing artwork", "artID", a.artID, "original", origSize, "resized", a.size)
|
||||
log.Trace(ctx, "Resizing artwork", "artID", a.artID, "original", origSize, "resized", a.size, "square", a.square)
|
||||
}
|
||||
if err != nil {
|
||||
log.Warn(ctx, "Could not resize image. Will return image as is", "artID", a.artID, "size", a.size, err)
|
||||
log.Warn(ctx, "Could not resize image. Will return image as is", "artID", a.artID, "size", a.size, "square", a.square, err)
|
||||
}
|
||||
if err != nil || resized == nil {
|
||||
// Force finish reading any remaining data
|
||||
_, _ = io.Copy(io.Discard, r)
|
||||
return io.NopCloser(buf), "", nil //nolint:nilerr
|
||||
// if we couldn't resize the image, return the original
|
||||
orig, _, err = a.a.Get(ctx, a.artID, 0, false)
|
||||
return orig, "", err
|
||||
}
|
||||
return io.NopCloser(resized), fmt.Sprintf("%s@%d", a.artID, a.size), nil
|
||||
}
|
||||
|
||||
@@ -10,13 +10,14 @@ import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"runtime"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/dhowden/tag"
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/core/ffmpeg"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
@@ -52,13 +53,9 @@ func (f sourceFunc) String() string {
|
||||
return name
|
||||
}
|
||||
|
||||
func splitList(s string) []string {
|
||||
return strings.Split(s, consts.Zwsp)
|
||||
}
|
||||
|
||||
func fromExternalFile(ctx context.Context, files string, pattern string) sourceFunc {
|
||||
func fromExternalFile(ctx context.Context, files []string, pattern string) sourceFunc {
|
||||
return func() (io.ReadCloser, string, error) {
|
||||
for _, file := range splitList(files) {
|
||||
for _, file := range files {
|
||||
_, name := filepath.Split(file)
|
||||
match, err := filepath.Match(pattern, strings.ToLower(name))
|
||||
if err != nil {
|
||||
@@ -79,7 +76,14 @@ func fromExternalFile(ctx context.Context, files string, pattern string) sourceF
|
||||
}
|
||||
}
|
||||
|
||||
func fromTag(path string) sourceFunc {
|
||||
// These regexes are used to match the picture type in the file, in the order they are listed.
|
||||
var picTypeRegexes = []*regexp.Regexp{
|
||||
regexp.MustCompile(`(?i).*cover.*front.*|.*front.*cover.*`),
|
||||
regexp.MustCompile(`(?i).*front.*`),
|
||||
regexp.MustCompile(`(?i).*cover.*`),
|
||||
}
|
||||
|
||||
func fromTag(ctx context.Context, path string) sourceFunc {
|
||||
return func() (io.ReadCloser, string, error) {
|
||||
if path == "" {
|
||||
return nil, "", nil
|
||||
@@ -95,10 +99,31 @@ func fromTag(path string) sourceFunc {
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
picture := m.Picture()
|
||||
if picture == nil {
|
||||
types := m.PictureTypes()
|
||||
if len(types) == 0 {
|
||||
return nil, "", fmt.Errorf("no embedded image found in %s", path)
|
||||
}
|
||||
|
||||
var picture *tag.Picture
|
||||
for _, regex := range picTypeRegexes {
|
||||
for _, t := range types {
|
||||
if regex.MatchString(t) {
|
||||
log.Trace(ctx, "Found embedded image", "type", t, "path", path)
|
||||
picture = m.Pictures(t)
|
||||
break
|
||||
}
|
||||
}
|
||||
if picture != nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
if picture == nil {
|
||||
log.Trace(ctx, "Could not find a front image. Getting the first one", "type", types[0], "path", path)
|
||||
picture = m.Picture()
|
||||
}
|
||||
if picture == nil {
|
||||
return nil, "", fmt.Errorf("could not load embedded image from %s", path)
|
||||
}
|
||||
return io.NopCloser(bytes.NewReader(picture.Data)), path, nil
|
||||
}
|
||||
}
|
||||
@@ -112,13 +137,7 @@ func fromFFmpegTag(ctx context.Context, ffmpeg ffmpeg.FFmpeg, path string) sourc
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
defer r.Close()
|
||||
buf := new(bytes.Buffer)
|
||||
_, err = io.Copy(buf, r)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
return io.NopCloser(buf), path, nil
|
||||
return r, path, nil
|
||||
}
|
||||
}
|
||||
|
||||
@@ -138,9 +157,9 @@ func fromAlbumPlaceholder() sourceFunc {
|
||||
return r, consts.PlaceholderAlbumArt, nil
|
||||
}
|
||||
}
|
||||
func fromArtistExternalSource(ctx context.Context, ar model.Artist, em core.ExternalMetadata) sourceFunc {
|
||||
func fromArtistExternalSource(ctx context.Context, ar model.Artist, provider external.Provider) sourceFunc {
|
||||
return func() (io.ReadCloser, string, error) {
|
||||
imageUrl, err := em.ArtistImage(ctx, ar.ID)
|
||||
imageUrl, err := provider.ArtistImage(ctx, ar.ID)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
@@ -149,9 +168,9 @@ func fromArtistExternalSource(ctx context.Context, ar model.Artist, em core.Exte
|
||||
}
|
||||
}
|
||||
|
||||
func fromAlbumExternalSource(ctx context.Context, al model.Album, em core.ExternalMetadata) sourceFunc {
|
||||
func fromAlbumExternalSource(ctx context.Context, al model.Album, provider external.Provider) sourceFunc {
|
||||
return func() (io.ReadCloser, string, error) {
|
||||
imageUrl, err := em.AlbumImage(ctx, al.ID)
|
||||
imageUrl, err := provider.AlbumImage(ctx, al.ID)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
@@ -169,7 +188,7 @@ func fromURL(ctx context.Context, imageUrl *url.URL) (io.ReadCloser, string, err
|
||||
}
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
resp.Body.Close()
|
||||
return nil, "", fmt.Errorf("error retrieveing artwork from %s: %s", imageUrl, resp.Status)
|
||||
return nil, "", fmt.Errorf("error retrieving artwork from %s: %s", imageUrl, resp.Status)
|
||||
}
|
||||
return resp.Body, imageUrl.String(), nil
|
||||
}
|
||||
|
||||
@@ -1,36 +1,47 @@
|
||||
package auth
|
||||
|
||||
import (
|
||||
"cmp"
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/go-chi/jwtauth/v5"
|
||||
"github.com/google/uuid"
|
||||
"github.com/lestrrat-go/jwx/v2/jwt"
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/model/id"
|
||||
"github.com/navidrome/navidrome/model/request"
|
||||
"github.com/navidrome/navidrome/utils"
|
||||
)
|
||||
|
||||
var (
|
||||
once sync.Once
|
||||
Secret []byte
|
||||
TokenAuth *jwtauth.JWTAuth
|
||||
)
|
||||
|
||||
// Init creates a JWTAuth object from the secret stored in the DB.
|
||||
// If the secret is not found, it will create a new one and store it in the DB.
|
||||
func Init(ds model.DataStore) {
|
||||
once.Do(func() {
|
||||
ctx := context.TODO()
|
||||
log.Info("Setting Session Timeout", "value", conf.Server.SessionTimeout)
|
||||
secret, err := ds.Property(context.TODO()).Get(consts.JWTSecretKey)
|
||||
|
||||
secret, err := ds.Property(ctx).Get(consts.JWTSecretKey)
|
||||
if err != nil || secret == "" {
|
||||
log.Error("No JWT secret found in DB. Setting a temp one, but please report this error", err)
|
||||
secret = uuid.NewString()
|
||||
log.Info(ctx, "Creating new JWT secret, used for encrypting UI sessions")
|
||||
secret = createNewSecret(ctx, ds)
|
||||
} else {
|
||||
if secret, err = utils.Decrypt(ctx, getEncKey(), secret); err != nil {
|
||||
log.Error(ctx, "Could not decrypt JWT secret, creating a new one", err)
|
||||
secret = createNewSecret(ctx, ds)
|
||||
}
|
||||
}
|
||||
Secret = []byte(secret)
|
||||
TokenAuth = jwtauth.New("HS256", Secret, nil)
|
||||
|
||||
TokenAuth = jwtauth.New("HS256", []byte(secret), nil)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -112,3 +123,25 @@ func WithAdminUser(ctx context.Context, ds model.DataStore) context.Context {
|
||||
ctx = request.WithUsername(ctx, u.UserName)
|
||||
return request.WithUser(ctx, *u)
|
||||
}
|
||||
|
||||
func createNewSecret(ctx context.Context, ds model.DataStore) string {
|
||||
secret := id.NewRandom()
|
||||
encSecret, err := utils.Encrypt(ctx, getEncKey(), secret)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Could not encrypt JWT secret", err)
|
||||
return secret
|
||||
}
|
||||
if err := ds.Property(ctx).Put(consts.JWTSecretKey, encSecret); err != nil {
|
||||
log.Error(ctx, "Could not save JWT secret in DB", err)
|
||||
}
|
||||
return secret
|
||||
}
|
||||
|
||||
func getEncKey() []byte {
|
||||
key := cmp.Or(
|
||||
conf.Server.PasswordEncryptionKey,
|
||||
consts.DefaultEncryptionKey,
|
||||
)
|
||||
sum := sha256.Sum256([]byte(key))
|
||||
return sum[:]
|
||||
}
|
||||
|
||||
@@ -4,12 +4,12 @@ import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/go-chi/jwtauth/v5"
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/core/auth"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
@@ -32,8 +32,10 @@ var _ = BeforeSuite(func() {
|
||||
var _ = Describe("Auth", func() {
|
||||
|
||||
BeforeEach(func() {
|
||||
auth.Secret = []byte(testJWTSecret)
|
||||
auth.TokenAuth = jwtauth.New("HS256", auth.Secret, nil)
|
||||
ds := &tests.MockDataStore{
|
||||
MockedProperty: &tests.MockedPropertyRepo{},
|
||||
}
|
||||
auth.Init(ds)
|
||||
})
|
||||
|
||||
Describe("Validate", func() {
|
||||
|
||||
@@ -2,7 +2,9 @@ package core
|
||||
|
||||
import (
|
||||
"context"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/model/request"
|
||||
)
|
||||
|
||||
@@ -13,3 +15,13 @@ func userName(ctx context.Context) string {
|
||||
return user.UserName
|
||||
}
|
||||
}
|
||||
|
||||
// BFR We should only access files through the `storage.Storage` interface. This will require changing how
|
||||
// TagLib and ffmpeg access files
|
||||
var AbsolutePath = func(ctx context.Context, ds model.DataStore, libId int, path string) string {
|
||||
libPath, err := ds.Library(ctx).GetPath(libId)
|
||||
if err != nil {
|
||||
return path
|
||||
}
|
||||
return filepath.Join(libPath, path)
|
||||
}
|
||||
|
||||
55
core/common_test.go
Normal file
55
core/common_test.go
Normal file
@@ -0,0 +1,55 @@
|
||||
package core
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/model/request"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
)
|
||||
|
||||
var _ = Describe("common.go", func() {
|
||||
Describe("userName", func() {
|
||||
It("returns the username from context", func() {
|
||||
ctx := request.WithUser(context.Background(), model.User{UserName: "testuser"})
|
||||
Expect(userName(ctx)).To(Equal("testuser"))
|
||||
})
|
||||
|
||||
It("returns 'UNKNOWN' if no user in context", func() {
|
||||
ctx := context.Background()
|
||||
Expect(userName(ctx)).To(Equal("UNKNOWN"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("AbsolutePath", func() {
|
||||
var (
|
||||
ds *tests.MockDataStore
|
||||
libId int
|
||||
path string
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
ds = &tests.MockDataStore{}
|
||||
libId = 1
|
||||
path = "music/file.mp3"
|
||||
mockLib := &tests.MockLibraryRepo{}
|
||||
mockLib.SetData(model.Libraries{{ID: libId, Path: "/library/root"}})
|
||||
ds.MockedLibrary = mockLib
|
||||
})
|
||||
|
||||
It("returns the absolute path when library exists", func() {
|
||||
ctx := context.Background()
|
||||
abs := AbsolutePath(ctx, ds, libId, path)
|
||||
Expect(abs).To(Equal("/library/root/music/file.mp3"))
|
||||
})
|
||||
|
||||
It("returns the original path if library not found", func() {
|
||||
ctx := context.Background()
|
||||
abs := AbsolutePath(ctx, ds, 999, path)
|
||||
Expect(abs).To(Equal(path))
|
||||
})
|
||||
})
|
||||
})
|
||||
284
core/external/extdata_helper_test.go
vendored
Normal file
284
core/external/extdata_helper_test.go
vendored
Normal file
@@ -0,0 +1,284 @@
|
||||
package external_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
// --- Shared Mock Implementations ---
|
||||
|
||||
// mockArtistRepo mocks model.ArtistRepository
|
||||
type mockArtistRepo struct {
|
||||
mock.Mock
|
||||
model.ArtistRepository
|
||||
}
|
||||
|
||||
func newMockArtistRepo() *mockArtistRepo {
|
||||
return &mockArtistRepo{}
|
||||
}
|
||||
|
||||
// SetData sets up basic Get expectations.
|
||||
func (m *mockArtistRepo) SetData(artists model.Artists) {
|
||||
for _, a := range artists {
|
||||
artistCopy := a
|
||||
m.On("Get", artistCopy.ID).Return(&artistCopy, nil)
|
||||
}
|
||||
}
|
||||
|
||||
// Get implements model.ArtistRepository.
|
||||
func (m *mockArtistRepo) Get(id string) (*model.Artist, error) {
|
||||
args := m.Called(id)
|
||||
if args.Get(0) == nil {
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
return args.Get(0).(*model.Artist), args.Error(1)
|
||||
}
|
||||
|
||||
// GetAll implements model.ArtistRepository.
|
||||
func (m *mockArtistRepo) GetAll(options ...model.QueryOptions) (model.Artists, error) {
|
||||
argsSlice := make([]interface{}, len(options))
|
||||
for i, v := range options {
|
||||
argsSlice[i] = v
|
||||
}
|
||||
args := m.Called(argsSlice...)
|
||||
if args.Get(0) == nil {
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
return args.Get(0).(model.Artists), args.Error(1)
|
||||
}
|
||||
|
||||
// SetError is a helper to set up a generic error for GetAll.
|
||||
func (m *mockArtistRepo) SetError(hasError bool) {
|
||||
if hasError {
|
||||
m.On("GetAll", mock.Anything).Return(nil, errors.New("mock repo error"))
|
||||
}
|
||||
}
|
||||
|
||||
// FindByName is a helper to set up a GetAll expectation for finding by name.
|
||||
func (m *mockArtistRepo) FindByName(name string, artist model.Artist) {
|
||||
m.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Filters != nil
|
||||
})).Return(model.Artists{artist}, nil).Once()
|
||||
}
|
||||
|
||||
// mockMediaFileRepo mocks model.MediaFileRepository
|
||||
type mockMediaFileRepo struct {
|
||||
mock.Mock
|
||||
model.MediaFileRepository
|
||||
}
|
||||
|
||||
func newMockMediaFileRepo() *mockMediaFileRepo {
|
||||
return &mockMediaFileRepo{}
|
||||
}
|
||||
|
||||
// SetData sets up basic Get expectations.
|
||||
func (m *mockMediaFileRepo) SetData(mediaFiles model.MediaFiles) {
|
||||
for _, mf := range mediaFiles {
|
||||
mfCopy := mf
|
||||
m.On("Get", mfCopy.ID).Return(&mfCopy, nil)
|
||||
}
|
||||
}
|
||||
|
||||
// Get implements model.MediaFileRepository.
|
||||
func (m *mockMediaFileRepo) Get(id string) (*model.MediaFile, error) {
|
||||
args := m.Called(id)
|
||||
if args.Get(0) == nil {
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
return args.Get(0).(*model.MediaFile), args.Error(1)
|
||||
}
|
||||
|
||||
// GetAll implements model.MediaFileRepository.
|
||||
func (m *mockMediaFileRepo) GetAll(options ...model.QueryOptions) (model.MediaFiles, error) {
|
||||
argsSlice := make([]interface{}, len(options))
|
||||
for i, v := range options {
|
||||
argsSlice[i] = v
|
||||
}
|
||||
args := m.Called(argsSlice...)
|
||||
if args.Get(0) == nil {
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
return args.Get(0).(model.MediaFiles), args.Error(1)
|
||||
}
|
||||
|
||||
// SetError is a helper to set up a generic error for GetAll.
|
||||
func (m *mockMediaFileRepo) SetError(hasError bool) {
|
||||
if hasError {
|
||||
m.On("GetAll", mock.Anything).Return(nil, errors.New("mock repo error"))
|
||||
}
|
||||
}
|
||||
|
||||
// FindByMBID is a helper to set up a GetAll expectation for finding by MBID.
|
||||
func (m *mockMediaFileRepo) FindByMBID(mbid string, mediaFile model.MediaFile) {
|
||||
m.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Filters != nil
|
||||
})).Return(model.MediaFiles{mediaFile}, nil).Once()
|
||||
}
|
||||
|
||||
// FindByArtistAndTitle is a helper to set up a GetAll expectation for finding by artist/title.
|
||||
func (m *mockMediaFileRepo) FindByArtistAndTitle(artistID string, title string, mediaFile model.MediaFile) {
|
||||
m.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Filters != nil
|
||||
})).Return(model.MediaFiles{mediaFile}, nil).Once()
|
||||
}
|
||||
|
||||
// mockAlbumRepo mocks model.AlbumRepository
|
||||
type mockAlbumRepo struct {
|
||||
mock.Mock
|
||||
model.AlbumRepository
|
||||
}
|
||||
|
||||
func newMockAlbumRepo() *mockAlbumRepo {
|
||||
return &mockAlbumRepo{}
|
||||
}
|
||||
|
||||
// Get implements model.AlbumRepository.
|
||||
func (m *mockAlbumRepo) Get(id string) (*model.Album, error) {
|
||||
args := m.Called(id)
|
||||
if args.Get(0) == nil {
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
return args.Get(0).(*model.Album), args.Error(1)
|
||||
}
|
||||
|
||||
// GetAll implements model.AlbumRepository.
|
||||
func (m *mockAlbumRepo) GetAll(options ...model.QueryOptions) (model.Albums, error) {
|
||||
argsSlice := make([]interface{}, len(options))
|
||||
for i, v := range options {
|
||||
argsSlice[i] = v
|
||||
}
|
||||
args := m.Called(argsSlice...)
|
||||
if args.Get(0) == nil {
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
return args.Get(0).(model.Albums), args.Error(1)
|
||||
}
|
||||
|
||||
// mockSimilarArtistAgent mocks agents implementing ArtistTopSongsRetriever and ArtistSimilarRetriever
|
||||
type mockSimilarArtistAgent struct {
|
||||
mock.Mock
|
||||
agents.Interface // Embed to satisfy methods not explicitly mocked
|
||||
}
|
||||
|
||||
func (m *mockSimilarArtistAgent) AgentName() string {
|
||||
return "mockSimilar"
|
||||
}
|
||||
|
||||
func (m *mockSimilarArtistAgent) GetArtistTopSongs(ctx context.Context, id, artistName, mbid string, count int) ([]agents.Song, error) {
|
||||
args := m.Called(ctx, id, artistName, mbid, count)
|
||||
if args.Get(0) != nil {
|
||||
return args.Get(0).([]agents.Song), args.Error(1)
|
||||
}
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
|
||||
func (m *mockSimilarArtistAgent) GetSimilarArtists(ctx context.Context, id, name, mbid string, limit int) ([]agents.Artist, error) {
|
||||
args := m.Called(ctx, id, name, mbid, limit)
|
||||
if args.Get(0) != nil {
|
||||
return args.Get(0).([]agents.Artist), args.Error(1)
|
||||
}
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
|
||||
// mockAgents mocks the main Agents interface used by Provider
|
||||
type mockAgents struct {
|
||||
mock.Mock // Embed testify mock
|
||||
topSongsAgent agents.ArtistTopSongsRetriever
|
||||
similarAgent agents.ArtistSimilarRetriever
|
||||
imageAgent agents.ArtistImageRetriever
|
||||
albumInfoAgent interface {
|
||||
agents.AlbumInfoRetriever
|
||||
agents.AlbumImageRetriever
|
||||
}
|
||||
bioAgent agents.ArtistBiographyRetriever
|
||||
mbidAgent agents.ArtistMBIDRetriever
|
||||
urlAgent agents.ArtistURLRetriever
|
||||
agents.Interface
|
||||
}
|
||||
|
||||
func (m *mockAgents) AgentName() string {
|
||||
return "mockCombined"
|
||||
}
|
||||
|
||||
func (m *mockAgents) GetSimilarArtists(ctx context.Context, id, name, mbid string, limit int) ([]agents.Artist, error) {
|
||||
if m.similarAgent != nil {
|
||||
return m.similarAgent.GetSimilarArtists(ctx, id, name, mbid, limit)
|
||||
}
|
||||
args := m.Called(ctx, id, name, mbid, limit)
|
||||
if args.Get(0) != nil {
|
||||
return args.Get(0).([]agents.Artist), args.Error(1)
|
||||
}
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
|
||||
func (m *mockAgents) GetArtistTopSongs(ctx context.Context, id, artistName, mbid string, count int) ([]agents.Song, error) {
|
||||
if m.topSongsAgent != nil {
|
||||
return m.topSongsAgent.GetArtistTopSongs(ctx, id, artistName, mbid, count)
|
||||
}
|
||||
args := m.Called(ctx, id, artistName, mbid, count)
|
||||
if args.Get(0) != nil {
|
||||
return args.Get(0).([]agents.Song), args.Error(1)
|
||||
}
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
|
||||
func (m *mockAgents) GetAlbumInfo(ctx context.Context, name, artist, mbid string) (*agents.AlbumInfo, error) {
|
||||
if m.albumInfoAgent != nil {
|
||||
return m.albumInfoAgent.GetAlbumInfo(ctx, name, artist, mbid)
|
||||
}
|
||||
args := m.Called(ctx, name, artist, mbid)
|
||||
if args.Get(0) != nil {
|
||||
return args.Get(0).(*agents.AlbumInfo), args.Error(1)
|
||||
}
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
|
||||
func (m *mockAgents) GetArtistMBID(ctx context.Context, id string, name string) (string, error) {
|
||||
if m.mbidAgent != nil {
|
||||
return m.mbidAgent.GetArtistMBID(ctx, id, name)
|
||||
}
|
||||
args := m.Called(ctx, id, name)
|
||||
return args.String(0), args.Error(1)
|
||||
}
|
||||
|
||||
func (m *mockAgents) GetArtistURL(ctx context.Context, id, name, mbid string) (string, error) {
|
||||
if m.urlAgent != nil {
|
||||
return m.urlAgent.GetArtistURL(ctx, id, name, mbid)
|
||||
}
|
||||
args := m.Called(ctx, id, name, mbid)
|
||||
return args.String(0), args.Error(1)
|
||||
}
|
||||
|
||||
func (m *mockAgents) GetArtistBiography(ctx context.Context, id, name, mbid string) (string, error) {
|
||||
if m.bioAgent != nil {
|
||||
return m.bioAgent.GetArtistBiography(ctx, id, name, mbid)
|
||||
}
|
||||
args := m.Called(ctx, id, name, mbid)
|
||||
return args.String(0), args.Error(1)
|
||||
}
|
||||
|
||||
func (m *mockAgents) GetArtistImages(ctx context.Context, id, name, mbid string) ([]agents.ExternalImage, error) {
|
||||
if m.imageAgent != nil {
|
||||
return m.imageAgent.GetArtistImages(ctx, id, name, mbid)
|
||||
}
|
||||
args := m.Called(ctx, id, name, mbid)
|
||||
if args.Get(0) != nil {
|
||||
return args.Get(0).([]agents.ExternalImage), args.Error(1)
|
||||
}
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
|
||||
func (m *mockAgents) GetAlbumImages(ctx context.Context, name, artist, mbid string) ([]agents.ExternalImage, error) {
|
||||
if m.albumInfoAgent != nil {
|
||||
return m.albumInfoAgent.GetAlbumImages(ctx, name, artist, mbid)
|
||||
}
|
||||
args := m.Called(ctx, name, artist, mbid)
|
||||
if args.Get(0) != nil {
|
||||
return args.Get(0).([]agents.ExternalImage), args.Error(1)
|
||||
}
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
17
core/external/extdata_suite_test.go
vendored
Normal file
17
core/external/extdata_suite_test.go
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
package external
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
func TestExternal(t *testing.T) {
|
||||
tests.Init(t, false)
|
||||
log.SetLevel(log.LevelFatal)
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "External Suite")
|
||||
}
|
||||
@@ -1,8 +1,9 @@
|
||||
package core
|
||||
package external
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"sort"
|
||||
"strings"
|
||||
@@ -11,6 +12,7 @@ import (
|
||||
"github.com/Masterminds/squirrel"
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
_ "github.com/navidrome/navidrome/core/agents/deezer"
|
||||
_ "github.com/navidrome/navidrome/core/agents/lastfm"
|
||||
_ "github.com/navidrome/navidrome/core/agents/listenbrainz"
|
||||
_ "github.com/navidrome/navidrome/core/agents/spotify"
|
||||
@@ -19,30 +21,30 @@ import (
|
||||
"github.com/navidrome/navidrome/utils"
|
||||
. "github.com/navidrome/navidrome/utils/gg"
|
||||
"github.com/navidrome/navidrome/utils/random"
|
||||
"github.com/navidrome/navidrome/utils/slice"
|
||||
"github.com/navidrome/navidrome/utils/str"
|
||||
"golang.org/x/sync/errgroup"
|
||||
)
|
||||
|
||||
const (
|
||||
unavailableArtistID = "-1"
|
||||
maxSimilarArtists = 100
|
||||
refreshDelay = 5 * time.Second
|
||||
refreshTimeout = 15 * time.Second
|
||||
refreshQueueLength = 2000
|
||||
maxSimilarArtists = 100
|
||||
refreshDelay = 5 * time.Second
|
||||
refreshTimeout = 15 * time.Second
|
||||
refreshQueueLength = 2000
|
||||
)
|
||||
|
||||
type ExternalMetadata interface {
|
||||
type Provider interface {
|
||||
UpdateAlbumInfo(ctx context.Context, id string) (*model.Album, error)
|
||||
UpdateArtistInfo(ctx context.Context, id string, count int, includeNotPresent bool) (*model.Artist, error)
|
||||
SimilarSongs(ctx context.Context, id string, count int) (model.MediaFiles, error)
|
||||
ArtistRadio(ctx context.Context, id string, count int) (model.MediaFiles, error)
|
||||
TopSongs(ctx context.Context, artist string, count int) (model.MediaFiles, error)
|
||||
ArtistImage(ctx context.Context, id string) (*url.URL, error)
|
||||
AlbumImage(ctx context.Context, id string) (*url.URL, error)
|
||||
}
|
||||
|
||||
type externalMetadata struct {
|
||||
type provider struct {
|
||||
ds model.DataStore
|
||||
ag *agents.Agents
|
||||
ag Agents
|
||||
artistQueue refreshQueue[auxArtist]
|
||||
albumQueue refreshQueue[auxAlbum]
|
||||
}
|
||||
@@ -57,18 +59,29 @@ type auxArtist struct {
|
||||
Name string
|
||||
}
|
||||
|
||||
func NewExternalMetadata(ds model.DataStore, agents *agents.Agents) ExternalMetadata {
|
||||
e := &externalMetadata{ds: ds, ag: agents}
|
||||
type Agents interface {
|
||||
agents.AlbumInfoRetriever
|
||||
agents.AlbumImageRetriever
|
||||
agents.ArtistBiographyRetriever
|
||||
agents.ArtistMBIDRetriever
|
||||
agents.ArtistImageRetriever
|
||||
agents.ArtistSimilarRetriever
|
||||
agents.ArtistTopSongsRetriever
|
||||
agents.ArtistURLRetriever
|
||||
}
|
||||
|
||||
func NewProvider(ds model.DataStore, agents Agents) Provider {
|
||||
e := &provider{ds: ds, ag: agents}
|
||||
e.artistQueue = newRefreshQueue(context.TODO(), e.populateArtistInfo)
|
||||
e.albumQueue = newRefreshQueue(context.TODO(), e.populateAlbumInfo)
|
||||
return e
|
||||
}
|
||||
|
||||
func (e *externalMetadata) getAlbum(ctx context.Context, id string) (*auxAlbum, error) {
|
||||
func (e *provider) getAlbum(ctx context.Context, id string) (auxAlbum, error) {
|
||||
var entity interface{}
|
||||
entity, err := model.GetEntityByID(ctx, e.ds, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return auxAlbum{}, err
|
||||
}
|
||||
|
||||
var album auxAlbum
|
||||
@@ -79,12 +92,13 @@ func (e *externalMetadata) getAlbum(ctx context.Context, id string) (*auxAlbum,
|
||||
case *model.MediaFile:
|
||||
return e.getAlbum(ctx, v.AlbumID)
|
||||
default:
|
||||
return nil, model.ErrNotFound
|
||||
return auxAlbum{}, model.ErrNotFound
|
||||
}
|
||||
return &album, nil
|
||||
|
||||
return album, nil
|
||||
}
|
||||
|
||||
func (e *externalMetadata) UpdateAlbumInfo(ctx context.Context, id string) (*model.Album, error) {
|
||||
func (e *provider) UpdateAlbumInfo(ctx context.Context, id string) (*model.Album, error) {
|
||||
album, err := e.getAlbum(ctx, id)
|
||||
if err != nil {
|
||||
log.Info(ctx, "Not found", "id", id)
|
||||
@@ -94,7 +108,7 @@ func (e *externalMetadata) UpdateAlbumInfo(ctx context.Context, id string) (*mod
|
||||
updatedAt := V(album.ExternalInfoUpdatedAt)
|
||||
if updatedAt.IsZero() {
|
||||
log.Debug(ctx, "AlbumInfo not cached. Retrieving it now", "updatedAt", updatedAt, "id", id, "name", album.Name)
|
||||
err = e.populateAlbumInfo(ctx, album)
|
||||
album, err = e.populateAlbumInfo(ctx, album)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -103,22 +117,22 @@ func (e *externalMetadata) UpdateAlbumInfo(ctx context.Context, id string) (*mod
|
||||
// If info is expired, trigger a populateAlbumInfo in the background
|
||||
if time.Since(updatedAt) > conf.Server.DevAlbumInfoTimeToLive {
|
||||
log.Debug("Found expired cached AlbumInfo, refreshing in the background", "updatedAt", album.ExternalInfoUpdatedAt, "name", album.Name)
|
||||
e.albumQueue.enqueue(*album)
|
||||
e.albumQueue.enqueue(&album)
|
||||
}
|
||||
|
||||
return &album.Album, nil
|
||||
}
|
||||
|
||||
func (e *externalMetadata) populateAlbumInfo(ctx context.Context, album *auxAlbum) error {
|
||||
func (e *provider) populateAlbumInfo(ctx context.Context, album auxAlbum) (auxAlbum, error) {
|
||||
start := time.Now()
|
||||
info, err := e.ag.GetAlbumInfo(ctx, album.Name, album.AlbumArtist, album.MbzAlbumID)
|
||||
if errors.Is(err, agents.ErrNotFound) {
|
||||
return nil
|
||||
return album, nil
|
||||
}
|
||||
if err != nil {
|
||||
log.Error("Error refreshing AlbumInfo", "id", album.ID, "name", album.Name, "artist", album.AlbumArtist,
|
||||
"elapsed", time.Since(start), err)
|
||||
return err
|
||||
return album, err
|
||||
}
|
||||
|
||||
album.ExternalInfoUpdatedAt = P(time.Now())
|
||||
@@ -128,23 +142,24 @@ func (e *externalMetadata) populateAlbumInfo(ctx context.Context, album *auxAlbu
|
||||
album.Description = info.Description
|
||||
}
|
||||
|
||||
if len(info.Images) > 0 {
|
||||
sort.Slice(info.Images, func(i, j int) bool {
|
||||
return info.Images[i].Size > info.Images[j].Size
|
||||
images, err := e.ag.GetAlbumImages(ctx, album.Name, album.AlbumArtist, album.MbzAlbumID)
|
||||
if err == nil && len(images) > 0 {
|
||||
sort.Slice(images, func(i, j int) bool {
|
||||
return images[i].Size > images[j].Size
|
||||
})
|
||||
|
||||
album.LargeImageUrl = info.Images[0].URL
|
||||
album.LargeImageUrl = images[0].URL
|
||||
|
||||
if len(info.Images) >= 2 {
|
||||
album.MediumImageUrl = info.Images[1].URL
|
||||
if len(images) >= 2 {
|
||||
album.MediumImageUrl = images[1].URL
|
||||
}
|
||||
|
||||
if len(info.Images) >= 3 {
|
||||
album.SmallImageUrl = info.Images[2].URL
|
||||
if len(images) >= 3 {
|
||||
album.SmallImageUrl = images[2].URL
|
||||
}
|
||||
}
|
||||
|
||||
err = e.ds.Album(ctx).Put(&album.Album)
|
||||
err = e.ds.Album(ctx).UpdateExternalInfo(&album.Album)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error trying to update album external information", "id", album.ID, "name", album.Name,
|
||||
"elapsed", time.Since(start), err)
|
||||
@@ -152,14 +167,14 @@ func (e *externalMetadata) populateAlbumInfo(ctx context.Context, album *auxAlbu
|
||||
log.Trace(ctx, "AlbumInfo collected", "album", album, "elapsed", time.Since(start))
|
||||
}
|
||||
|
||||
return nil
|
||||
return album, nil
|
||||
}
|
||||
|
||||
func (e *externalMetadata) getArtist(ctx context.Context, id string) (*auxArtist, error) {
|
||||
func (e *provider) getArtist(ctx context.Context, id string) (auxArtist, error) {
|
||||
var entity interface{}
|
||||
entity, err := model.GetEntityByID(ctx, e.ds, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return auxArtist{}, err
|
||||
}
|
||||
|
||||
var artist auxArtist
|
||||
@@ -172,46 +187,46 @@ func (e *externalMetadata) getArtist(ctx context.Context, id string) (*auxArtist
|
||||
case *model.Album:
|
||||
return e.getArtist(ctx, v.AlbumArtistID)
|
||||
default:
|
||||
return nil, model.ErrNotFound
|
||||
return auxArtist{}, model.ErrNotFound
|
||||
}
|
||||
return &artist, nil
|
||||
return artist, nil
|
||||
}
|
||||
|
||||
func (e *externalMetadata) UpdateArtistInfo(ctx context.Context, id string, similarCount int, includeNotPresent bool) (*model.Artist, error) {
|
||||
func (e *provider) UpdateArtistInfo(ctx context.Context, id string, similarCount int, includeNotPresent bool) (*model.Artist, error) {
|
||||
artist, err := e.refreshArtistInfo(ctx, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err = e.loadSimilar(ctx, artist, similarCount, includeNotPresent)
|
||||
err = e.loadSimilar(ctx, &artist, similarCount, includeNotPresent)
|
||||
return &artist.Artist, err
|
||||
}
|
||||
|
||||
func (e *externalMetadata) refreshArtistInfo(ctx context.Context, id string) (*auxArtist, error) {
|
||||
func (e *provider) refreshArtistInfo(ctx context.Context, id string) (auxArtist, error) {
|
||||
artist, err := e.getArtist(ctx, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return auxArtist{}, err
|
||||
}
|
||||
|
||||
// If we don't have any info, retrieves it now
|
||||
updatedAt := V(artist.ExternalInfoUpdatedAt)
|
||||
if updatedAt.IsZero() {
|
||||
log.Debug(ctx, "ArtistInfo not cached. Retrieving it now", "updatedAt", updatedAt, "id", id, "name", artist.Name)
|
||||
err := e.populateArtistInfo(ctx, artist)
|
||||
artist, err = e.populateArtistInfo(ctx, artist)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return auxArtist{}, err
|
||||
}
|
||||
}
|
||||
|
||||
// If info is expired, trigger a populateArtistInfo in the background
|
||||
if time.Since(updatedAt) > conf.Server.DevArtistInfoTimeToLive {
|
||||
log.Debug("Found expired cached ArtistInfo, refreshing in the background", "updatedAt", updatedAt, "name", artist.Name)
|
||||
e.artistQueue.enqueue(*artist)
|
||||
e.artistQueue.enqueue(&artist)
|
||||
}
|
||||
return artist, nil
|
||||
}
|
||||
|
||||
func (e *externalMetadata) populateArtistInfo(ctx context.Context, artist *auxArtist) error {
|
||||
func (e *provider) populateArtistInfo(ctx context.Context, artist auxArtist) (auxArtist, error) {
|
||||
start := time.Now()
|
||||
// Get MBID first, if it is not yet available
|
||||
if artist.MbzArtistID == "" {
|
||||
@@ -224,44 +239,44 @@ func (e *externalMetadata) populateArtistInfo(ctx context.Context, artist *auxAr
|
||||
// Call all registered agents and collect information
|
||||
g := errgroup.Group{}
|
||||
g.SetLimit(2)
|
||||
g.Go(func() error { e.callGetImage(ctx, e.ag, artist); return nil })
|
||||
g.Go(func() error { e.callGetBiography(ctx, e.ag, artist); return nil })
|
||||
g.Go(func() error { e.callGetURL(ctx, e.ag, artist); return nil })
|
||||
g.Go(func() error { e.callGetSimilar(ctx, e.ag, artist, maxSimilarArtists, true); return nil })
|
||||
g.Go(func() error { e.callGetImage(ctx, e.ag, &artist); return nil })
|
||||
g.Go(func() error { e.callGetBiography(ctx, e.ag, &artist); return nil })
|
||||
g.Go(func() error { e.callGetURL(ctx, e.ag, &artist); return nil })
|
||||
g.Go(func() error { e.callGetSimilar(ctx, e.ag, &artist, maxSimilarArtists, true); return nil })
|
||||
_ = g.Wait()
|
||||
|
||||
if utils.IsCtxDone(ctx) {
|
||||
log.Warn(ctx, "ArtistInfo update canceled", "elapsed", "id", artist.ID, "name", artist.Name, time.Since(start), ctx.Err())
|
||||
return ctx.Err()
|
||||
return artist, ctx.Err()
|
||||
}
|
||||
|
||||
artist.ExternalInfoUpdatedAt = P(time.Now())
|
||||
err := e.ds.Artist(ctx).Put(&artist.Artist)
|
||||
err := e.ds.Artist(ctx).UpdateExternalInfo(&artist.Artist)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error trying to update artist external information", "id", artist.ID, "name", artist.Name,
|
||||
"elapsed", time.Since(start), err)
|
||||
} else {
|
||||
log.Trace(ctx, "ArtistInfo collected", "artist", artist, "elapsed", time.Since(start))
|
||||
}
|
||||
return nil
|
||||
return artist, nil
|
||||
}
|
||||
|
||||
func (e *externalMetadata) SimilarSongs(ctx context.Context, id string, count int) (model.MediaFiles, error) {
|
||||
func (e *provider) ArtistRadio(ctx context.Context, id string, count int) (model.MediaFiles, error) {
|
||||
artist, err := e.getArtist(ctx, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
e.callGetSimilar(ctx, e.ag, artist, 15, false)
|
||||
e.callGetSimilar(ctx, e.ag, &artist, 15, false)
|
||||
if utils.IsCtxDone(ctx) {
|
||||
log.Warn(ctx, "SimilarSongs call canceled", ctx.Err())
|
||||
log.Warn(ctx, "ArtistRadio call canceled", ctx.Err())
|
||||
return nil, ctx.Err()
|
||||
}
|
||||
|
||||
weightedSongs := random.NewWeightedChooser[model.MediaFile]()
|
||||
addArtist := func(a model.Artist, weightedSongs *random.WeightedChooser[model.MediaFile], count, artistWeight int) error {
|
||||
if utils.IsCtxDone(ctx) {
|
||||
log.Warn(ctx, "SimilarSongs call canceled", ctx.Err())
|
||||
log.Warn(ctx, "ArtistRadio call canceled", ctx.Err())
|
||||
return ctx.Err()
|
||||
}
|
||||
|
||||
@@ -304,13 +319,13 @@ func (e *externalMetadata) SimilarSongs(ctx context.Context, id string, count in
|
||||
return similarSongs, nil
|
||||
}
|
||||
|
||||
func (e *externalMetadata) ArtistImage(ctx context.Context, id string) (*url.URL, error) {
|
||||
func (e *provider) ArtistImage(ctx context.Context, id string) (*url.URL, error) {
|
||||
artist, err := e.getArtist(ctx, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
e.callGetImage(ctx, e.ag, artist)
|
||||
e.callGetImage(ctx, e.ag, &artist)
|
||||
if utils.IsCtxDone(ctx) {
|
||||
log.Warn(ctx, "ArtistImage call canceled", ctx.Err())
|
||||
return nil, ctx.Err()
|
||||
@@ -318,105 +333,191 @@ func (e *externalMetadata) ArtistImage(ctx context.Context, id string) (*url.URL
|
||||
|
||||
imageUrl := artist.ArtistImageUrl()
|
||||
if imageUrl == "" {
|
||||
return nil, agents.ErrNotFound
|
||||
return nil, model.ErrNotFound
|
||||
}
|
||||
return url.Parse(imageUrl)
|
||||
}
|
||||
|
||||
func (e *externalMetadata) AlbumImage(ctx context.Context, id string) (*url.URL, error) {
|
||||
func (e *provider) AlbumImage(ctx context.Context, id string) (*url.URL, error) {
|
||||
album, err := e.getAlbum(ctx, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
info, err := e.ag.GetAlbumInfo(ctx, album.Name, album.AlbumArtist, album.MbzAlbumID)
|
||||
if errors.Is(err, agents.ErrNotFound) {
|
||||
images, err := e.ag.GetAlbumImages(ctx, album.Name, album.AlbumArtist, album.MbzAlbumID)
|
||||
if err != nil {
|
||||
switch {
|
||||
case errors.Is(err, agents.ErrNotFound):
|
||||
log.Trace(ctx, "Album not found in agent", "albumID", id, "name", album.Name, "artist", album.AlbumArtist)
|
||||
return nil, model.ErrNotFound
|
||||
case errors.Is(err, context.Canceled):
|
||||
log.Debug(ctx, "GetAlbumImages call canceled", err)
|
||||
default:
|
||||
log.Warn(ctx, "Error getting album images from agent", "albumID", id, "name", album.Name, "artist", album.AlbumArtist, err)
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
if utils.IsCtxDone(ctx) {
|
||||
log.Warn(ctx, "AlbumImage call canceled", ctx.Err())
|
||||
return nil, ctx.Err()
|
||||
|
||||
if len(images) == 0 {
|
||||
log.Warn(ctx, "Agent returned no images without error", "albumID", id, "name", album.Name, "artist", album.AlbumArtist)
|
||||
return nil, model.ErrNotFound
|
||||
}
|
||||
|
||||
// Return the biggest image
|
||||
var img agents.ExternalImage
|
||||
for _, i := range info.Images {
|
||||
for _, i := range images {
|
||||
if img.Size <= i.Size {
|
||||
img = i
|
||||
}
|
||||
}
|
||||
if img.URL == "" {
|
||||
return nil, agents.ErrNotFound
|
||||
return nil, model.ErrNotFound
|
||||
}
|
||||
return url.Parse(img.URL)
|
||||
}
|
||||
|
||||
func (e *externalMetadata) TopSongs(ctx context.Context, artistName string, count int) (model.MediaFiles, error) {
|
||||
func (e *provider) TopSongs(ctx context.Context, artistName string, count int) (model.MediaFiles, error) {
|
||||
artist, err := e.findArtistByName(ctx, artistName)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Artist not found", "name", artistName, err)
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
return e.getMatchingTopSongs(ctx, e.ag, artist, count)
|
||||
}
|
||||
|
||||
func (e *externalMetadata) getMatchingTopSongs(ctx context.Context, agent agents.ArtistTopSongsRetriever, artist *auxArtist, count int) (model.MediaFiles, error) {
|
||||
songs, err := agent.GetArtistTopSongs(ctx, artist.ID, artist.Name, artist.MbzArtistID, count)
|
||||
if errors.Is(err, agents.ErrNotFound) {
|
||||
return nil, nil
|
||||
}
|
||||
songs, err := e.getMatchingTopSongs(ctx, e.ag, artist, count)
|
||||
if err != nil {
|
||||
switch {
|
||||
case errors.Is(err, agents.ErrNotFound):
|
||||
log.Trace(ctx, "TopSongs not found", "name", artistName)
|
||||
return nil, model.ErrNotFound
|
||||
case errors.Is(err, context.Canceled):
|
||||
log.Debug(ctx, "TopSongs call canceled", err)
|
||||
default:
|
||||
log.Warn(ctx, "Error getting top songs from agent", "artist", artistName, err)
|
||||
}
|
||||
|
||||
return nil, err
|
||||
}
|
||||
return songs, nil
|
||||
}
|
||||
|
||||
var mfs model.MediaFiles
|
||||
for _, t := range songs {
|
||||
mf, err := e.findMatchingTrack(ctx, t.MBID, artist.ID, t.Name)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
mfs = append(mfs, *mf)
|
||||
if len(mfs) == count {
|
||||
break
|
||||
}
|
||||
func (e *provider) getMatchingTopSongs(ctx context.Context, agent agents.ArtistTopSongsRetriever, artist *auxArtist, count int) (model.MediaFiles, error) {
|
||||
songs, err := agent.GetArtistTopSongs(ctx, artist.ID, artist.Name, artist.MbzArtistID, count)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get top songs for artist %s: %w", artist.Name, err)
|
||||
}
|
||||
|
||||
mbidMatches, err := e.loadTracksByMBID(ctx, songs)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to load tracks by MBID: %w", err)
|
||||
}
|
||||
titleMatches, err := e.loadTracksByTitle(ctx, songs, artist, mbidMatches)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to load tracks by title: %w", err)
|
||||
}
|
||||
|
||||
log.Trace(ctx, "Top Songs loaded", "name", artist.Name, "numSongs", len(songs), "numMBIDMatches", len(mbidMatches), "numTitleMatches", len(titleMatches))
|
||||
mfs := e.selectTopSongs(songs, mbidMatches, titleMatches, count)
|
||||
|
||||
if len(mfs) == 0 {
|
||||
log.Debug(ctx, "No matching top songs found", "name", artist.Name)
|
||||
} else {
|
||||
log.Debug(ctx, "Found matching top songs", "name", artist.Name, "numSongs", len(mfs))
|
||||
}
|
||||
|
||||
return mfs, nil
|
||||
}
|
||||
|
||||
func (e *externalMetadata) findMatchingTrack(ctx context.Context, mbid string, artistID, title string) (*model.MediaFile, error) {
|
||||
if mbid != "" {
|
||||
mfs, err := e.ds.MediaFile(ctx).GetAll(model.QueryOptions{
|
||||
Filters: squirrel.Eq{"mbz_recording_id": mbid},
|
||||
})
|
||||
if err == nil && len(mfs) > 0 {
|
||||
return &mfs[0], nil
|
||||
func (e *provider) loadTracksByMBID(ctx context.Context, songs []agents.Song) (map[string]model.MediaFile, error) {
|
||||
var mbids []string
|
||||
for _, s := range songs {
|
||||
if s.MBID != "" {
|
||||
mbids = append(mbids, s.MBID)
|
||||
}
|
||||
return e.findMatchingTrack(ctx, "", artistID, title)
|
||||
}
|
||||
mfs, err := e.ds.MediaFile(ctx).GetAll(model.QueryOptions{
|
||||
matches := map[string]model.MediaFile{}
|
||||
if len(mbids) == 0 {
|
||||
return matches, nil
|
||||
}
|
||||
res, err := e.ds.MediaFile(ctx).GetAll(model.QueryOptions{
|
||||
Filters: squirrel.And{
|
||||
squirrel.Or{
|
||||
squirrel.Eq{"artist_id": artistID},
|
||||
squirrel.Eq{"album_artist_id": artistID},
|
||||
},
|
||||
squirrel.Like{"order_title": str.SanitizeFieldForSorting(title)},
|
||||
squirrel.Eq{"mbz_recording_id": mbids},
|
||||
squirrel.Eq{"missing": false},
|
||||
},
|
||||
Sort: "starred desc, rating desc, year asc, compilation asc ",
|
||||
Max: 1,
|
||||
})
|
||||
if err != nil || len(mfs) == 0 {
|
||||
return nil, model.ErrNotFound
|
||||
if err != nil {
|
||||
return matches, err
|
||||
}
|
||||
return &mfs[0], nil
|
||||
for _, mf := range res {
|
||||
if id := mf.MbzRecordingID; id != "" {
|
||||
if _, ok := matches[id]; !ok {
|
||||
matches[id] = mf
|
||||
}
|
||||
}
|
||||
}
|
||||
return matches, nil
|
||||
}
|
||||
|
||||
func (e *externalMetadata) callGetURL(ctx context.Context, agent agents.ArtistURLRetriever, artist *auxArtist) {
|
||||
func (e *provider) loadTracksByTitle(ctx context.Context, songs []agents.Song, artist *auxArtist, mbidMatches map[string]model.MediaFile) (map[string]model.MediaFile, error) {
|
||||
titleMap := map[string]string{}
|
||||
for _, s := range songs {
|
||||
if s.MBID != "" && mbidMatches[s.MBID].ID != "" {
|
||||
continue
|
||||
}
|
||||
sanitized := str.SanitizeFieldForSorting(s.Name)
|
||||
titleMap[sanitized] = s.Name
|
||||
}
|
||||
matches := map[string]model.MediaFile{}
|
||||
if len(titleMap) == 0 {
|
||||
return matches, nil
|
||||
}
|
||||
titleFilters := squirrel.Or{}
|
||||
for sanitized := range titleMap {
|
||||
titleFilters = append(titleFilters, squirrel.Like{"order_title": sanitized})
|
||||
}
|
||||
|
||||
res, err := e.ds.MediaFile(ctx).GetAll(model.QueryOptions{
|
||||
Filters: squirrel.And{
|
||||
squirrel.Or{
|
||||
squirrel.Eq{"artist_id": artist.ID},
|
||||
squirrel.Eq{"album_artist_id": artist.ID},
|
||||
},
|
||||
titleFilters,
|
||||
squirrel.Eq{"missing": false},
|
||||
},
|
||||
Sort: "starred desc, rating desc, year asc, compilation asc ",
|
||||
})
|
||||
if err != nil {
|
||||
return matches, err
|
||||
}
|
||||
for _, mf := range res {
|
||||
sanitized := str.SanitizeFieldForSorting(mf.Title)
|
||||
if _, ok := matches[sanitized]; !ok {
|
||||
matches[sanitized] = mf
|
||||
}
|
||||
}
|
||||
return matches, nil
|
||||
}
|
||||
|
||||
func (e *provider) selectTopSongs(songs []agents.Song, byMBID, byTitle map[string]model.MediaFile, count int) model.MediaFiles {
|
||||
var mfs model.MediaFiles
|
||||
for _, t := range songs {
|
||||
if len(mfs) == count {
|
||||
break
|
||||
}
|
||||
if t.MBID != "" {
|
||||
if mf, ok := byMBID[t.MBID]; ok {
|
||||
mfs = append(mfs, mf)
|
||||
continue
|
||||
}
|
||||
}
|
||||
if mf, ok := byTitle[str.SanitizeFieldForSorting(t.Name)]; ok {
|
||||
mfs = append(mfs, mf)
|
||||
}
|
||||
}
|
||||
return mfs
|
||||
}
|
||||
|
||||
func (e *provider) callGetURL(ctx context.Context, agent agents.ArtistURLRetriever, artist *auxArtist) {
|
||||
artisURL, err := agent.GetArtistURL(ctx, artist.ID, artist.Name, artist.MbzArtistID)
|
||||
if err != nil {
|
||||
return
|
||||
@@ -424,7 +525,7 @@ func (e *externalMetadata) callGetURL(ctx context.Context, agent agents.ArtistUR
|
||||
artist.ExternalUrl = artisURL
|
||||
}
|
||||
|
||||
func (e *externalMetadata) callGetBiography(ctx context.Context, agent agents.ArtistBiographyRetriever, artist *auxArtist) {
|
||||
func (e *provider) callGetBiography(ctx context.Context, agent agents.ArtistBiographyRetriever, artist *auxArtist) {
|
||||
bio, err := agent.GetArtistBiography(ctx, artist.ID, str.Clear(artist.Name), artist.MbzArtistID)
|
||||
if err != nil {
|
||||
return
|
||||
@@ -434,7 +535,7 @@ func (e *externalMetadata) callGetBiography(ctx context.Context, agent agents.Ar
|
||||
artist.Biography = strings.ReplaceAll(bio, "<a ", "<a target='_blank' ")
|
||||
}
|
||||
|
||||
func (e *externalMetadata) callGetImage(ctx context.Context, agent agents.ArtistImageRetriever, artist *auxArtist) {
|
||||
func (e *provider) callGetImage(ctx context.Context, agent agents.ArtistImageRetriever, artist *auxArtist) {
|
||||
images, err := agent.GetArtistImages(ctx, artist.ID, artist.Name, artist.MbzArtistID)
|
||||
if err != nil {
|
||||
return
|
||||
@@ -452,14 +553,14 @@ func (e *externalMetadata) callGetImage(ctx context.Context, agent agents.Artist
|
||||
}
|
||||
}
|
||||
|
||||
func (e *externalMetadata) callGetSimilar(ctx context.Context, agent agents.ArtistSimilarRetriever, artist *auxArtist,
|
||||
func (e *provider) callGetSimilar(ctx context.Context, agent agents.ArtistSimilarRetriever, artist *auxArtist,
|
||||
limit int, includeNotPresent bool) {
|
||||
similar, err := agent.GetSimilarArtists(ctx, artist.ID, artist.Name, artist.MbzArtistID, limit)
|
||||
if len(similar) == 0 || err != nil {
|
||||
return
|
||||
}
|
||||
start := time.Now()
|
||||
sa, err := e.mapSimilarArtists(ctx, similar, includeNotPresent)
|
||||
sa, err := e.mapSimilarArtists(ctx, similar, limit, includeNotPresent)
|
||||
log.Debug(ctx, "Mapped Similar Artists", "artist", artist.Name, "numSimilar", len(sa), "elapsed", time.Since(start))
|
||||
if err != nil {
|
||||
return
|
||||
@@ -467,32 +568,63 @@ func (e *externalMetadata) callGetSimilar(ctx context.Context, agent agents.Arti
|
||||
artist.SimilarArtists = sa
|
||||
}
|
||||
|
||||
func (e *externalMetadata) mapSimilarArtists(ctx context.Context, similar []agents.Artist, includeNotPresent bool) (model.Artists, error) {
|
||||
func (e *provider) mapSimilarArtists(ctx context.Context, similar []agents.Artist, limit int, includeNotPresent bool) (model.Artists, error) {
|
||||
var result model.Artists
|
||||
var notPresent []string
|
||||
|
||||
// First select artists that are present.
|
||||
artistNames := slice.Map(similar, func(artist agents.Artist) string { return artist.Name })
|
||||
|
||||
// Query all artists at once
|
||||
clauses := slice.Map(artistNames, func(name string) squirrel.Sqlizer {
|
||||
return squirrel.Like{"artist.name": name}
|
||||
})
|
||||
artists, err := e.ds.Artist(ctx).GetAll(model.QueryOptions{
|
||||
Filters: squirrel.Or(clauses),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Create a map for quick lookup
|
||||
artistMap := make(map[string]model.Artist)
|
||||
for _, artist := range artists {
|
||||
artistMap[artist.Name] = artist
|
||||
}
|
||||
|
||||
count := 0
|
||||
|
||||
// Process the similar artists
|
||||
for _, s := range similar {
|
||||
sa, err := e.findArtistByName(ctx, s.Name)
|
||||
if err != nil {
|
||||
if artist, found := artistMap[s.Name]; found {
|
||||
result = append(result, artist)
|
||||
count++
|
||||
|
||||
if count >= limit {
|
||||
break
|
||||
}
|
||||
} else {
|
||||
notPresent = append(notPresent, s.Name)
|
||||
continue
|
||||
}
|
||||
result = append(result, sa.Artist)
|
||||
}
|
||||
|
||||
// Then fill up with non-present artists
|
||||
if includeNotPresent {
|
||||
if includeNotPresent && count < limit {
|
||||
for _, s := range notPresent {
|
||||
sa := model.Artist{ID: unavailableArtistID, Name: s}
|
||||
// Let the ID empty to indicate that the artist is not present in the DB
|
||||
sa := model.Artist{Name: s}
|
||||
result = append(result, sa)
|
||||
|
||||
count++
|
||||
if count >= limit {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (e *externalMetadata) findArtistByName(ctx context.Context, artistName string) (*auxArtist, error) {
|
||||
func (e *provider) findArtistByName(ctx context.Context, artistName string) (*auxArtist, error) {
|
||||
artists, err := e.ds.Artist(ctx).GetAll(model.QueryOptions{
|
||||
Filters: squirrel.Like{"artist.name": artistName},
|
||||
Max: 1,
|
||||
@@ -510,10 +642,10 @@ func (e *externalMetadata) findArtistByName(ctx context.Context, artistName stri
|
||||
return artist, nil
|
||||
}
|
||||
|
||||
func (e *externalMetadata) loadSimilar(ctx context.Context, artist *auxArtist, count int, includeNotPresent bool) error {
|
||||
func (e *provider) loadSimilar(ctx context.Context, artist *auxArtist, count int, includeNotPresent bool) error {
|
||||
var ids []string
|
||||
for _, sa := range artist.SimilarArtists {
|
||||
if sa.ID == unavailableArtistID {
|
||||
if sa.ID == "" {
|
||||
continue
|
||||
}
|
||||
ids = append(ids, sa.ID)
|
||||
@@ -544,7 +676,7 @@ func (e *externalMetadata) loadSimilar(ctx context.Context, artist *auxArtist, c
|
||||
continue
|
||||
}
|
||||
la = sa
|
||||
la.ID = unavailableArtistID
|
||||
la.ID = ""
|
||||
}
|
||||
loaded = append(loaded, la)
|
||||
}
|
||||
@@ -552,28 +684,31 @@ func (e *externalMetadata) loadSimilar(ctx context.Context, artist *auxArtist, c
|
||||
return nil
|
||||
}
|
||||
|
||||
type refreshQueue[T any] chan<- T
|
||||
type refreshQueue[T any] chan<- *T
|
||||
|
||||
func newRefreshQueue[T any](ctx context.Context, processFn func(context.Context, *T) error) refreshQueue[T] {
|
||||
queue := make(chan T, refreshQueueLength)
|
||||
func newRefreshQueue[T any](ctx context.Context, processFn func(context.Context, T) (T, error)) refreshQueue[T] {
|
||||
queue := make(chan *T, refreshQueueLength)
|
||||
go func() {
|
||||
for {
|
||||
time.Sleep(refreshDelay)
|
||||
ctx, cancel := context.WithTimeout(ctx, refreshTimeout)
|
||||
select {
|
||||
case item := <-queue:
|
||||
_ = processFn(ctx, &item)
|
||||
cancel()
|
||||
case <-ctx.Done():
|
||||
cancel()
|
||||
break
|
||||
return
|
||||
case <-time.After(refreshDelay):
|
||||
ctx, cancel := context.WithTimeout(ctx, refreshTimeout)
|
||||
select {
|
||||
case item := <-queue:
|
||||
_, _ = processFn(ctx, *item)
|
||||
cancel()
|
||||
case <-ctx.Done():
|
||||
cancel()
|
||||
}
|
||||
}
|
||||
}
|
||||
}()
|
||||
return queue
|
||||
}
|
||||
|
||||
func (q *refreshQueue[T]) enqueue(item T) {
|
||||
func (q *refreshQueue[T]) enqueue(item *T) {
|
||||
select {
|
||||
case *q <- item:
|
||||
default: // It is ok to miss a refresh request
|
||||
301
core/external/provider_albumimage_test.go
vendored
Normal file
301
core/external/provider_albumimage_test.go
vendored
Normal file
@@ -0,0 +1,301 @@
|
||||
package external_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"net/url"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/conf/configtest"
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
. "github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
var _ = Describe("Provider - AlbumImage", func() {
|
||||
var ds *tests.MockDataStore
|
||||
var provider Provider
|
||||
var mockArtistRepo *mockArtistRepo
|
||||
var mockAlbumRepo *mockAlbumRepo
|
||||
var mockMediaFileRepo *mockMediaFileRepo
|
||||
var mockAlbumAgent *mockAlbumInfoAgent
|
||||
var ctx context.Context
|
||||
|
||||
BeforeEach(func() {
|
||||
ctx = GinkgoT().Context()
|
||||
DeferCleanup(configtest.SetupConfig())
|
||||
conf.Server.Agents = "mockAlbum" // Configure mock agent
|
||||
|
||||
mockArtistRepo = newMockArtistRepo()
|
||||
mockAlbumRepo = newMockAlbumRepo()
|
||||
mockMediaFileRepo = newMockMediaFileRepo()
|
||||
|
||||
ds = &tests.MockDataStore{
|
||||
MockedArtist: mockArtistRepo,
|
||||
MockedAlbum: mockAlbumRepo,
|
||||
MockedMediaFile: mockMediaFileRepo,
|
||||
}
|
||||
|
||||
mockAlbumAgent = newMockAlbumInfoAgent()
|
||||
|
||||
agentsCombined := &mockAgents{albumInfoAgent: mockAlbumAgent}
|
||||
provider = NewProvider(ds, agentsCombined)
|
||||
|
||||
// Default mocks
|
||||
// Mocks for GetEntityByID sequence (initial failed lookups)
|
||||
mockArtistRepo.On("Get", "album-1").Return(nil, model.ErrNotFound).Once()
|
||||
mockArtistRepo.On("Get", "mf-1").Return(nil, model.ErrNotFound).Once()
|
||||
mockAlbumRepo.On("Get", "mf-1").Return(nil, model.ErrNotFound).Once()
|
||||
|
||||
// Default mock for non-existent entities - Use Maybe() for flexibility
|
||||
mockArtistRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Maybe()
|
||||
mockAlbumRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Maybe()
|
||||
mockMediaFileRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Maybe()
|
||||
})
|
||||
|
||||
It("returns the largest image URL when successful", func() {
|
||||
// Arrange
|
||||
mockArtistRepo.On("Get", "album-1").Return(nil, model.ErrNotFound).Once() // Expect GetEntityByID sequence
|
||||
mockAlbumRepo.On("Get", "album-1").Return(&model.Album{ID: "album-1", Name: "Album One", AlbumArtistID: "artist-1"}, nil).Once()
|
||||
// Explicitly mock agent call for this test
|
||||
mockAlbumAgent.On("GetAlbumImages", ctx, "Album One", "", "").
|
||||
Return([]agents.ExternalImage{
|
||||
{URL: "http://example.com/large.jpg", Size: 1000},
|
||||
{URL: "http://example.com/medium.jpg", Size: 500},
|
||||
{URL: "http://example.com/small.jpg", Size: 200},
|
||||
}, nil).Once()
|
||||
|
||||
expectedURL, _ := url.Parse("http://example.com/large.jpg")
|
||||
imgURL, err := provider.AlbumImage(ctx, "album-1")
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(imgURL).To(Equal(expectedURL))
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "album-1") // From GetEntityByID
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
mockArtistRepo.AssertNotCalled(GinkgoT(), "Get", "artist-1") // Artist lookup no longer happens in getAlbum
|
||||
mockAlbumAgent.AssertCalled(GinkgoT(), "GetAlbumImages", ctx, "Album One", "", "") // Expect empty artist name
|
||||
})
|
||||
|
||||
It("returns ErrNotFound if the album is not found in the DB", func() {
|
||||
// Arrange: Explicitly expect the full GetEntityByID sequence for "not-found"
|
||||
mockArtistRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Once()
|
||||
mockAlbumRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Once()
|
||||
mockMediaFileRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Once()
|
||||
|
||||
imgURL, err := provider.AlbumImage(ctx, "not-found")
|
||||
|
||||
Expect(err).To(MatchError("data not found"))
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "not-found")
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "not-found")
|
||||
mockMediaFileRepo.AssertCalled(GinkgoT(), "Get", "not-found")
|
||||
mockAlbumAgent.AssertNotCalled(GinkgoT(), "GetAlbumImages", mock.Anything, mock.Anything, mock.Anything)
|
||||
})
|
||||
|
||||
It("returns the agent error if the agent fails", func() {
|
||||
// Arrange
|
||||
mockArtistRepo.On("Get", "album-1").Return(nil, model.ErrNotFound).Once() // Expect GetEntityByID sequence
|
||||
mockAlbumRepo.On("Get", "album-1").Return(&model.Album{ID: "album-1", Name: "Album One", AlbumArtistID: "artist-1"}, nil).Once()
|
||||
|
||||
agentErr := errors.New("agent failure")
|
||||
// Explicitly mock agent call for this test
|
||||
mockAlbumAgent.On("GetAlbumImages", ctx, "Album One", "", "").Return(nil, agentErr).Once() // Expect empty artist
|
||||
|
||||
imgURL, err := provider.AlbumImage(ctx, "album-1")
|
||||
|
||||
Expect(err).To(MatchError("agent failure"))
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
mockArtistRepo.AssertNotCalled(GinkgoT(), "Get", "artist-1")
|
||||
mockAlbumAgent.AssertCalled(GinkgoT(), "GetAlbumImages", ctx, "Album One", "", "") // Expect empty artist
|
||||
})
|
||||
|
||||
It("returns ErrNotFound if the agent returns ErrNotFound", func() {
|
||||
// Arrange
|
||||
mockArtistRepo.On("Get", "album-1").Return(nil, model.ErrNotFound).Once() // Expect GetEntityByID sequence
|
||||
mockAlbumRepo.On("Get", "album-1").Return(&model.Album{ID: "album-1", Name: "Album One", AlbumArtistID: "artist-1"}, nil).Once()
|
||||
|
||||
// Explicitly mock agent call for this test
|
||||
mockAlbumAgent.On("GetAlbumImages", ctx, "Album One", "", "").Return(nil, agents.ErrNotFound).Once() // Expect empty artist
|
||||
|
||||
imgURL, err := provider.AlbumImage(ctx, "album-1")
|
||||
|
||||
Expect(err).To(MatchError("data not found"))
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
mockAlbumAgent.AssertCalled(GinkgoT(), "GetAlbumImages", ctx, "Album One", "", "") // Expect empty artist
|
||||
})
|
||||
|
||||
It("returns ErrNotFound if the agent returns no images", func() {
|
||||
// Arrange
|
||||
mockArtistRepo.On("Get", "album-1").Return(nil, model.ErrNotFound).Once() // Expect GetEntityByID sequence
|
||||
mockAlbumRepo.On("Get", "album-1").Return(&model.Album{ID: "album-1", Name: "Album One", AlbumArtistID: "artist-1"}, nil).Once()
|
||||
|
||||
// Explicitly mock agent call for this test
|
||||
mockAlbumAgent.On("GetAlbumImages", ctx, "Album One", "", "").
|
||||
Return([]agents.ExternalImage{}, nil).Once() // Expect empty artist
|
||||
|
||||
imgURL, err := provider.AlbumImage(ctx, "album-1")
|
||||
|
||||
Expect(err).To(MatchError("data not found"))
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
mockAlbumAgent.AssertCalled(GinkgoT(), "GetAlbumImages", ctx, "Album One", "", "") // Expect empty artist
|
||||
})
|
||||
|
||||
It("returns context error if context is canceled", func() {
|
||||
// Arrange
|
||||
cctx, cancelCtx := context.WithCancel(ctx)
|
||||
// Mock the necessary DB calls *before* canceling the context
|
||||
mockArtistRepo.On("Get", "album-1").Return(nil, model.ErrNotFound).Once()
|
||||
mockAlbumRepo.On("Get", "album-1").Return(&model.Album{ID: "album-1", Name: "Album One", AlbumArtistID: "artist-1"}, nil).Once()
|
||||
// Expect the agent call even if context is cancelled, returning the context error
|
||||
mockAlbumAgent.On("GetAlbumImages", cctx, "Album One", "", "").Return(nil, context.Canceled).Once()
|
||||
// Cancel the context *before* calling the function under test
|
||||
cancelCtx()
|
||||
|
||||
imgURL, err := provider.AlbumImage(cctx, "album-1")
|
||||
|
||||
Expect(err).To(MatchError("context canceled"))
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
// Agent should now be called, verify this expectation
|
||||
mockAlbumAgent.AssertCalled(GinkgoT(), "GetAlbumImages", cctx, "Album One", "", "")
|
||||
})
|
||||
|
||||
It("derives album ID from MediaFile ID", func() {
|
||||
// Arrange: Mock full GetEntityByID for "mf-1" and recursive "album-1"
|
||||
mockArtistRepo.On("Get", "mf-1").Return(nil, model.ErrNotFound).Once()
|
||||
mockAlbumRepo.On("Get", "mf-1").Return(nil, model.ErrNotFound).Once()
|
||||
mockMediaFileRepo.On("Get", "mf-1").Return(&model.MediaFile{ID: "mf-1", Title: "Track One", ArtistID: "artist-1", AlbumID: "album-1"}, nil).Once()
|
||||
mockArtistRepo.On("Get", "album-1").Return(nil, model.ErrNotFound).Once()
|
||||
mockAlbumRepo.On("Get", "album-1").Return(&model.Album{ID: "album-1", Name: "Album One", AlbumArtistID: "artist-1"}, nil).Once()
|
||||
|
||||
// Explicitly mock agent call for this test
|
||||
mockAlbumAgent.On("GetAlbumImages", ctx, "Album One", "", "").
|
||||
Return([]agents.ExternalImage{
|
||||
{URL: "http://example.com/large.jpg", Size: 1000},
|
||||
{URL: "http://example.com/medium.jpg", Size: 500},
|
||||
{URL: "http://example.com/small.jpg", Size: 200},
|
||||
}, nil).Once()
|
||||
|
||||
expectedURL, _ := url.Parse("http://example.com/large.jpg")
|
||||
imgURL, err := provider.AlbumImage(ctx, "mf-1")
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(imgURL).To(Equal(expectedURL))
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "mf-1")
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "mf-1")
|
||||
mockMediaFileRepo.AssertCalled(GinkgoT(), "Get", "mf-1")
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
mockArtistRepo.AssertNotCalled(GinkgoT(), "Get", "artist-1")
|
||||
mockAlbumAgent.AssertCalled(GinkgoT(), "GetAlbumImages", ctx, "Album One", "", "")
|
||||
})
|
||||
|
||||
It("handles different image orders from agent", func() {
|
||||
// Arrange
|
||||
mockArtistRepo.On("Get", "album-1").Return(nil, model.ErrNotFound).Once() // Expect GetEntityByID sequence
|
||||
mockAlbumRepo.On("Get", "album-1").Return(&model.Album{ID: "album-1", Name: "Album One", AlbumArtistID: "artist-1"}, nil).Once()
|
||||
// Explicitly mock agent call for this test
|
||||
mockAlbumAgent.On("GetAlbumImages", ctx, "Album One", "", "").
|
||||
Return([]agents.ExternalImage{
|
||||
{URL: "http://example.com/small.jpg", Size: 200},
|
||||
{URL: "http://example.com/large.jpg", Size: 1000},
|
||||
{URL: "http://example.com/medium.jpg", Size: 500},
|
||||
}, nil).Once()
|
||||
|
||||
expectedURL, _ := url.Parse("http://example.com/large.jpg")
|
||||
imgURL, err := provider.AlbumImage(ctx, "album-1")
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(imgURL).To(Equal(expectedURL)) // Should still pick the largest
|
||||
mockAlbumAgent.AssertCalled(GinkgoT(), "GetAlbumImages", ctx, "Album One", "", "")
|
||||
})
|
||||
|
||||
It("handles agent returning only one image", func() {
|
||||
// Arrange
|
||||
mockArtistRepo.On("Get", "album-1").Return(nil, model.ErrNotFound).Once() // Expect GetEntityByID sequence
|
||||
mockAlbumRepo.On("Get", "album-1").Return(&model.Album{ID: "album-1", Name: "Album One", AlbumArtistID: "artist-1"}, nil).Once()
|
||||
// Explicitly mock agent call for this test
|
||||
mockAlbumAgent.On("GetAlbumImages", ctx, "Album One", "", "").
|
||||
Return([]agents.ExternalImage{
|
||||
{URL: "http://example.com/single.jpg", Size: 700},
|
||||
}, nil).Once()
|
||||
|
||||
expectedURL, _ := url.Parse("http://example.com/single.jpg")
|
||||
imgURL, err := provider.AlbumImage(ctx, "album-1")
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(imgURL).To(Equal(expectedURL))
|
||||
mockAlbumAgent.AssertCalled(GinkgoT(), "GetAlbumImages", ctx, "Album One", "", "")
|
||||
})
|
||||
|
||||
It("returns ErrNotFound if deriving album ID fails", func() {
|
||||
// Arrange: Mock full GetEntityByID for "mf-no-album" and recursive "not-found"
|
||||
mockArtistRepo.On("Get", "mf-no-album").Return(nil, model.ErrNotFound).Once()
|
||||
mockAlbumRepo.On("Get", "mf-no-album").Return(nil, model.ErrNotFound).Once()
|
||||
mockMediaFileRepo.On("Get", "mf-no-album").Return(&model.MediaFile{ID: "mf-no-album", Title: "Track No Album", ArtistID: "artist-1", AlbumID: "not-found"}, nil).Once()
|
||||
mockArtistRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Once()
|
||||
mockAlbumRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Once()
|
||||
mockMediaFileRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Once()
|
||||
|
||||
imgURL, err := provider.AlbumImage(ctx, "mf-no-album")
|
||||
|
||||
Expect(err).To(MatchError("data not found"))
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "mf-no-album")
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "mf-no-album")
|
||||
mockMediaFileRepo.AssertCalled(GinkgoT(), "Get", "mf-no-album")
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "not-found")
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "not-found")
|
||||
mockMediaFileRepo.AssertCalled(GinkgoT(), "Get", "not-found")
|
||||
mockAlbumAgent.AssertNotCalled(GinkgoT(), "GetAlbumImages", mock.Anything, mock.Anything, mock.Anything)
|
||||
})
|
||||
})
|
||||
|
||||
// mockAlbumInfoAgent implementation
|
||||
type mockAlbumInfoAgent struct {
|
||||
mock.Mock
|
||||
agents.AlbumInfoRetriever
|
||||
agents.AlbumImageRetriever
|
||||
}
|
||||
|
||||
func newMockAlbumInfoAgent() *mockAlbumInfoAgent {
|
||||
m := new(mockAlbumInfoAgent)
|
||||
m.On("AgentName").Return("mockAlbum").Maybe()
|
||||
return m
|
||||
}
|
||||
|
||||
func (m *mockAlbumInfoAgent) AgentName() string {
|
||||
args := m.Called()
|
||||
return args.String(0)
|
||||
}
|
||||
|
||||
func (m *mockAlbumInfoAgent) GetAlbumInfo(ctx context.Context, name, artist, mbid string) (*agents.AlbumInfo, error) {
|
||||
args := m.Called(ctx, name, artist, mbid)
|
||||
if args.Get(0) == nil {
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
return args.Get(0).(*agents.AlbumInfo), args.Error(1)
|
||||
}
|
||||
|
||||
func (m *mockAlbumInfoAgent) GetAlbumImages(ctx context.Context, name, artist, mbid string) ([]agents.ExternalImage, error) {
|
||||
args := m.Called(ctx, name, artist, mbid)
|
||||
if args.Get(0) == nil {
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
return args.Get(0).([]agents.ExternalImage), args.Error(1)
|
||||
}
|
||||
|
||||
// Ensure mockAgent implements the interfaces
|
||||
var _ agents.AlbumInfoRetriever = (*mockAlbumInfoAgent)(nil)
|
||||
var _ agents.AlbumImageRetriever = (*mockAlbumInfoAgent)(nil)
|
||||
301
core/external/provider_artistimage_test.go
vendored
Normal file
301
core/external/provider_artistimage_test.go
vendored
Normal file
@@ -0,0 +1,301 @@
|
||||
package external_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"net/url"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/conf/configtest"
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
. "github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
var _ = Describe("Provider - ArtistImage", func() {
|
||||
var ds *tests.MockDataStore
|
||||
var provider Provider
|
||||
var mockArtistRepo *mockArtistRepo
|
||||
var mockAlbumRepo *mockAlbumRepo
|
||||
var mockMediaFileRepo *mockMediaFileRepo
|
||||
var mockImageAgent *mockArtistImageAgent
|
||||
var agentsCombined *mockAgents
|
||||
var ctx context.Context
|
||||
|
||||
BeforeEach(func() {
|
||||
DeferCleanup(configtest.SetupConfig())
|
||||
conf.Server.Agents = "mockImage" // Configure only the mock agent
|
||||
ctx = GinkgoT().Context()
|
||||
|
||||
mockArtistRepo = newMockArtistRepo()
|
||||
mockAlbumRepo = newMockAlbumRepo()
|
||||
mockMediaFileRepo = newMockMediaFileRepo()
|
||||
|
||||
ds = &tests.MockDataStore{
|
||||
MockedArtist: mockArtistRepo,
|
||||
MockedAlbum: mockAlbumRepo,
|
||||
MockedMediaFile: mockMediaFileRepo,
|
||||
}
|
||||
|
||||
mockImageAgent = newMockArtistImageAgent()
|
||||
|
||||
// Use the mockAgents from helper, setting the specific agent
|
||||
agentsCombined = &mockAgents{
|
||||
imageAgent: mockImageAgent,
|
||||
}
|
||||
|
||||
provider = NewProvider(ds, agentsCombined)
|
||||
|
||||
// Default mocks for successful Get calls
|
||||
mockArtistRepo.On("Get", "artist-1").Return(&model.Artist{ID: "artist-1", Name: "Artist One"}, nil).Maybe()
|
||||
mockAlbumRepo.On("Get", "album-1").Return(&model.Album{ID: "album-1", Name: "Album One", AlbumArtistID: "artist-1"}, nil).Maybe()
|
||||
mockMediaFileRepo.On("Get", "mf-1").Return(&model.MediaFile{ID: "mf-1", Title: "Track One", ArtistID: "artist-1"}, nil).Maybe()
|
||||
// Default mock for non-existent entities
|
||||
mockArtistRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Maybe()
|
||||
mockAlbumRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Maybe()
|
||||
mockMediaFileRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Maybe()
|
||||
|
||||
// Default successful image agent response
|
||||
mockImageAgent.On("GetArtistImages", mock.Anything, "artist-1", "Artist One", "").
|
||||
Return([]agents.ExternalImage{
|
||||
{URL: "http://example.com/large.jpg", Size: 1000},
|
||||
{URL: "http://example.com/medium.jpg", Size: 500},
|
||||
{URL: "http://example.com/small.jpg", Size: 200},
|
||||
}, nil).Maybe()
|
||||
})
|
||||
|
||||
AfterEach(func() {
|
||||
mockArtistRepo.AssertExpectations(GinkgoT())
|
||||
mockAlbumRepo.AssertExpectations(GinkgoT())
|
||||
mockMediaFileRepo.AssertExpectations(GinkgoT())
|
||||
mockImageAgent.AssertExpectations(GinkgoT())
|
||||
})
|
||||
|
||||
It("returns the largest image URL when successful", func() {
|
||||
// Arrange
|
||||
expectedURL, _ := url.Parse("http://example.com/large.jpg")
|
||||
|
||||
// Act
|
||||
imgURL, err := provider.ArtistImage(ctx, "artist-1")
|
||||
|
||||
// Assert
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(imgURL).To(Equal(expectedURL))
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "artist-1")
|
||||
mockImageAgent.AssertCalled(GinkgoT(), "GetArtistImages", ctx, "artist-1", "Artist One", "")
|
||||
})
|
||||
|
||||
It("returns ErrNotFound if the artist is not found in the DB", func() {
|
||||
// Arrange
|
||||
|
||||
// Act
|
||||
imgURL, err := provider.ArtistImage(ctx, "not-found")
|
||||
|
||||
// Assert
|
||||
Expect(err).To(MatchError(model.ErrNotFound))
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "not-found")
|
||||
mockImageAgent.AssertNotCalled(GinkgoT(), "GetArtistImages", mock.Anything, mock.Anything, mock.Anything, mock.Anything)
|
||||
})
|
||||
|
||||
It("returns the agent error if the agent fails", func() {
|
||||
// Arrange
|
||||
agentErr := errors.New("agent failure")
|
||||
mockImageAgent.Mock = mock.Mock{} // Reset default expectation
|
||||
mockImageAgent.On("GetArtistImages", ctx, "artist-1", "Artist One", "").Return(nil, agentErr).Once()
|
||||
|
||||
// Act
|
||||
imgURL, err := provider.ArtistImage(ctx, "artist-1")
|
||||
|
||||
// Assert
|
||||
Expect(err).To(MatchError(model.ErrNotFound)) // Corrected Expectation: The provider maps agent errors (other than canceled) to ErrNotFound if no image was found/populated
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "artist-1")
|
||||
mockImageAgent.AssertCalled(GinkgoT(), "GetArtistImages", ctx, "artist-1", "Artist One", "")
|
||||
})
|
||||
|
||||
It("returns ErrNotFound if the agent returns ErrNotFound", func() {
|
||||
// Arrange
|
||||
mockImageAgent.Mock = mock.Mock{} // Reset default expectation
|
||||
mockImageAgent.On("GetArtistImages", ctx, "artist-1", "Artist One", "").Return(nil, agents.ErrNotFound).Once()
|
||||
|
||||
// Act
|
||||
imgURL, err := provider.ArtistImage(ctx, "artist-1")
|
||||
|
||||
// Assert
|
||||
Expect(err).To(MatchError(model.ErrNotFound))
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "artist-1")
|
||||
mockImageAgent.AssertCalled(GinkgoT(), "GetArtistImages", ctx, "artist-1", "Artist One", "")
|
||||
})
|
||||
|
||||
It("returns ErrNotFound if the agent returns no images", func() {
|
||||
// Arrange
|
||||
mockImageAgent.Mock = mock.Mock{} // Reset default expectation
|
||||
mockImageAgent.On("GetArtistImages", ctx, "artist-1", "Artist One", "").Return([]agents.ExternalImage{}, nil).Once()
|
||||
|
||||
// Act
|
||||
imgURL, err := provider.ArtistImage(ctx, "artist-1")
|
||||
|
||||
// Assert
|
||||
Expect(err).To(MatchError(model.ErrNotFound)) // Implementation maps empty result to ErrNotFound
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "artist-1")
|
||||
mockImageAgent.AssertCalled(GinkgoT(), "GetArtistImages", ctx, "artist-1", "Artist One", "")
|
||||
})
|
||||
|
||||
It("returns context error if context is canceled before agent call", func() {
|
||||
// Arrange
|
||||
cctx, cancelCtx := context.WithCancel(context.Background())
|
||||
mockArtistRepo.Mock = mock.Mock{} // Reset default expectation for artist repo as well
|
||||
mockArtistRepo.On("Get", "artist-1").Return(&model.Artist{ID: "artist-1", Name: "Artist One"}, nil).Run(func(args mock.Arguments) {
|
||||
cancelCtx() // Cancel context *during* the DB call simulation
|
||||
}).Once()
|
||||
|
||||
// Act
|
||||
imgURL, err := provider.ArtistImage(cctx, "artist-1")
|
||||
|
||||
// Assert
|
||||
Expect(err).To(MatchError(context.Canceled))
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "artist-1")
|
||||
})
|
||||
|
||||
It("derives artist ID from MediaFile ID", func() {
|
||||
// Arrange: Add mocks for the initial GetEntityByID lookups
|
||||
mockArtistRepo.On("Get", "mf-1").Return(nil, model.ErrNotFound).Once()
|
||||
mockAlbumRepo.On("Get", "mf-1").Return(nil, model.ErrNotFound).Once()
|
||||
// Default mocks for MediaFileRepo.Get("mf-1") and ArtistRepo.Get("artist-1") handle the rest
|
||||
expectedURL, _ := url.Parse("http://example.com/large.jpg")
|
||||
|
||||
// Act
|
||||
imgURL, err := provider.ArtistImage(ctx, "mf-1")
|
||||
|
||||
// Assert
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(imgURL).To(Equal(expectedURL))
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "mf-1") // GetEntityByID sequence
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "mf-1") // GetEntityByID sequence
|
||||
mockMediaFileRepo.AssertCalled(GinkgoT(), "Get", "mf-1")
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "artist-1") // Should be called after getting MF
|
||||
mockImageAgent.AssertCalled(GinkgoT(), "GetArtistImages", ctx, "artist-1", "Artist One", "")
|
||||
})
|
||||
|
||||
It("derives artist ID from Album ID", func() {
|
||||
// Arrange: Add mock for the initial GetEntityByID lookup
|
||||
mockArtistRepo.On("Get", "album-1").Return(nil, model.ErrNotFound).Once()
|
||||
// Default mocks for AlbumRepo.Get("album-1") and ArtistRepo.Get("artist-1") handle the rest
|
||||
expectedURL, _ := url.Parse("http://example.com/large.jpg")
|
||||
|
||||
// Act
|
||||
imgURL, err := provider.ArtistImage(ctx, "album-1")
|
||||
|
||||
// Assert
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(imgURL).To(Equal(expectedURL))
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "album-1") // GetEntityByID sequence
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "artist-1") // Should be called after getting Album
|
||||
mockImageAgent.AssertCalled(GinkgoT(), "GetArtistImages", ctx, "artist-1", "Artist One", "")
|
||||
})
|
||||
|
||||
It("returns ErrNotFound if derived artist is not found", func() {
|
||||
// Arrange
|
||||
// Add mocks for the initial GetEntityByID lookups
|
||||
mockArtistRepo.On("Get", "mf-bad-artist").Return(nil, model.ErrNotFound).Once()
|
||||
mockAlbumRepo.On("Get", "mf-bad-artist").Return(nil, model.ErrNotFound).Once()
|
||||
mockMediaFileRepo.On("Get", "mf-bad-artist").Return(&model.MediaFile{ID: "mf-bad-artist", ArtistID: "not-found"}, nil).Once()
|
||||
// Add expectation for the recursive GetEntityByID call for the MediaFileRepo
|
||||
mockMediaFileRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Maybe()
|
||||
// The default mocks for ArtistRepo/AlbumRepo handle the final "not-found" lookups
|
||||
|
||||
// Act
|
||||
imgURL, err := provider.ArtistImage(ctx, "mf-bad-artist")
|
||||
|
||||
// Assert
|
||||
Expect(err).To(MatchError(model.ErrNotFound))
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "mf-bad-artist") // GetEntityByID sequence
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "mf-bad-artist") // GetEntityByID sequence
|
||||
mockMediaFileRepo.AssertCalled(GinkgoT(), "Get", "mf-bad-artist")
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "not-found")
|
||||
mockImageAgent.AssertNotCalled(GinkgoT(), "GetArtistImages", mock.Anything, mock.Anything, mock.Anything, mock.Anything)
|
||||
})
|
||||
|
||||
It("handles different image orders from agent", func() {
|
||||
// Arrange
|
||||
mockImageAgent.Mock = mock.Mock{} // Reset default expectation
|
||||
mockImageAgent.On("GetArtistImages", ctx, "artist-1", "Artist One", "").
|
||||
Return([]agents.ExternalImage{
|
||||
{URL: "http://example.com/small.jpg", Size: 200},
|
||||
{URL: "http://example.com/large.jpg", Size: 1000},
|
||||
{URL: "http://example.com/medium.jpg", Size: 500},
|
||||
}, nil).Once()
|
||||
expectedURL, _ := url.Parse("http://example.com/large.jpg")
|
||||
|
||||
// Act
|
||||
imgURL, err := provider.ArtistImage(ctx, "artist-1")
|
||||
|
||||
// Assert
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(imgURL).To(Equal(expectedURL)) // Still picks the largest
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "artist-1")
|
||||
mockImageAgent.AssertCalled(GinkgoT(), "GetArtistImages", ctx, "artist-1", "Artist One", "")
|
||||
})
|
||||
|
||||
It("handles agent returning only one image", func() {
|
||||
// Arrange
|
||||
mockImageAgent.Mock = mock.Mock{} // Reset default expectation
|
||||
mockImageAgent.On("GetArtistImages", ctx, "artist-1", "Artist One", "").
|
||||
Return([]agents.ExternalImage{
|
||||
{URL: "http://example.com/medium.jpg", Size: 500},
|
||||
}, nil).Once()
|
||||
expectedURL, _ := url.Parse("http://example.com/medium.jpg")
|
||||
|
||||
// Act
|
||||
imgURL, err := provider.ArtistImage(ctx, "artist-1")
|
||||
|
||||
// Assert
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(imgURL).To(Equal(expectedURL))
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "artist-1")
|
||||
mockImageAgent.AssertCalled(GinkgoT(), "GetArtistImages", ctx, "artist-1", "Artist One", "")
|
||||
})
|
||||
})
|
||||
|
||||
// mockArtistImageAgent implementation using testify/mock
|
||||
// This remains local as it's specific to testing the ArtistImage functionality
|
||||
type mockArtistImageAgent struct {
|
||||
mock.Mock
|
||||
agents.ArtistImageRetriever // Embed interface
|
||||
}
|
||||
|
||||
// Constructor for the mock agent
|
||||
func newMockArtistImageAgent() *mockArtistImageAgent {
|
||||
mock := new(mockArtistImageAgent)
|
||||
// Set default AgentName if needed, although usually called via mockAgents
|
||||
mock.On("AgentName").Return("mockImage").Maybe()
|
||||
return mock
|
||||
}
|
||||
|
||||
func (m *mockArtistImageAgent) AgentName() string {
|
||||
args := m.Called()
|
||||
return args.String(0)
|
||||
}
|
||||
|
||||
func (m *mockArtistImageAgent) GetArtistImages(ctx context.Context, id, artistName, mbid string) ([]agents.ExternalImage, error) {
|
||||
args := m.Called(ctx, id, artistName, mbid)
|
||||
// Need careful type assertion for potentially nil slice
|
||||
var res []agents.ExternalImage
|
||||
if args.Get(0) != nil {
|
||||
res = args.Get(0).([]agents.ExternalImage)
|
||||
}
|
||||
return res, args.Error(1)
|
||||
}
|
||||
|
||||
// Ensure mockAgent implements the interface
|
||||
var _ agents.ArtistImageRetriever = (*mockArtistImageAgent)(nil)
|
||||
196
core/external/provider_artistradio_test.go
vendored
Normal file
196
core/external/provider_artistradio_test.go
vendored
Normal file
@@ -0,0 +1,196 @@
|
||||
package external_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
. "github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
var _ = Describe("Provider - ArtistRadio", func() {
|
||||
var ds model.DataStore
|
||||
var provider Provider
|
||||
var mockAgent *mockSimilarArtistAgent
|
||||
var mockTopAgent agents.ArtistTopSongsRetriever
|
||||
var mockSimilarAgent agents.ArtistSimilarRetriever
|
||||
var agentsCombined Agents
|
||||
var artistRepo *mockArtistRepo
|
||||
var mediaFileRepo *mockMediaFileRepo
|
||||
var ctx context.Context
|
||||
|
||||
BeforeEach(func() {
|
||||
ctx = GinkgoT().Context()
|
||||
|
||||
artistRepo = newMockArtistRepo()
|
||||
mediaFileRepo = newMockMediaFileRepo()
|
||||
|
||||
ds = &tests.MockDataStore{
|
||||
MockedArtist: artistRepo,
|
||||
MockedMediaFile: mediaFileRepo,
|
||||
}
|
||||
|
||||
mockAgent = &mockSimilarArtistAgent{}
|
||||
mockTopAgent = mockAgent
|
||||
mockSimilarAgent = mockAgent
|
||||
|
||||
agentsCombined = &mockAgents{
|
||||
topSongsAgent: mockTopAgent,
|
||||
similarAgent: mockSimilarAgent,
|
||||
}
|
||||
|
||||
provider = NewProvider(ds, agentsCombined)
|
||||
})
|
||||
|
||||
It("returns similar songs from main artist and similar artists", func() {
|
||||
artist1 := model.Artist{ID: "artist-1", Name: "Artist One"}
|
||||
similarArtist := model.Artist{ID: "artist-3", Name: "Similar Artist"}
|
||||
song1 := model.MediaFile{ID: "song-1", Title: "Song One", ArtistID: "artist-1", MbzRecordingID: "mbid-1"}
|
||||
song2 := model.MediaFile{ID: "song-2", Title: "Song Two", ArtistID: "artist-1", MbzRecordingID: "mbid-2"}
|
||||
song3 := model.MediaFile{ID: "song-3", Title: "Song Three", ArtistID: "artist-3", MbzRecordingID: "mbid-3"}
|
||||
|
||||
artistRepo.On("Get", "artist-1").Return(&artist1, nil).Maybe()
|
||||
artistRepo.On("Get", "artist-3").Return(&similarArtist, nil).Maybe()
|
||||
|
||||
artistRepo.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Max == 1 && opt.Filters != nil
|
||||
})).Return(model.Artists{artist1}, nil).Once()
|
||||
|
||||
similarAgentsResp := []agents.Artist{
|
||||
{Name: "Similar Artist", MBID: "similar-mbid"},
|
||||
}
|
||||
mockAgent.On("GetSimilarArtists", mock.Anything, "artist-1", "Artist One", "", 15).
|
||||
Return(similarAgentsResp, nil).Once()
|
||||
|
||||
artistRepo.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Max == 0 && opt.Filters != nil
|
||||
})).Return(model.Artists{similarArtist}, nil).Once()
|
||||
|
||||
mockAgent.On("GetArtistTopSongs", mock.Anything, "artist-1", "Artist One", "", mock.Anything).
|
||||
Return([]agents.Song{
|
||||
{Name: "Song One", MBID: "mbid-1"},
|
||||
{Name: "Song Two", MBID: "mbid-2"},
|
||||
}, nil).Once()
|
||||
|
||||
mockAgent.On("GetArtistTopSongs", mock.Anything, "artist-3", "Similar Artist", "", mock.Anything).
|
||||
Return([]agents.Song{
|
||||
{Name: "Song Three", MBID: "mbid-3"},
|
||||
}, nil).Once()
|
||||
|
||||
mediaFileRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.MediaFiles{song1, song2}, nil).Once()
|
||||
mediaFileRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.MediaFiles{song3}, nil).Once()
|
||||
|
||||
songs, err := provider.ArtistRadio(ctx, "artist-1", 3)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(songs).To(HaveLen(3))
|
||||
for _, song := range songs {
|
||||
Expect(song.ID).To(BeElementOf("song-1", "song-2", "song-3"))
|
||||
}
|
||||
})
|
||||
|
||||
It("returns ErrNotFound when artist is not found", func() {
|
||||
artistRepo.On("Get", "artist-unknown-artist").Return(nil, model.ErrNotFound)
|
||||
mediaFileRepo.On("Get", "artist-unknown-artist").Return(nil, model.ErrNotFound)
|
||||
|
||||
artistRepo.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Max == 1 && opt.Filters != nil
|
||||
})).Return(model.Artists{}, nil).Maybe()
|
||||
|
||||
songs, err := provider.ArtistRadio(ctx, "artist-unknown-artist", 5)
|
||||
|
||||
Expect(err).To(Equal(model.ErrNotFound))
|
||||
Expect(songs).To(BeNil())
|
||||
})
|
||||
|
||||
It("returns songs from main artist when GetSimilarArtists returns error", func() {
|
||||
artist1 := model.Artist{ID: "artist-1", Name: "Artist One"}
|
||||
song1 := model.MediaFile{ID: "song-1", Title: "Song One", ArtistID: "artist-1", MbzRecordingID: "mbid-1"}
|
||||
|
||||
artistRepo.On("Get", "artist-1").Return(&artist1, nil).Maybe()
|
||||
artistRepo.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Max == 1 && opt.Filters != nil
|
||||
})).Return(model.Artists{artist1}, nil).Maybe()
|
||||
|
||||
mockAgent.On("GetSimilarArtists", mock.Anything, "artist-1", "Artist One", "", 15).
|
||||
Return(nil, errors.New("error getting similar artists")).Once()
|
||||
|
||||
artistRepo.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Max == 0 && opt.Filters != nil
|
||||
})).Return(model.Artists{}, nil).Once()
|
||||
|
||||
mockAgent.On("GetArtistTopSongs", mock.Anything, "artist-1", "Artist One", "", mock.Anything).
|
||||
Return([]agents.Song{
|
||||
{Name: "Song One", MBID: "mbid-1"},
|
||||
}, nil).Once()
|
||||
|
||||
mediaFileRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.MediaFiles{song1}, nil).Once()
|
||||
|
||||
songs, err := provider.ArtistRadio(ctx, "artist-1", 5)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(songs).To(HaveLen(1))
|
||||
Expect(songs[0].ID).To(Equal("song-1"))
|
||||
})
|
||||
|
||||
It("returns empty list when GetArtistTopSongs returns error", func() {
|
||||
artist1 := model.Artist{ID: "artist-1", Name: "Artist One"}
|
||||
|
||||
artistRepo.On("Get", "artist-1").Return(&artist1, nil).Maybe()
|
||||
artistRepo.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Max == 1 && opt.Filters != nil
|
||||
})).Return(model.Artists{artist1}, nil).Maybe()
|
||||
|
||||
mockAgent.On("GetSimilarArtists", mock.Anything, "artist-1", "Artist One", "", 15).
|
||||
Return([]agents.Artist{}, nil).Once()
|
||||
|
||||
artistRepo.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Max == 0 && opt.Filters != nil
|
||||
})).Return(model.Artists{}, nil).Once()
|
||||
|
||||
mockAgent.On("GetArtistTopSongs", mock.Anything, "artist-1", "Artist One", "", mock.Anything).
|
||||
Return(nil, errors.New("error getting top songs")).Once()
|
||||
|
||||
songs, err := provider.ArtistRadio(ctx, "artist-1", 5)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(songs).To(BeEmpty())
|
||||
})
|
||||
|
||||
It("respects count parameter", func() {
|
||||
artist1 := model.Artist{ID: "artist-1", Name: "Artist One"}
|
||||
song1 := model.MediaFile{ID: "song-1", Title: "Song One", ArtistID: "artist-1", MbzRecordingID: "mbid-1"}
|
||||
song2 := model.MediaFile{ID: "song-2", Title: "Song Two", ArtistID: "artist-1", MbzRecordingID: "mbid-2"}
|
||||
|
||||
artistRepo.On("Get", "artist-1").Return(&artist1, nil).Maybe()
|
||||
artistRepo.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Max == 1 && opt.Filters != nil
|
||||
})).Return(model.Artists{artist1}, nil).Maybe()
|
||||
|
||||
mockAgent.On("GetSimilarArtists", mock.Anything, "artist-1", "Artist One", "", 15).
|
||||
Return([]agents.Artist{}, nil).Once()
|
||||
|
||||
artistRepo.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Max == 0 && opt.Filters != nil
|
||||
})).Return(model.Artists{}, nil).Once()
|
||||
|
||||
mockAgent.On("GetArtistTopSongs", mock.Anything, "artist-1", "Artist One", "", mock.Anything).
|
||||
Return([]agents.Song{
|
||||
{Name: "Song One", MBID: "mbid-1"},
|
||||
{Name: "Song Two", MBID: "mbid-2"},
|
||||
}, nil).Once()
|
||||
|
||||
mediaFileRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.MediaFiles{song1, song2}, nil).Once()
|
||||
|
||||
songs, err := provider.ArtistRadio(ctx, "artist-1", 1)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(songs).To(HaveLen(1))
|
||||
Expect(songs[0].ID).To(BeElementOf("song-1", "song-2"))
|
||||
})
|
||||
})
|
||||
274
core/external/provider_topsongs_test.go
vendored
Normal file
274
core/external/provider_topsongs_test.go
vendored
Normal file
@@ -0,0 +1,274 @@
|
||||
package external_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
_ "github.com/navidrome/navidrome/core/agents/lastfm"
|
||||
_ "github.com/navidrome/navidrome/core/agents/listenbrainz"
|
||||
_ "github.com/navidrome/navidrome/core/agents/spotify"
|
||||
. "github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
var _ = Describe("Provider - TopSongs", func() {
|
||||
var (
|
||||
p Provider
|
||||
artistRepo *mockArtistRepo // From provider_helper_test.go
|
||||
mediaFileRepo *mockMediaFileRepo // From provider_helper_test.go
|
||||
ag *mockAgents // Consolidated mock from export_test.go
|
||||
ctx context.Context
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
ctx = GinkgoT().Context()
|
||||
|
||||
artistRepo = newMockArtistRepo() // Use helper mock
|
||||
mediaFileRepo = newMockMediaFileRepo() // Use helper mock
|
||||
|
||||
// Configure tests.MockDataStore to use the testify/mock-based repos
|
||||
ds := &tests.MockDataStore{
|
||||
MockedArtist: artistRepo,
|
||||
MockedMediaFile: mediaFileRepo,
|
||||
}
|
||||
|
||||
ag = new(mockAgents)
|
||||
|
||||
p = NewProvider(ds, ag)
|
||||
})
|
||||
|
||||
It("returns top songs for a known artist", func() {
|
||||
// Mock finding the artist
|
||||
artist1 := model.Artist{ID: "artist-1", Name: "Artist One", MbzArtistID: "mbid-artist-1"}
|
||||
artistRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.Artists{artist1}, nil).Once()
|
||||
|
||||
// Mock agent response
|
||||
agentSongs := []agents.Song{
|
||||
{Name: "Song One", MBID: "mbid-song-1"},
|
||||
{Name: "Song Two", MBID: "mbid-song-2"},
|
||||
}
|
||||
ag.On("GetArtistTopSongs", ctx, "artist-1", "Artist One", "mbid-artist-1", 2).Return(agentSongs, nil).Once()
|
||||
|
||||
// Mock finding matching tracks (both returned in a single query)
|
||||
song1 := model.MediaFile{ID: "song-1", Title: "Song One", ArtistID: "artist-1", MbzRecordingID: "mbid-song-1"}
|
||||
song2 := model.MediaFile{ID: "song-2", Title: "Song Two", ArtistID: "artist-1", MbzRecordingID: "mbid-song-2"}
|
||||
mediaFileRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.MediaFiles{song1, song2}, nil).Once()
|
||||
|
||||
songs, err := p.TopSongs(ctx, "Artist One", 2)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(songs).To(HaveLen(2))
|
||||
Expect(songs[0].ID).To(Equal("song-1"))
|
||||
Expect(songs[1].ID).To(Equal("song-2"))
|
||||
artistRepo.AssertExpectations(GinkgoT())
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
mediaFileRepo.AssertExpectations(GinkgoT())
|
||||
})
|
||||
|
||||
It("returns nil for an unknown artist", func() {
|
||||
// Mock artist not found
|
||||
artistRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.Artists{}, nil).Once()
|
||||
|
||||
songs, err := p.TopSongs(ctx, "Unknown Artist", 5)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred()) // TopSongs returns nil error if artist not found
|
||||
Expect(songs).To(BeNil())
|
||||
artistRepo.AssertExpectations(GinkgoT())
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistTopSongs", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything)
|
||||
})
|
||||
|
||||
It("returns error when the agent returns an error", func() {
|
||||
// Mock finding the artist
|
||||
artist1 := model.Artist{ID: "artist-1", Name: "Artist One", MbzArtistID: "mbid-artist-1"}
|
||||
artistRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.Artists{artist1}, nil).Once()
|
||||
|
||||
// Mock agent error
|
||||
agentErr := errors.New("agent error")
|
||||
ag.On("GetArtistTopSongs", ctx, "artist-1", "Artist One", "mbid-artist-1", 5).Return(nil, agentErr).Once()
|
||||
|
||||
songs, err := p.TopSongs(ctx, "Artist One", 5)
|
||||
|
||||
Expect(err).To(MatchError(agentErr))
|
||||
Expect(songs).To(BeNil())
|
||||
artistRepo.AssertExpectations(GinkgoT())
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
})
|
||||
|
||||
It("returns ErrNotFound when the agent returns ErrNotFound", func() {
|
||||
// Mock finding the artist
|
||||
artist1 := model.Artist{ID: "artist-1", Name: "Artist One", MbzArtistID: "mbid-artist-1"}
|
||||
artistRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.Artists{artist1}, nil).Once()
|
||||
|
||||
// Mock agent ErrNotFound
|
||||
ag.On("GetArtistTopSongs", ctx, "artist-1", "Artist One", "mbid-artist-1", 5).Return(nil, agents.ErrNotFound).Once()
|
||||
|
||||
songs, err := p.TopSongs(ctx, "Artist One", 5)
|
||||
|
||||
Expect(err).To(MatchError(model.ErrNotFound))
|
||||
Expect(songs).To(BeNil())
|
||||
artistRepo.AssertExpectations(GinkgoT())
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
})
|
||||
|
||||
It("returns fewer songs if count is less than available top songs", func() {
|
||||
// Mock finding the artist
|
||||
artist1 := model.Artist{ID: "artist-1", Name: "Artist One", MbzArtistID: "mbid-artist-1"}
|
||||
artistRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.Artists{artist1}, nil).Once()
|
||||
|
||||
// Mock agent response (only need 1 for the test)
|
||||
agentSongs := []agents.Song{{Name: "Song One", MBID: "mbid-song-1"}}
|
||||
ag.On("GetArtistTopSongs", ctx, "artist-1", "Artist One", "mbid-artist-1", 1).Return(agentSongs, nil).Once()
|
||||
|
||||
// Mock finding matching track
|
||||
song1 := model.MediaFile{ID: "song-1", Title: "Song One", ArtistID: "artist-1", MbzRecordingID: "mbid-song-1"}
|
||||
mediaFileRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.MediaFiles{song1}, nil).Once()
|
||||
|
||||
songs, err := p.TopSongs(ctx, "Artist One", 1)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(songs).To(HaveLen(1))
|
||||
Expect(songs[0].ID).To(Equal("song-1"))
|
||||
artistRepo.AssertExpectations(GinkgoT())
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
mediaFileRepo.AssertExpectations(GinkgoT())
|
||||
})
|
||||
|
||||
It("returns fewer songs if fewer matching tracks are found", func() {
|
||||
// Mock finding the artist
|
||||
artist1 := model.Artist{ID: "artist-1", Name: "Artist One", MbzArtistID: "mbid-artist-1"}
|
||||
artistRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.Artists{artist1}, nil).Once()
|
||||
|
||||
// Mock agent response
|
||||
agentSongs := []agents.Song{
|
||||
{Name: "Song One", MBID: "mbid-song-1"},
|
||||
{Name: "Song Two", MBID: "mbid-song-2"},
|
||||
}
|
||||
ag.On("GetArtistTopSongs", ctx, "artist-1", "Artist One", "mbid-artist-1", 2).Return(agentSongs, nil).Once()
|
||||
|
||||
// Mock finding matching tracks (only find song 1 on bulk query)
|
||||
song1 := model.MediaFile{ID: "song-1", Title: "Song One", ArtistID: "artist-1", MbzRecordingID: "mbid-song-1"}
|
||||
mediaFileRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.MediaFiles{song1}, nil).Once() // bulk MBID query
|
||||
mediaFileRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.MediaFiles{}, nil).Once() // title fallback for song2
|
||||
|
||||
songs, err := p.TopSongs(ctx, "Artist One", 2)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(songs).To(HaveLen(1))
|
||||
Expect(songs[0].ID).To(Equal("song-1"))
|
||||
artistRepo.AssertExpectations(GinkgoT())
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
mediaFileRepo.AssertExpectations(GinkgoT())
|
||||
})
|
||||
|
||||
It("returns error when context is canceled during agent call", func() {
|
||||
// Mock finding the artist
|
||||
artist1 := model.Artist{ID: "artist-1", Name: "Artist One", MbzArtistID: "mbid-artist-1"}
|
||||
artistRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.Artists{artist1}, nil).Once()
|
||||
|
||||
// Setup context that will be canceled
|
||||
canceledCtx, cancel := context.WithCancel(ctx)
|
||||
|
||||
// Mock agent call to return context canceled error
|
||||
ag.On("GetArtistTopSongs", canceledCtx, "artist-1", "Artist One", "mbid-artist-1", 5).Return(nil, context.Canceled).Once()
|
||||
|
||||
cancel() // Cancel the context before calling
|
||||
songs, err := p.TopSongs(canceledCtx, "Artist One", 5)
|
||||
|
||||
Expect(err).To(MatchError(context.Canceled))
|
||||
Expect(songs).To(BeNil())
|
||||
artistRepo.AssertExpectations(GinkgoT())
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
})
|
||||
|
||||
It("falls back to title matching when MbzRecordingID is missing", func() {
|
||||
// Mock finding the artist
|
||||
artist1 := model.Artist{ID: "artist-1", Name: "Artist One", MbzArtistID: "mbid-artist-1"}
|
||||
artistRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.Artists{artist1}, nil).Once()
|
||||
|
||||
// Mock agent response with songs that have NO MBID (empty string)
|
||||
agentSongs := []agents.Song{
|
||||
{Name: "Song One", MBID: ""}, // No MBID, should fall back to title matching
|
||||
{Name: "Song Two", MBID: ""}, // No MBID, should fall back to title matching
|
||||
}
|
||||
ag.On("GetArtistTopSongs", ctx, "artist-1", "Artist One", "mbid-artist-1", 2).Return(agentSongs, nil).Once()
|
||||
|
||||
// Since there are no MBIDs, loadTracksByMBID should not make any database call
|
||||
// loadTracksByTitle should make a database call for title matching
|
||||
song1 := model.MediaFile{ID: "song-1", Title: "Song One", ArtistID: "artist-1", MbzRecordingID: "", OrderTitle: "song one"}
|
||||
song2 := model.MediaFile{ID: "song-2", Title: "Song Two", ArtistID: "artist-1", MbzRecordingID: "", OrderTitle: "song two"}
|
||||
mediaFileRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.MediaFiles{song1, song2}, nil).Once()
|
||||
|
||||
songs, err := p.TopSongs(ctx, "Artist One", 2)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(songs).To(HaveLen(2))
|
||||
Expect(songs[0].ID).To(Equal("song-1"))
|
||||
Expect(songs[1].ID).To(Equal("song-2"))
|
||||
artistRepo.AssertExpectations(GinkgoT())
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
mediaFileRepo.AssertExpectations(GinkgoT())
|
||||
})
|
||||
|
||||
It("combines MBID and title matching when some songs have missing MbzRecordingID", func() {
|
||||
// Mock finding the artist
|
||||
artist1 := model.Artist{ID: "artist-1", Name: "Artist One", MbzArtistID: "mbid-artist-1"}
|
||||
artistRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.Artists{artist1}, nil).Once()
|
||||
|
||||
// Mock agent response with mixed MBID availability
|
||||
agentSongs := []agents.Song{
|
||||
{Name: "Song One", MBID: "mbid-song-1"}, // Has MBID, should match by MBID
|
||||
{Name: "Song Two", MBID: ""}, // No MBID, should fall back to title matching
|
||||
}
|
||||
ag.On("GetArtistTopSongs", ctx, "artist-1", "Artist One", "mbid-artist-1", 2).Return(agentSongs, nil).Once()
|
||||
|
||||
// Mock the MBID query (finds song1 by MBID)
|
||||
song1 := model.MediaFile{ID: "song-1", Title: "Song One", ArtistID: "artist-1", MbzRecordingID: "mbid-song-1", OrderTitle: "song one"}
|
||||
mediaFileRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.MediaFiles{song1}, nil).Once()
|
||||
|
||||
// Mock the title fallback query (finds song2 by title)
|
||||
song2 := model.MediaFile{ID: "song-2", Title: "Song Two", ArtistID: "artist-1", MbzRecordingID: "", OrderTitle: "song two"}
|
||||
mediaFileRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.MediaFiles{song2}, nil).Once()
|
||||
|
||||
songs, err := p.TopSongs(ctx, "Artist One", 2)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(songs).To(HaveLen(2))
|
||||
Expect(songs[0].ID).To(Equal("song-1")) // Found by MBID
|
||||
Expect(songs[1].ID).To(Equal("song-2")) // Found by title
|
||||
artistRepo.AssertExpectations(GinkgoT())
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
mediaFileRepo.AssertExpectations(GinkgoT())
|
||||
})
|
||||
|
||||
It("only returns requested count when provider returns additional items", func() {
|
||||
// Mock finding the artist
|
||||
artist1 := model.Artist{ID: "artist-1", Name: "Artist One", MbzArtistID: "mbid-artist-1"}
|
||||
artistRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.Artists{artist1}, nil).Once()
|
||||
|
||||
// Mock agent response
|
||||
agentSongs := []agents.Song{
|
||||
{Name: "Song One", MBID: "mbid-song-1"},
|
||||
{Name: "Song Two", MBID: "mbid-song-2"},
|
||||
}
|
||||
ag.On("GetArtistTopSongs", ctx, "artist-1", "Artist One", "mbid-artist-1", 1).Return(agentSongs, nil).Once()
|
||||
|
||||
// Mock finding matching tracks (both returned in a single query)
|
||||
song1 := model.MediaFile{ID: "song-1", Title: "Song One", ArtistID: "artist-1", MbzRecordingID: "mbid-song-1"}
|
||||
song2 := model.MediaFile{ID: "song-2", Title: "Song Two", ArtistID: "artist-1", MbzRecordingID: "mbid-song-2"}
|
||||
mediaFileRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.MediaFiles{song1, song2}, nil).Once()
|
||||
|
||||
songs, err := p.TopSongs(ctx, "Artist One", 1)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(songs).To(HaveLen(1))
|
||||
Expect(songs[0].ID).To(Equal("song-1"))
|
||||
artistRepo.AssertExpectations(GinkgoT())
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
mediaFileRepo.AssertExpectations(GinkgoT())
|
||||
})
|
||||
})
|
||||
167
core/external/provider_updatealbuminfo_test.go
vendored
Normal file
167
core/external/provider_updatealbuminfo_test.go
vendored
Normal file
@@ -0,0 +1,167 @@
|
||||
package external_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
"github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
"github.com/navidrome/navidrome/utils/gg"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
func init() {
|
||||
log.SetLevel(log.LevelDebug)
|
||||
}
|
||||
|
||||
var _ = Describe("Provider - UpdateAlbumInfo", func() {
|
||||
var (
|
||||
ctx context.Context
|
||||
p external.Provider
|
||||
ds *tests.MockDataStore
|
||||
ag *mockAgents
|
||||
mockAlbumRepo *tests.MockAlbumRepo
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
ctx = GinkgoT().Context()
|
||||
ds = new(tests.MockDataStore)
|
||||
ag = new(mockAgents)
|
||||
p = external.NewProvider(ds, ag)
|
||||
mockAlbumRepo = ds.Album(ctx).(*tests.MockAlbumRepo)
|
||||
conf.Server.DevAlbumInfoTimeToLive = 1 * time.Hour
|
||||
})
|
||||
|
||||
It("returns error when album is not found", func() {
|
||||
album, err := p.UpdateAlbumInfo(ctx, "al-not-found")
|
||||
|
||||
Expect(err).To(MatchError(model.ErrNotFound))
|
||||
Expect(album).To(BeNil())
|
||||
ag.AssertNotCalled(GinkgoT(), "GetAlbumInfo", mock.Anything, mock.Anything, mock.Anything, mock.Anything)
|
||||
})
|
||||
|
||||
It("populates info when album exists but has no external info", func() {
|
||||
originalAlbum := &model.Album{
|
||||
ID: "al-existing",
|
||||
Name: "Test Album",
|
||||
AlbumArtist: "Test Artist",
|
||||
MbzAlbumID: "mbid-album",
|
||||
}
|
||||
mockAlbumRepo.SetData(model.Albums{*originalAlbum})
|
||||
|
||||
expectedInfo := &agents.AlbumInfo{
|
||||
URL: "http://example.com/album",
|
||||
Description: "Album Description",
|
||||
}
|
||||
ag.On("GetAlbumInfo", ctx, "Test Album", "Test Artist", "mbid-album").Return(expectedInfo, nil)
|
||||
ag.On("GetAlbumImages", ctx, "Test Album", "Test Artist", "mbid-album").Return([]agents.ExternalImage{
|
||||
{URL: "http://example.com/large.jpg", Size: 300},
|
||||
{URL: "http://example.com/medium.jpg", Size: 200},
|
||||
{URL: "http://example.com/small.jpg", Size: 100},
|
||||
}, nil)
|
||||
|
||||
updatedAlbum, err := p.UpdateAlbumInfo(ctx, "al-existing")
|
||||
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(updatedAlbum).NotTo(BeNil())
|
||||
Expect(updatedAlbum.ID).To(Equal("al-existing"))
|
||||
Expect(updatedAlbum.ExternalUrl).To(Equal("http://example.com/album"))
|
||||
Expect(updatedAlbum.Description).To(Equal("Album Description"))
|
||||
Expect(updatedAlbum.ExternalInfoUpdatedAt).NotTo(BeNil())
|
||||
Expect(*updatedAlbum.ExternalInfoUpdatedAt).To(BeTemporally("~", time.Now(), time.Second))
|
||||
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
})
|
||||
|
||||
It("returns cached info when album exists and info is not expired", func() {
|
||||
now := time.Now()
|
||||
originalAlbum := &model.Album{
|
||||
ID: "al-cached",
|
||||
Name: "Cached Album",
|
||||
AlbumArtist: "Cached Artist",
|
||||
ExternalUrl: "http://cached.com/album",
|
||||
Description: "Cached Desc",
|
||||
LargeImageUrl: "http://cached.com/large.jpg",
|
||||
ExternalInfoUpdatedAt: gg.P(now.Add(-conf.Server.DevAlbumInfoTimeToLive / 2)),
|
||||
}
|
||||
mockAlbumRepo.SetData(model.Albums{*originalAlbum})
|
||||
|
||||
updatedAlbum, err := p.UpdateAlbumInfo(ctx, "al-cached")
|
||||
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(updatedAlbum).NotTo(BeNil())
|
||||
Expect(*updatedAlbum).To(Equal(*originalAlbum))
|
||||
|
||||
ag.AssertNotCalled(GinkgoT(), "GetAlbumInfo", mock.Anything, mock.Anything, mock.Anything, mock.Anything)
|
||||
})
|
||||
|
||||
It("returns cached info and triggers background refresh when info is expired", func() {
|
||||
now := time.Now()
|
||||
expiredTime := now.Add(-conf.Server.DevAlbumInfoTimeToLive * 2)
|
||||
originalAlbum := &model.Album{
|
||||
ID: "al-expired",
|
||||
Name: "Expired Album",
|
||||
AlbumArtist: "Expired Artist",
|
||||
ExternalUrl: "http://expired.com/album",
|
||||
Description: "Expired Desc",
|
||||
LargeImageUrl: "http://expired.com/large.jpg",
|
||||
ExternalInfoUpdatedAt: gg.P(expiredTime),
|
||||
}
|
||||
mockAlbumRepo.SetData(model.Albums{*originalAlbum})
|
||||
|
||||
updatedAlbum, err := p.UpdateAlbumInfo(ctx, "al-expired")
|
||||
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(updatedAlbum).NotTo(BeNil())
|
||||
Expect(*updatedAlbum).To(Equal(*originalAlbum))
|
||||
|
||||
ag.AssertNotCalled(GinkgoT(), "GetAlbumInfo", mock.Anything, mock.Anything, mock.Anything, mock.Anything)
|
||||
})
|
||||
|
||||
It("returns error when agent fails to get album info", func() {
|
||||
originalAlbum := &model.Album{
|
||||
ID: "al-agent-error",
|
||||
Name: "Agent Error Album",
|
||||
AlbumArtist: "Agent Error Artist",
|
||||
MbzAlbumID: "mbid-agent-error",
|
||||
}
|
||||
mockAlbumRepo.SetData(model.Albums{*originalAlbum})
|
||||
|
||||
expectedErr := errors.New("agent communication failed")
|
||||
ag.On("GetAlbumInfo", ctx, "Agent Error Album", "Agent Error Artist", "mbid-agent-error").Return(nil, expectedErr)
|
||||
|
||||
updatedAlbum, err := p.UpdateAlbumInfo(ctx, "al-agent-error")
|
||||
|
||||
Expect(err).To(MatchError(expectedErr))
|
||||
Expect(updatedAlbum).To(BeNil())
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
})
|
||||
|
||||
It("returns original album when agent returns ErrNotFound", func() {
|
||||
originalAlbum := &model.Album{
|
||||
ID: "al-agent-notfound",
|
||||
Name: "Agent NotFound Album",
|
||||
AlbumArtist: "Agent NotFound Artist",
|
||||
MbzAlbumID: "mbid-agent-notfound",
|
||||
}
|
||||
mockAlbumRepo.SetData(model.Albums{*originalAlbum})
|
||||
|
||||
ag.On("GetAlbumInfo", ctx, "Agent NotFound Album", "Agent NotFound Artist", "mbid-agent-notfound").Return(nil, agents.ErrNotFound)
|
||||
|
||||
updatedAlbum, err := p.UpdateAlbumInfo(ctx, "al-agent-notfound")
|
||||
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(updatedAlbum).NotTo(BeNil())
|
||||
Expect(*updatedAlbum).To(Equal(*originalAlbum))
|
||||
Expect(updatedAlbum.ExternalInfoUpdatedAt).To(BeNil())
|
||||
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
})
|
||||
})
|
||||
229
core/external/provider_updateartistinfo_test.go
vendored
Normal file
229
core/external/provider_updateartistinfo_test.go
vendored
Normal file
@@ -0,0 +1,229 @@
|
||||
package external_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/conf/configtest"
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
"github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
"github.com/navidrome/navidrome/utils/gg"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
func init() {
|
||||
log.SetLevel(log.LevelDebug)
|
||||
}
|
||||
|
||||
var _ = Describe("Provider - UpdateArtistInfo", func() {
|
||||
var (
|
||||
ctx context.Context
|
||||
p external.Provider
|
||||
ds *tests.MockDataStore
|
||||
ag *mockAgents
|
||||
mockArtistRepo *tests.MockArtistRepo
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
DeferCleanup(configtest.SetupConfig())
|
||||
conf.Server.DevArtistInfoTimeToLive = 1 * time.Hour
|
||||
ctx = GinkgoT().Context()
|
||||
ds = new(tests.MockDataStore)
|
||||
ag = new(mockAgents)
|
||||
p = external.NewProvider(ds, ag)
|
||||
mockArtistRepo = ds.Artist(ctx).(*tests.MockArtistRepo)
|
||||
})
|
||||
|
||||
It("returns error when artist is not found", func() {
|
||||
artist, err := p.UpdateArtistInfo(ctx, "ar-not-found", 10, false)
|
||||
|
||||
Expect(err).To(MatchError(model.ErrNotFound))
|
||||
Expect(artist).To(BeNil())
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistMBID")
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistImages")
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistBiography")
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistURL")
|
||||
ag.AssertNotCalled(GinkgoT(), "GetSimilarArtists")
|
||||
})
|
||||
|
||||
It("populates info when artist exists but has no external info", func() {
|
||||
originalArtist := &model.Artist{
|
||||
ID: "ar-existing",
|
||||
Name: "Test Artist",
|
||||
}
|
||||
mockArtistRepo.SetData(model.Artists{*originalArtist})
|
||||
|
||||
expectedMBID := "mbid-artist-123"
|
||||
expectedBio := "Artist Bio"
|
||||
expectedURL := "http://artist.url"
|
||||
expectedImages := []agents.ExternalImage{
|
||||
{URL: "http://large.jpg", Size: 300},
|
||||
{URL: "http://medium.jpg", Size: 200},
|
||||
{URL: "http://small.jpg", Size: 100},
|
||||
}
|
||||
rawSimilar := []agents.Artist{
|
||||
{Name: "Similar Artist 1", MBID: "mbid-similar-1"},
|
||||
{Name: "Similar Artist 2", MBID: "mbid-similar-2"},
|
||||
{Name: "Similar Artist 3", MBID: "mbid-similar-3"},
|
||||
}
|
||||
similarInDS := model.Artist{ID: "ar-similar-2", Name: "Similar Artist 2"}
|
||||
|
||||
ag.On("GetArtistMBID", ctx, "ar-existing", "Test Artist").Return(expectedMBID, nil).Once()
|
||||
ag.On("GetArtistImages", ctx, "ar-existing", "Test Artist", expectedMBID).Return(expectedImages, nil).Once()
|
||||
ag.On("GetArtistBiography", ctx, "ar-existing", "Test Artist", expectedMBID).Return(expectedBio, nil).Once()
|
||||
ag.On("GetArtistURL", ctx, "ar-existing", "Test Artist", expectedMBID).Return(expectedURL, nil).Once()
|
||||
ag.On("GetSimilarArtists", ctx, "ar-existing", "Test Artist", expectedMBID, 100).Return(rawSimilar, nil).Once()
|
||||
|
||||
mockArtistRepo.SetData(model.Artists{*originalArtist, similarInDS})
|
||||
|
||||
updatedArtist, err := p.UpdateArtistInfo(ctx, "ar-existing", 10, false)
|
||||
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(updatedArtist).NotTo(BeNil())
|
||||
Expect(updatedArtist.ID).To(Equal("ar-existing"))
|
||||
Expect(updatedArtist.MbzArtistID).To(Equal(expectedMBID))
|
||||
Expect(updatedArtist.Biography).To(Equal("Artist Bio"))
|
||||
Expect(updatedArtist.ExternalUrl).To(Equal(expectedURL))
|
||||
Expect(updatedArtist.LargeImageUrl).To(Equal("http://large.jpg"))
|
||||
Expect(updatedArtist.MediumImageUrl).To(Equal("http://medium.jpg"))
|
||||
Expect(updatedArtist.SmallImageUrl).To(Equal("http://small.jpg"))
|
||||
Expect(updatedArtist.ExternalInfoUpdatedAt).NotTo(BeNil())
|
||||
Expect(*updatedArtist.ExternalInfoUpdatedAt).To(BeTemporally("~", time.Now(), time.Second))
|
||||
|
||||
Expect(updatedArtist.SimilarArtists).To(HaveLen(1))
|
||||
Expect(updatedArtist.SimilarArtists[0].ID).To(Equal("ar-similar-2"))
|
||||
Expect(updatedArtist.SimilarArtists[0].Name).To(Equal("Similar Artist 2"))
|
||||
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
})
|
||||
|
||||
It("returns cached info when artist exists and info is not expired", func() {
|
||||
now := time.Now()
|
||||
originalArtist := &model.Artist{
|
||||
ID: "ar-cached",
|
||||
Name: "Cached Artist",
|
||||
MbzArtistID: "mbid-cached",
|
||||
ExternalUrl: "http://cached.url",
|
||||
Biography: "Cached Bio",
|
||||
LargeImageUrl: "http://cached_large.jpg",
|
||||
ExternalInfoUpdatedAt: gg.P(now.Add(-conf.Server.DevArtistInfoTimeToLive / 2)),
|
||||
SimilarArtists: model.Artists{
|
||||
{ID: "ar-similar-present", Name: "Similar Present"},
|
||||
{ID: "ar-similar-absent", Name: "Similar Absent"},
|
||||
},
|
||||
}
|
||||
similarInDS := model.Artist{ID: "ar-similar-present", Name: "Similar Present Updated"}
|
||||
mockArtistRepo.SetData(model.Artists{*originalArtist, similarInDS})
|
||||
|
||||
updatedArtist, err := p.UpdateArtistInfo(ctx, "ar-cached", 5, false)
|
||||
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(updatedArtist).NotTo(BeNil())
|
||||
Expect(updatedArtist.ID).To(Equal(originalArtist.ID))
|
||||
Expect(updatedArtist.Name).To(Equal(originalArtist.Name))
|
||||
Expect(updatedArtist.MbzArtistID).To(Equal(originalArtist.MbzArtistID))
|
||||
Expect(updatedArtist.ExternalUrl).To(Equal(originalArtist.ExternalUrl))
|
||||
Expect(updatedArtist.Biography).To(Equal(originalArtist.Biography))
|
||||
Expect(updatedArtist.LargeImageUrl).To(Equal(originalArtist.LargeImageUrl))
|
||||
Expect(updatedArtist.ExternalInfoUpdatedAt).To(Equal(originalArtist.ExternalInfoUpdatedAt))
|
||||
|
||||
Expect(updatedArtist.SimilarArtists).To(HaveLen(1))
|
||||
Expect(updatedArtist.SimilarArtists[0].ID).To(Equal(similarInDS.ID))
|
||||
Expect(updatedArtist.SimilarArtists[0].Name).To(Equal(similarInDS.Name))
|
||||
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistMBID")
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistImages")
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistBiography")
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistURL")
|
||||
})
|
||||
|
||||
It("returns cached info and triggers background refresh when info is expired", func() {
|
||||
now := time.Now()
|
||||
expiredTime := now.Add(-conf.Server.DevArtistInfoTimeToLive * 2)
|
||||
originalArtist := &model.Artist{
|
||||
ID: "ar-expired",
|
||||
Name: "Expired Artist",
|
||||
ExternalInfoUpdatedAt: gg.P(expiredTime),
|
||||
SimilarArtists: model.Artists{
|
||||
{ID: "ar-exp-similar", Name: "Expired Similar"},
|
||||
},
|
||||
}
|
||||
similarInDS := model.Artist{ID: "ar-exp-similar", Name: "Expired Similar Updated"}
|
||||
mockArtistRepo.SetData(model.Artists{*originalArtist, similarInDS})
|
||||
|
||||
updatedArtist, err := p.UpdateArtistInfo(ctx, "ar-expired", 5, false)
|
||||
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(updatedArtist).NotTo(BeNil())
|
||||
Expect(updatedArtist.ID).To(Equal(originalArtist.ID))
|
||||
Expect(updatedArtist.Name).To(Equal(originalArtist.Name))
|
||||
Expect(updatedArtist.ExternalInfoUpdatedAt).To(Equal(originalArtist.ExternalInfoUpdatedAt))
|
||||
|
||||
Expect(updatedArtist.SimilarArtists).To(HaveLen(1))
|
||||
Expect(updatedArtist.SimilarArtists[0].ID).To(Equal(similarInDS.ID))
|
||||
Expect(updatedArtist.SimilarArtists[0].Name).To(Equal(similarInDS.Name))
|
||||
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistMBID")
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistImages")
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistBiography")
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistURL")
|
||||
})
|
||||
|
||||
It("includes non-present similar artists when includeNotPresent is true", func() {
|
||||
now := time.Now()
|
||||
originalArtist := &model.Artist{
|
||||
ID: "ar-similar-test",
|
||||
Name: "Similar Test Artist",
|
||||
ExternalInfoUpdatedAt: gg.P(now.Add(-conf.Server.DevArtistInfoTimeToLive / 2)),
|
||||
SimilarArtists: model.Artists{
|
||||
{ID: "ar-sim-present", Name: "Similar Present"},
|
||||
{ID: "", Name: "Similar Absent Raw"},
|
||||
{ID: "ar-sim-absent-lookup", Name: "Similar Absent Lookup"},
|
||||
},
|
||||
}
|
||||
similarInDS := model.Artist{ID: "ar-sim-present", Name: "Similar Present Updated"}
|
||||
mockArtistRepo.SetData(model.Artists{*originalArtist, similarInDS})
|
||||
|
||||
updatedArtist, err := p.UpdateArtistInfo(ctx, "ar-similar-test", 5, true)
|
||||
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(updatedArtist).NotTo(BeNil())
|
||||
|
||||
Expect(updatedArtist.SimilarArtists).To(HaveLen(3))
|
||||
Expect(updatedArtist.SimilarArtists[0].ID).To(Equal(similarInDS.ID))
|
||||
Expect(updatedArtist.SimilarArtists[0].Name).To(Equal(similarInDS.Name))
|
||||
Expect(updatedArtist.SimilarArtists[1].ID).To(BeEmpty())
|
||||
Expect(updatedArtist.SimilarArtists[1].Name).To(Equal("Similar Absent Raw"))
|
||||
Expect(updatedArtist.SimilarArtists[2].ID).To(BeEmpty())
|
||||
Expect(updatedArtist.SimilarArtists[2].Name).To(Equal("Similar Absent Lookup"))
|
||||
})
|
||||
|
||||
It("updates ArtistInfo even if an optional agent call fails", func() {
|
||||
originalArtist := &model.Artist{
|
||||
ID: "ar-agent-fail",
|
||||
Name: "Agent Fail Artist",
|
||||
}
|
||||
mockArtistRepo.SetData(model.Artists{*originalArtist})
|
||||
|
||||
expectedErr := errors.New("agent MBID failed")
|
||||
ag.On("GetArtistMBID", ctx, "ar-agent-fail", "Agent Fail Artist").Return("", expectedErr).Once()
|
||||
ag.On("GetArtistImages", ctx, "ar-agent-fail", "Agent Fail Artist", mock.Anything).Return(nil, nil).Maybe()
|
||||
ag.On("GetArtistBiography", ctx, "ar-agent-fail", "Agent Fail Artist", mock.Anything).Return("", nil).Maybe()
|
||||
ag.On("GetArtistURL", ctx, "ar-agent-fail", "Agent Fail Artist", mock.Anything).Return("", nil).Maybe()
|
||||
ag.On("GetSimilarArtists", ctx, "ar-agent-fail", "Agent Fail Artist", mock.Anything, 100).Return(nil, nil).Maybe()
|
||||
|
||||
updatedArtist, err := p.UpdateArtistInfo(ctx, "ar-agent-fail", 10, false)
|
||||
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(updatedArtist).NotTo(BeNil())
|
||||
Expect(updatedArtist.ID).To(Equal("ar-agent-fail"))
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
})
|
||||
})
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user