mirror of
https://github.com/advplyr/audiobookshelf.git
synced 2026-01-06 06:31:19 -05:00
Compare commits
34 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
890b0b949e | ||
|
|
b19e360bbb | ||
|
|
1ff7952074 | ||
|
|
259d93d882 | ||
|
|
14f60a593b | ||
|
|
7334580c8c | ||
|
|
f467c44543 | ||
|
|
867354e59d | ||
|
|
67952cc577 | ||
|
|
079a15541c | ||
|
|
658ac04268 | ||
|
|
cbee6d8f5e | ||
|
|
68413ae2f6 | ||
|
|
252a233282 | ||
|
|
c35185fff7 | ||
|
|
9774b2cfa5 | ||
|
|
344890fb45 | ||
|
|
5fa0897ad7 | ||
|
|
95c80a5b18 | ||
|
|
0f1b64b883 | ||
|
|
615ed26f0f | ||
|
|
84803cef82 | ||
|
|
605bd73c11 | ||
|
|
cc89db059b | ||
|
|
a03146e09c | ||
|
|
33aa4f1952 | ||
|
|
c03f18b90a | ||
|
|
0dedb09a07 | ||
|
|
2b5484243b | ||
|
|
c496db7c95 | ||
|
|
9917f2d358 | ||
|
|
8c3ba67583 | ||
|
|
6d8720b404 | ||
|
|
843dd0b1b2 |
@@ -120,6 +120,7 @@ export default {
|
||||
this.users = res.users.sort((a, b) => {
|
||||
return a.createdAt - b.createdAt
|
||||
})
|
||||
this.$emit('numUsers', this.users.length)
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error('Failed', error)
|
||||
|
||||
4
client/package-lock.json
generated
4
client/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "audiobookshelf-client",
|
||||
"version": "2.17.3",
|
||||
"version": "2.17.4",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "audiobookshelf-client",
|
||||
"version": "2.17.3",
|
||||
"version": "2.17.4",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@nuxtjs/axios": "^5.13.6",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "audiobookshelf-client",
|
||||
"version": "2.17.3",
|
||||
"version": "2.17.4",
|
||||
"buildNumber": 1,
|
||||
"description": "Self-hosted audiobook and podcast client",
|
||||
"main": "index.js",
|
||||
|
||||
@@ -64,6 +64,20 @@
|
||||
<ui-multi-select ref="redirectUris" v-model="newAuthSettings.authOpenIDMobileRedirectURIs" :items="newAuthSettings.authOpenIDMobileRedirectURIs" :label="$strings.LabelMobileRedirectURIs" class="mb-2" :menuDisabled="true" :disabled="savingSettings" />
|
||||
<p class="sm:pl-4 text-sm text-gray-300 mb-2" v-html="$strings.LabelMobileRedirectURIsDescription" />
|
||||
|
||||
<div class="flex sm:items-center flex-col sm:flex-row pt-1 mb-2">
|
||||
<div class="w-44">
|
||||
<ui-dropdown v-model="newAuthSettings.authOpenIDSubfolderForRedirectURLs" small :items="subfolderOptions" :label="$strings.LabelWebRedirectURLsSubfolder" :disabled="savingSettings" />
|
||||
</div>
|
||||
<div class="mt-2 sm:mt-5">
|
||||
<p class="sm:pl-4 text-sm text-gray-300">{{ $strings.LabelWebRedirectURLsDescription }}</p>
|
||||
<p class="sm:pl-4 text-sm text-gray-300 mb-2">
|
||||
<code>{{ webCallbackURL }}</code>
|
||||
<br />
|
||||
<code>{{ mobileAppCallbackURL }}</code>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<ui-text-input-with-label ref="buttonTextInput" v-model="newAuthSettings.authOpenIDButtonText" :disabled="savingSettings" :label="$strings.LabelButtonText" class="mb-2" />
|
||||
|
||||
<div class="flex sm:items-center flex-col sm:flex-row pt-1 mb-2">
|
||||
@@ -164,6 +178,27 @@ export default {
|
||||
value: 'username'
|
||||
}
|
||||
]
|
||||
},
|
||||
subfolderOptions() {
|
||||
const options = [
|
||||
{
|
||||
text: 'None',
|
||||
value: ''
|
||||
}
|
||||
]
|
||||
if (this.$config.routerBasePath) {
|
||||
options.push({
|
||||
text: this.$config.routerBasePath,
|
||||
value: this.$config.routerBasePath
|
||||
})
|
||||
}
|
||||
return options
|
||||
},
|
||||
webCallbackURL() {
|
||||
return `https://<your.server.com>${this.newAuthSettings.authOpenIDSubfolderForRedirectURLs ? this.newAuthSettings.authOpenIDSubfolderForRedirectURLs : ''}/auth/openid/callback`
|
||||
},
|
||||
mobileAppCallbackURL() {
|
||||
return `https://<your.server.com>${this.newAuthSettings.authOpenIDSubfolderForRedirectURLs ? this.newAuthSettings.authOpenIDSubfolderForRedirectURLs : ''}/auth/openid/mobile-redirect`
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
@@ -325,7 +360,8 @@ export default {
|
||||
},
|
||||
init() {
|
||||
this.newAuthSettings = {
|
||||
...this.authSettings
|
||||
...this.authSettings,
|
||||
authOpenIDSubfolderForRedirectURLs: this.authSettings.authOpenIDSubfolderForRedirectURLs === undefined ? this.$config.routerBasePath : this.authSettings.authOpenIDSubfolderForRedirectURLs
|
||||
}
|
||||
this.enableLocalAuth = this.authMethods.includes('local')
|
||||
this.enableOpenIDAuth = this.authMethods.includes('openid')
|
||||
|
||||
@@ -2,6 +2,10 @@
|
||||
<div>
|
||||
<app-settings-content :header-text="$strings.HeaderUsers">
|
||||
<template #header-items>
|
||||
<div v-if="numUsers" class="mx-2 px-1.5 rounded-lg bg-primary/50 text-gray-300/90 text-sm inline-flex items-center justify-center">
|
||||
<span>{{ numUsers }}</span>
|
||||
</div>
|
||||
|
||||
<ui-tooltip :text="$strings.LabelClickForMoreInfo" class="inline-flex ml-2">
|
||||
<a href="https://www.audiobookshelf.org/guides/users" target="_blank" class="inline-flex">
|
||||
<span class="material-symbols text-xl w-5 text-gray-200">help_outline</span>
|
||||
@@ -13,7 +17,7 @@
|
||||
<ui-btn color="primary" small @click="setShowUserModal()">{{ $strings.ButtonAddUser }}</ui-btn>
|
||||
</template>
|
||||
|
||||
<tables-users-table class="pt-2" @edit="setShowUserModal" />
|
||||
<tables-users-table class="pt-2" @edit="setShowUserModal" @numUsers="(count) => (numUsers = count)" />
|
||||
</app-settings-content>
|
||||
<modals-account-modal ref="accountModal" v-model="showAccountModal" :account="selectedAccount" />
|
||||
</div>
|
||||
@@ -29,7 +33,8 @@ export default {
|
||||
data() {
|
||||
return {
|
||||
selectedAccount: null,
|
||||
showAccountModal: false
|
||||
showAccountModal: false,
|
||||
numUsers: 0
|
||||
}
|
||||
},
|
||||
computed: {},
|
||||
|
||||
@@ -584,7 +584,7 @@
|
||||
"LabelSettingsStoreMetadataWithItemHelp": "Standardmäßig werden die Metadaten in /metadata/items gespeichert. Wenn diese Option aktiviert ist, werden die Metadaten als OPF-Datei (Textdatei) in dem gleichen Ordner gespeichert in welchem sich auch das Medium befindet",
|
||||
"LabelSettingsTimeFormat": "Zeitformat",
|
||||
"LabelShare": "Freigeben",
|
||||
"LabelShareOpen": "Freigabe",
|
||||
"LabelShareOpen": "Freigeben",
|
||||
"LabelShareURL": "Freigabe URL",
|
||||
"LabelShowAll": "Alles anzeigen",
|
||||
"LabelShowSeconds": "Zeige Sekunden",
|
||||
@@ -728,7 +728,7 @@
|
||||
"MessageConfirmPurgeCache": "Cache leeren wird das ganze Verzeichnis <code>/metadata/cache</code> löschen. <br /><br />Bist du dir sicher, dass das Cache Verzeichnis gelöscht werden soll?",
|
||||
"MessageConfirmPurgeItemsCache": "Durch Elementcache leeren wird das gesamte Verzeichnis unter <code>/metadata/cache/items</code> gelöscht.<br />Bist du dir sicher?",
|
||||
"MessageConfirmQuickEmbed": "Warnung! Audiodateien werden bei der Schnelleinbettung nicht gesichert! Achte darauf, dass du eine Sicherungskopie der Audiodateien besitzt. <br><br>Möchtest du fortfahren?",
|
||||
"MessageConfirmQuickMatchEpisodes": "Schnelles Zuordnen von Episoden überschreibt die Details, wenn eine Übereinstimmung gefunden wird. Nur nicht zugeordnete Episoden werden aktualisiert. Bist du sicher?",
|
||||
"MessageConfirmQuickMatchEpisodes": "Schnellabgleich von Episoden überschreibt deren Details, wenn ein passender Eintrag gefunden wurde, wird aber nur auf bisher unbearbeitete Episoden angewendet. Wirklich fortfahren?",
|
||||
"MessageConfirmReScanLibraryItems": "{0} Elemente werden erneut gescannt! Bist du dir sicher?",
|
||||
"MessageConfirmRemoveAllChapters": "Alle Kapitel werden entfernt! Bist du dir sicher?",
|
||||
"MessageConfirmRemoveAuthor": "Autor \"{0}\" wird enfernt! Bist du dir sicher?",
|
||||
@@ -833,7 +833,7 @@
|
||||
"MessageSetChaptersFromTracksDescription": "Kaitelerstellung basiert auf den existierenden einzelnen Audiodateien. Pro existierende Audiodatei wird 1 Kapitel erstellt, wobei deren Kapitelname aus dem Audiodateinamen extrahiert wird",
|
||||
"MessageShareExpirationWillBe": "Läuft am <strong>{0}</strong> ab",
|
||||
"MessageShareExpiresIn": "Läuft in {0} ab",
|
||||
"MessageShareURLWillBe": "Der Freigabe Link wird <strong>{0}</strong> sein.",
|
||||
"MessageShareURLWillBe": "Der Freigabe Link wird <strong>{0}</strong> sein",
|
||||
"MessageStartPlaybackAtTime": "Start der Wiedergabe für \"{0}\" bei {1}?",
|
||||
"MessageTaskAudioFileNotWritable": "Die Audiodatei \"{0}\" ist schreibgeschützt",
|
||||
"MessageTaskCanceledByUser": "Aufgabe vom Benutzer abgebrochen",
|
||||
@@ -1041,7 +1041,7 @@
|
||||
"ToastRenameFailed": "Umbenennen fehlgeschlagen",
|
||||
"ToastRescanFailed": "Erneut scannen fehlgeschlagen für {0}",
|
||||
"ToastRescanRemoved": "Erneut scannen erledigt, Artikel wurde entfernt",
|
||||
"ToastRescanUpToDate": "Erneut scannen erledigt, Artikel wahr auf dem neusten Stand",
|
||||
"ToastRescanUpToDate": "Erneut scannen erledigt, Artikel war auf dem neusten Stand",
|
||||
"ToastRescanUpdated": "Erneut scannen erledigt, Artikel wurde verändert",
|
||||
"ToastScanFailed": "Fehler beim scannen des Artikels der Bibliothek",
|
||||
"ToastSelectAtLeastOneUser": "Wähle mindestens einen Benutzer aus",
|
||||
|
||||
@@ -679,6 +679,8 @@
|
||||
"LabelViewPlayerSettings": "View player settings",
|
||||
"LabelViewQueue": "View player queue",
|
||||
"LabelVolume": "Volume",
|
||||
"LabelWebRedirectURLsDescription": "Authorize these URLs in your OAuth provider to allow redirection back to the web app after login:",
|
||||
"LabelWebRedirectURLsSubfolder": "Subfolder for Redirect URLs",
|
||||
"LabelWeekdaysToRun": "Weekdays to run",
|
||||
"LabelXBooks": "{0} books",
|
||||
"LabelXItems": "{0} items",
|
||||
|
||||
@@ -679,6 +679,8 @@
|
||||
"LabelViewPlayerSettings": "Ver los ajustes del reproductor",
|
||||
"LabelViewQueue": "Ver Fila del Reproductor",
|
||||
"LabelVolume": "Volumen",
|
||||
"LabelWebRedirectURLsDescription": "Autorice estas URL en su proveedor OAuth para permitir la redirección a la aplicación web después de iniciar sesión:",
|
||||
"LabelWebRedirectURLsSubfolder": "Subcarpeta para URL de redireccionamiento",
|
||||
"LabelWeekdaysToRun": "Correr en Días de la Semana",
|
||||
"LabelXBooks": "{0} libros",
|
||||
"LabelXItems": "{0} elementos",
|
||||
|
||||
@@ -271,7 +271,7 @@
|
||||
"LabelCollapseSubSeries": "Podserijale prikaži sažeto",
|
||||
"LabelCollection": "Zbirka",
|
||||
"LabelCollections": "Zbirke",
|
||||
"LabelComplete": "Dovršeno",
|
||||
"LabelComplete": "Potpuno",
|
||||
"LabelConfirmPassword": "Potvrda zaporke",
|
||||
"LabelContinueListening": "Nastavi slušati",
|
||||
"LabelContinueReading": "Nastavi čitati",
|
||||
@@ -532,7 +532,7 @@
|
||||
"LabelSelectAllEpisodes": "Označi sve nastavke",
|
||||
"LabelSelectEpisodesShowing": "Prikazujem {0} odabranih nastavaka",
|
||||
"LabelSelectUsers": "Označi korisnike",
|
||||
"LabelSendEbookToDevice": "Pošalji e-knjigu",
|
||||
"LabelSendEbookToDevice": "Pošalji e-knjigu …",
|
||||
"LabelSequence": "Slijed",
|
||||
"LabelSerial": "Serijal",
|
||||
"LabelSeries": "Serijal",
|
||||
@@ -567,7 +567,7 @@
|
||||
"LabelSettingsLibraryMarkAsFinishedTimeRemaining": "Preostalo vrijeme je manje od (sekundi)",
|
||||
"LabelSettingsLibraryMarkAsFinishedWhen": "Označi medij dovršenim kada",
|
||||
"LabelSettingsOnlyShowLaterBooksInContinueSeries": "Preskoči ranije knjige u funkciji Nastavi serijal",
|
||||
"LabelSettingsOnlyShowLaterBooksInContinueSeriesHelp": "Na polici početne stranice Nastavi serijal prikazuje se prva nezapočeta knjiga serijala koji imaju barem jednu dovršenu knjigu i nijednu započetu knjigu. Ako uključite ovu opciju, serijal će vam se nastaviti od zadnje dovršene knjige umjesto od prve nezapočete knjige.",
|
||||
"LabelSettingsOnlyShowLaterBooksInContinueSeriesHelp": "Na polici početne stranice Nastavi serijal prikazuje se prva nezapočeta knjiga serijala koji imaju barem jednu dovršenu knjigu i nijednu započetu knjigu. Ako se ova opcija uključi serijal će nastaviti od zadnje dovršene knjige umjesto od prve nezapočete knjige.",
|
||||
"LabelSettingsParseSubtitles": "Raščlani podnaslove",
|
||||
"LabelSettingsParseSubtitlesHelp": "Iz naziva mape zvučne knjige raščlanjuje podnaslov.<br>Podnaslov mora biti odvojen s \" - \"<br>npr. \"Naslov knjige - Ovo je podnaslov\" imat će podnaslov \"Ovo je podnaslov\"",
|
||||
"LabelSettingsPreferMatchedMetadata": "Daj prednost meta-podatcima prepoznatih stavki",
|
||||
@@ -679,6 +679,8 @@
|
||||
"LabelViewPlayerSettings": "Pogledaj postavke reproduktora",
|
||||
"LabelViewQueue": "Pogledaj redoslijed izvođenja reproduktora",
|
||||
"LabelVolume": "Glasnoća",
|
||||
"LabelWebRedirectURLsDescription": "Autoriziraj ove URL-ove u svom pružatelju OAuth ovjere kako bi omogućio preusmjeravanje natrag na web-aplikaciju nakon prijave:",
|
||||
"LabelWebRedirectURLsSubfolder": "Podmapa za URL-ove preusmjeravanja",
|
||||
"LabelWeekdaysToRun": "Dani u tjednu za pokretanje",
|
||||
"LabelXBooks": "{0} knjiga",
|
||||
"LabelXItems": "{0} stavki",
|
||||
|
||||
@@ -184,7 +184,7 @@
|
||||
"HeaderScheduleEpisodeDownloads": "Načrtovanje samodejnega prenosa epizod",
|
||||
"HeaderScheduleLibraryScans": "Načrtuj samodejno pregledovanje knjižnice",
|
||||
"HeaderSession": "Seja",
|
||||
"HeaderSetBackupSchedule": "Nastavite urnik varnostnega kopiranja",
|
||||
"HeaderSetBackupSchedule": "Nastavi urnik varnostnega kopiranja",
|
||||
"HeaderSettings": "Nastavitve",
|
||||
"HeaderSettingsDisplay": "Zaslon",
|
||||
"HeaderSettingsExperimental": "Eksperimentalne funkcije",
|
||||
@@ -830,7 +830,7 @@
|
||||
"MessageSearchResultsFor": "Rezultati iskanja za",
|
||||
"MessageSelected": "{0} izbrano",
|
||||
"MessageServerCouldNotBeReached": "Strežnika ni bilo mogoče doseči",
|
||||
"MessageSetChaptersFromTracksDescription": "Nastavite poglavja z uporabo vsake zvočne datoteke kot poglavja in naslova poglavja kot imena zvočne datoteke",
|
||||
"MessageSetChaptersFromTracksDescription": "Nastavi poglavja z uporabo vsake zvočne datoteke kot poglavja in naslova poglavja kot imena zvočne datoteke",
|
||||
"MessageShareExpirationWillBe": "Potečeno bo <strong>{0}</strong>",
|
||||
"MessageShareExpiresIn": "Poteče čez {0}",
|
||||
"MessageShareURLWillBe": "URL za skupno rabo bo <strong>{0}</strong>",
|
||||
|
||||
@@ -663,6 +663,7 @@
|
||||
"LabelUpdateDetailsHelp": "找到匹配项时允许覆盖所选书籍存在的详细信息",
|
||||
"LabelUpdatedAt": "更新时间",
|
||||
"LabelUploaderDragAndDrop": "拖放文件或文件夹",
|
||||
"LabelUploaderDragAndDropFilesOnly": "拖放文件",
|
||||
"LabelUploaderDropFiles": "删除文件",
|
||||
"LabelUploaderItemFetchMetadataHelp": "自动获取标题, 作者和系列",
|
||||
"LabelUseAdvancedOptions": "使用高级选项",
|
||||
@@ -678,6 +679,8 @@
|
||||
"LabelViewPlayerSettings": "查看播放器设置",
|
||||
"LabelViewQueue": "查看播放列表",
|
||||
"LabelVolume": "音量",
|
||||
"LabelWebRedirectURLsDescription": "在你的 OAuth 提供商中授权这些链接,以允许在登录后重定向回 Web 应用程序:",
|
||||
"LabelWebRedirectURLsSubfolder": "重定向 URL 的子文件夹",
|
||||
"LabelWeekdaysToRun": "工作日运行",
|
||||
"LabelXBooks": "{0} 本书",
|
||||
"LabelXItems": "{0} 项目",
|
||||
|
||||
4
package-lock.json
generated
4
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "audiobookshelf",
|
||||
"version": "2.17.3",
|
||||
"version": "2.17.4",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "audiobookshelf",
|
||||
"version": "2.17.3",
|
||||
"version": "2.17.4",
|
||||
"license": "GPL-3.0",
|
||||
"dependencies": {
|
||||
"axios": "^0.27.2",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "audiobookshelf",
|
||||
"version": "2.17.3",
|
||||
"version": "2.17.4",
|
||||
"buildNumber": 1,
|
||||
"description": "Self-hosted audiobook and podcast server",
|
||||
"main": "index.js",
|
||||
|
||||
@@ -131,7 +131,7 @@ class Auth {
|
||||
{
|
||||
client: openIdClient,
|
||||
params: {
|
||||
redirect_uri: '/auth/openid/callback',
|
||||
redirect_uri: `${global.ServerSettings.authOpenIDSubfolderForRedirectURLs}/auth/openid/callback`,
|
||||
scope: 'openid profile email'
|
||||
}
|
||||
},
|
||||
@@ -480,9 +480,9 @@ class Auth {
|
||||
// for the request to mobile-redirect and as such the session is not shared
|
||||
this.openIdAuthSession.set(state, { mobile_redirect_uri: req.query.redirect_uri })
|
||||
|
||||
redirectUri = new URL('/auth/openid/mobile-redirect', hostUrl).toString()
|
||||
redirectUri = new URL(`${global.ServerSettings.authOpenIDSubfolderForRedirectURLs}/auth/openid/mobile-redirect`, hostUrl).toString()
|
||||
} else {
|
||||
redirectUri = new URL('/auth/openid/callback', hostUrl).toString()
|
||||
redirectUri = new URL(`${global.ServerSettings.authOpenIDSubfolderForRedirectURLs}/auth/openid/callback`, hostUrl).toString()
|
||||
|
||||
if (req.query.state) {
|
||||
Logger.debug(`[Auth] Invalid state - not allowed on web openid flow`)
|
||||
@@ -733,7 +733,7 @@ class Auth {
|
||||
const host = req.get('host')
|
||||
// TODO: ABS does currently not support subfolders for installation
|
||||
// If we want to support it we need to include a config for the serverurl
|
||||
postLogoutRedirectUri = `${protocol}://${host}/login`
|
||||
postLogoutRedirectUri = `${protocol}://${host}${global.RouterBasePath}/login`
|
||||
}
|
||||
// else for openid-mobile we keep postLogoutRedirectUri on null
|
||||
// nice would be to redirect to the app here, but for example Authentik does not implement
|
||||
|
||||
@@ -84,7 +84,6 @@ class Server {
|
||||
Logger.logManager = new LogManager()
|
||||
|
||||
this.server = null
|
||||
this.io = null
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -441,18 +440,11 @@ class Server {
|
||||
async stop() {
|
||||
Logger.info('=== Stopping Server ===')
|
||||
Watcher.close()
|
||||
Logger.info('Watcher Closed')
|
||||
|
||||
return new Promise((resolve) => {
|
||||
SocketAuthority.close((err) => {
|
||||
if (err) {
|
||||
Logger.error('Failed to close server', err)
|
||||
} else {
|
||||
Logger.info('Server successfully closed')
|
||||
}
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
Logger.info('[Server] Watcher Closed')
|
||||
await SocketAuthority.close()
|
||||
Logger.info('[Server] Closing HTTP Server')
|
||||
await new Promise((resolve) => this.server.close(resolve))
|
||||
Logger.info('[Server] HTTP Server Closed')
|
||||
}
|
||||
}
|
||||
module.exports = Server
|
||||
|
||||
@@ -14,7 +14,7 @@ const Auth = require('./Auth')
|
||||
class SocketAuthority {
|
||||
constructor() {
|
||||
this.Server = null
|
||||
this.io = null
|
||||
this.socketIoServers = []
|
||||
|
||||
/** @type {Object.<string, SocketClient>} */
|
||||
this.clients = {}
|
||||
@@ -89,82 +89,104 @@ class SocketAuthority {
|
||||
*
|
||||
* @param {Function} callback
|
||||
*/
|
||||
close(callback) {
|
||||
Logger.info('[SocketAuthority] Shutting down')
|
||||
// This will close all open socket connections, and also close the underlying http server
|
||||
if (this.io) this.io.close(callback)
|
||||
else callback()
|
||||
async close() {
|
||||
Logger.info('[SocketAuthority] closing...')
|
||||
const closePromises = this.socketIoServers.map((io) => {
|
||||
return new Promise((resolve) => {
|
||||
Logger.info(`[SocketAuthority] Closing Socket.IO server: ${io.path}`)
|
||||
io.close(() => {
|
||||
Logger.info(`[SocketAuthority] Socket.IO server closed: ${io.path}`)
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
})
|
||||
await Promise.all(closePromises)
|
||||
Logger.info('[SocketAuthority] closed')
|
||||
this.socketIoServers = []
|
||||
}
|
||||
|
||||
initialize(Server) {
|
||||
this.Server = Server
|
||||
|
||||
this.io = new SocketIO.Server(this.Server.server, {
|
||||
const socketIoOptions = {
|
||||
cors: {
|
||||
origin: '*',
|
||||
methods: ['GET', 'POST']
|
||||
},
|
||||
path: `${global.RouterBasePath}/socket.io`
|
||||
})
|
||||
|
||||
this.io.on('connection', (socket) => {
|
||||
this.clients[socket.id] = {
|
||||
id: socket.id,
|
||||
socket,
|
||||
connected_at: Date.now()
|
||||
}
|
||||
socket.sheepClient = this.clients[socket.id]
|
||||
}
|
||||
|
||||
Logger.info('[SocketAuthority] Socket Connected', socket.id)
|
||||
const ioServer = new SocketIO.Server(Server.server, socketIoOptions)
|
||||
ioServer.path = '/socket.io'
|
||||
this.socketIoServers.push(ioServer)
|
||||
|
||||
// Required for associating a User with a socket
|
||||
socket.on('auth', (token) => this.authenticateSocket(socket, token))
|
||||
if (global.RouterBasePath) {
|
||||
// open a separate socket.io server for the router base path, keeping the original server open for legacy clients
|
||||
const ioBasePath = `${global.RouterBasePath}/socket.io`
|
||||
const ioBasePathServer = new SocketIO.Server(Server.server, { ...socketIoOptions, path: ioBasePath })
|
||||
ioBasePathServer.path = ioBasePath
|
||||
this.socketIoServers.push(ioBasePathServer)
|
||||
}
|
||||
|
||||
// Scanning
|
||||
socket.on('cancel_scan', (libraryId) => this.cancelScan(libraryId))
|
||||
|
||||
// Logs
|
||||
socket.on('set_log_listener', (level) => Logger.addSocketListener(socket, level))
|
||||
socket.on('remove_log_listener', () => Logger.removeSocketListener(socket.id))
|
||||
|
||||
// Sent automatically from socket.io clients
|
||||
socket.on('disconnect', (reason) => {
|
||||
Logger.removeSocketListener(socket.id)
|
||||
|
||||
const _client = this.clients[socket.id]
|
||||
if (!_client) {
|
||||
Logger.warn(`[SocketAuthority] Socket ${socket.id} disconnect, no client (Reason: ${reason})`)
|
||||
} else if (!_client.user) {
|
||||
Logger.info(`[SocketAuthority] Unauth socket ${socket.id} disconnected (Reason: ${reason})`)
|
||||
delete this.clients[socket.id]
|
||||
} else {
|
||||
Logger.debug('[SocketAuthority] User Offline ' + _client.user.username)
|
||||
this.adminEmitter('user_offline', _client.user.toJSONForPublic(this.Server.playbackSessionManager.sessions))
|
||||
|
||||
const disconnectTime = Date.now() - _client.connected_at
|
||||
Logger.info(`[SocketAuthority] Socket ${socket.id} disconnected from client "${_client.user.username}" after ${disconnectTime}ms (Reason: ${reason})`)
|
||||
delete this.clients[socket.id]
|
||||
this.socketIoServers.forEach((io) => {
|
||||
io.on('connection', (socket) => {
|
||||
this.clients[socket.id] = {
|
||||
id: socket.id,
|
||||
socket,
|
||||
connected_at: Date.now()
|
||||
}
|
||||
})
|
||||
socket.sheepClient = this.clients[socket.id]
|
||||
|
||||
//
|
||||
// Events for testing
|
||||
//
|
||||
socket.on('message_all_users', (payload) => {
|
||||
// admin user can send a message to all authenticated users
|
||||
// displays on the web app as a toast
|
||||
const client = this.clients[socket.id] || {}
|
||||
if (client.user?.isAdminOrUp) {
|
||||
this.emitter('admin_message', payload.message || '')
|
||||
} else {
|
||||
Logger.error(`[SocketAuthority] Non-admin user sent the message_all_users event`)
|
||||
}
|
||||
})
|
||||
socket.on('ping', () => {
|
||||
const client = this.clients[socket.id] || {}
|
||||
const user = client.user || {}
|
||||
Logger.debug(`[SocketAuthority] Received ping from socket ${user.username || 'No User'}`)
|
||||
socket.emit('pong')
|
||||
Logger.info(`[SocketAuthority] Socket Connected to ${io.path}`, socket.id)
|
||||
|
||||
// Required for associating a User with a socket
|
||||
socket.on('auth', (token) => this.authenticateSocket(socket, token))
|
||||
|
||||
// Scanning
|
||||
socket.on('cancel_scan', (libraryId) => this.cancelScan(libraryId))
|
||||
|
||||
// Logs
|
||||
socket.on('set_log_listener', (level) => Logger.addSocketListener(socket, level))
|
||||
socket.on('remove_log_listener', () => Logger.removeSocketListener(socket.id))
|
||||
|
||||
// Sent automatically from socket.io clients
|
||||
socket.on('disconnect', (reason) => {
|
||||
Logger.removeSocketListener(socket.id)
|
||||
|
||||
const _client = this.clients[socket.id]
|
||||
if (!_client) {
|
||||
Logger.warn(`[SocketAuthority] Socket ${socket.id} disconnect, no client (Reason: ${reason})`)
|
||||
} else if (!_client.user) {
|
||||
Logger.info(`[SocketAuthority] Unauth socket ${socket.id} disconnected (Reason: ${reason})`)
|
||||
delete this.clients[socket.id]
|
||||
} else {
|
||||
Logger.debug('[SocketAuthority] User Offline ' + _client.user.username)
|
||||
this.adminEmitter('user_offline', _client.user.toJSONForPublic(this.Server.playbackSessionManager.sessions))
|
||||
|
||||
const disconnectTime = Date.now() - _client.connected_at
|
||||
Logger.info(`[SocketAuthority] Socket ${socket.id} disconnected from client "${_client.user.username}" after ${disconnectTime}ms (Reason: ${reason})`)
|
||||
delete this.clients[socket.id]
|
||||
}
|
||||
})
|
||||
|
||||
//
|
||||
// Events for testing
|
||||
//
|
||||
socket.on('message_all_users', (payload) => {
|
||||
// admin user can send a message to all authenticated users
|
||||
// displays on the web app as a toast
|
||||
const client = this.clients[socket.id] || {}
|
||||
if (client.user?.isAdminOrUp) {
|
||||
this.emitter('admin_message', payload.message || '')
|
||||
} else {
|
||||
Logger.error(`[SocketAuthority] Non-admin user sent the message_all_users event`)
|
||||
}
|
||||
})
|
||||
socket.on('ping', () => {
|
||||
const client = this.clients[socket.id] || {}
|
||||
const user = client.user || {}
|
||||
Logger.debug(`[SocketAuthority] Received ping from socket ${user.username || 'No User'}`)
|
||||
socket.emit('pong')
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
@@ -400,19 +400,48 @@ class LibraryController {
|
||||
model: Database.podcastEpisodeModel,
|
||||
attributes: ['id']
|
||||
}
|
||||
},
|
||||
{
|
||||
model: Database.bookModel,
|
||||
attributes: ['id'],
|
||||
include: [
|
||||
{
|
||||
model: Database.bookAuthorModel,
|
||||
attributes: ['authorId']
|
||||
},
|
||||
{
|
||||
model: Database.bookSeriesModel,
|
||||
attributes: ['seriesId']
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
})
|
||||
Logger.info(`[LibraryController] Removed folder "${folder.path}" from library "${req.library.name}" with ${libraryItemsInFolder.length} library items`)
|
||||
const seriesIds = []
|
||||
const authorIds = []
|
||||
for (const libraryItem of libraryItemsInFolder) {
|
||||
let mediaItemIds = []
|
||||
if (req.library.isPodcast) {
|
||||
mediaItemIds = libraryItem.media.podcastEpisodes.map((pe) => pe.id)
|
||||
} else {
|
||||
mediaItemIds.push(libraryItem.mediaId)
|
||||
if (libraryItem.media.bookAuthors.length) {
|
||||
authorIds.push(...libraryItem.media.bookAuthors.map((ba) => ba.authorId))
|
||||
}
|
||||
if (libraryItem.media.bookSeries.length) {
|
||||
seriesIds.push(...libraryItem.media.bookSeries.map((bs) => bs.seriesId))
|
||||
}
|
||||
}
|
||||
Logger.info(`[LibraryController] Removing library item "${libraryItem.id}" from folder "${folder.path}"`)
|
||||
await this.handleDeleteLibraryItem(libraryItem.mediaType, libraryItem.id, mediaItemIds)
|
||||
await this.handleDeleteLibraryItem(libraryItem.id, mediaItemIds)
|
||||
}
|
||||
|
||||
if (authorIds.length) {
|
||||
await this.checkRemoveAuthorsWithNoBooks(authorIds)
|
||||
}
|
||||
if (seriesIds.length) {
|
||||
await this.checkRemoveEmptySeries(seriesIds)
|
||||
}
|
||||
|
||||
// Remove folder
|
||||
@@ -501,7 +530,7 @@ class LibraryController {
|
||||
mediaItemIds.push(libraryItem.mediaId)
|
||||
}
|
||||
Logger.info(`[LibraryController] Removing library item "${libraryItem.id}" from library "${req.library.name}"`)
|
||||
await this.handleDeleteLibraryItem(libraryItem.mediaType, libraryItem.id, mediaItemIds)
|
||||
await this.handleDeleteLibraryItem(libraryItem.id, mediaItemIds)
|
||||
}
|
||||
|
||||
// Set PlaybackSessions libraryId to null
|
||||
@@ -580,6 +609,8 @@ class LibraryController {
|
||||
* DELETE: /api/libraries/:id/issues
|
||||
* Remove all library items missing or invalid
|
||||
*
|
||||
* @this {import('../routers/ApiRouter')}
|
||||
*
|
||||
* @param {LibraryControllerRequest} req
|
||||
* @param {Response} res
|
||||
*/
|
||||
@@ -605,6 +636,20 @@ class LibraryController {
|
||||
model: Database.podcastEpisodeModel,
|
||||
attributes: ['id']
|
||||
}
|
||||
},
|
||||
{
|
||||
model: Database.bookModel,
|
||||
attributes: ['id'],
|
||||
include: [
|
||||
{
|
||||
model: Database.bookAuthorModel,
|
||||
attributes: ['authorId']
|
||||
},
|
||||
{
|
||||
model: Database.bookSeriesModel,
|
||||
attributes: ['seriesId']
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
})
|
||||
@@ -615,15 +660,30 @@ class LibraryController {
|
||||
}
|
||||
|
||||
Logger.info(`[LibraryController] Removing ${libraryItemsWithIssues.length} items with issues`)
|
||||
const authorIds = []
|
||||
const seriesIds = []
|
||||
for (const libraryItem of libraryItemsWithIssues) {
|
||||
let mediaItemIds = []
|
||||
if (req.library.isPodcast) {
|
||||
mediaItemIds = libraryItem.media.podcastEpisodes.map((pe) => pe.id)
|
||||
} else {
|
||||
mediaItemIds.push(libraryItem.mediaId)
|
||||
if (libraryItem.media.bookAuthors.length) {
|
||||
authorIds.push(...libraryItem.media.bookAuthors.map((ba) => ba.authorId))
|
||||
}
|
||||
if (libraryItem.media.bookSeries.length) {
|
||||
seriesIds.push(...libraryItem.media.bookSeries.map((bs) => bs.seriesId))
|
||||
}
|
||||
}
|
||||
Logger.info(`[LibraryController] Removing library item "${libraryItem.id}" with issue`)
|
||||
await this.handleDeleteLibraryItem(libraryItem.mediaType, libraryItem.id, mediaItemIds)
|
||||
await this.handleDeleteLibraryItem(libraryItem.id, mediaItemIds)
|
||||
}
|
||||
|
||||
if (authorIds.length) {
|
||||
await this.checkRemoveAuthorsWithNoBooks(authorIds)
|
||||
}
|
||||
if (seriesIds.length) {
|
||||
await this.checkRemoveEmptySeries(seriesIds)
|
||||
}
|
||||
|
||||
// Set numIssues to 0 for library filter data
|
||||
|
||||
@@ -96,6 +96,8 @@ class LibraryItemController {
|
||||
* Optional query params:
|
||||
* ?hard=1
|
||||
*
|
||||
* @this {import('../routers/ApiRouter')}
|
||||
*
|
||||
* @param {RequestWithUser} req
|
||||
* @param {Response} res
|
||||
*/
|
||||
@@ -103,14 +105,36 @@ class LibraryItemController {
|
||||
const hardDelete = req.query.hard == 1 // Delete from file system
|
||||
const libraryItemPath = req.libraryItem.path
|
||||
|
||||
const mediaItemIds = req.libraryItem.mediaType === 'podcast' ? req.libraryItem.media.episodes.map((ep) => ep.id) : [req.libraryItem.media.id]
|
||||
await this.handleDeleteLibraryItem(req.libraryItem.mediaType, req.libraryItem.id, mediaItemIds)
|
||||
const mediaItemIds = []
|
||||
const authorIds = []
|
||||
const seriesIds = []
|
||||
if (req.libraryItem.isPodcast) {
|
||||
mediaItemIds.push(...req.libraryItem.media.episodes.map((ep) => ep.id))
|
||||
} else {
|
||||
mediaItemIds.push(req.libraryItem.media.id)
|
||||
if (req.libraryItem.media.metadata.authors?.length) {
|
||||
authorIds.push(...req.libraryItem.media.metadata.authors.map((au) => au.id))
|
||||
}
|
||||
if (req.libraryItem.media.metadata.series?.length) {
|
||||
seriesIds.push(...req.libraryItem.media.metadata.series.map((se) => se.id))
|
||||
}
|
||||
}
|
||||
|
||||
await this.handleDeleteLibraryItem(req.libraryItem.id, mediaItemIds)
|
||||
if (hardDelete) {
|
||||
Logger.info(`[LibraryItemController] Deleting library item from file system at "${libraryItemPath}"`)
|
||||
await fs.remove(libraryItemPath).catch((error) => {
|
||||
Logger.error(`[LibraryItemController] Failed to delete library item from file system at "${libraryItemPath}"`, error)
|
||||
})
|
||||
}
|
||||
|
||||
if (authorIds.length) {
|
||||
await this.checkRemoveAuthorsWithNoBooks(authorIds)
|
||||
}
|
||||
if (seriesIds.length) {
|
||||
await this.checkRemoveEmptySeries(seriesIds)
|
||||
}
|
||||
|
||||
await Database.resetLibraryIssuesFilterData(req.libraryItem.libraryId)
|
||||
res.sendStatus(200)
|
||||
}
|
||||
@@ -212,15 +236,6 @@ class LibraryItemController {
|
||||
if (hasUpdates) {
|
||||
libraryItem.updatedAt = Date.now()
|
||||
|
||||
if (seriesRemoved.length) {
|
||||
// Check remove empty series
|
||||
Logger.debug(`[LibraryItemController] Series was removed from book. Check if series is now empty.`)
|
||||
await this.checkRemoveEmptySeries(
|
||||
libraryItem.media.id,
|
||||
seriesRemoved.map((se) => se.id)
|
||||
)
|
||||
}
|
||||
|
||||
if (isPodcastAutoDownloadUpdated) {
|
||||
this.cronManager.checkUpdatePodcastCron(libraryItem)
|
||||
}
|
||||
@@ -232,10 +247,12 @@ class LibraryItemController {
|
||||
if (authorsRemoved.length) {
|
||||
// Check remove empty authors
|
||||
Logger.debug(`[LibraryItemController] Authors were removed from book. Check if authors are now empty.`)
|
||||
await this.checkRemoveAuthorsWithNoBooks(
|
||||
libraryItem.libraryId,
|
||||
authorsRemoved.map((au) => au.id)
|
||||
)
|
||||
await this.checkRemoveAuthorsWithNoBooks(authorsRemoved.map((au) => au.id))
|
||||
}
|
||||
if (seriesRemoved.length) {
|
||||
// Check remove empty series
|
||||
Logger.debug(`[LibraryItemController] Series were removed from book. Check if series are now empty.`)
|
||||
await this.checkRemoveEmptySeries(seriesRemoved.map((se) => se.id))
|
||||
}
|
||||
}
|
||||
res.json({
|
||||
@@ -450,6 +467,8 @@ class LibraryItemController {
|
||||
* Optional query params:
|
||||
* ?hard=1
|
||||
*
|
||||
* @this {import('../routers/ApiRouter')}
|
||||
*
|
||||
* @param {RequestWithUser} req
|
||||
* @param {Response} res
|
||||
*/
|
||||
@@ -477,14 +496,33 @@ class LibraryItemController {
|
||||
for (const libraryItem of itemsToDelete) {
|
||||
const libraryItemPath = libraryItem.path
|
||||
Logger.info(`[LibraryItemController] (${hardDelete ? 'Hard' : 'Soft'}) deleting Library Item "${libraryItem.media.metadata.title}" with id "${libraryItem.id}"`)
|
||||
const mediaItemIds = libraryItem.mediaType === 'podcast' ? libraryItem.media.episodes.map((ep) => ep.id) : [libraryItem.media.id]
|
||||
await this.handleDeleteLibraryItem(libraryItem.mediaType, libraryItem.id, mediaItemIds)
|
||||
const mediaItemIds = []
|
||||
const seriesIds = []
|
||||
const authorIds = []
|
||||
if (libraryItem.isPodcast) {
|
||||
mediaItemIds.push(...libraryItem.media.episodes.map((ep) => ep.id))
|
||||
} else {
|
||||
mediaItemIds.push(libraryItem.media.id)
|
||||
if (libraryItem.media.metadata.series?.length) {
|
||||
seriesIds.push(...libraryItem.media.metadata.series.map((se) => se.id))
|
||||
}
|
||||
if (libraryItem.media.metadata.authors?.length) {
|
||||
authorIds.push(...libraryItem.media.metadata.authors.map((au) => au.id))
|
||||
}
|
||||
}
|
||||
await this.handleDeleteLibraryItem(libraryItem.id, mediaItemIds)
|
||||
if (hardDelete) {
|
||||
Logger.info(`[LibraryItemController] Deleting library item from file system at "${libraryItemPath}"`)
|
||||
await fs.remove(libraryItemPath).catch((error) => {
|
||||
Logger.error(`[LibraryItemController] Failed to delete library item from file system at "${libraryItemPath}"`, error)
|
||||
})
|
||||
}
|
||||
if (seriesIds.length) {
|
||||
await this.checkRemoveEmptySeries(seriesIds)
|
||||
}
|
||||
if (authorIds.length) {
|
||||
await this.checkRemoveAuthorsWithNoBooks(authorIds)
|
||||
}
|
||||
}
|
||||
|
||||
await Database.resetLibraryIssuesFilterData(libraryId)
|
||||
@@ -494,48 +532,74 @@ class LibraryItemController {
|
||||
/**
|
||||
* POST: /api/items/batch/update
|
||||
*
|
||||
* @this {import('../routers/ApiRouter')}
|
||||
*
|
||||
* @param {RequestWithUser} req
|
||||
* @param {Response} res
|
||||
*/
|
||||
async batchUpdate(req, res) {
|
||||
const updatePayloads = req.body
|
||||
if (!updatePayloads?.length) {
|
||||
return res.sendStatus(500)
|
||||
if (!Array.isArray(updatePayloads) || !updatePayloads.length) {
|
||||
Logger.error(`[LibraryItemController] Batch update failed. Invalid payload`)
|
||||
return res.sendStatus(400)
|
||||
}
|
||||
|
||||
// Ensure that each update payload has a unique library item id
|
||||
const libraryItemIds = [...new Set(updatePayloads.map((up) => up?.id).filter((id) => id))]
|
||||
if (!libraryItemIds.length || libraryItemIds.length !== updatePayloads.length) {
|
||||
Logger.error(`[LibraryItemController] Batch update failed. Each update payload must have a unique library item id`)
|
||||
return res.sendStatus(400)
|
||||
}
|
||||
|
||||
// Get all library items to update
|
||||
const libraryItems = await Database.libraryItemModel.getAllOldLibraryItems({
|
||||
id: libraryItemIds
|
||||
})
|
||||
if (updatePayloads.length !== libraryItems.length) {
|
||||
Logger.error(`[LibraryItemController] Batch update failed. Not all library items found`)
|
||||
return res.sendStatus(404)
|
||||
}
|
||||
|
||||
let itemsUpdated = 0
|
||||
|
||||
const seriesIdsRemoved = []
|
||||
const authorIdsRemoved = []
|
||||
|
||||
for (const updatePayload of updatePayloads) {
|
||||
const mediaPayload = updatePayload.mediaPayload
|
||||
const libraryItem = await Database.libraryItemModel.getOldById(updatePayload.id)
|
||||
if (!libraryItem) return null
|
||||
const libraryItem = libraryItems.find((li) => li.id === updatePayload.id)
|
||||
|
||||
await this.createAuthorsAndSeriesForItemUpdate(mediaPayload, libraryItem.libraryId)
|
||||
|
||||
let seriesRemoved = []
|
||||
if (libraryItem.isBook && mediaPayload.metadata?.series) {
|
||||
const seriesIdsInUpdate = (mediaPayload.metadata?.series || []).map((se) => se.id)
|
||||
seriesRemoved = libraryItem.media.metadata.series.filter((se) => !seriesIdsInUpdate.includes(se.id))
|
||||
if (libraryItem.isBook) {
|
||||
if (Array.isArray(mediaPayload.metadata?.series)) {
|
||||
const seriesIdsInUpdate = mediaPayload.metadata.series.map((se) => se.id)
|
||||
const seriesRemoved = libraryItem.media.metadata.series.filter((se) => !seriesIdsInUpdate.includes(se.id))
|
||||
seriesIdsRemoved.push(...seriesRemoved.map((se) => se.id))
|
||||
}
|
||||
if (Array.isArray(mediaPayload.metadata?.authors)) {
|
||||
const authorIdsInUpdate = mediaPayload.metadata.authors.map((au) => au.id)
|
||||
const authorsRemoved = libraryItem.media.metadata.authors.filter((au) => !authorIdsInUpdate.includes(au.id))
|
||||
authorIdsRemoved.push(...authorsRemoved.map((au) => au.id))
|
||||
}
|
||||
}
|
||||
|
||||
if (libraryItem.media.update(mediaPayload)) {
|
||||
Logger.debug(`[LibraryItemController] Updated library item media ${libraryItem.media.metadata.title}`)
|
||||
|
||||
if (seriesRemoved.length) {
|
||||
// Check remove empty series
|
||||
Logger.debug(`[LibraryItemController] Series was removed from book. Check if series is now empty.`)
|
||||
await this.checkRemoveEmptySeries(
|
||||
libraryItem.media.id,
|
||||
seriesRemoved.map((se) => se.id)
|
||||
)
|
||||
}
|
||||
|
||||
await Database.updateLibraryItem(libraryItem)
|
||||
SocketAuthority.emitter('item_updated', libraryItem.toJSONExpanded())
|
||||
itemsUpdated++
|
||||
}
|
||||
}
|
||||
|
||||
if (seriesIdsRemoved.length) {
|
||||
await this.checkRemoveEmptySeries(seriesIdsRemoved)
|
||||
}
|
||||
if (authorIdsRemoved.length) {
|
||||
await this.checkRemoveAuthorsWithNoBooks(authorIdsRemoved)
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
updates: itemsUpdated
|
||||
|
||||
@@ -679,9 +679,9 @@ class MiscController {
|
||||
continue
|
||||
}
|
||||
let updatedValue = settingsUpdate[key]
|
||||
if (updatedValue === '') updatedValue = null
|
||||
if (updatedValue === '' && key != 'authOpenIDSubfolderForRedirectURLs') updatedValue = null
|
||||
let currentValue = currentAuthenticationSettings[key]
|
||||
if (currentValue === '') currentValue = null
|
||||
if (currentValue === '' && key != 'authOpenIDSubfolderForRedirectURLs') currentValue = null
|
||||
|
||||
if (updatedValue !== currentValue) {
|
||||
Logger.debug(`[MiscController] Updating auth settings key "${key}" from "${currentValue}" to "${updatedValue}"`)
|
||||
|
||||
@@ -86,6 +86,7 @@ class CacheManager {
|
||||
}
|
||||
|
||||
async purgeEntityCache(entityId, cachePath) {
|
||||
if (!entityId || !cachePath) return []
|
||||
return Promise.all(
|
||||
(await fs.readdir(cachePath)).reduce((promises, file) => {
|
||||
if (file.startsWith(entityId)) {
|
||||
|
||||
@@ -2,10 +2,11 @@
|
||||
|
||||
Please add a record of every database migration that you create to this file. This will help us keep track of changes to the database schema over time.
|
||||
|
||||
| Server Version | Migration Script Name | Description |
|
||||
| -------------- | ---------------------------- | ------------------------------------------------------------------------------------------------------------- |
|
||||
| v2.15.0 | v2.15.0-series-column-unique | Series must have unique names in the same library |
|
||||
| v2.15.1 | v2.15.1-reindex-nocase | Fix potential db corruption issues due to bad sqlite extension introduced in v2.12.0 |
|
||||
| v2.15.2 | v2.15.2-index-creation | Creates author, series, and podcast episode indexes |
|
||||
| v2.17.0 | v2.17.0-uuid-replacement | Changes the data type of columns with UUIDv4 to UUID matching the associated model |
|
||||
| v2.17.3 | v2.17.3-fk-constraints | Changes the foreign key constraints for tables due to sequelize bug dropping constraints in v2.17.0 migration |
|
||||
| Server Version | Migration Script Name | Description |
|
||||
| -------------- | -------------------------------------------- | ------------------------------------------------------------------------------------------------------------- |
|
||||
| v2.15.0 | v2.15.0-series-column-unique | Series must have unique names in the same library |
|
||||
| v2.15.1 | v2.15.1-reindex-nocase | Fix potential db corruption issues due to bad sqlite extension introduced in v2.12.0 |
|
||||
| v2.15.2 | v2.15.2-index-creation | Creates author, series, and podcast episode indexes |
|
||||
| v2.17.0 | v2.17.0-uuid-replacement | Changes the data type of columns with UUIDv4 to UUID matching the associated model |
|
||||
| v2.17.3 | v2.17.3-fk-constraints | Changes the foreign key constraints for tables due to sequelize bug dropping constraints in v2.17.0 migration |
|
||||
| v2.17.4 | v2.17.4-use-subfolder-for-oidc-redirect-uris | Save subfolder to OIDC redirect URIs to support existing installations |
|
||||
|
||||
@@ -0,0 +1,84 @@
|
||||
/**
|
||||
* @typedef MigrationContext
|
||||
* @property {import('sequelize').QueryInterface} queryInterface - a suquelize QueryInterface object.
|
||||
* @property {import('../Logger')} logger - a Logger object.
|
||||
*
|
||||
* @typedef MigrationOptions
|
||||
* @property {MigrationContext} context - an object containing the migration context.
|
||||
*/
|
||||
|
||||
/**
|
||||
* This upward migration adds an subfolder setting for OIDC redirect URIs.
|
||||
* It updates existing OIDC setups to set this option to None (empty subfolder), so they continue to work as before.
|
||||
* IF OIDC is not enabled, no action is taken (i.e. the subfolder is left undefined),
|
||||
* so that future OIDC setups will use the default subfolder.
|
||||
*
|
||||
* @param {MigrationOptions} options - an object containing the migration context.
|
||||
* @returns {Promise<void>} - A promise that resolves when the migration is complete.
|
||||
*/
|
||||
async function up({ context: { queryInterface, logger } }) {
|
||||
// Upwards migration script
|
||||
logger.info('[2.17.4 migration] UPGRADE BEGIN: 2.17.4-use-subfolder-for-oidc-redirect-uris')
|
||||
|
||||
const serverSettings = await getServerSettings(queryInterface, logger)
|
||||
if (serverSettings.authActiveAuthMethods?.includes('openid')) {
|
||||
logger.info('[2.17.4 migration] OIDC is enabled, adding authOpenIDSubfolderForRedirectURLs to server settings')
|
||||
serverSettings.authOpenIDSubfolderForRedirectURLs = ''
|
||||
await updateServerSettings(queryInterface, logger, serverSettings)
|
||||
} else {
|
||||
logger.info('[2.17.4 migration] OIDC is not enabled, no action required')
|
||||
}
|
||||
|
||||
logger.info('[2.17.4 migration] UPGRADE END: 2.17.4-use-subfolder-for-oidc-redirect-uris')
|
||||
}
|
||||
|
||||
/**
|
||||
* This downward migration script removes the subfolder setting for OIDC redirect URIs.
|
||||
*
|
||||
* @param {MigrationOptions} options - an object containing the migration context.
|
||||
* @returns {Promise<void>} - A promise that resolves when the migration is complete.
|
||||
*/
|
||||
async function down({ context: { queryInterface, logger } }) {
|
||||
// Downward migration script
|
||||
logger.info('[2.17.4 migration] DOWNGRADE BEGIN: 2.17.4-use-subfolder-for-oidc-redirect-uris ')
|
||||
|
||||
// Remove the OIDC subfolder option from the server settings
|
||||
const serverSettings = await getServerSettings(queryInterface, logger)
|
||||
if (serverSettings.authOpenIDSubfolderForRedirectURLs !== undefined) {
|
||||
logger.info('[2.17.4 migration] Removing authOpenIDSubfolderForRedirectURLs from server settings')
|
||||
delete serverSettings.authOpenIDSubfolderForRedirectURLs
|
||||
await updateServerSettings(queryInterface, logger, serverSettings)
|
||||
} else {
|
||||
logger.info('[2.17.4 migration] authOpenIDSubfolderForRedirectURLs not found in server settings, no action required')
|
||||
}
|
||||
|
||||
logger.info('[2.17.4 migration] DOWNGRADE END: 2.17.4-use-subfolder-for-oidc-redirect-uris ')
|
||||
}
|
||||
|
||||
async function getServerSettings(queryInterface, logger) {
|
||||
const result = await queryInterface.sequelize.query('SELECT value FROM settings WHERE key = "server-settings";')
|
||||
if (!result[0].length) {
|
||||
logger.error('[2.17.4 migration] Server settings not found')
|
||||
throw new Error('Server settings not found')
|
||||
}
|
||||
|
||||
let serverSettings = null
|
||||
try {
|
||||
serverSettings = JSON.parse(result[0][0].value)
|
||||
} catch (error) {
|
||||
logger.error('[2.17.4 migration] Error parsing server settings:', error)
|
||||
throw error
|
||||
}
|
||||
|
||||
return serverSettings
|
||||
}
|
||||
|
||||
async function updateServerSettings(queryInterface, logger, serverSettings) {
|
||||
await queryInterface.sequelize.query('UPDATE settings SET value = :value WHERE key = "server-settings";', {
|
||||
replacements: {
|
||||
value: JSON.stringify(serverSettings)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = { up, down }
|
||||
@@ -262,7 +262,7 @@ class LibraryItem {
|
||||
* @returns {Promise<LibraryFile>} null if not saved
|
||||
*/
|
||||
async saveMetadata() {
|
||||
if (this.isSavingMetadata) return null
|
||||
if (this.isSavingMetadata || !global.MetadataPath) return null
|
||||
|
||||
this.isSavingMetadata = true
|
||||
|
||||
|
||||
@@ -78,6 +78,7 @@ class ServerSettings {
|
||||
this.authOpenIDMobileRedirectURIs = ['audiobookshelf://oauth']
|
||||
this.authOpenIDGroupClaim = ''
|
||||
this.authOpenIDAdvancedPermsClaim = ''
|
||||
this.authOpenIDSubfolderForRedirectURLs = undefined
|
||||
|
||||
if (settings) {
|
||||
this.construct(settings)
|
||||
@@ -139,6 +140,7 @@ class ServerSettings {
|
||||
this.authOpenIDMobileRedirectURIs = settings.authOpenIDMobileRedirectURIs || ['audiobookshelf://oauth']
|
||||
this.authOpenIDGroupClaim = settings.authOpenIDGroupClaim || ''
|
||||
this.authOpenIDAdvancedPermsClaim = settings.authOpenIDAdvancedPermsClaim || ''
|
||||
this.authOpenIDSubfolderForRedirectURLs = settings.authOpenIDSubfolderForRedirectURLs
|
||||
|
||||
if (!Array.isArray(this.authActiveAuthMethods)) {
|
||||
this.authActiveAuthMethods = ['local']
|
||||
@@ -240,7 +242,8 @@ class ServerSettings {
|
||||
authOpenIDMatchExistingBy: this.authOpenIDMatchExistingBy,
|
||||
authOpenIDMobileRedirectURIs: this.authOpenIDMobileRedirectURIs, // Do not return to client
|
||||
authOpenIDGroupClaim: this.authOpenIDGroupClaim, // Do not return to client
|
||||
authOpenIDAdvancedPermsClaim: this.authOpenIDAdvancedPermsClaim // Do not return to client
|
||||
authOpenIDAdvancedPermsClaim: this.authOpenIDAdvancedPermsClaim, // Do not return to client
|
||||
authOpenIDSubfolderForRedirectURLs: this.authOpenIDSubfolderForRedirectURLs
|
||||
}
|
||||
}
|
||||
|
||||
@@ -286,6 +289,7 @@ class ServerSettings {
|
||||
authOpenIDMobileRedirectURIs: this.authOpenIDMobileRedirectURIs, // Do not return to client
|
||||
authOpenIDGroupClaim: this.authOpenIDGroupClaim, // Do not return to client
|
||||
authOpenIDAdvancedPermsClaim: this.authOpenIDAdvancedPermsClaim, // Do not return to client
|
||||
authOpenIDSubfolderForRedirectURLs: this.authOpenIDSubfolderForRedirectURLs,
|
||||
|
||||
authOpenIDSamplePermissions: User.getSampleAbsPermissions()
|
||||
}
|
||||
|
||||
@@ -348,11 +348,10 @@ class ApiRouter {
|
||||
//
|
||||
/**
|
||||
* Remove library item and associated entities
|
||||
* @param {string} mediaType
|
||||
* @param {string} libraryItemId
|
||||
* @param {string[]} mediaItemIds array of bookId or podcastEpisodeId
|
||||
*/
|
||||
async handleDeleteLibraryItem(mediaType, libraryItemId, mediaItemIds) {
|
||||
async handleDeleteLibraryItem(libraryItemId, mediaItemIds) {
|
||||
const numProgressRemoved = await Database.mediaProgressModel.destroy({
|
||||
where: {
|
||||
mediaItemId: mediaItemIds
|
||||
@@ -362,29 +361,6 @@ class ApiRouter {
|
||||
Logger.info(`[ApiRouter] Removed ${numProgressRemoved} media progress entries for library item "${libraryItemId}"`)
|
||||
}
|
||||
|
||||
// TODO: Remove open sessions for library item
|
||||
|
||||
// Remove series if empty
|
||||
if (mediaType === 'book') {
|
||||
// TODO: update filter data
|
||||
const bookSeries = await Database.bookSeriesModel.findAll({
|
||||
where: {
|
||||
bookId: mediaItemIds[0]
|
||||
},
|
||||
include: {
|
||||
model: Database.seriesModel,
|
||||
include: {
|
||||
model: Database.bookModel
|
||||
}
|
||||
}
|
||||
})
|
||||
for (const bs of bookSeries) {
|
||||
if (bs.series.books.length === 1) {
|
||||
await this.removeEmptySeries(bs.series)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// remove item from playlists
|
||||
const playlistsWithItem = await Database.playlistModel.getPlaylistsForMediaItemIds(mediaItemIds)
|
||||
for (const playlist of playlistsWithItem) {
|
||||
@@ -423,10 +399,13 @@ class ApiRouter {
|
||||
// purge cover cache
|
||||
await CacheManager.purgeCoverCache(libraryItemId)
|
||||
|
||||
const itemMetadataPath = Path.join(global.MetadataPath, 'items', libraryItemId)
|
||||
if (await fs.pathExists(itemMetadataPath)) {
|
||||
Logger.info(`[ApiRouter] Removing item metadata at "${itemMetadataPath}"`)
|
||||
await fs.remove(itemMetadataPath)
|
||||
// Remove metadata file if in /metadata/items dir
|
||||
if (global.MetadataPath) {
|
||||
const itemMetadataPath = Path.join(global.MetadataPath, 'items', libraryItemId)
|
||||
if (await fs.pathExists(itemMetadataPath)) {
|
||||
Logger.info(`[ApiRouter] Removing item metadata at "${itemMetadataPath}"`)
|
||||
await fs.remove(itemMetadataPath)
|
||||
}
|
||||
}
|
||||
|
||||
await Database.libraryItemModel.removeById(libraryItemId)
|
||||
@@ -437,32 +416,27 @@ class ApiRouter {
|
||||
}
|
||||
|
||||
/**
|
||||
* Used when a series is removed from a book
|
||||
* Series is removed if it only has 1 book
|
||||
* After deleting book(s), remove empty series
|
||||
*
|
||||
* @param {string} bookId
|
||||
* @param {string[]} seriesIds
|
||||
*/
|
||||
async checkRemoveEmptySeries(bookId, seriesIds) {
|
||||
async checkRemoveEmptySeries(seriesIds) {
|
||||
if (!seriesIds?.length) return
|
||||
|
||||
const bookSeries = await Database.bookSeriesModel.findAll({
|
||||
const series = await Database.seriesModel.findAll({
|
||||
where: {
|
||||
bookId,
|
||||
seriesId: seriesIds
|
||||
id: seriesIds
|
||||
},
|
||||
include: [
|
||||
{
|
||||
model: Database.seriesModel,
|
||||
include: {
|
||||
model: Database.bookModel
|
||||
}
|
||||
}
|
||||
]
|
||||
attributes: ['id', 'name', 'libraryId'],
|
||||
include: {
|
||||
model: Database.bookModel,
|
||||
attributes: ['id']
|
||||
}
|
||||
})
|
||||
for (const bs of bookSeries) {
|
||||
if (bs.series.books.length === 1) {
|
||||
await this.removeEmptySeries(bs.series)
|
||||
|
||||
for (const s of series) {
|
||||
if (!s.books.length) {
|
||||
await this.removeEmptySeries(s)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -471,11 +445,10 @@ class ApiRouter {
|
||||
* Remove authors with no books and unset asin, description and imagePath
|
||||
* Note: Other implementation is in BookScanner.checkAuthorsRemovedFromBooks (can be merged)
|
||||
*
|
||||
* @param {string} libraryId
|
||||
* @param {string[]} authorIds
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async checkRemoveAuthorsWithNoBooks(libraryId, authorIds) {
|
||||
async checkRemoveAuthorsWithNoBooks(authorIds) {
|
||||
if (!authorIds?.length) return
|
||||
|
||||
const bookAuthorsToRemove = (
|
||||
@@ -495,10 +468,10 @@ class ApiRouter {
|
||||
},
|
||||
sequelize.where(sequelize.literal('(SELECT count(*) FROM bookAuthors ba WHERE ba.authorId = author.id)'), 0)
|
||||
],
|
||||
attributes: ['id', 'name'],
|
||||
attributes: ['id', 'name', 'libraryId'],
|
||||
raw: true
|
||||
})
|
||||
).map((au) => ({ id: au.id, name: au.name }))
|
||||
).map((au) => ({ id: au.id, name: au.name, libraryId: au.libraryId }))
|
||||
|
||||
if (bookAuthorsToRemove.length) {
|
||||
await Database.authorModel.destroy({
|
||||
@@ -506,7 +479,7 @@ class ApiRouter {
|
||||
id: bookAuthorsToRemove.map((au) => au.id)
|
||||
}
|
||||
})
|
||||
bookAuthorsToRemove.forEach(({ id, name }) => {
|
||||
bookAuthorsToRemove.forEach(({ id, name, libraryId }) => {
|
||||
Database.removeAuthorFromFilterData(libraryId, id)
|
||||
// TODO: Clients were expecting full author in payload but its unnecessary
|
||||
SocketAuthority.emitter('author_removed', { id, libraryId })
|
||||
|
||||
@@ -133,8 +133,8 @@ class AudioFileScanner {
|
||||
|
||||
// Look for disc number in folder path e.g. /Book Title/CD01/audiofile.mp3
|
||||
const pathdir = Path.dirname(path).split('/').pop()
|
||||
if (pathdir && /^cd\d{1,3}$/i.test(pathdir)) {
|
||||
const discFromFolder = Number(pathdir.replace(/cd/i, ''))
|
||||
if (pathdir && /^(cd|dis[ck])\s*\d{1,3}$/i.test(pathdir)) {
|
||||
const discFromFolder = Number(pathdir.replace(/^(cd|dis[ck])\s*/i, ''))
|
||||
if (!isNaN(discFromFolder) && discFromFolder !== null) discNumber = discFromFolder
|
||||
}
|
||||
|
||||
|
||||
@@ -424,8 +424,8 @@ class LibraryScanner {
|
||||
}
|
||||
const folder = library.libraryFolders[0]
|
||||
|
||||
const relFilePaths = folderGroups[folderId].fileUpdates.map((fileUpdate) => fileUpdate.relPath)
|
||||
const fileUpdateGroup = scanUtils.groupFilesIntoLibraryItemPaths(library.mediaType, relFilePaths)
|
||||
const filePathItems = folderGroups[folderId].fileUpdates.map((fileUpdate) => fileUtils.getFilePathItemFromFileUpdate(fileUpdate))
|
||||
const fileUpdateGroup = scanUtils.groupFileItemsIntoLibraryItemDirs(library.mediaType, filePathItems, !!library.settings?.audiobooksOnly)
|
||||
|
||||
if (!Object.keys(fileUpdateGroup).length) {
|
||||
Logger.info(`[LibraryScanner] No important changes to scan for in folder "${folderId}"`)
|
||||
|
||||
@@ -131,11 +131,21 @@ async function readTextFile(path) {
|
||||
}
|
||||
module.exports.readTextFile = readTextFile
|
||||
|
||||
/**
|
||||
* @typedef FilePathItem
|
||||
* @property {string} name - file name e.g. "audiofile.m4b"
|
||||
* @property {string} path - fullpath excluding folder e.g. "Author/Book/audiofile.m4b"
|
||||
* @property {string} reldirpath - path excluding file name e.g. "Author/Book"
|
||||
* @property {string} fullpath - full path e.g. "/audiobooks/Author/Book/audiofile.m4b"
|
||||
* @property {string} extension - file extension e.g. ".m4b"
|
||||
* @property {number} deep - depth of file in directory (0 is file in folder root)
|
||||
*/
|
||||
|
||||
/**
|
||||
* Get array of files inside dir
|
||||
* @param {string} path
|
||||
* @param {string} [relPathToReplace]
|
||||
* @returns {{name:string, path:string, dirpath:string, reldirpath:string, fullpath:string, extension:string, deep:number}[]}
|
||||
* @returns {FilePathItem[]}
|
||||
*/
|
||||
async function recurseFiles(path, relPathToReplace = null) {
|
||||
path = filePathToPOSIX(path)
|
||||
@@ -213,7 +223,6 @@ async function recurseFiles(path, relPathToReplace = null) {
|
||||
return {
|
||||
name: item.name,
|
||||
path: item.fullname.replace(relPathToReplace, ''),
|
||||
dirpath: item.path,
|
||||
reldirpath: isInRoot ? '' : item.path.replace(relPathToReplace, ''),
|
||||
fullpath: item.fullname,
|
||||
extension: item.extension,
|
||||
@@ -228,6 +237,26 @@ async function recurseFiles(path, relPathToReplace = null) {
|
||||
}
|
||||
module.exports.recurseFiles = recurseFiles
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {import('../Watcher').PendingFileUpdate} fileUpdate
|
||||
* @returns {FilePathItem}
|
||||
*/
|
||||
module.exports.getFilePathItemFromFileUpdate = (fileUpdate) => {
|
||||
let relPath = fileUpdate.relPath
|
||||
if (relPath.startsWith('/')) relPath = relPath.slice(1)
|
||||
|
||||
const dirname = Path.dirname(relPath)
|
||||
return {
|
||||
name: Path.basename(relPath),
|
||||
path: relPath,
|
||||
reldirpath: dirname === '.' ? '' : dirname,
|
||||
fullpath: fileUpdate.path,
|
||||
extension: Path.extname(relPath),
|
||||
deep: relPath.split('/').length - 1
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Download file from web to local file system
|
||||
* Uses SSRF filter to prevent internal URLs
|
||||
|
||||
@@ -189,7 +189,7 @@ function parseTags(format, verbose) {
|
||||
file_tag_genre: tryGrabTags(format, 'genre', 'tcon', 'tco'),
|
||||
file_tag_series: tryGrabTags(format, 'series', 'show', 'mvnm'),
|
||||
file_tag_seriespart: tryGrabTags(format, 'series-part', 'episode_id', 'mvin', 'part'),
|
||||
file_tag_grouping: tryGrabTags(format, 'grouping'),
|
||||
file_tag_grouping: tryGrabTags(format, 'grouping', 'grp1'),
|
||||
file_tag_isbn: tryGrabTags(format, 'isbn'), // custom
|
||||
file_tag_language: tryGrabTags(format, 'language', 'lang'),
|
||||
file_tag_asin: tryGrabTags(format, 'asin', 'audible_asin'), // custom
|
||||
|
||||
@@ -5,7 +5,7 @@ const fsExtra = require('../../libs/fsExtra')
|
||||
|
||||
module.exports = {
|
||||
/**
|
||||
*
|
||||
*
|
||||
* @param {number} year YYYY
|
||||
* @returns {Promise<PlaybackSession[]>}
|
||||
*/
|
||||
@@ -22,7 +22,7 @@ module.exports = {
|
||||
},
|
||||
|
||||
/**
|
||||
*
|
||||
*
|
||||
* @param {number} year YYYY
|
||||
* @returns {Promise<number>}
|
||||
*/
|
||||
@@ -39,7 +39,7 @@ module.exports = {
|
||||
},
|
||||
|
||||
/**
|
||||
*
|
||||
*
|
||||
* @param {number} year YYYY
|
||||
* @returns {Promise<import('../../models/Book')[]>}
|
||||
*/
|
||||
@@ -63,7 +63,7 @@ module.exports = {
|
||||
},
|
||||
|
||||
/**
|
||||
*
|
||||
*
|
||||
* @param {number} year YYYY
|
||||
*/
|
||||
async getStatsForYear(year) {
|
||||
@@ -75,7 +75,7 @@ module.exports = {
|
||||
|
||||
for (const book of booksAdded) {
|
||||
// Grab first 25 that have a cover
|
||||
if (book.coverPath && !booksWithCovers.includes(book.libraryItem.id) && booksWithCovers.length < 25 && await fsExtra.pathExists(book.coverPath)) {
|
||||
if (book.coverPath && !booksWithCovers.includes(book.libraryItem.id) && booksWithCovers.length < 25 && (await fsExtra.pathExists(book.coverPath))) {
|
||||
booksWithCovers.push(book.libraryItem.id)
|
||||
}
|
||||
if (book.duration && !isNaN(book.duration)) {
|
||||
@@ -95,45 +95,54 @@ module.exports = {
|
||||
const listeningSessions = await this.getListeningSessionsForYear(year)
|
||||
let totalListeningTime = 0
|
||||
for (const ls of listeningSessions) {
|
||||
totalListeningTime += (ls.timeListening || 0)
|
||||
totalListeningTime += ls.timeListening || 0
|
||||
|
||||
const authors = ls.mediaMetadata.authors || []
|
||||
const authors = ls.mediaMetadata?.authors || []
|
||||
authors.forEach((au) => {
|
||||
if (!authorListeningMap[au.name]) authorListeningMap[au.name] = 0
|
||||
authorListeningMap[au.name] += (ls.timeListening || 0)
|
||||
authorListeningMap[au.name] += ls.timeListening || 0
|
||||
})
|
||||
|
||||
const narrators = ls.mediaMetadata.narrators || []
|
||||
const narrators = ls.mediaMetadata?.narrators || []
|
||||
narrators.forEach((narrator) => {
|
||||
if (!narratorListeningMap[narrator]) narratorListeningMap[narrator] = 0
|
||||
narratorListeningMap[narrator] += (ls.timeListening || 0)
|
||||
narratorListeningMap[narrator] += ls.timeListening || 0
|
||||
})
|
||||
|
||||
// Filter out bad genres like "audiobook" and "audio book"
|
||||
const genres = (ls.mediaMetadata.genres || []).filter(g => g && !g.toLowerCase().includes('audiobook') && !g.toLowerCase().includes('audio book'))
|
||||
const genres = (ls.mediaMetadata?.genres || []).filter((g) => g && !g.toLowerCase().includes('audiobook') && !g.toLowerCase().includes('audio book'))
|
||||
genres.forEach((genre) => {
|
||||
if (!genreListeningMap[genre]) genreListeningMap[genre] = 0
|
||||
genreListeningMap[genre] += (ls.timeListening || 0)
|
||||
genreListeningMap[genre] += ls.timeListening || 0
|
||||
})
|
||||
}
|
||||
|
||||
let topAuthors = null
|
||||
topAuthors = Object.keys(authorListeningMap).map(authorName => ({
|
||||
name: authorName,
|
||||
time: Math.round(authorListeningMap[authorName])
|
||||
})).sort((a, b) => b.time - a.time).slice(0, 3)
|
||||
topAuthors = Object.keys(authorListeningMap)
|
||||
.map((authorName) => ({
|
||||
name: authorName,
|
||||
time: Math.round(authorListeningMap[authorName])
|
||||
}))
|
||||
.sort((a, b) => b.time - a.time)
|
||||
.slice(0, 3)
|
||||
|
||||
let topNarrators = null
|
||||
topNarrators = Object.keys(narratorListeningMap).map(narratorName => ({
|
||||
name: narratorName,
|
||||
time: Math.round(narratorListeningMap[narratorName])
|
||||
})).sort((a, b) => b.time - a.time).slice(0, 3)
|
||||
topNarrators = Object.keys(narratorListeningMap)
|
||||
.map((narratorName) => ({
|
||||
name: narratorName,
|
||||
time: Math.round(narratorListeningMap[narratorName])
|
||||
}))
|
||||
.sort((a, b) => b.time - a.time)
|
||||
.slice(0, 3)
|
||||
|
||||
let topGenres = null
|
||||
topGenres = Object.keys(genreListeningMap).map(genre => ({
|
||||
genre,
|
||||
time: Math.round(genreListeningMap[genre])
|
||||
})).sort((a, b) => b.time - a.time).slice(0, 3)
|
||||
topGenres = Object.keys(genreListeningMap)
|
||||
.map((genre) => ({
|
||||
genre,
|
||||
time: Math.round(genreListeningMap[genre])
|
||||
}))
|
||||
.sort((a, b) => b.time - a.time)
|
||||
.slice(0, 3)
|
||||
|
||||
// Stats for total books, size and duration for everything added this year or earlier
|
||||
const [totalStatResultsRow] = await Database.sequelize.query(`SELECT SUM(li.size) AS totalSize, SUM(b.duration) AS totalDuration, COUNT(*) AS totalItems FROM libraryItems li, books b WHERE b.id = li.mediaId AND li.mediaType = 'book' AND li.createdAt < ":nextYear-01-01";`, {
|
||||
|
||||
@@ -33,109 +33,8 @@ function checkFilepathIsAudioFile(filepath) {
|
||||
module.exports.checkFilepathIsAudioFile = checkFilepathIsAudioFile
|
||||
|
||||
/**
|
||||
* TODO: Function needs to be re-done
|
||||
* @param {string} mediaType
|
||||
* @param {string[]} paths array of relative file paths
|
||||
* @returns {Record<string,string[]>} map of files grouped into potential libarary item dirs
|
||||
*/
|
||||
function groupFilesIntoLibraryItemPaths(mediaType, paths) {
|
||||
// Step 1: Clean path, Remove leading "/", Filter out non-media files in root dir
|
||||
var nonMediaFilePaths = []
|
||||
var pathsFiltered = paths
|
||||
.map((path) => {
|
||||
return path.startsWith('/') ? path.slice(1) : path
|
||||
})
|
||||
.filter((path) => {
|
||||
let parsedPath = Path.parse(path)
|
||||
// Is not in root dir OR is a book media file
|
||||
if (parsedPath.dir) {
|
||||
if (!isMediaFile(mediaType, parsedPath.ext, false)) {
|
||||
// Seperate out non-media files
|
||||
nonMediaFilePaths.push(path)
|
||||
return false
|
||||
}
|
||||
return true
|
||||
} else if (mediaType === 'book' && isMediaFile(mediaType, parsedPath.ext, false)) {
|
||||
// (book media type supports single file audiobooks/ebooks in root dir)
|
||||
return true
|
||||
}
|
||||
return false
|
||||
})
|
||||
|
||||
// Step 2: Sort by least number of directories
|
||||
pathsFiltered.sort((a, b) => {
|
||||
var pathsA = Path.dirname(a).split('/').length
|
||||
var pathsB = Path.dirname(b).split('/').length
|
||||
return pathsA - pathsB
|
||||
})
|
||||
|
||||
// Step 3: Group files in dirs
|
||||
var itemGroup = {}
|
||||
pathsFiltered.forEach((path) => {
|
||||
var dirparts = Path.dirname(path)
|
||||
.split('/')
|
||||
.filter((p) => !!p && p !== '.') // dirname returns . if no directory
|
||||
var numparts = dirparts.length
|
||||
var _path = ''
|
||||
|
||||
if (!numparts) {
|
||||
// Media file in root
|
||||
itemGroup[path] = path
|
||||
} else {
|
||||
// Iterate over directories in path
|
||||
for (let i = 0; i < numparts; i++) {
|
||||
var dirpart = dirparts.shift()
|
||||
_path = Path.posix.join(_path, dirpart)
|
||||
|
||||
if (itemGroup[_path]) {
|
||||
// Directory already has files, add file
|
||||
var relpath = Path.posix.join(dirparts.join('/'), Path.basename(path))
|
||||
itemGroup[_path].push(relpath)
|
||||
return
|
||||
} else if (!dirparts.length) {
|
||||
// This is the last directory, create group
|
||||
itemGroup[_path] = [Path.basename(path)]
|
||||
return
|
||||
} else if (dirparts.length === 1 && /^cd\d{1,3}$/i.test(dirparts[0])) {
|
||||
// Next directory is the last and is a CD dir, create group
|
||||
itemGroup[_path] = [Path.posix.join(dirparts[0], Path.basename(path))]
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// Step 4: Add in non-media files if they fit into item group
|
||||
if (nonMediaFilePaths.length) {
|
||||
for (const nonMediaFilePath of nonMediaFilePaths) {
|
||||
const pathDir = Path.dirname(nonMediaFilePath)
|
||||
const filename = Path.basename(nonMediaFilePath)
|
||||
const dirparts = pathDir.split('/')
|
||||
const numparts = dirparts.length
|
||||
let _path = ''
|
||||
|
||||
// Iterate over directories in path
|
||||
for (let i = 0; i < numparts; i++) {
|
||||
const dirpart = dirparts.shift()
|
||||
_path = Path.posix.join(_path, dirpart)
|
||||
if (itemGroup[_path]) {
|
||||
// Directory is a group
|
||||
const relpath = Path.posix.join(dirparts.join('/'), filename)
|
||||
itemGroup[_path].push(relpath)
|
||||
} else if (!dirparts.length) {
|
||||
itemGroup[_path] = [filename]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return itemGroup
|
||||
}
|
||||
module.exports.groupFilesIntoLibraryItemPaths = groupFilesIntoLibraryItemPaths
|
||||
|
||||
/**
|
||||
* @param {string} mediaType
|
||||
* @param {{name:string, path:string, dirpath:string, reldirpath:string, fullpath:string, extension:string, deep:number}[]} fileItems (see recurseFiles)
|
||||
* @param {import('./fileUtils').FilePathItem[]} fileItems
|
||||
* @param {boolean} [audiobooksOnly=false]
|
||||
* @returns {Record<string,string[]>} map of files grouped into potential libarary item dirs
|
||||
*/
|
||||
@@ -147,7 +46,9 @@ function groupFileItemsIntoLibraryItemDirs(mediaType, fileItems, audiobooksOnly
|
||||
|
||||
// Step 2: Seperate media files and other files
|
||||
// - Directories without a media file will not be included
|
||||
/** @type {import('./fileUtils').FilePathItem[]} */
|
||||
const mediaFileItems = []
|
||||
/** @type {import('./fileUtils').FilePathItem[]} */
|
||||
const otherFileItems = []
|
||||
itemsFiltered.forEach((item) => {
|
||||
if (isMediaFile(mediaType, item.extension, audiobooksOnly)) mediaFileItems.push(item)
|
||||
@@ -179,7 +80,7 @@ function groupFileItemsIntoLibraryItemDirs(mediaType, fileItems, audiobooksOnly
|
||||
// This is the last directory, create group
|
||||
libraryItemGroup[_path] = [item.name]
|
||||
return
|
||||
} else if (dirparts.length === 1 && /^cd\d{1,3}$/i.test(dirparts[0])) {
|
||||
} else if (dirparts.length === 1 && /^(cd|dis[ck])\s*\d{1,3}$/i.test(dirparts[0])) {
|
||||
// Next directory is the last and is a CD dir, create group
|
||||
libraryItemGroup[_path] = [Path.posix.join(dirparts[0], item.name)]
|
||||
return
|
||||
|
||||
202
test/server/controllers/LibraryItemController.test.js
Normal file
202
test/server/controllers/LibraryItemController.test.js
Normal file
@@ -0,0 +1,202 @@
|
||||
const { expect } = require('chai')
|
||||
const { Sequelize } = require('sequelize')
|
||||
const sinon = require('sinon')
|
||||
|
||||
const Database = require('../../../server/Database')
|
||||
const ApiRouter = require('../../../server/routers/ApiRouter')
|
||||
const LibraryItemController = require('../../../server/controllers/LibraryItemController')
|
||||
const ApiCacheManager = require('../../../server/managers/ApiCacheManager')
|
||||
const RssFeedManager = require('../../../server/managers/RssFeedManager')
|
||||
const Logger = require('../../../server/Logger')
|
||||
|
||||
describe('LibraryItemController', () => {
|
||||
/** @type {ApiRouter} */
|
||||
let apiRouter
|
||||
|
||||
beforeEach(async () => {
|
||||
global.ServerSettings = {}
|
||||
Database.sequelize = new Sequelize({ dialect: 'sqlite', storage: ':memory:', logging: false })
|
||||
Database.sequelize.uppercaseFirst = (str) => (str ? `${str[0].toUpperCase()}${str.substr(1)}` : '')
|
||||
await Database.buildModels()
|
||||
|
||||
apiRouter = new ApiRouter({
|
||||
apiCacheManager: new ApiCacheManager(),
|
||||
rssFeedManager: new RssFeedManager()
|
||||
})
|
||||
|
||||
sinon.stub(Logger, 'info')
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
sinon.restore()
|
||||
|
||||
// Clear all tables
|
||||
await Database.sequelize.sync({ force: true })
|
||||
})
|
||||
|
||||
describe('checkRemoveAuthorsAndSeries', () => {
|
||||
let libraryItem1Id
|
||||
let libraryItem2Id
|
||||
let author1Id
|
||||
let author2Id
|
||||
let author3Id
|
||||
let series1Id
|
||||
let series2Id
|
||||
|
||||
beforeEach(async () => {
|
||||
const newLibrary = await Database.libraryModel.create({ name: 'Test Library', mediaType: 'book' })
|
||||
const newLibraryFolder = await Database.libraryFolderModel.create({ path: '/test', libraryId: newLibrary.id })
|
||||
|
||||
const newBook = await Database.bookModel.create({ title: 'Test Book', audioFiles: [], tags: [], narrators: [], genres: [], chapters: [] })
|
||||
const newLibraryItem = await Database.libraryItemModel.create({ libraryFiles: [], mediaId: newBook.id, mediaType: 'book', libraryId: newLibrary.id, libraryFolderId: newLibraryFolder.id })
|
||||
libraryItem1Id = newLibraryItem.id
|
||||
|
||||
const newBook2 = await Database.bookModel.create({ title: 'Test Book 2', audioFiles: [], tags: [], narrators: [], genres: [], chapters: [] })
|
||||
const newLibraryItem2 = await Database.libraryItemModel.create({ libraryFiles: [], mediaId: newBook2.id, mediaType: 'book', libraryId: newLibrary.id, libraryFolderId: newLibraryFolder.id })
|
||||
libraryItem2Id = newLibraryItem2.id
|
||||
|
||||
const newAuthor = await Database.authorModel.create({ name: 'Test Author', libraryId: newLibrary.id })
|
||||
author1Id = newAuthor.id
|
||||
const newAuthor2 = await Database.authorModel.create({ name: 'Test Author 2', libraryId: newLibrary.id })
|
||||
author2Id = newAuthor2.id
|
||||
const newAuthor3 = await Database.authorModel.create({ name: 'Test Author 3', imagePath: '/fake/path/author.png', libraryId: newLibrary.id })
|
||||
author3Id = newAuthor3.id
|
||||
|
||||
// Book 1 has Author 1, Author 2 and Author 3
|
||||
await Database.bookAuthorModel.create({ bookId: newBook.id, authorId: newAuthor.id })
|
||||
await Database.bookAuthorModel.create({ bookId: newBook.id, authorId: newAuthor2.id })
|
||||
await Database.bookAuthorModel.create({ bookId: newBook.id, authorId: newAuthor3.id })
|
||||
|
||||
// Book 2 has Author 2
|
||||
await Database.bookAuthorModel.create({ bookId: newBook2.id, authorId: newAuthor2.id })
|
||||
|
||||
const newSeries = await Database.seriesModel.create({ name: 'Test Series', libraryId: newLibrary.id })
|
||||
series1Id = newSeries.id
|
||||
const newSeries2 = await Database.seriesModel.create({ name: 'Test Series 2', libraryId: newLibrary.id })
|
||||
series2Id = newSeries2.id
|
||||
|
||||
// Book 1 is in Series 1 and Series 2
|
||||
await Database.bookSeriesModel.create({ bookId: newBook.id, seriesId: newSeries.id })
|
||||
await Database.bookSeriesModel.create({ bookId: newBook.id, seriesId: newSeries2.id })
|
||||
|
||||
// Book 2 is in Series 2
|
||||
await Database.bookSeriesModel.create({ bookId: newBook2.id, seriesId: newSeries2.id })
|
||||
})
|
||||
|
||||
it('should remove authors and series with no books on library item delete', async () => {
|
||||
const oldLibraryItem = await Database.libraryItemModel.getOldById(libraryItem1Id)
|
||||
|
||||
const fakeReq = {
|
||||
query: {},
|
||||
libraryItem: oldLibraryItem
|
||||
}
|
||||
const fakeRes = {
|
||||
sendStatus: sinon.spy()
|
||||
}
|
||||
await LibraryItemController.delete.bind(apiRouter)(fakeReq, fakeRes)
|
||||
|
||||
expect(fakeRes.sendStatus.calledWith(200)).to.be.true
|
||||
|
||||
// Author 1 should be removed because it has no books
|
||||
const author1Exists = await Database.authorModel.checkExistsById(author1Id)
|
||||
expect(author1Exists).to.be.false
|
||||
|
||||
// Author 2 should not be removed because it still has Book 2
|
||||
const author2Exists = await Database.authorModel.checkExistsById(author2Id)
|
||||
expect(author2Exists).to.be.true
|
||||
|
||||
// Author 3 should not be removed because it has an image
|
||||
const author3Exists = await Database.authorModel.checkExistsById(author3Id)
|
||||
expect(author3Exists).to.be.true
|
||||
|
||||
// Series 1 should be removed because it has no books
|
||||
const series1Exists = await Database.seriesModel.checkExistsById(series1Id)
|
||||
expect(series1Exists).to.be.false
|
||||
|
||||
// Series 2 should not be removed because it still has Book 2
|
||||
const series2Exists = await Database.seriesModel.checkExistsById(series2Id)
|
||||
expect(series2Exists).to.be.true
|
||||
})
|
||||
|
||||
it('should remove authors and series with no books on library item batch delete', async () => {
|
||||
// Batch delete library item 1
|
||||
const fakeReq = {
|
||||
query: {},
|
||||
user: {
|
||||
canDelete: true
|
||||
},
|
||||
body: {
|
||||
libraryItemIds: [libraryItem1Id]
|
||||
}
|
||||
}
|
||||
const fakeRes = {
|
||||
sendStatus: sinon.spy()
|
||||
}
|
||||
await LibraryItemController.batchDelete.bind(apiRouter)(fakeReq, fakeRes)
|
||||
|
||||
expect(fakeRes.sendStatus.calledWith(200)).to.be.true
|
||||
|
||||
// Author 1 should be removed because it has no books
|
||||
const author1Exists = await Database.authorModel.checkExistsById(author1Id)
|
||||
expect(author1Exists).to.be.false
|
||||
|
||||
// Author 2 should not be removed because it still has Book 2
|
||||
const author2Exists = await Database.authorModel.checkExistsById(author2Id)
|
||||
expect(author2Exists).to.be.true
|
||||
|
||||
// Author 3 should not be removed because it has an image
|
||||
const author3Exists = await Database.authorModel.checkExistsById(author3Id)
|
||||
expect(author3Exists).to.be.true
|
||||
|
||||
// Series 1 should be removed because it has no books
|
||||
const series1Exists = await Database.seriesModel.checkExistsById(series1Id)
|
||||
expect(series1Exists).to.be.false
|
||||
|
||||
// Series 2 should not be removed because it still has Book 2
|
||||
const series2Exists = await Database.seriesModel.checkExistsById(series2Id)
|
||||
expect(series2Exists).to.be.true
|
||||
})
|
||||
|
||||
it('should remove authors and series with no books on library item update media', async () => {
|
||||
const oldLibraryItem = await Database.libraryItemModel.getOldById(libraryItem1Id)
|
||||
|
||||
// Update library item 1 remove all authors and series
|
||||
const fakeReq = {
|
||||
query: {},
|
||||
body: {
|
||||
metadata: {
|
||||
authors: [],
|
||||
series: []
|
||||
}
|
||||
},
|
||||
libraryItem: oldLibraryItem
|
||||
}
|
||||
const fakeRes = {
|
||||
json: sinon.spy()
|
||||
}
|
||||
await LibraryItemController.updateMedia.bind(apiRouter)(fakeReq, fakeRes)
|
||||
|
||||
expect(fakeRes.json.calledOnce).to.be.true
|
||||
|
||||
// Author 1 should be removed because it has no books
|
||||
const author1Exists = await Database.authorModel.checkExistsById(author1Id)
|
||||
expect(author1Exists).to.be.false
|
||||
|
||||
// Author 2 should not be removed because it still has Book 2
|
||||
const author2Exists = await Database.authorModel.checkExistsById(author2Id)
|
||||
expect(author2Exists).to.be.true
|
||||
|
||||
// Author 3 should not be removed because it has an image
|
||||
const author3Exists = await Database.authorModel.checkExistsById(author3Id)
|
||||
expect(author3Exists).to.be.true
|
||||
|
||||
// Series 1 should be removed because it has no books
|
||||
const series1Exists = await Database.seriesModel.checkExistsById(series1Id)
|
||||
expect(series1Exists).to.be.false
|
||||
|
||||
// Series 2 should not be removed because it still has Book 2
|
||||
const series2Exists = await Database.seriesModel.checkExistsById(series2Id)
|
||||
expect(series2Exists).to.be.true
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,116 @@
|
||||
const { expect } = require('chai')
|
||||
const sinon = require('sinon')
|
||||
const { up, down } = require('../../../server/migrations/v2.17.4-use-subfolder-for-oidc-redirect-uris')
|
||||
const { Sequelize } = require('sequelize')
|
||||
const Logger = require('../../../server/Logger')
|
||||
|
||||
describe('Migration v2.17.4-use-subfolder-for-oidc-redirect-uris', () => {
|
||||
let queryInterface, logger, context
|
||||
|
||||
beforeEach(() => {
|
||||
queryInterface = {
|
||||
sequelize: {
|
||||
query: sinon.stub()
|
||||
}
|
||||
}
|
||||
logger = {
|
||||
info: sinon.stub(),
|
||||
error: sinon.stub()
|
||||
}
|
||||
context = { queryInterface, logger }
|
||||
})
|
||||
|
||||
describe('up', () => {
|
||||
it('should add authOpenIDSubfolderForRedirectURLs if OIDC is enabled', async () => {
|
||||
queryInterface.sequelize.query.onFirstCall().resolves([[{ value: JSON.stringify({ authActiveAuthMethods: ['openid'] }) }]])
|
||||
queryInterface.sequelize.query.onSecondCall().resolves()
|
||||
|
||||
await up({ context })
|
||||
|
||||
expect(logger.info.calledWith('[2.17.4 migration] UPGRADE BEGIN: 2.17.4-use-subfolder-for-oidc-redirect-uris')).to.be.true
|
||||
expect(logger.info.calledWith('[2.17.4 migration] OIDC is enabled, adding authOpenIDSubfolderForRedirectURLs to server settings')).to.be.true
|
||||
expect(queryInterface.sequelize.query.calledTwice).to.be.true
|
||||
expect(queryInterface.sequelize.query.calledWith('SELECT value FROM settings WHERE key = "server-settings";')).to.be.true
|
||||
expect(
|
||||
queryInterface.sequelize.query.calledWith('UPDATE settings SET value = :value WHERE key = "server-settings";', {
|
||||
replacements: {
|
||||
value: JSON.stringify({ authActiveAuthMethods: ['openid'], authOpenIDSubfolderForRedirectURLs: '' })
|
||||
}
|
||||
})
|
||||
).to.be.true
|
||||
expect(logger.info.calledWith('[2.17.4 migration] UPGRADE END: 2.17.4-use-subfolder-for-oidc-redirect-uris')).to.be.true
|
||||
})
|
||||
|
||||
it('should not add authOpenIDSubfolderForRedirectURLs if OIDC is not enabled', async () => {
|
||||
queryInterface.sequelize.query.onFirstCall().resolves([[{ value: JSON.stringify({ authActiveAuthMethods: [] }) }]])
|
||||
|
||||
await up({ context })
|
||||
|
||||
expect(logger.info.calledWith('[2.17.4 migration] UPGRADE BEGIN: 2.17.4-use-subfolder-for-oidc-redirect-uris')).to.be.true
|
||||
expect(logger.info.calledWith('[2.17.4 migration] OIDC is not enabled, no action required')).to.be.true
|
||||
expect(queryInterface.sequelize.query.calledOnce).to.be.true
|
||||
expect(queryInterface.sequelize.query.calledWith('SELECT value FROM settings WHERE key = "server-settings";')).to.be.true
|
||||
expect(logger.info.calledWith('[2.17.4 migration] UPGRADE END: 2.17.4-use-subfolder-for-oidc-redirect-uris')).to.be.true
|
||||
})
|
||||
|
||||
it('should throw an error if server settings cannot be parsed', async () => {
|
||||
queryInterface.sequelize.query.onFirstCall().resolves([[{ value: 'invalid json' }]])
|
||||
|
||||
try {
|
||||
await up({ context })
|
||||
} catch (error) {
|
||||
expect(queryInterface.sequelize.query.calledOnce).to.be.true
|
||||
expect(queryInterface.sequelize.query.calledWith('SELECT value FROM settings WHERE key = "server-settings";')).to.be.true
|
||||
expect(logger.error.calledWith('[2.17.4 migration] Error parsing server settings:')).to.be.true
|
||||
expect(error).to.be.instanceOf(Error)
|
||||
}
|
||||
})
|
||||
|
||||
it('should throw an error if server settings are not found', async () => {
|
||||
queryInterface.sequelize.query.onFirstCall().resolves([[]])
|
||||
|
||||
try {
|
||||
await up({ context })
|
||||
} catch (error) {
|
||||
expect(queryInterface.sequelize.query.calledOnce).to.be.true
|
||||
expect(queryInterface.sequelize.query.calledWith('SELECT value FROM settings WHERE key = "server-settings";')).to.be.true
|
||||
expect(logger.error.calledWith('[2.17.4 migration] Server settings not found')).to.be.true
|
||||
expect(error).to.be.instanceOf(Error)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe('down', () => {
|
||||
it('should remove authOpenIDSubfolderForRedirectURLs if it exists', async () => {
|
||||
queryInterface.sequelize.query.onFirstCall().resolves([[{ value: JSON.stringify({ authOpenIDSubfolderForRedirectURLs: '' }) }]])
|
||||
queryInterface.sequelize.query.onSecondCall().resolves()
|
||||
|
||||
await down({ context })
|
||||
|
||||
expect(logger.info.calledWith('[2.17.4 migration] DOWNGRADE BEGIN: 2.17.4-use-subfolder-for-oidc-redirect-uris ')).to.be.true
|
||||
expect(logger.info.calledWith('[2.17.4 migration] Removing authOpenIDSubfolderForRedirectURLs from server settings')).to.be.true
|
||||
expect(queryInterface.sequelize.query.calledTwice).to.be.true
|
||||
expect(queryInterface.sequelize.query.calledWith('SELECT value FROM settings WHERE key = "server-settings";')).to.be.true
|
||||
expect(
|
||||
queryInterface.sequelize.query.calledWith('UPDATE settings SET value = :value WHERE key = "server-settings";', {
|
||||
replacements: {
|
||||
value: JSON.stringify({})
|
||||
}
|
||||
})
|
||||
).to.be.true
|
||||
expect(logger.info.calledWith('[2.17.4 migration] DOWNGRADE END: 2.17.4-use-subfolder-for-oidc-redirect-uris ')).to.be.true
|
||||
})
|
||||
|
||||
it('should not remove authOpenIDSubfolderForRedirectURLs if it does not exist', async () => {
|
||||
queryInterface.sequelize.query.onFirstCall().resolves([[{ value: JSON.stringify({}) }]])
|
||||
|
||||
await down({ context })
|
||||
|
||||
expect(logger.info.calledWith('[2.17.4 migration] DOWNGRADE BEGIN: 2.17.4-use-subfolder-for-oidc-redirect-uris ')).to.be.true
|
||||
expect(logger.info.calledWith('[2.17.4 migration] authOpenIDSubfolderForRedirectURLs not found in server settings, no action required')).to.be.true
|
||||
expect(queryInterface.sequelize.query.calledOnce).to.be.true
|
||||
expect(queryInterface.sequelize.query.calledWith('SELECT value FROM settings WHERE key = "server-settings";')).to.be.true
|
||||
expect(logger.info.calledWith('[2.17.4 migration] DOWNGRADE END: 2.17.4-use-subfolder-for-oidc-redirect-uris ')).to.be.true
|
||||
})
|
||||
})
|
||||
})
|
||||
52
test/server/utils/scandir.test.js
Normal file
52
test/server/utils/scandir.test.js
Normal file
@@ -0,0 +1,52 @@
|
||||
const Path = require('path')
|
||||
const chai = require('chai')
|
||||
const expect = chai.expect
|
||||
const scanUtils = require('../../../server/utils/scandir')
|
||||
|
||||
describe('scanUtils', async () => {
|
||||
it('should properly group files into potential book library items', async () => {
|
||||
global.isWin = process.platform === 'win32'
|
||||
global.ServerSettings = {
|
||||
scannerParseSubtitle: true
|
||||
}
|
||||
|
||||
const filePaths = [
|
||||
'randomfile.txt', // Should be ignored because it's not a book media file
|
||||
'Book1.m4b', // Root single file audiobook
|
||||
'Book2/audiofile.m4b',
|
||||
'Book2/disk 001/audiofile.m4b',
|
||||
'Book2/disk 002/audiofile.m4b',
|
||||
'Author/Book3/audiofile.mp3',
|
||||
'Author/Book3/Disc 1/audiofile.mp3',
|
||||
'Author/Book3/Disc 2/audiofile.mp3',
|
||||
'Author/Series/Book4/cover.jpg',
|
||||
'Author/Series/Book4/CD1/audiofile.mp3',
|
||||
'Author/Series/Book4/CD2/audiofile.mp3',
|
||||
'Author/Series2/Book5/deeply/nested/cd 01/audiofile.mp3',
|
||||
'Author/Series2/Book5/deeply/nested/cd 02/audiofile.mp3',
|
||||
'Author/Series2/Book5/randomfile.js' // Should be ignored because it's not a book media file
|
||||
]
|
||||
|
||||
// Create fileItems to match the format of fileUtils.recurseFiles
|
||||
const fileItems = []
|
||||
for (const filePath of filePaths) {
|
||||
const dirname = Path.dirname(filePath)
|
||||
fileItems.push({
|
||||
name: Path.basename(filePath),
|
||||
reldirpath: dirname === '.' ? '' : dirname,
|
||||
extension: Path.extname(filePath),
|
||||
deep: filePath.split('/').length - 1
|
||||
})
|
||||
}
|
||||
|
||||
const libraryItemGrouping = scanUtils.groupFileItemsIntoLibraryItemDirs('book', fileItems, false)
|
||||
|
||||
expect(libraryItemGrouping).to.deep.equal({
|
||||
'Book1.m4b': 'Book1.m4b',
|
||||
Book2: ['audiofile.m4b', 'disk 001/audiofile.m4b', 'disk 002/audiofile.m4b'],
|
||||
'Author/Book3': ['audiofile.mp3', 'Disc 1/audiofile.mp3', 'Disc 2/audiofile.mp3'],
|
||||
'Author/Series/Book4': ['CD1/audiofile.mp3', 'CD2/audiofile.mp3', 'cover.jpg'],
|
||||
'Author/Series2/Book5/deeply/nested': ['cd 01/audiofile.mp3', 'cd 02/audiofile.mp3']
|
||||
})
|
||||
})
|
||||
})
|
||||
Reference in New Issue
Block a user