Merge pull request #1907 from advplyr/sqlite_2

Migration to use sqlite3
This commit is contained in:
advplyr 2023-07-14 15:11:23 -05:00 committed by GitHub
commit b3991574c7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
134 changed files with 7483 additions and 5284 deletions

View File

@ -14,7 +14,10 @@ RUN apk update && \
apk add --no-cache --update \ apk add --no-cache --update \
curl \ curl \
tzdata \ tzdata \
ffmpeg ffmpeg \
make \
python3 \
g++
COPY --from=tone /usr/local/bin/tone /usr/local/bin/ COPY --from=tone /usr/local/bin/tone /usr/local/bin/
COPY --from=build /client/dist /client/dist COPY --from=build /client/dist /client/dist
@ -23,6 +26,8 @@ COPY server server
RUN npm ci --only=production RUN npm ci --only=production
RUN apk del make python3 g++
EXPOSE 80 EXPOSE 80
HEALTHCHECK \ HEALTHCHECK \
--interval=30s \ --interval=30s \

View File

@ -756,6 +756,8 @@ export default {
this.store.commit('globals/setConfirmPrompt', payload) this.store.commit('globals/setConfirmPrompt', payload)
}, },
removeSeriesFromContinueListening() { removeSeriesFromContinueListening() {
if (!this.series) return
const axios = this.$axios || this.$nuxt.$axios const axios = this.$axios || this.$nuxt.$axios
this.processing = true this.processing = true
axios axios

View File

@ -271,12 +271,16 @@ export default {
let filterValue = null let filterValue = null
if (parts.length > 1) { if (parts.length > 1) {
const decoded = this.$decode(parts[1]) const decoded = this.$decode(parts[1])
if (decoded.startsWith('aut_')) { if (parts[0] === 'authors') {
const author = this.authors.find((au) => au.id == decoded) const author = this.authors.find((au) => au.id == decoded)
if (author) filterValue = author.name if (author) filterValue = author.name
} else if (decoded.startsWith('ser_')) { } else if (parts[0] === 'series') {
const series = this.series.find((se) => se.id == decoded) if (decoded === 'no-series') {
if (series) filterValue = series.name filterValue = this.$strings.MessageNoSeries
} else {
const series = this.series.find((se) => se.id == decoded)
if (series) filterValue = series.name
}
} else { } else {
filterValue = decoded filterValue = decoded
} }

View File

@ -2,7 +2,7 @@
<modals-modal v-model="show" name="listening-session-modal" :processing="processing" :width="700" :height="'unset'"> <modals-modal v-model="show" name="listening-session-modal" :processing="processing" :width="700" :height="'unset'">
<template #outer> <template #outer>
<div class="absolute top-0 left-0 p-5 w-2/3 overflow-hidden"> <div class="absolute top-0 left-0 p-5 w-2/3 overflow-hidden">
<p class="text-3xl text-white truncate">{{ $strings.HeaderSession }} {{ _session.id }}</p> <p class="text-lg md:text-2xl text-white truncate">{{ $strings.HeaderSession }} {{ _session.id }}</p>
</div> </div>
</template> </template>
<div ref="container" class="w-full rounded-lg bg-bg box-shadow-md overflow-y-auto overflow-x-hidden p-6" style="max-height: 80vh"> <div ref="container" class="w-full rounded-lg bg-bg box-shadow-md overflow-y-auto overflow-x-hidden p-6" style="max-height: 80vh">
@ -50,19 +50,19 @@
<p class="font-semibold uppercase text-xs text-gray-400 tracking-wide mt-6 mb-2">{{ $strings.LabelItem }}</p> <p class="font-semibold uppercase text-xs text-gray-400 tracking-wide mt-6 mb-2">{{ $strings.LabelItem }}</p>
<div v-if="_session.libraryId" class="flex items-center -mx-1 mb-1"> <div v-if="_session.libraryId" class="flex items-center -mx-1 mb-1">
<div class="w-40 px-1 text-gray-200">{{ $strings.LabelLibrary }} Id</div> <div class="w-40 px-1 text-gray-200">{{ $strings.LabelLibrary }} Id</div>
<div class="px-1"> <div class="px-1 text-xs">
{{ _session.libraryId }} {{ _session.libraryId }}
</div> </div>
</div> </div>
<div class="flex items-center -mx-1 mb-1"> <div class="flex items-center -mx-1 mb-1">
<div class="w-40 px-1 text-gray-200">{{ $strings.LabelLibraryItem }} Id</div> <div class="w-40 px-1 text-gray-200">{{ $strings.LabelLibraryItem }} Id</div>
<div class="px-1"> <div class="px-1 text-xs">
{{ _session.libraryItemId }} {{ _session.libraryItemId }}
</div> </div>
</div> </div>
<div v-if="_session.episodeId" class="flex items-center -mx-1 mb-1"> <div v-if="_session.episodeId" class="flex items-center -mx-1 mb-1">
<div class="w-40 px-1 text-gray-200">{{ $strings.LabelEpisode }} Id</div> <div class="w-40 px-1 text-gray-200">{{ $strings.LabelEpisode }} Id</div>
<div class="px-1"> <div class="px-1 text-xs">
{{ _session.episodeId }} {{ _session.episodeId }}
</div> </div>
</div> </div>
@ -81,7 +81,7 @@
</div> </div>
<div class="w-full md:w-1/3"> <div class="w-full md:w-1/3">
<p class="font-semibold uppercase text-xs text-gray-400 tracking-wide mb-2 mt-6 md:mt-0">{{ $strings.LabelUser }}</p> <p class="font-semibold uppercase text-xs text-gray-400 tracking-wide mb-2 mt-6 md:mt-0">{{ $strings.LabelUser }}</p>
<p class="mb-1">{{ _session.userId }}</p> <p class="mb-1 text-xs">{{ _session.userId }}</p>
<p class="font-semibold uppercase text-xs text-gray-400 tracking-wide mt-6 mb-2">{{ $strings.LabelMediaPlayer }}</p> <p class="font-semibold uppercase text-xs text-gray-400 tracking-wide mt-6 mb-2">{{ $strings.LabelMediaPlayer }}</p>
<p class="mb-1">{{ playMethodName }}</p> <p class="mb-1">{{ playMethodName }}</p>

View File

@ -21,15 +21,14 @@
<td class="hidden sm:table-cell font-mono md:text-sm text-xs">{{ $bytesPretty(backup.fileSize) }}</td> <td class="hidden sm:table-cell font-mono md:text-sm text-xs">{{ $bytesPretty(backup.fileSize) }}</td>
<td> <td>
<div class="w-full flex flex-row items-center justify-center"> <div class="w-full flex flex-row items-center justify-center">
<ui-btn v-if="backup.serverVersion" small color="primary" @click="applyBackup(backup)">{{ $strings.ButtonRestore }}</ui-btn> <ui-btn v-if="backup.serverVersion && backup.key" small color="primary" @click="applyBackup(backup)">{{ $strings.ButtonRestore }}</ui-btn>
<button v-if="backup.serverVersion" aria-label="Download Backup" class="inline-flex material-icons text-xl mx-1 mt-1 text-white/70 hover:text-white/100" @click.stop="downloadBackup(backup)">download</button>
<ui-tooltip v-else text="This backup was created with an old version of audiobookshelf no longer supported" direction="bottom" class="mx-2 flex items-center"> <ui-tooltip v-else text="This backup was created with an old version of audiobookshelf no longer supported" direction="bottom" class="mx-2 flex items-center">
<span class="material-icons-outlined text-2xl text-error">error_outline</span> <span class="material-icons-outlined text-2xl text-error">error_outline</span>
</ui-tooltip> </ui-tooltip>
<button v-if="backup.serverVersion" aria-label="Delete Backup" class="inline-flex material-icons text-xl mx-1 text-white/70 hover:text-error" @click="deleteBackupClick(backup)">delete</button> <button aria-label="Download Backup" class="inline-flex material-icons text-xl mx-1 mt-1 text-white/70 hover:text-white/100" @click.stop="downloadBackup(backup)">download</button>
<button aria-label="Delete Backup" class="inline-flex material-icons text-xl mx-1 text-white/70 hover:text-error" @click="deleteBackupClick(backup)">delete</button>
</div> </div>
</td> </td>
</tr> </tr>
@ -95,8 +94,9 @@ export default {
}) })
.catch((error) => { .catch((error) => {
this.isBackingUp = false this.isBackingUp = false
console.error('Failed', error) console.error('Failed to apply backup', error)
this.$toast.error(this.$strings.ToastBackupRestoreFailed) const errorMsg = error.response.data || this.$strings.ToastBackupRestoreFailed
this.$toast.error(errorMsg)
}) })
}, },
deleteBackupClick(backup) { deleteBackupClick(backup) {

View File

@ -107,7 +107,7 @@ export default {
this.$toast.error('Invalid number of backups to keep') this.$toast.error('Invalid number of backups to keep')
return return
} }
var updatePayload = { const updatePayload = {
backupSchedule: this.enableBackups ? this.cronExpression : false, backupSchedule: this.enableBackups ? this.cronExpression : false,
backupsToKeep: Number(this.backupsToKeep), backupsToKeep: Number(this.backupsToKeep),
maxBackupSize: Number(this.maxBackupSize) maxBackupSize: Number(this.maxBackupSize)

View File

@ -192,7 +192,6 @@
<div class="flex-grow" /> <div class="flex-grow" />
<ui-btn color="bg" small :padding-x="4" class="mr-2 text-xs md:text-sm" :loading="isPurgingCache" @click.stop="purgeCache">{{ $strings.ButtonPurgeAllCache }}</ui-btn> <ui-btn color="bg" small :padding-x="4" class="mr-2 text-xs md:text-sm" :loading="isPurgingCache" @click.stop="purgeCache">{{ $strings.ButtonPurgeAllCache }}</ui-btn>
<ui-btn color="bg" small :padding-x="4" class="mr-2 text-xs md:text-sm" :loading="isPurgingCache" @click.stop="purgeItemsCache">{{ $strings.ButtonPurgeItemsCache }}</ui-btn> <ui-btn color="bg" small :padding-x="4" class="mr-2 text-xs md:text-sm" :loading="isPurgingCache" @click.stop="purgeItemsCache">{{ $strings.ButtonPurgeItemsCache }}</ui-btn>
<ui-btn color="bg" small :padding-x="4" class="mr-2 text-xs md:text-sm" :loading="isResettingLibraryItems" @click="resetLibraryItems">{{ $strings.ButtonRemoveAllLibraryItems }}</ui-btn>
</div> </div>
<div class="flex items-center py-4"> <div class="flex items-center py-4">
@ -368,23 +367,6 @@ export default {
this.homepageUseBookshelfView = this.newServerSettings.homeBookshelfView != this.$constants.BookshelfView.DETAIL this.homepageUseBookshelfView = this.newServerSettings.homeBookshelfView != this.$constants.BookshelfView.DETAIL
this.useBookshelfView = this.newServerSettings.bookshelfView != this.$constants.BookshelfView.DETAIL this.useBookshelfView = this.newServerSettings.bookshelfView != this.$constants.BookshelfView.DETAIL
}, },
resetLibraryItems() {
if (confirm(this.$strings.MessageRemoveAllItemsWarning)) {
this.isResettingLibraryItems = true
this.$axios
.$delete('/api/items/all')
.then(() => {
this.isResettingLibraryItems = false
this.$toast.success('Successfully reset items')
location.reload()
})
.catch((error) => {
console.error('failed to reset items', error)
this.isResettingLibraryItems = false
this.$toast.error('Failed to reset items - manually remove the /config/libraryItems folder')
})
}
},
purgeCache() { purgeCache() {
this.showConfirmPurgeCache = true this.showConfirmPurgeCache = true
}, },

View File

@ -47,12 +47,6 @@
<div class="py-2"> <div class="py-2">
<h1 class="text-lg mb-2 text-white text-opacity-90 px-2 sm:px-0">{{ $strings.HeaderSavedMediaProgress }}</h1> <h1 class="text-lg mb-2 text-white text-opacity-90 px-2 sm:px-0">{{ $strings.HeaderSavedMediaProgress }}</h1>
<div v-if="mediaProgressWithoutMedia.length" class="flex items-center py-2 mb-2">
<p class="text-error">User has media progress for {{ mediaProgressWithoutMedia.length }} items that no longer exist.</p>
<div class="flex-grow" />
<ui-btn small :loading="purgingMediaProgress" @click.stop="purgeMediaProgress">{{ $strings.ButtonPurgeMediaProgress }}</ui-btn>
</div>
<table v-if="mediaProgressWithMedia.length" class="userAudiobooksTable"> <table v-if="mediaProgressWithMedia.length" class="userAudiobooksTable">
<tr class="bg-primary bg-opacity-40"> <tr class="bg-primary bg-opacity-40">
<th class="w-16 text-left">{{ $strings.LabelItem }}</th> <th class="w-16 text-left">{{ $strings.LabelItem }}</th>
@ -111,8 +105,7 @@ export default {
data() { data() {
return { return {
listeningSessions: {}, listeningSessions: {},
listeningStats: {}, listeningStats: {}
purgingMediaProgress: false
} }
}, },
computed: { computed: {
@ -134,9 +127,6 @@ export default {
mediaProgressWithMedia() { mediaProgressWithMedia() {
return this.mediaProgress.filter((mp) => mp.media) return this.mediaProgress.filter((mp) => mp.media)
}, },
mediaProgressWithoutMedia() {
return this.mediaProgress.filter((mp) => !mp.media)
},
totalListeningTime() { totalListeningTime() {
return this.listeningStats.totalTime || 0 return this.listeningStats.totalTime || 0
}, },
@ -176,24 +166,6 @@ export default {
return [] return []
}) })
console.log('Loaded user listening data', this.listeningSessions, this.listeningStats) console.log('Loaded user listening data', this.listeningSessions, this.listeningStats)
},
purgeMediaProgress() {
this.purgingMediaProgress = true
this.$axios
.$post(`/api/users/${this.user.id}/purge-media-progress`)
.then((updatedUser) => {
console.log('Updated user', updatedUser)
this.$toast.success('Media progress purged')
this.user = updatedUser
})
.catch((error) => {
console.error('Failed to purge media progress', error)
this.$toast.error('Failed to purge media progress')
})
.finally(() => {
this.purgingMediaProgress = false
})
} }
}, },
mounted() { mounted() {

View File

@ -191,6 +191,7 @@ export default class PlayerHandler {
const payload = { const payload = {
deviceInfo: { deviceInfo: {
clientName: 'Abs Web',
deviceId: this.getDeviceId() deviceId: this.getDeviceId()
}, },
supportedMimeTypes: this.player.playableMimeTypes, supportedMimeTypes: this.player.playableMimeTypes,

View File

@ -592,7 +592,6 @@
"MessagePlaylistCreateFromCollection": "Erstelle eine Wiedergabeliste aus der Sammlung", "MessagePlaylistCreateFromCollection": "Erstelle eine Wiedergabeliste aus der Sammlung",
"MessagePodcastHasNoRSSFeedForMatching": "Podcast hat keine RSS-Feed-Url welche für den Online-Abgleich verwendet werden kann", "MessagePodcastHasNoRSSFeedForMatching": "Podcast hat keine RSS-Feed-Url welche für den Online-Abgleich verwendet werden kann",
"MessageQuickMatchDescription": "Füllt leere Details und Titelbilder mit dem ersten Treffer aus '{0}'. Überschreibt keine Details, es sei denn, die Server-Einstellung \"Passende Metadaten bevorzugen\" ist aktiviert.", "MessageQuickMatchDescription": "Füllt leere Details und Titelbilder mit dem ersten Treffer aus '{0}'. Überschreibt keine Details, es sei denn, die Server-Einstellung \"Passende Metadaten bevorzugen\" ist aktiviert.",
"MessageRemoveAllItemsWarning": "WARNUNG! Bei dieser Aktion werden alle Bibliotheksobjekte aus der Datenbank entfernt, einschließlich aller Aktualisierungen oder Online-Abgleichs, die Sie vorgenommen haben. Ihre eigentlichen Dateien bleiben davon unberührt. Sind Sie sicher?",
"MessageRemoveChapter": "Kapitel löschen", "MessageRemoveChapter": "Kapitel löschen",
"MessageRemoveEpisodes": "Entferne {0} Episode(n)", "MessageRemoveEpisodes": "Entferne {0} Episode(n)",
"MessageRemoveFromPlayerQueue": "Aus der Abspielwarteliste löschen", "MessageRemoveFromPlayerQueue": "Aus der Abspielwarteliste löschen",

View File

@ -592,7 +592,6 @@
"MessagePlaylistCreateFromCollection": "Create playlist from collection", "MessagePlaylistCreateFromCollection": "Create playlist from collection",
"MessagePodcastHasNoRSSFeedForMatching": "Podcast has no RSS feed url to use for matching", "MessagePodcastHasNoRSSFeedForMatching": "Podcast has no RSS feed url to use for matching",
"MessageQuickMatchDescription": "Populate empty item details & cover with first match result from '{0}'. Does not overwrite details unless 'Prefer matched metadata' server setting is enabled.", "MessageQuickMatchDescription": "Populate empty item details & cover with first match result from '{0}'. Does not overwrite details unless 'Prefer matched metadata' server setting is enabled.",
"MessageRemoveAllItemsWarning": "WARNING! This action will remove all library items from the database including any updates or matches you have made. This does not do anything to your actual files. Are you sure?",
"MessageRemoveChapter": "Remove chapter", "MessageRemoveChapter": "Remove chapter",
"MessageRemoveEpisodes": "Remove {0} episode(s)", "MessageRemoveEpisodes": "Remove {0} episode(s)",
"MessageRemoveFromPlayerQueue": "Remove from player queue", "MessageRemoveFromPlayerQueue": "Remove from player queue",

View File

@ -592,7 +592,6 @@
"MessagePlaylistCreateFromCollection": "Crear lista de reproducción a partir de colección", "MessagePlaylistCreateFromCollection": "Crear lista de reproducción a partir de colección",
"MessagePodcastHasNoRSSFeedForMatching": "El podcast no tiene una URL de fuente RSS que pueda usar que coincida", "MessagePodcastHasNoRSSFeedForMatching": "El podcast no tiene una URL de fuente RSS que pueda usar que coincida",
"MessageQuickMatchDescription": "Rellenar detalles de elementos vacíos y portada con los primeros resultados de '{0}'. No sobrescribe los detalles a menos que la configuración 'Prefer matched metadata' del servidor este habilita.", "MessageQuickMatchDescription": "Rellenar detalles de elementos vacíos y portada con los primeros resultados de '{0}'. No sobrescribe los detalles a menos que la configuración 'Prefer matched metadata' del servidor este habilita.",
"MessageRemoveAllItemsWarning": "ADVERTENCIA! Esta acción eliminará todos los elementos de la biblioteca de la base de datos incluyendo cualquier actualización o match. Esto no hace nada a sus archivos reales. Esta seguro que desea continuar?",
"MessageRemoveChapter": "Remover capítulos", "MessageRemoveChapter": "Remover capítulos",
"MessageRemoveEpisodes": "Remover {0} episodio(s)", "MessageRemoveEpisodes": "Remover {0} episodio(s)",
"MessageRemoveFromPlayerQueue": "Romover la cola de reporduccion", "MessageRemoveFromPlayerQueue": "Romover la cola de reporduccion",

View File

@ -592,7 +592,6 @@
"MessagePlaylistCreateFromCollection": "Créer une liste de lecture depuis la collection", "MessagePlaylistCreateFromCollection": "Créer une liste de lecture depuis la collection",
"MessagePodcastHasNoRSSFeedForMatching": "Le Podcast na pas dURL de flux RSS à utiliser pour la correspondance", "MessagePodcastHasNoRSSFeedForMatching": "Le Podcast na pas dURL de flux RSS à utiliser pour la correspondance",
"MessageQuickMatchDescription": "Renseigne les détails manquants ainsi que la couverture avec la première correspondance de « {0} ». Nécrase pas les données présentes à moins que le paramètre « Préférer les Métadonnées par correspondance » soit activé.", "MessageQuickMatchDescription": "Renseigne les détails manquants ainsi que la couverture avec la première correspondance de « {0} ». Nécrase pas les données présentes à moins que le paramètre « Préférer les Métadonnées par correspondance » soit activé.",
"MessageRemoveAllItemsWarning": "ATTENTION ! Cette action supprimera toute la base de données de la bibliothèque ainsi que les mises à jour ou correspondances qui auraient été effectuées. Cela na aucune incidence sur les fichiers de la bibliothèque. Souhaitez-vous continuer ?",
"MessageRemoveChapter": "Supprimer le chapitre", "MessageRemoveChapter": "Supprimer le chapitre",
"MessageRemoveEpisodes": "Suppression de {0} épisode(s)", "MessageRemoveEpisodes": "Suppression de {0} épisode(s)",
"MessageRemoveFromPlayerQueue": "Supprimer de la liste découte", "MessageRemoveFromPlayerQueue": "Supprimer de la liste découte",

View File

@ -592,7 +592,6 @@
"MessagePlaylistCreateFromCollection": "Create playlist from collection", "MessagePlaylistCreateFromCollection": "Create playlist from collection",
"MessagePodcastHasNoRSSFeedForMatching": "Podcast has no RSS feed url to use for matching", "MessagePodcastHasNoRSSFeedForMatching": "Podcast has no RSS feed url to use for matching",
"MessageQuickMatchDescription": "Populate empty item details & cover with first match result from '{0}'. Does not overwrite details unless 'Prefer matched metadata' server setting is enabled.", "MessageQuickMatchDescription": "Populate empty item details & cover with first match result from '{0}'. Does not overwrite details unless 'Prefer matched metadata' server setting is enabled.",
"MessageRemoveAllItemsWarning": "WARNING! This action will remove all library items from the database including any updates or matches you have made. This does not do anything to your actual files. Are you sure?",
"MessageRemoveChapter": "Remove chapter", "MessageRemoveChapter": "Remove chapter",
"MessageRemoveEpisodes": "Remove {0} episode(s)", "MessageRemoveEpisodes": "Remove {0} episode(s)",
"MessageRemoveFromPlayerQueue": "Remove from player queue", "MessageRemoveFromPlayerQueue": "Remove from player queue",

View File

@ -592,7 +592,6 @@
"MessagePlaylistCreateFromCollection": "Create playlist from collection", "MessagePlaylistCreateFromCollection": "Create playlist from collection",
"MessagePodcastHasNoRSSFeedForMatching": "Podcast has no RSS feed url to use for matching", "MessagePodcastHasNoRSSFeedForMatching": "Podcast has no RSS feed url to use for matching",
"MessageQuickMatchDescription": "Populate empty item details & cover with first match result from '{0}'. Does not overwrite details unless 'Prefer matched metadata' server setting is enabled.", "MessageQuickMatchDescription": "Populate empty item details & cover with first match result from '{0}'. Does not overwrite details unless 'Prefer matched metadata' server setting is enabled.",
"MessageRemoveAllItemsWarning": "WARNING! This action will remove all library items from the database including any updates or matches you have made. This does not do anything to your actual files. Are you sure?",
"MessageRemoveChapter": "Remove chapter", "MessageRemoveChapter": "Remove chapter",
"MessageRemoveEpisodes": "Remove {0} episode(s)", "MessageRemoveEpisodes": "Remove {0} episode(s)",
"MessageRemoveFromPlayerQueue": "Remove from player queue", "MessageRemoveFromPlayerQueue": "Remove from player queue",

View File

@ -592,7 +592,6 @@
"MessagePlaylistCreateFromCollection": "Create playlist from collection", "MessagePlaylistCreateFromCollection": "Create playlist from collection",
"MessagePodcastHasNoRSSFeedForMatching": "Podcast nema RSS feed url za matchanje", "MessagePodcastHasNoRSSFeedForMatching": "Podcast nema RSS feed url za matchanje",
"MessageQuickMatchDescription": "Popuni prazne detalje stavki i cover sa prvim match rezultato iz '{0}'. Ne briše detalje osim ako 'Prefer matched metadata' server postavka nije uključena.", "MessageQuickMatchDescription": "Popuni prazne detalje stavki i cover sa prvim match rezultato iz '{0}'. Ne briše detalje osim ako 'Prefer matched metadata' server postavka nije uključena.",
"MessageRemoveAllItemsWarning": "UPOZORENJE! Ova radnja briše sve stavke iz biblioteke uključujući bilokakve aktualizacije ili matcheve. Ovo ne mjenja vaše lokalne datoteke. Jeste li sigurni?",
"MessageRemoveChapter": "Remove chapter", "MessageRemoveChapter": "Remove chapter",
"MessageRemoveEpisodes": "ukloni {0} epizoda/-e", "MessageRemoveEpisodes": "ukloni {0} epizoda/-e",
"MessageRemoveFromPlayerQueue": "Remove from player queue", "MessageRemoveFromPlayerQueue": "Remove from player queue",

View File

@ -592,7 +592,6 @@
"MessagePlaylistCreateFromCollection": "Crea playlist da una Raccolta", "MessagePlaylistCreateFromCollection": "Crea playlist da una Raccolta",
"MessagePodcastHasNoRSSFeedForMatching": "Podcast non ha l'URL del feed RSS da utilizzare per il match", "MessagePodcastHasNoRSSFeedForMatching": "Podcast non ha l'URL del feed RSS da utilizzare per il match",
"MessageQuickMatchDescription": "Compila i dettagli dell'articolo vuoto e copri con il risultato della prima corrispondenza di '{0}'. Non sovrascrive i dettagli a meno che non sia abilitata l'impostazione del server \"Preferisci metadati corrispondenti\".", "MessageQuickMatchDescription": "Compila i dettagli dell'articolo vuoto e copri con il risultato della prima corrispondenza di '{0}'. Non sovrascrive i dettagli a meno che non sia abilitata l'impostazione del server \"Preferisci metadati corrispondenti\".",
"MessageRemoveAllItemsWarning": "AVVERTIMENTO! Questa azione rimuoverà tutti gli elementi della libreria dal database, inclusi eventuali aggiornamenti o corrispondenze apportate. Questo non fa nulla ai tuoi file effettivi. Sei sicuro?",
"MessageRemoveChapter": "Rimuovi Capitolo", "MessageRemoveChapter": "Rimuovi Capitolo",
"MessageRemoveEpisodes": "rimuovi {0} episodio(i)", "MessageRemoveEpisodes": "rimuovi {0} episodio(i)",
"MessageRemoveFromPlayerQueue": "Rimuovi dalla coda di riproduzione", "MessageRemoveFromPlayerQueue": "Rimuovi dalla coda di riproduzione",

View File

@ -592,7 +592,6 @@
"MessagePlaylistCreateFromCollection": "Afspeellijst aanmaken vanuit collectie", "MessagePlaylistCreateFromCollection": "Afspeellijst aanmaken vanuit collectie",
"MessagePodcastHasNoRSSFeedForMatching": "Podcast heeft geen RSS-feed URL om te gebruiken voor matching", "MessagePodcastHasNoRSSFeedForMatching": "Podcast heeft geen RSS-feed URL om te gebruiken voor matching",
"MessageQuickMatchDescription": "Vul lege onderdeeldetails & cover met eerste matchresultaat van '{0}'. Overschrijft geen details tenzij 'Prefereer gematchte metadata' serverinstelling is ingeschakeld.", "MessageQuickMatchDescription": "Vul lege onderdeeldetails & cover met eerste matchresultaat van '{0}'. Overschrijft geen details tenzij 'Prefereer gematchte metadata' serverinstelling is ingeschakeld.",
"MessageRemoveAllItemsWarning": "WAARSCHUWING! Deze actie zal alle onderdelen in de bibliotheek verwijderen uit de database, inclusief enige bijwerkingen of matches die je hebt gemaakt. Dit doet niets met je onderliggende bestanden. Weet je het zeker?",
"MessageRemoveChapter": "Verwijder hoofdstuk", "MessageRemoveChapter": "Verwijder hoofdstuk",
"MessageRemoveEpisodes": "Verwijder {0} aflevering(en)", "MessageRemoveEpisodes": "Verwijder {0} aflevering(en)",
"MessageRemoveFromPlayerQueue": "Verwijder uit afspeelwachtrij", "MessageRemoveFromPlayerQueue": "Verwijder uit afspeelwachtrij",

View File

@ -592,7 +592,6 @@
"MessagePlaylistCreateFromCollection": "Create playlist from collection", "MessagePlaylistCreateFromCollection": "Create playlist from collection",
"MessagePodcastHasNoRSSFeedForMatching": "Podcast nie ma adresu url kanału RSS, który mógłby zostać użyty do dopasowania", "MessagePodcastHasNoRSSFeedForMatching": "Podcast nie ma adresu url kanału RSS, który mógłby zostać użyty do dopasowania",
"MessageQuickMatchDescription": "Wypełnij puste informacje i okładkę pierwszym wynikiem dopasowania z '{0}'. Nie nadpisuje szczegółów, chyba że włączone jest ustawienie serwera 'Preferuj dopasowane metadane'.", "MessageQuickMatchDescription": "Wypełnij puste informacje i okładkę pierwszym wynikiem dopasowania z '{0}'. Nie nadpisuje szczegółów, chyba że włączone jest ustawienie serwera 'Preferuj dopasowane metadane'.",
"MessageRemoveAllItemsWarning": "UWAGA! Ta akcja usunie wszystkie elementy biblioteki z bazy danych, w tym wszystkie aktualizacje lub dopasowania, które zostały wykonane. Pliki pozostaną niezmienione. Czy jesteś pewien?",
"MessageRemoveChapter": "Usuń rozdział", "MessageRemoveChapter": "Usuń rozdział",
"MessageRemoveEpisodes": "Usuń {0} odcinków", "MessageRemoveEpisodes": "Usuń {0} odcinków",
"MessageRemoveFromPlayerQueue": "Remove from player queue", "MessageRemoveFromPlayerQueue": "Remove from player queue",

View File

@ -592,7 +592,6 @@
"MessagePlaylistCreateFromCollection": "Создать плейлист из коллекции", "MessagePlaylistCreateFromCollection": "Создать плейлист из коллекции",
"MessagePodcastHasNoRSSFeedForMatching": "Подкаст не имеет URL-адреса RSS-канала, который можно использовать для поиска", "MessagePodcastHasNoRSSFeedForMatching": "Подкаст не имеет URL-адреса RSS-канала, который можно использовать для поиска",
"MessageQuickMatchDescription": "Заполняет пустые детали элемента и обложку первым результатом поиска из «{0}». Не перезаписывает сведения, если не включен параметр сервера 'Предпочитать метаданные поиска'.", "MessageQuickMatchDescription": "Заполняет пустые детали элемента и обложку первым результатом поиска из «{0}». Не перезаписывает сведения, если не включен параметр сервера 'Предпочитать метаданные поиска'.",
"MessageRemoveAllItemsWarning": "ПРЕДУПРЕЖДЕНИЕ! Это действие удалит все элементы библиотеки из базы данных, включая все сделанные обновления или совпадения. Ничего не произойдет с вашими фактическими файлами. Уверены?",
"MessageRemoveChapter": "Удалить главу", "MessageRemoveChapter": "Удалить главу",
"MessageRemoveEpisodes": "Удалить {0} эпизод(ов)", "MessageRemoveEpisodes": "Удалить {0} эпизод(ов)",
"MessageRemoveFromPlayerQueue": "Удалить из очереди воспроизведения", "MessageRemoveFromPlayerQueue": "Удалить из очереди воспроизведения",

View File

@ -592,7 +592,6 @@
"MessagePlaylistCreateFromCollection": "从收藏中创建播放列表", "MessagePlaylistCreateFromCollection": "从收藏中创建播放列表",
"MessagePodcastHasNoRSSFeedForMatching": "播客没有可用于匹配 RSS 源的 url", "MessagePodcastHasNoRSSFeedForMatching": "播客没有可用于匹配 RSS 源的 url",
"MessageQuickMatchDescription": "使用来自 '{0}' 的第一个匹配结果填充空白详细信息和封面. 除非启用 '首选匹配元数据' 服务器设置, 否则不会覆盖详细信息.", "MessageQuickMatchDescription": "使用来自 '{0}' 的第一个匹配结果填充空白详细信息和封面. 除非启用 '首选匹配元数据' 服务器设置, 否则不会覆盖详细信息.",
"MessageRemoveAllItemsWarning": "警告! 此操作将从数据库中删除所有的媒体库项, 包括您所做的任何更新或匹配. 这不会对实际文件产生任何影响. 你确定吗?",
"MessageRemoveChapter": "移除章节", "MessageRemoveChapter": "移除章节",
"MessageRemoveEpisodes": "移除 {0} 剧集", "MessageRemoveEpisodes": "移除 {0} 剧集",
"MessageRemoveFromPlayerQueue": "从播放队列中移除", "MessageRemoveFromPlayerQueue": "从播放队列中移除",

View File

@ -1,171 +0,0 @@
/*
This is an example of a fully expanded book library item
*/
const LibraryItem = require('../server/objects/LibraryItem')
new LibraryItem({
id: 'li_abai123wir',
ino: "55450570412017066",
libraryId: 'lib_1239p1d8',
folderId: 'fol_192ab8901',
path: '/audiobooks/Terry Goodkind/Sword of Truth/1 - Wizards First Rule',
relPath: '/Terry Goodkind/Sword of Truth/1 - Wizards First Rule',
mtimeMs: 1646784672127,
ctimeMs: 1646784672127,
birthtimeMs: 1646784672127,
addedAt: 1646784672127,
updatedAt: 1646784672127,
lastScan: 1646784672127,
scanVersion: 1.72,
isMissing: false,
isInvalid: false,
mediaType: 'book',
media: { // Book.js
coverPath: '/metadata/items/li_abai123wir/cover.webp',
tags: ['favorites'],
lastCoverSearch: null,
lastCoverSearchQuery: null,
metadata: { // BookMetadata.js
title: 'Wizards First Rule',
subtitle: null,
authors: [
{
id: 'au_42908lkajsfdk',
name: 'Terry Goodkind'
}
],
narrators: ['Sam Tsoutsouvas'],
series: [
{
id: 'se_902384lansf',
name: 'Sword of Truth',
sequence: 1
}
],
genres: ['Fantasy', 'Adventure'],
publishedYear: '1994',
publishedDate: '1994-01-01',
publisher: 'Brilliance Audio',
description: 'In the aftermath of the brutal murder of his father, a mysterious woman...',
isbn: '289374092834',
asin: '19023819203',
language: 'english',
explicit: false
},
audioFiles: [
{ // AudioFile.js
ino: "55450570412017066",
index: 1,
metadata: { // FileMetadata.js
filename: 'audiofile.mp3',
ext: '.mp3',
path: '/audiobooks/Terry Goodkind/Sword of Truth/1 - Wizards First Rule/CD01/audiofile.mp3',
relPath: '/CD01/audiofile.mp3',
mtimeMs: 1646784672127,
ctimeMs: 1646784672127,
birthtimeMs: 1646784672127,
size: 1197449516
},
trackNumFromMeta: 1,
discNumFromMeta: null,
trackNumFromFilename: null,
discNumFromFilename: 1,
manuallyVerified: false,
exclude: false,
invalid: false,
format: "MP2/3 (MPEG audio layer 2/3)",
duration: 2342342,
bitRate: 324234,
language: null,
codec: 'mp3',
timeBase: "1/14112000",
channels: 1,
channelLayout: "mono",
chapters: [],
embeddedCoverArt: 'jpeg', // Video stream codec ['mjpeg', 'jpeg', 'png'] or null
metaTags: { // AudioMetaTags.js
tagAlbum: '',
tagArtist: '',
tagGenre: '',
tagTitle: '',
tagSeries: '',
tagSeriesPart: '',
tagTrack: '',
tagDisc: '',
tagSubtitle: '',
tagAlbumArtist: '',
tagDate: '',
tagComposer: '',
tagPublisher: '',
tagComment: '',
tagDescription: '',
tagEncoder: '',
tagEncodedBy: '',
tagIsbn: '',
tagLanguage: '',
tagASIN: ''
},
addedAt: 1646784672127,
updatedAt: 1646784672127
}
],
chapters: [
{
id: 0,
title: 'Chapter 01',
start: 0,
end: 2467.753
}
],
missingParts: [4, 10], // Array of missing parts in tracklist
ebookFile: { // EBookFile.js
ino: "55450570412017066",
metadata: { // FileMetadata.js
filename: 'ebookfile.mobi',
ext: '.mobi',
path: '/audiobooks/Terry Goodkind/Sword of Truth/1 - Wizards First Rule/ebookfile.mobi',
relPath: '/ebookfile.mobi',
mtimeMs: 1646784672127,
ctimeMs: 1646784672127,
birthtimeMs: 1646784672127,
size: 1197449516
},
ebookFormat: 'mobi',
addedAt: 1646784672127,
updatedAt: 1646784672127
}
},
libraryFiles: [
{ // LibraryFile.js
ino: "55450570412017066",
metadata: { // FileMetadata.js
filename: 'cover.png',
ext: '.png',
path: '/audiobooks/Terry Goodkind/Sword of Truth/1 - Wizards First Rule/subfolder/cover.png',
relPath: '/subfolder/cover.png',
mtimeMs: 1646784672127,
ctimeMs: 1646784672127,
birthtimeMs: 1646784672127,
size: 1197449516
},
addedAt: 1646784672127,
updatedAt: 1646784672127
},
{ // LibraryFile.js
ino: "55450570412017066",
metadata: { // FileMetadata.js
filename: 'cover.png',
ext: '.mobi',
path: '/audiobooks/Terry Goodkind/Sword of Truth/1 - Wizards First Rule/ebookfile.mobi',
relPath: '/ebookfile.mobi',
mtimeMs: 1646784672127,
ctimeMs: 1646784672127,
birthtimeMs: 1646784672127,
size: 1197449516
},
addedAt: 1646784672127,
updatedAt: 1646784672127
}
]
})

View File

@ -1,83 +0,0 @@
/*
This is an example of a fully expanded podcast library item (under construction)
*/
const LibraryItem = require('../server/objects/LibraryItem')
new LibraryItem({
id: 'li_abai123wir',
ino: "55450570412017066",
libraryId: 'lib_1239p1d8',
folderId: 'fol_192ab8901',
path: '/podcasts/Great Podcast Name',
relPath: '/Great Podcast Name',
mtimeMs: 1646784672127,
ctimeMs: 1646784672127,
birthtimeMs: 1646784672127,
addedAt: 1646784672127,
updatedAt: 1646784672127,
lastScan: 1646784672127,
scanVersion: 1.72,
isMissing: false,
isInvalid: false,
mediaType: 'podcast',
media: { // Podcast.js
coverPath: '/metadata/items/li_abai123wir/cover.webp',
tags: ['favorites'],
lastCoverSearch: null,
lastCoverSearchQuery: null,
metadata: { // PodcastMetadata.js
title: 'Great Podcast Name',
artist: 'Some Artist Name',
genres: ['Fantasy', 'Adventure'],
publishedDate: '1994-01-01',
description: 'In the aftermath of the brutal murder of his father, a mysterious woman...',
feedUrl: '',
itunesPageUrl: '',
itunesId: '',
itunesArtistId: '',
explicit: false
},
episodes: [
{ // PodcastEpisode.js
id: 'ep_289374asf0a98',
index: 1,
// TODO: podcast episode data and PodcastEpisodeMetadata
addedAt: 1646784672127,
updatedAt: 1646784672127
}
]
},
libraryFiles: [
{ // LibraryFile.js
ino: "55450570412017066",
metadata: { // FileMetadata.js
filename: 'cover.png',
ext: '.png',
path: '/podcasts/Great Podcast Name/cover.png',
relPath: '/cover.png',
mtimeMs: 1646784672127,
ctimeMs: 1646784672127,
birthtimeMs: 1646784672127,
size: 1197449516
},
addedAt: 1646784672127,
updatedAt: 1646784672127
},
{ // LibraryFile.js
ino: "55450570412017066",
metadata: { // FileMetadata.js
filename: 'episode_1.mp3',
ext: '.mp3',
path: '/podcasts/Great Podcast Name/episode_1.mp3',
relPath: '/episode_1.mp3',
mtimeMs: 1646784672127,
ctimeMs: 1646784672127,
birthtimeMs: 1646784672127,
size: 1197449516
},
addedAt: 1646784672127,
updatedAt: 1646784672127
}
]
})

2376
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -20,7 +20,7 @@
"pkg": { "pkg": {
"assets": [ "assets": [
"client/dist/**/*", "client/dist/**/*",
"server/Db.js" "node_modules/sqlite3/lib/binding/**/*.node"
], ],
"scripts": [ "scripts": [
"prod.js", "prod.js",
@ -36,7 +36,9 @@
"htmlparser2": "^8.0.1", "htmlparser2": "^8.0.1",
"node-tone": "^1.0.1", "node-tone": "^1.0.1",
"nodemailer": "^6.9.2", "nodemailer": "^6.9.2",
"sequelize": "^6.32.1",
"socket.io": "^4.5.4", "socket.io": "^4.5.4",
"sqlite3": "^5.1.6",
"xml2js": "^0.5.0" "xml2js": "^0.5.0"
}, },
"devDependencies": { "devDependencies": {

View File

@ -2,21 +2,10 @@ const bcrypt = require('./libs/bcryptjs')
const jwt = require('./libs/jsonwebtoken') const jwt = require('./libs/jsonwebtoken')
const requestIp = require('./libs/requestIp') const requestIp = require('./libs/requestIp')
const Logger = require('./Logger') const Logger = require('./Logger')
const Database = require('./Database')
class Auth { class Auth {
constructor(db) { constructor() { }
this.db = db
this.user = null
}
get username() {
return this.user ? this.user.username : 'nobody'
}
get users() {
return this.db.users
}
cors(req, res, next) { cors(req, res, next) {
res.header('Access-Control-Allow-Origin', '*') res.header('Access-Control-Allow-Origin', '*')
@ -35,20 +24,20 @@ class Auth {
async initTokenSecret() { async initTokenSecret() {
if (process.env.TOKEN_SECRET) { // User can supply their own token secret if (process.env.TOKEN_SECRET) { // User can supply their own token secret
Logger.debug(`[Auth] Setting token secret - using user passed in TOKEN_SECRET env var`) Logger.debug(`[Auth] Setting token secret - using user passed in TOKEN_SECRET env var`)
this.db.serverSettings.tokenSecret = process.env.TOKEN_SECRET Database.serverSettings.tokenSecret = process.env.TOKEN_SECRET
} else { } else {
Logger.debug(`[Auth] Setting token secret - using random bytes`) Logger.debug(`[Auth] Setting token secret - using random bytes`)
this.db.serverSettings.tokenSecret = require('crypto').randomBytes(256).toString('base64') Database.serverSettings.tokenSecret = require('crypto').randomBytes(256).toString('base64')
} }
await this.db.updateServerSettings() await Database.updateServerSettings()
// New token secret creation added in v2.1.0 so generate new API tokens for each user // New token secret creation added in v2.1.0 so generate new API tokens for each user
if (this.db.users.length) { if (Database.users.length) {
for (const user of this.db.users) { for (const user of Database.users) {
user.token = await this.generateAccessToken({ userId: user.id, username: user.username }) user.token = await this.generateAccessToken({ userId: user.id, username: user.username })
Logger.warn(`[Auth] User ${user.username} api token has been updated using new token secret`) Logger.warn(`[Auth] User ${user.username} api token has been updated using new token secret`)
} }
await this.db.updateEntities('user', this.db.users) await Database.updateBulkUsers(Database.users)
} }
} }
@ -68,7 +57,7 @@ class Auth {
return res.sendStatus(401) return res.sendStatus(401)
} }
var user = await this.verifyToken(token) const user = await this.verifyToken(token)
if (!user) { if (!user) {
Logger.error('Verify Token User Not Found', token) Logger.error('Verify Token User Not Found', token)
return res.sendStatus(404) return res.sendStatus(404)
@ -95,7 +84,7 @@ class Auth {
} }
generateAccessToken(payload) { generateAccessToken(payload) {
return jwt.sign(payload, global.ServerSettings.tokenSecret); return jwt.sign(payload, Database.serverSettings.tokenSecret)
} }
authenticateUser(token) { authenticateUser(token) {
@ -104,12 +93,12 @@ class Auth {
verifyToken(token) { verifyToken(token) {
return new Promise((resolve) => { return new Promise((resolve) => {
jwt.verify(token, global.ServerSettings.tokenSecret, (err, payload) => { jwt.verify(token, Database.serverSettings.tokenSecret, (err, payload) => {
if (!payload || err) { if (!payload || err) {
Logger.error('JWT Verify Token Failed', err) Logger.error('JWT Verify Token Failed', err)
return resolve(null) return resolve(null)
} }
const user = this.users.find(u => u.id === payload.userId && u.username === payload.username) const user = Database.users.find(u => (u.id === payload.userId || u.oldUserId === payload.userId) && u.username === payload.username)
resolve(user || null) resolve(user || null)
}) })
}) })
@ -118,9 +107,9 @@ class Auth {
getUserLoginResponsePayload(user) { getUserLoginResponsePayload(user) {
return { return {
user: user.toJSONForBrowser(), user: user.toJSONForBrowser(),
userDefaultLibraryId: user.getDefaultLibraryId(this.db.libraries), userDefaultLibraryId: user.getDefaultLibraryId(Database.libraries),
serverSettings: this.db.serverSettings.toJSONForBrowser(), serverSettings: Database.serverSettings.toJSONForBrowser(),
ereaderDevices: this.db.emailSettings.getEReaderDevices(user), ereaderDevices: Database.emailSettings.getEReaderDevices(user),
Source: global.Source Source: global.Source
} }
} }
@ -130,7 +119,7 @@ class Auth {
const username = (req.body.username || '').toLowerCase() const username = (req.body.username || '').toLowerCase()
const password = req.body.password || '' const password = req.body.password || ''
const user = this.users.find(u => u.username.toLowerCase() === username) const user = Database.users.find(u => u.username.toLowerCase() === username)
if (!user?.isActive) { if (!user?.isActive) {
Logger.warn(`[Auth] Failed login attempt ${req.rateLimit.current} of ${req.rateLimit.limit} from ${ipAddress}`) Logger.warn(`[Auth] Failed login attempt ${req.rateLimit.current} of ${req.rateLimit.limit} from ${ipAddress}`)
@ -142,7 +131,7 @@ class Auth {
} }
// Check passwordless root user // Check passwordless root user
if (user.id === 'root' && (!user.pash || user.pash === '')) { if (user.type === 'root' && (!user.pash || user.pash === '')) {
if (password) { if (password) {
return res.status(401).send('Invalid root password (hint: there is none)') return res.status(401).send('Invalid root password (hint: there is none)')
} else { } else {
@ -166,15 +155,6 @@ class Auth {
} }
} }
// Not in use now
lockUser(user) {
user.isLocked = true
return this.db.updateEntity('user', user).catch((error) => {
Logger.error('[Auth] Failed to lock user', user.username, error)
return false
})
}
comparePassword(password, user) { comparePassword(password, user) {
if (user.type === 'root' && !password && !user.pash) return true if (user.type === 'root' && !password && !user.pash) return true
if (!password || !user.pash) return false if (!password || !user.pash) return false
@ -184,7 +164,7 @@ class Auth {
async userChangePassword(req, res) { async userChangePassword(req, res) {
var { password, newPassword } = req.body var { password, newPassword } = req.body
newPassword = newPassword || '' newPassword = newPassword || ''
var matchingUser = this.users.find(u => u.id === req.user.id) const matchingUser = Database.users.find(u => u.id === req.user.id)
// Only root can have an empty password // Only root can have an empty password
if (matchingUser.type !== 'root' && !newPassword) { if (matchingUser.type !== 'root' && !newPassword) {
@ -193,14 +173,14 @@ class Auth {
}) })
} }
var compare = await this.comparePassword(password, matchingUser) const compare = await this.comparePassword(password, matchingUser)
if (!compare) { if (!compare) {
return res.json({ return res.json({
error: 'Invalid password' error: 'Invalid password'
}) })
} }
var pw = '' let pw = ''
if (newPassword) { if (newPassword) {
pw = await this.hashPass(newPassword) pw = await this.hashPass(newPassword)
if (!pw) { if (!pw) {
@ -211,7 +191,8 @@ class Auth {
} }
matchingUser.pash = pw matchingUser.pash = pw
var success = await this.db.updateEntity('user', matchingUser)
const success = await Database.updateUser(matchingUser)
if (success) { if (success) {
res.json({ res.json({
success: true success: true

520
server/Database.js Normal file
View File

@ -0,0 +1,520 @@
const Path = require('path')
const { Sequelize } = require('sequelize')
const packageJson = require('../package.json')
const fs = require('./libs/fsExtra')
const Logger = require('./Logger')
const dbMigration = require('./utils/migrations/dbMigration')
class Database {
constructor() {
this.sequelize = null
this.dbPath = null
this.isNew = false // New absdatabase.sqlite created
// Temporarily using format of old DB
// TODO: below data should be loaded from the DB as needed
this.libraryItems = []
this.users = []
this.libraries = []
this.settings = []
this.collections = []
this.playlists = []
this.authors = []
this.series = []
this.feeds = []
this.serverSettings = null
this.notificationSettings = null
this.emailSettings = null
}
get models() {
return this.sequelize?.models || {}
}
get hasRootUser() {
return this.users.some(u => u.type === 'root')
}
async checkHasDb() {
if (!await fs.pathExists(this.dbPath)) {
Logger.info(`[Database] absdatabase.sqlite not found at ${this.dbPath}`)
return false
}
return true
}
async init(force = false) {
this.dbPath = Path.join(global.ConfigPath, 'absdatabase.sqlite')
// First check if this is a new database
this.isNew = !(await this.checkHasDb()) || force
if (!await this.connect()) {
throw new Error('Database connection failed')
}
await this.buildModels(force)
Logger.info(`[Database] Db initialized with models:`, Object.keys(this.sequelize.models).join(', '))
await this.loadData()
}
async connect() {
Logger.info(`[Database] Initializing db at "${this.dbPath}"`)
this.sequelize = new Sequelize({
dialect: 'sqlite',
storage: this.dbPath,
logging: false
})
// Helper function
this.sequelize.uppercaseFirst = str => str ? `${str[0].toUpperCase()}${str.substr(1)}` : ''
try {
await this.sequelize.authenticate()
Logger.info(`[Database] Db connection was successful`)
return true
} catch (error) {
Logger.error(`[Database] Failed to connect to db`, error)
return false
}
}
async disconnect() {
Logger.info(`[Database] Disconnecting sqlite db`)
await this.sequelize.close()
this.sequelize = null
}
async reconnect() {
Logger.info(`[Database] Reconnecting sqlite db`)
await this.init()
}
buildModels(force = false) {
require('./models/User')(this.sequelize)
require('./models/Library')(this.sequelize)
require('./models/LibraryFolder')(this.sequelize)
require('./models/Book')(this.sequelize)
require('./models/Podcast')(this.sequelize)
require('./models/PodcastEpisode')(this.sequelize)
require('./models/LibraryItem')(this.sequelize)
require('./models/MediaProgress')(this.sequelize)
require('./models/Series')(this.sequelize)
require('./models/BookSeries')(this.sequelize)
require('./models/Author')(this.sequelize)
require('./models/BookAuthor')(this.sequelize)
require('./models/Collection')(this.sequelize)
require('./models/CollectionBook')(this.sequelize)
require('./models/Playlist')(this.sequelize)
require('./models/PlaylistMediaItem')(this.sequelize)
require('./models/Device')(this.sequelize)
require('./models/PlaybackSession')(this.sequelize)
require('./models/Feed')(this.sequelize)
require('./models/FeedEpisode')(this.sequelize)
require('./models/Setting')(this.sequelize)
return this.sequelize.sync({ force, alter: false })
}
async loadData() {
if (this.isNew && await dbMigration.checkShouldMigrate()) {
Logger.info(`[Database] New database was created and old database was detected - migrating old to new`)
await dbMigration.migrate(this.models)
}
const startTime = Date.now()
this.libraryItems = await this.models.libraryItem.getAllOldLibraryItems()
this.users = await this.models.user.getOldUsers()
this.libraries = await this.models.library.getAllOldLibraries()
this.collections = await this.models.collection.getOldCollections()
this.playlists = await this.models.playlist.getOldPlaylists()
this.authors = await this.models.author.getOldAuthors()
this.series = await this.models.series.getAllOldSeries()
this.feeds = await this.models.feed.getOldFeeds()
const settingsData = await this.models.setting.getOldSettings()
this.settings = settingsData.settings
this.emailSettings = settingsData.emailSettings
this.serverSettings = settingsData.serverSettings
this.notificationSettings = settingsData.notificationSettings
global.ServerSettings = this.serverSettings.toJSON()
Logger.info(`[Database] Db data loaded in ${Date.now() - startTime}ms`)
if (packageJson.version !== this.serverSettings.version) {
Logger.info(`[Database] Server upgrade detected from ${this.serverSettings.version} to ${packageJson.version}`)
this.serverSettings.version = packageJson.version
await this.updateServerSettings()
}
}
async createRootUser(username, pash, token) {
if (!this.sequelize) return false
const newUser = await this.models.user.createRootUser(username, pash, token)
if (newUser) {
this.users.push(newUser)
return true
}
return false
}
updateServerSettings() {
if (!this.sequelize) return false
global.ServerSettings = this.serverSettings.toJSON()
return this.updateSetting(this.serverSettings)
}
updateSetting(settings) {
if (!this.sequelize) return false
return this.models.setting.updateSettingObj(settings.toJSON())
}
async createUser(oldUser) {
if (!this.sequelize) return false
await this.models.user.createFromOld(oldUser)
this.users.push(oldUser)
return true
}
updateUser(oldUser) {
if (!this.sequelize) return false
return this.models.user.updateFromOld(oldUser)
}
updateBulkUsers(oldUsers) {
if (!this.sequelize) return false
return Promise.all(oldUsers.map(u => this.updateUser(u)))
}
async removeUser(userId) {
if (!this.sequelize) return false
await this.models.user.removeById(userId)
this.users = this.users.filter(u => u.id !== userId)
}
upsertMediaProgress(oldMediaProgress) {
if (!this.sequelize) return false
return this.models.mediaProgress.upsertFromOld(oldMediaProgress)
}
removeMediaProgress(mediaProgressId) {
if (!this.sequelize) return false
return this.models.mediaProgress.removeById(mediaProgressId)
}
updateBulkBooks(oldBooks) {
if (!this.sequelize) return false
return Promise.all(oldBooks.map(oldBook => this.models.book.saveFromOld(oldBook)))
}
async createLibrary(oldLibrary) {
if (!this.sequelize) return false
await this.models.library.createFromOld(oldLibrary)
this.libraries.push(oldLibrary)
}
updateLibrary(oldLibrary) {
if (!this.sequelize) return false
return this.models.library.updateFromOld(oldLibrary)
}
async removeLibrary(libraryId) {
if (!this.sequelize) return false
await this.models.library.removeById(libraryId)
this.libraries = this.libraries.filter(lib => lib.id !== libraryId)
}
async createCollection(oldCollection) {
if (!this.sequelize) return false
const newCollection = await this.models.collection.createFromOld(oldCollection)
// Create CollectionBooks
if (newCollection) {
const collectionBooks = []
oldCollection.books.forEach((libraryItemId) => {
const libraryItem = this.libraryItems.find(li => li.id === libraryItemId)
if (libraryItem) {
collectionBooks.push({
collectionId: newCollection.id,
bookId: libraryItem.media.id
})
}
})
if (collectionBooks.length) {
await this.createBulkCollectionBooks(collectionBooks)
}
}
this.collections.push(oldCollection)
}
updateCollection(oldCollection) {
if (!this.sequelize) return false
const collectionBooks = []
let order = 1
oldCollection.books.forEach((libraryItemId) => {
const libraryItem = this.getLibraryItem(libraryItemId)
if (!libraryItem) return
collectionBooks.push({
collectionId: oldCollection.id,
bookId: libraryItem.media.id,
order: order++
})
})
return this.models.collection.fullUpdateFromOld(oldCollection, collectionBooks)
}
async removeCollection(collectionId) {
if (!this.sequelize) return false
await this.models.collection.removeById(collectionId)
this.collections = this.collections.filter(c => c.id !== collectionId)
}
createCollectionBook(collectionBook) {
if (!this.sequelize) return false
return this.models.collectionBook.create(collectionBook)
}
createBulkCollectionBooks(collectionBooks) {
if (!this.sequelize) return false
return this.models.collectionBook.bulkCreate(collectionBooks)
}
removeCollectionBook(collectionId, bookId) {
if (!this.sequelize) return false
return this.models.collectionBook.removeByIds(collectionId, bookId)
}
async createPlaylist(oldPlaylist) {
if (!this.sequelize) return false
const newPlaylist = await this.models.playlist.createFromOld(oldPlaylist)
if (newPlaylist) {
const playlistMediaItems = []
let order = 1
for (const mediaItemObj of oldPlaylist.items) {
const libraryItem = this.libraryItems.find(li => li.id === mediaItemObj.libraryItemId)
if (!libraryItem) continue
let mediaItemId = libraryItem.media.id // bookId
let mediaItemType = 'book'
if (mediaItemObj.episodeId) {
mediaItemType = 'podcastEpisode'
mediaItemId = mediaItemObj.episodeId
}
playlistMediaItems.push({
playlistId: newPlaylist.id,
mediaItemId,
mediaItemType,
order: order++
})
}
if (playlistMediaItems.length) {
await this.createBulkPlaylistMediaItems(playlistMediaItems)
}
}
this.playlists.push(oldPlaylist)
}
updatePlaylist(oldPlaylist) {
if (!this.sequelize) return false
const playlistMediaItems = []
let order = 1
oldPlaylist.items.forEach((item) => {
const libraryItem = this.getLibraryItem(item.libraryItemId)
if (!libraryItem) return
playlistMediaItems.push({
playlistId: oldPlaylist.id,
mediaItemId: item.episodeId || libraryItem.media.id,
mediaItemType: item.episodeId ? 'podcastEpisode' : 'book',
order: order++
})
})
return this.models.playlist.fullUpdateFromOld(oldPlaylist, playlistMediaItems)
}
async removePlaylist(playlistId) {
if (!this.sequelize) return false
await this.models.playlist.removeById(playlistId)
this.playlists = this.playlists.filter(p => p.id !== playlistId)
}
createPlaylistMediaItem(playlistMediaItem) {
if (!this.sequelize) return false
return this.models.playlistMediaItem.create(playlistMediaItem)
}
createBulkPlaylistMediaItems(playlistMediaItems) {
if (!this.sequelize) return false
return this.models.playlistMediaItem.bulkCreate(playlistMediaItems)
}
removePlaylistMediaItem(playlistId, mediaItemId) {
if (!this.sequelize) return false
return this.models.playlistMediaItem.removeByIds(playlistId, mediaItemId)
}
getLibraryItem(libraryItemId) {
if (!this.sequelize) return false
return this.libraryItems.find(li => li.id === libraryItemId)
}
async createLibraryItem(oldLibraryItem) {
if (!this.sequelize) return false
await this.models.libraryItem.fullCreateFromOld(oldLibraryItem)
this.libraryItems.push(oldLibraryItem)
}
updateLibraryItem(oldLibraryItem) {
if (!this.sequelize) return false
return this.models.libraryItem.fullUpdateFromOld(oldLibraryItem)
}
async updateBulkLibraryItems(oldLibraryItems) {
if (!this.sequelize) return false
let updatesMade = 0
for (const oldLibraryItem of oldLibraryItems) {
const hasUpdates = await this.models.libraryItem.fullUpdateFromOld(oldLibraryItem)
if (hasUpdates) updatesMade++
}
return updatesMade
}
async createBulkLibraryItems(oldLibraryItems) {
if (!this.sequelize) return false
for (const oldLibraryItem of oldLibraryItems) {
await this.models.libraryItem.fullCreateFromOld(oldLibraryItem)
this.libraryItems.push(oldLibraryItem)
}
}
async removeLibraryItem(libraryItemId) {
if (!this.sequelize) return false
await this.models.libraryItem.removeById(libraryItemId)
this.libraryItems = this.libraryItems.filter(li => li.id !== libraryItemId)
}
async createFeed(oldFeed) {
if (!this.sequelize) return false
await this.models.feed.fullCreateFromOld(oldFeed)
this.feeds.push(oldFeed)
}
updateFeed(oldFeed) {
if (!this.sequelize) return false
return this.models.feed.fullUpdateFromOld(oldFeed)
}
async removeFeed(feedId) {
if (!this.sequelize) return false
await this.models.feed.removeById(feedId)
this.feeds = this.feeds.filter(f => f.id !== feedId)
}
updateSeries(oldSeries) {
if (!this.sequelize) return false
return this.models.series.updateFromOld(oldSeries)
}
async createSeries(oldSeries) {
if (!this.sequelize) return false
await this.models.series.createFromOld(oldSeries)
this.series.push(oldSeries)
}
async createBulkSeries(oldSeriesObjs) {
if (!this.sequelize) return false
await this.models.series.createBulkFromOld(oldSeriesObjs)
this.series.push(...oldSeriesObjs)
}
async removeSeries(seriesId) {
if (!this.sequelize) return false
await this.models.series.removeById(seriesId)
this.series = this.series.filter(se => se.id !== seriesId)
}
async createAuthor(oldAuthor) {
if (!this.sequelize) return false
await this.models.createFromOld(oldAuthor)
this.authors.push(oldAuthor)
}
async createBulkAuthors(oldAuthors) {
if (!this.sequelize) return false
await this.models.author.createBulkFromOld(oldAuthors)
this.authors.push(...oldAuthors)
}
updateAuthor(oldAuthor) {
if (!this.sequelize) return false
return this.models.author.updateFromOld(oldAuthor)
}
async removeAuthor(authorId) {
if (!this.sequelize) return false
await this.models.author.removeById(authorId)
this.authors = this.authors.filter(au => au.id !== authorId)
}
async createBulkBookAuthors(bookAuthors) {
if (!this.sequelize) return false
await this.models.bookAuthor.bulkCreate(bookAuthors)
this.authors.push(...bookAuthors)
}
async removeBulkBookAuthors(authorId = null, bookId = null) {
if (!this.sequelize) return false
if (!authorId && !bookId) return
await this.models.bookAuthor.removeByIds(authorId, bookId)
this.authors = this.authors.filter(au => {
if (authorId && au.authorId !== authorId) return true
if (bookId && au.bookId !== bookId) return true
return false
})
}
getPlaybackSessions(where = null) {
if (!this.sequelize) return false
return this.models.playbackSession.getOldPlaybackSessions(where)
}
getPlaybackSession(sessionId) {
if (!this.sequelize) return false
return this.models.playbackSession.getById(sessionId)
}
createPlaybackSession(oldSession) {
if (!this.sequelize) return false
return this.models.playbackSession.createFromOld(oldSession)
}
updatePlaybackSession(oldSession) {
if (!this.sequelize) return false
return this.models.playbackSession.updateFromOld(oldSession)
}
removePlaybackSession(sessionId) {
if (!this.sequelize) return false
return this.models.playbackSession.removeById(sessionId)
}
getDeviceByDeviceId(deviceId) {
if (!this.sequelize) return false
return this.models.device.getOldDeviceByDeviceId(deviceId)
}
updateDevice(oldDevice) {
if (!this.sequelize) return false
return this.models.device.updateFromOld(oldDevice)
}
createDevice(oldDevice) {
if (!this.sequelize) return false
return this.models.device.createFromOld(oldDevice)
}
}
module.exports = new Database()

View File

@ -1,503 +0,0 @@
const Path = require('path')
const njodb = require('./libs/njodb')
const Logger = require('./Logger')
const { version } = require('../package.json')
const filePerms = require('./utils/filePerms')
const LibraryItem = require('./objects/LibraryItem')
const User = require('./objects/user/User')
const Collection = require('./objects/Collection')
const Playlist = require('./objects/Playlist')
const Library = require('./objects/Library')
const Author = require('./objects/entities/Author')
const Series = require('./objects/entities/Series')
const ServerSettings = require('./objects/settings/ServerSettings')
const NotificationSettings = require('./objects/settings/NotificationSettings')
const EmailSettings = require('./objects/settings/EmailSettings')
const PlaybackSession = require('./objects/PlaybackSession')
class Db {
constructor() {
this.LibraryItemsPath = Path.join(global.ConfigPath, 'libraryItems')
this.UsersPath = Path.join(global.ConfigPath, 'users')
this.SessionsPath = Path.join(global.ConfigPath, 'sessions')
this.LibrariesPath = Path.join(global.ConfigPath, 'libraries')
this.SettingsPath = Path.join(global.ConfigPath, 'settings')
this.CollectionsPath = Path.join(global.ConfigPath, 'collections')
this.PlaylistsPath = Path.join(global.ConfigPath, 'playlists')
this.AuthorsPath = Path.join(global.ConfigPath, 'authors')
this.SeriesPath = Path.join(global.ConfigPath, 'series')
this.FeedsPath = Path.join(global.ConfigPath, 'feeds')
this.libraryItemsDb = new njodb.Database(this.LibraryItemsPath, this.getNjodbOptions())
this.usersDb = new njodb.Database(this.UsersPath, this.getNjodbOptions())
this.sessionsDb = new njodb.Database(this.SessionsPath, this.getNjodbOptions())
this.librariesDb = new njodb.Database(this.LibrariesPath, this.getNjodbOptions())
this.settingsDb = new njodb.Database(this.SettingsPath, this.getNjodbOptions())
this.collectionsDb = new njodb.Database(this.CollectionsPath, this.getNjodbOptions())
this.playlistsDb = new njodb.Database(this.PlaylistsPath, this.getNjodbOptions())
this.authorsDb = new njodb.Database(this.AuthorsPath, this.getNjodbOptions())
this.seriesDb = new njodb.Database(this.SeriesPath, this.getNjodbOptions())
this.feedsDb = new njodb.Database(this.FeedsPath, this.getNjodbOptions())
this.libraryItems = []
this.users = []
this.libraries = []
this.settings = []
this.collections = []
this.playlists = []
this.authors = []
this.series = []
this.serverSettings = null
this.notificationSettings = null
this.emailSettings = null
// Stores previous version only if upgraded
this.previousVersion = null
}
get hasRootUser() {
return this.users.some(u => u.id === 'root')
}
getNjodbOptions() {
return {
lockoptions: {
stale: 1000 * 20, // 20 seconds
update: 2500,
retries: {
retries: 20,
minTimeout: 250,
maxTimeout: 5000,
factor: 1
}
}
}
}
getEntityDb(entityName) {
if (entityName === 'user') return this.usersDb
else if (entityName === 'session') return this.sessionsDb
else if (entityName === 'libraryItem') return this.libraryItemsDb
else if (entityName === 'library') return this.librariesDb
else if (entityName === 'settings') return this.settingsDb
else if (entityName === 'collection') return this.collectionsDb
else if (entityName === 'playlist') return this.playlistsDb
else if (entityName === 'author') return this.authorsDb
else if (entityName === 'series') return this.seriesDb
else if (entityName === 'feed') return this.feedsDb
return null
}
getEntityArrayKey(entityName) {
if (entityName === 'user') return 'users'
else if (entityName === 'session') return 'sessions'
else if (entityName === 'libraryItem') return 'libraryItems'
else if (entityName === 'library') return 'libraries'
else if (entityName === 'settings') return 'settings'
else if (entityName === 'collection') return 'collections'
else if (entityName === 'playlist') return 'playlists'
else if (entityName === 'author') return 'authors'
else if (entityName === 'series') return 'series'
else if (entityName === 'feed') return 'feeds'
return null
}
reinit() {
this.libraryItemsDb = new njodb.Database(this.LibraryItemsPath, this.getNjodbOptions())
this.usersDb = new njodb.Database(this.UsersPath, this.getNjodbOptions())
this.sessionsDb = new njodb.Database(this.SessionsPath, this.getNjodbOptions())
this.librariesDb = new njodb.Database(this.LibrariesPath, this.getNjodbOptions())
this.settingsDb = new njodb.Database(this.SettingsPath, this.getNjodbOptions())
this.collectionsDb = new njodb.Database(this.CollectionsPath, this.getNjodbOptions())
this.playlistsDb = new njodb.Database(this.PlaylistsPath, this.getNjodbOptions())
this.authorsDb = new njodb.Database(this.AuthorsPath, this.getNjodbOptions())
this.seriesDb = new njodb.Database(this.SeriesPath, this.getNjodbOptions())
this.feedsDb = new njodb.Database(this.FeedsPath, this.getNjodbOptions())
return this.init()
}
// Get previous server version before loading DB to check whether a db migration is required
// returns null if server was not upgraded
checkPreviousVersion() {
return this.settingsDb.select(() => true).then((results) => {
if (results.data && results.data.length) {
const serverSettings = results.data.find(s => s.id === 'server-settings')
if (serverSettings && serverSettings.version && serverSettings.version !== version) {
return serverSettings.version
}
}
return null
})
}
createRootUser(username, pash, token) {
const newRoot = new User({
id: 'root',
type: 'root',
username,
pash,
token,
isActive: true,
createdAt: Date.now()
})
return this.insertEntity('user', newRoot)
}
async init() {
await this.load()
// Set file ownership for all files created by db
await filePerms.setDefault(global.ConfigPath, true)
if (!this.serverSettings) { // Create first load server settings
this.serverSettings = new ServerSettings()
await this.insertEntity('settings', this.serverSettings)
}
if (!this.notificationSettings) {
this.notificationSettings = new NotificationSettings()
await this.insertEntity('settings', this.notificationSettings)
}
if (!this.emailSettings) {
this.emailSettings = new EmailSettings()
await this.insertEntity('settings', this.emailSettings)
}
global.ServerSettings = this.serverSettings.toJSON()
}
async load() {
const p1 = this.libraryItemsDb.select(() => true).then((results) => {
this.libraryItems = results.data.map(a => new LibraryItem(a))
Logger.info(`[DB] ${this.libraryItems.length} Library Items Loaded`)
})
const p2 = this.usersDb.select(() => true).then((results) => {
this.users = results.data.map(u => new User(u))
Logger.info(`[DB] ${this.users.length} Users Loaded`)
})
const p3 = this.librariesDb.select(() => true).then((results) => {
this.libraries = results.data.map(l => new Library(l))
this.libraries.sort((a, b) => a.displayOrder - b.displayOrder)
Logger.info(`[DB] ${this.libraries.length} Libraries Loaded`)
})
const p4 = this.settingsDb.select(() => true).then(async (results) => {
if (results.data && results.data.length) {
this.settings = results.data
const serverSettings = this.settings.find(s => s.id === 'server-settings')
if (serverSettings) {
this.serverSettings = new ServerSettings(serverSettings)
// Check if server was upgraded
if (!this.serverSettings.version || this.serverSettings.version !== version) {
this.previousVersion = this.serverSettings.version || '1.0.0'
// Library settings and server settings updated in 2.1.3 - run migration
if (this.previousVersion.localeCompare('2.1.3') < 0) {
Logger.info(`[Db] Running servers & library settings migration`)
for (const library of this.libraries) {
if (library.settings.coverAspectRatio !== serverSettings.coverAspectRatio) {
library.settings.coverAspectRatio = serverSettings.coverAspectRatio
await this.updateEntity('library', library)
Logger.debug(`[Db] Library ${library.name} migrated`)
}
}
}
}
}
const notificationSettings = this.settings.find(s => s.id === 'notification-settings')
if (notificationSettings) {
this.notificationSettings = new NotificationSettings(notificationSettings)
}
const emailSettings = this.settings.find(s => s.id === 'email-settings')
if (emailSettings) {
this.emailSettings = new EmailSettings(emailSettings)
}
}
})
const p5 = this.collectionsDb.select(() => true).then((results) => {
this.collections = results.data.map(l => new Collection(l))
Logger.info(`[DB] ${this.collections.length} Collections Loaded`)
})
const p6 = this.playlistsDb.select(() => true).then((results) => {
this.playlists = results.data.map(l => new Playlist(l))
Logger.info(`[DB] ${this.playlists.length} Playlists Loaded`)
})
const p7 = this.authorsDb.select(() => true).then((results) => {
this.authors = results.data.map(l => new Author(l))
Logger.info(`[DB] ${this.authors.length} Authors Loaded`)
})
const p8 = this.seriesDb.select(() => true).then((results) => {
this.series = results.data.map(l => new Series(l))
Logger.info(`[DB] ${this.series.length} Series Loaded`)
})
await Promise.all([p1, p2, p3, p4, p5, p6, p7, p8])
// Update server version in server settings
if (this.previousVersion) {
this.serverSettings.version = version
await this.updateServerSettings()
}
}
getLibraryItem(id) {
return this.libraryItems.find(li => li.id === id)
}
getLibraryItemsInLibrary(libraryId) {
return this.libraryItems.filter(li => li.libraryId === libraryId)
}
async updateLibraryItem(libraryItem) {
return this.updateLibraryItems([libraryItem])
}
async updateLibraryItems(libraryItems) {
await Promise.all(libraryItems.map(async (li) => {
if (li && li.saveMetadata) return li.saveMetadata()
return null
}))
const libraryItemIds = libraryItems.map(li => li.id)
return this.libraryItemsDb.update((record) => libraryItemIds.includes(record.id), (record) => {
return libraryItems.find(li => li.id === record.id)
}).then((results) => {
Logger.debug(`[DB] Library Items updated ${results.updated}`)
return true
}).catch((error) => {
Logger.error(`[DB] Library Items update failed ${error}`)
return false
})
}
async insertLibraryItem(libraryItem) {
return this.insertLibraryItems([libraryItem])
}
async insertLibraryItems(libraryItems) {
await Promise.all(libraryItems.map(async (li) => {
if (li && li.saveMetadata) return li.saveMetadata()
return null
}))
return this.libraryItemsDb.insert(libraryItems).then((results) => {
Logger.debug(`[DB] Library Items inserted ${results.inserted}`)
this.libraryItems = this.libraryItems.concat(libraryItems)
return true
}).catch((error) => {
Logger.error(`[DB] Library Items insert failed ${error}`)
return false
})
}
removeLibraryItem(id) {
return this.libraryItemsDb.delete((record) => record.id === id).then((results) => {
Logger.debug(`[DB] Deleted Library Items: ${results.deleted}`)
this.libraryItems = this.libraryItems.filter(li => li.id !== id)
}).catch((error) => {
Logger.error(`[DB] Remove Library Items Failed: ${error}`)
})
}
updateServerSettings() {
global.ServerSettings = this.serverSettings.toJSON()
return this.updateEntity('settings', this.serverSettings)
}
getAllEntities(entityName) {
const entityDb = this.getEntityDb(entityName)
return entityDb.select(() => true).then((results) => results.data).catch((error) => {
Logger.error(`[DB] Failed to get all ${entityName}`, error)
return null
})
}
insertEntities(entityName, entities) {
var entityDb = this.getEntityDb(entityName)
return entityDb.insert(entities).then((results) => {
Logger.debug(`[DB] Inserted ${results.inserted} ${entityName}`)
var arrayKey = this.getEntityArrayKey(entityName)
if (this[arrayKey]) this[arrayKey] = this[arrayKey].concat(entities)
return true
}).catch((error) => {
Logger.error(`[DB] Failed to insert ${entityName}`, error)
return false
})
}
insertEntity(entityName, entity) {
var entityDb = this.getEntityDb(entityName)
return entityDb.insert([entity]).then((results) => {
Logger.debug(`[DB] Inserted ${results.inserted} ${entityName}`)
var arrayKey = this.getEntityArrayKey(entityName)
if (this[arrayKey]) this[arrayKey].push(entity)
return true
}).catch((error) => {
Logger.error(`[DB] Failed to insert ${entityName}`, error)
return false
})
}
async bulkInsertEntities(entityName, entities, batchSize = 500) {
// Group entities in batches of size batchSize
var entityBatches = []
var batch = []
var index = 0
entities.forEach((ent) => {
batch.push(ent)
index++
if (index >= batchSize) {
entityBatches.push(batch)
index = 0
batch = []
}
})
if (batch.length) entityBatches.push(batch)
Logger.info(`[Db] bulkInsertEntities: ${entities.length} ${entityName} to ${entityBatches.length} batches of max size ${batchSize}`)
// Start inserting batches
var batchIndex = 1
for (const entityBatch of entityBatches) {
Logger.info(`[Db] bulkInsertEntities: Start inserting batch ${batchIndex} of ${entityBatch.length} for ${entityName}`)
var success = await this.insertEntities(entityName, entityBatch)
if (success) {
Logger.info(`[Db] bulkInsertEntities: Success inserting batch ${batchIndex} for ${entityName}`)
} else {
Logger.info(`[Db] bulkInsertEntities: Failed inserting batch ${batchIndex} for ${entityName}`)
}
batchIndex++
}
return true
}
updateEntities(entityName, entities) {
var entityDb = this.getEntityDb(entityName)
var entityIds = entities.map(ent => ent.id)
return entityDb.update((record) => entityIds.includes(record.id), (record) => {
return entities.find(ent => ent.id === record.id)
}).then((results) => {
Logger.debug(`[DB] Updated ${entityName}: ${results.updated}`)
var arrayKey = this.getEntityArrayKey(entityName)
if (this[arrayKey]) {
this[arrayKey] = this[arrayKey].map(e => {
if (entityIds.includes(e.id)) return entities.find(_e => _e.id === e.id)
return e
})
}
return true
}).catch((error) => {
Logger.error(`[DB] Update ${entityName} Failed: ${error}`)
return false
})
}
updateEntity(entityName, entity) {
const entityDb = this.getEntityDb(entityName)
let jsonEntity = entity
if (entity && entity.toJSON) {
jsonEntity = entity.toJSON()
}
return entityDb.update((record) => record.id === entity.id, () => jsonEntity).then((results) => {
Logger.debug(`[DB] Updated ${entityName}: ${results.updated}`)
const arrayKey = this.getEntityArrayKey(entityName)
if (this[arrayKey]) {
this[arrayKey] = this[arrayKey].map(e => {
return e.id === entity.id ? entity : e
})
}
return true
}).catch((error) => {
Logger.error(`[DB] Update entity ${entityName} Failed: ${error}`)
return false
})
}
removeEntity(entityName, entityId) {
var entityDb = this.getEntityDb(entityName)
return entityDb.delete((record) => {
return record.id === entityId
}).then((results) => {
Logger.debug(`[DB] Deleted entity ${entityName}: ${results.deleted}`)
var arrayKey = this.getEntityArrayKey(entityName)
if (this[arrayKey]) {
this[arrayKey] = this[arrayKey].filter(e => {
return e.id !== entityId
})
}
}).catch((error) => {
Logger.error(`[DB] Remove entity ${entityName} Failed: ${error}`)
})
}
removeEntities(entityName, selectFunc, silent = false) {
var entityDb = this.getEntityDb(entityName)
return entityDb.delete(selectFunc).then((results) => {
if (!silent) Logger.debug(`[DB] Deleted entities ${entityName}: ${results.deleted}`)
var arrayKey = this.getEntityArrayKey(entityName)
if (this[arrayKey]) {
this[arrayKey] = this[arrayKey].filter(e => {
return !selectFunc(e)
})
}
return results.deleted
}).catch((error) => {
Logger.error(`[DB] Remove entities ${entityName} Failed: ${error}`)
return 0
})
}
recreateLibraryItemsDb() {
return this.libraryItemsDb.drop().then((results) => {
Logger.info(`[DB] Dropped library items db`, results)
this.libraryItemsDb = new njodb.Database(this.LibraryItemsPath)
this.libraryItems = []
return true
}).catch((error) => {
Logger.error(`[DB] Failed to drop library items db`, error)
return false
})
}
getAllSessions(selectFunc = () => true) {
return this.sessionsDb.select(selectFunc).then((results) => {
return results.data || []
}).catch((error) => {
Logger.error('[Db] Failed to select sessions', error)
return []
})
}
getPlaybackSession(id) {
return this.sessionsDb.select((pb) => pb.id == id).then((results) => {
if (results.data.length) {
return new PlaybackSession(results.data[0])
}
return null
}).catch((error) => {
Logger.error('Failed to get session', error)
return null
})
}
selectUserSessions(userId) {
return this.sessionsDb.select((session) => session.userId === userId).then((results) => {
return results.data || []
}).catch((error) => {
Logger.error(`[Db] Failed to select user sessions "${userId}"`, error)
return []
})
}
// Check if server was updated and previous version was earlier than param
checkPreviousVersionIsBefore(version) {
if (!this.previousVersion) return false
// true if version > previousVersion
return version.localeCompare(this.previousVersion) >= 0
}
}
module.exports = Db

View File

@ -3,7 +3,8 @@ const { LogLevel } = require('./utils/constants')
class Logger { class Logger {
constructor() { constructor() {
this.logLevel = process.env.NODE_ENV === 'production' ? LogLevel.INFO : LogLevel.TRACE this.isDev = process.env.NODE_ENV !== 'production'
this.logLevel = !this.isDev ? LogLevel.INFO : LogLevel.TRACE
this.socketListeners = [] this.socketListeners = []
this.logManager = null this.logManager = null
@ -86,6 +87,15 @@ class Logger {
this.debug(`Set Log Level to ${this.levelString}`) this.debug(`Set Log Level to ${this.levelString}`)
} }
/**
* Only to console and only for development
* @param {...any} args
*/
dev(...args) {
if (!this.isDev) return
console.log(`[${this.timestamp}] DEV:`, ...args)
}
trace(...args) { trace(...args) {
if (this.logLevel > LogLevel.TRACE) return if (this.logLevel > LogLevel.TRACE) return
console.trace(`[${this.timestamp}] TRACE:`, ...args) console.trace(`[${this.timestamp}] TRACE:`, ...args)

View File

@ -8,18 +8,18 @@ const rateLimit = require('./libs/expressRateLimit')
const { version } = require('../package.json') const { version } = require('../package.json')
// Utils // Utils
const dbMigration = require('./utils/dbMigration')
const filePerms = require('./utils/filePerms') const filePerms = require('./utils/filePerms')
const fileUtils = require('./utils/fileUtils') const fileUtils = require('./utils/fileUtils')
const globals = require('./utils/globals')
const Logger = require('./Logger') const Logger = require('./Logger')
const Auth = require('./Auth') const Auth = require('./Auth')
const Watcher = require('./Watcher') const Watcher = require('./Watcher')
const Scanner = require('./scanner/Scanner') const Scanner = require('./scanner/Scanner')
const Db = require('./Db') const Database = require('./Database')
const SocketAuthority = require('./SocketAuthority') const SocketAuthority = require('./SocketAuthority')
const routes = require('./routes/index')
const ApiRouter = require('./routers/ApiRouter') const ApiRouter = require('./routers/ApiRouter')
const HlsRouter = require('./routers/HlsRouter') const HlsRouter = require('./routers/HlsRouter')
@ -59,30 +59,29 @@ class Server {
filePerms.setDefaultDirSync(global.MetadataPath, false) filePerms.setDefaultDirSync(global.MetadataPath, false)
} }
this.db = new Db()
this.watcher = new Watcher() this.watcher = new Watcher()
this.auth = new Auth(this.db) this.auth = new Auth()
// Managers // Managers
this.taskManager = new TaskManager() this.taskManager = new TaskManager()
this.notificationManager = new NotificationManager(this.db) this.notificationManager = new NotificationManager()
this.emailManager = new EmailManager(this.db) this.emailManager = new EmailManager()
this.backupManager = new BackupManager(this.db) this.backupManager = new BackupManager()
this.logManager = new LogManager(this.db) this.logManager = new LogManager()
this.cacheManager = new CacheManager() this.cacheManager = new CacheManager()
this.abMergeManager = new AbMergeManager(this.db, this.taskManager) this.abMergeManager = new AbMergeManager(this.taskManager)
this.playbackSessionManager = new PlaybackSessionManager(this.db) this.playbackSessionManager = new PlaybackSessionManager()
this.coverManager = new CoverManager(this.db, this.cacheManager) this.coverManager = new CoverManager(this.cacheManager)
this.podcastManager = new PodcastManager(this.db, this.watcher, this.notificationManager, this.taskManager) this.podcastManager = new PodcastManager(this.watcher, this.notificationManager, this.taskManager)
this.audioMetadataManager = new AudioMetadataMangaer(this.db, this.taskManager) this.audioMetadataManager = new AudioMetadataMangaer(this.taskManager)
this.rssFeedManager = new RssFeedManager(this.db) this.rssFeedManager = new RssFeedManager()
this.scanner = new Scanner(this.db, this.coverManager, this.taskManager) this.scanner = new Scanner(this.coverManager, this.taskManager)
this.cronManager = new CronManager(this.db, this.scanner, this.podcastManager) this.cronManager = new CronManager(this.scanner, this.podcastManager)
// Routers // Routers
this.apiRouter = new ApiRouter(this) this.apiRouter = new ApiRouter(this)
this.hlsRouter = new HlsRouter(this.db, this.auth, this.playbackSessionManager) this.hlsRouter = new HlsRouter(this.auth, this.playbackSessionManager)
Logger.logManager = this.logManager Logger.logManager = this.logManager
@ -98,38 +97,28 @@ class Server {
Logger.info('[Server] Init v' + version) Logger.info('[Server] Init v' + version)
await this.playbackSessionManager.removeOrphanStreams() await this.playbackSessionManager.removeOrphanStreams()
const previousVersion = await this.db.checkPreviousVersion() // Returns null if same server version await Database.init(false)
if (previousVersion) {
Logger.debug(`[Server] Upgraded from previous version ${previousVersion}`)
}
if (previousVersion && previousVersion.localeCompare('2.0.0') < 0) { // Old version data model migration
Logger.debug(`[Server] Previous version was < 2.0.0 - migration required`)
await dbMigration.migrate(this.db)
} else {
await this.db.init()
}
// Create token secret if does not exist (Added v2.1.0) // Create token secret if does not exist (Added v2.1.0)
if (!this.db.serverSettings.tokenSecret) { if (!Database.serverSettings.tokenSecret) {
await this.auth.initTokenSecret() await this.auth.initTokenSecret()
} }
await this.cleanUserData() // Remove invalid user item progress await this.cleanUserData() // Remove invalid user item progress
await this.purgeMetadata() // Remove metadata folders without library item await this.purgeMetadata() // Remove metadata folders without library item
await this.playbackSessionManager.removeInvalidSessions()
await this.cacheManager.ensureCachePaths() await this.cacheManager.ensureCachePaths()
await this.backupManager.init() await this.backupManager.init()
await this.logManager.init() await this.logManager.init()
await this.apiRouter.checkRemoveEmptySeries(this.db.series) // Remove empty series await this.apiRouter.checkRemoveEmptySeries(Database.series) // Remove empty series
await this.rssFeedManager.init() await this.rssFeedManager.init()
this.cronManager.init() this.cronManager.init()
if (this.db.serverSettings.scannerDisableWatcher) { if (Database.serverSettings.scannerDisableWatcher) {
Logger.info(`[Server] Watcher is disabled`) Logger.info(`[Server] Watcher is disabled`)
this.watcher.disabled = true this.watcher.disabled = true
} else { } else {
this.watcher.initWatcher(this.db.libraries) this.watcher.initWatcher(Database.libraries)
this.watcher.on('files', this.filesChanged.bind(this)) this.watcher.on('files', this.filesChanged.bind(this))
} }
} }
@ -162,19 +151,20 @@ class Server {
// Static folder // Static folder
router.use(express.static(Path.join(global.appRoot, 'static'))) router.use(express.static(Path.join(global.appRoot, 'static')))
// router.use('/api/v1', routes) // TODO: New routes
router.use('/api', this.authMiddleware.bind(this), this.apiRouter.router) router.use('/api', this.authMiddleware.bind(this), this.apiRouter.router)
router.use('/hls', this.authMiddleware.bind(this), this.hlsRouter.router) router.use('/hls', this.authMiddleware.bind(this), this.hlsRouter.router)
// RSS Feed temp route // RSS Feed temp route
router.get('/feed/:id', (req, res) => { router.get('/feed/:slug', (req, res) => {
Logger.info(`[Server] Requesting rss feed ${req.params.id}`) Logger.info(`[Server] Requesting rss feed ${req.params.slug}`)
this.rssFeedManager.getFeed(req, res) this.rssFeedManager.getFeed(req, res)
}) })
router.get('/feed/:id/cover', (req, res) => { router.get('/feed/:slug/cover', (req, res) => {
this.rssFeedManager.getFeedCover(req, res) this.rssFeedManager.getFeedCover(req, res)
}) })
router.get('/feed/:id/item/:episodeId/*', (req, res) => { router.get('/feed/:slug/item/:episodeId/*', (req, res) => {
Logger.debug(`[Server] Requesting rss feed episode ${req.params.id}/${req.params.episodeId}`) Logger.debug(`[Server] Requesting rss feed episode ${req.params.slug}/${req.params.episodeId}`)
this.rssFeedManager.getFeedItem(req, res) this.rssFeedManager.getFeedItem(req, res)
}) })
@ -203,7 +193,7 @@ class Server {
router.post('/login', this.getLoginRateLimiter(), (req, res) => this.auth.login(req, res)) router.post('/login', this.getLoginRateLimiter(), (req, res) => this.auth.login(req, res))
router.post('/logout', this.authMiddleware.bind(this), this.logout.bind(this)) router.post('/logout', this.authMiddleware.bind(this), this.logout.bind(this))
router.post('/init', (req, res) => { router.post('/init', (req, res) => {
if (this.db.hasRootUser) { if (Database.hasRootUser) {
Logger.error(`[Server] attempt to init server when server already has a root user`) Logger.error(`[Server] attempt to init server when server already has a root user`)
return res.sendStatus(500) return res.sendStatus(500)
} }
@ -213,8 +203,8 @@ class Server {
// status check for client to see if server has been initialized // status check for client to see if server has been initialized
// server has been initialized if a root user exists // server has been initialized if a root user exists
const payload = { const payload = {
isInit: this.db.hasRootUser, isInit: Database.hasRootUser,
language: this.db.serverSettings.language language: Database.serverSettings.language
} }
if (!payload.isInit) { if (!payload.isInit) {
payload.ConfigPath = global.ConfigPath payload.ConfigPath = global.ConfigPath
@ -240,10 +230,10 @@ class Server {
async initializeServer(req, res) { async initializeServer(req, res) {
Logger.info(`[Server] Initializing new server`) Logger.info(`[Server] Initializing new server`)
const newRoot = req.body.newRoot const newRoot = req.body.newRoot
let rootPash = newRoot.password ? await this.auth.hashPass(newRoot.password) : '' const rootUsername = newRoot.username || 'root'
const rootPash = newRoot.password ? await this.auth.hashPass(newRoot.password) : ''
if (!rootPash) Logger.warn(`[Server] Creating root user with no password`) if (!rootPash) Logger.warn(`[Server] Creating root user with no password`)
let rootToken = await this.auth.generateAccessToken({ userId: 'root', username: newRoot.username }) await Database.createRootUser(rootUsername, rootPash, this.auth)
await this.db.createRootUser(newRoot.username, rootPash, rootToken)
res.sendStatus(200) res.sendStatus(200)
} }
@ -261,15 +251,19 @@ class Server {
let purged = 0 let purged = 0
await Promise.all(foldersInItemsMetadata.map(async foldername => { await Promise.all(foldersInItemsMetadata.map(async foldername => {
const hasMatchingItem = this.db.libraryItems.find(ab => ab.id === foldername) const itemFullPath = fileUtils.filePathToPOSIX(Path.join(itemsMetadata, foldername))
if (!hasMatchingItem) {
const folderPath = Path.join(itemsMetadata, foldername)
Logger.debug(`[Server] Purging unused metadata ${folderPath}`)
await fs.remove(folderPath).then(() => { const hasMatchingItem = Database.libraryItems.find(li => {
if (!li.media.coverPath) return false
return itemFullPath === fileUtils.filePathToPOSIX(Path.dirname(li.media.coverPath))
})
if (!hasMatchingItem) {
Logger.debug(`[Server] Purging unused metadata ${itemFullPath}`)
await fs.remove(itemFullPath).then(() => {
purged++ purged++
}).catch((err) => { }).catch((err) => {
Logger.error(`[Server] Failed to delete folder path ${folderPath}`, err) Logger.error(`[Server] Failed to delete folder path ${itemFullPath}`, err)
}) })
} }
})) }))
@ -281,26 +275,26 @@ class Server {
// Remove user media progress with items that no longer exist & remove seriesHideFrom that no longer exist // Remove user media progress with items that no longer exist & remove seriesHideFrom that no longer exist
async cleanUserData() { async cleanUserData() {
for (let i = 0; i < this.db.users.length; i++) { for (const _user of Database.users) {
const _user = this.db.users[i]
let hasUpdated = false
if (_user.mediaProgress.length) { if (_user.mediaProgress.length) {
const lengthBefore = _user.mediaProgress.length for (const mediaProgress of _user.mediaProgress) {
_user.mediaProgress = _user.mediaProgress.filter(mp => { const libraryItem = Database.libraryItems.find(li => li.id === mediaProgress.libraryItemId)
const libraryItem = this.db.libraryItems.find(li => li.id === mp.libraryItemId) if (libraryItem && mediaProgress.episodeId) {
if (!libraryItem) return false const episode = libraryItem.media.checkHasEpisode?.(mediaProgress.episodeId)
if (mp.episodeId && (libraryItem.mediaType !== 'podcast' || !libraryItem.media.checkHasEpisode(mp.episodeId))) return false // Episode not found if (episode) continue
return true } else {
}) continue
}
if (lengthBefore > _user.mediaProgress.length) { Logger.debug(`[Server] Removing media progress ${mediaProgress.id} data from user ${_user.username}`)
Logger.debug(`[Server] Removing ${_user.mediaProgress.length - lengthBefore} media progress data from user ${_user.username}`) await Database.removeMediaProgress(mediaProgress.id)
hasUpdated = true
} }
} }
let hasUpdated = false
if (_user.seriesHideFromContinueListening.length) { if (_user.seriesHideFromContinueListening.length) {
_user.seriesHideFromContinueListening = _user.seriesHideFromContinueListening.filter(seriesId => { _user.seriesHideFromContinueListening = _user.seriesHideFromContinueListening.filter(seriesId => {
if (!this.db.series.some(se => se.id === seriesId)) { // Series removed if (!Database.series.some(se => se.id === seriesId)) { // Series removed
hasUpdated = true hasUpdated = true
return false return false
} }
@ -308,7 +302,7 @@ class Server {
}) })
} }
if (hasUpdated) { if (hasUpdated) {
await this.db.updateEntity('user', _user) await Database.updateUser(_user)
} }
} }
} }
@ -321,8 +315,8 @@ class Server {
getLoginRateLimiter() { getLoginRateLimiter() {
return rateLimit({ return rateLimit({
windowMs: this.db.serverSettings.rateLimitLoginWindow, // 5 minutes windowMs: Database.serverSettings.rateLimitLoginWindow, // 5 minutes
max: this.db.serverSettings.rateLimitLoginRequests, max: Database.serverSettings.rateLimitLoginRequests,
skipSuccessfulRequests: true, skipSuccessfulRequests: true,
onLimitReached: this.loginLimitReached onLimitReached: this.loginLimitReached
}) })

View File

@ -1,5 +1,6 @@
const SocketIO = require('socket.io') const SocketIO = require('socket.io')
const Logger = require('./Logger') const Logger = require('./Logger')
const Database = require('./Database')
class SocketAuthority { class SocketAuthority {
constructor() { constructor() {
@ -18,7 +19,7 @@ class SocketAuthority {
onlineUsersMap[client.user.id].connections++ onlineUsersMap[client.user.id].connections++
} else { } else {
onlineUsersMap[client.user.id] = { onlineUsersMap[client.user.id] = {
...client.user.toJSONForPublic(this.Server.playbackSessionManager.sessions, this.Server.db.libraryItems), ...client.user.toJSONForPublic(this.Server.playbackSessionManager.sessions, Database.libraryItems),
connections: 1 connections: 1
} }
} }
@ -107,7 +108,7 @@ class SocketAuthority {
delete this.clients[socket.id] delete this.clients[socket.id]
} else { } else {
Logger.debug('[Server] User Offline ' + _client.user.username) Logger.debug('[Server] User Offline ' + _client.user.username)
this.adminEmitter('user_offline', _client.user.toJSONForPublic(this.Server.playbackSessionManager.sessions, this.Server.db.libraryItems)) this.adminEmitter('user_offline', _client.user.toJSONForPublic(this.Server.playbackSessionManager.sessions, Database.libraryItems))
const disconnectTime = Date.now() - _client.connected_at const disconnectTime = Date.now() - _client.connected_at
Logger.info(`[Server] Socket ${socket.id} disconnected from client "${_client.user.username}" after ${disconnectTime}ms (Reason: ${reason})`) Logger.info(`[Server] Socket ${socket.id} disconnected from client "${_client.user.username}" after ${disconnectTime}ms (Reason: ${reason})`)
@ -160,11 +161,11 @@ class SocketAuthority {
Logger.debug(`[Server] User Online ${client.user.username}`) Logger.debug(`[Server] User Online ${client.user.username}`)
this.adminEmitter('user_online', client.user.toJSONForPublic(this.Server.playbackSessionManager.sessions, this.Server.db.libraryItems)) this.adminEmitter('user_online', client.user.toJSONForPublic(this.Server.playbackSessionManager.sessions, Database.libraryItems))
// Update user lastSeen // Update user lastSeen
user.lastSeen = Date.now() user.lastSeen = Date.now()
await this.Server.db.updateEntity('user', user) await Database.updateUser(user)
const initialPayload = { const initialPayload = {
userId: client.user.id, userId: client.user.id,
@ -186,7 +187,7 @@ class SocketAuthority {
if (client.user) { if (client.user) {
Logger.debug('[Server] User Offline ' + client.user.username) Logger.debug('[Server] User Offline ' + client.user.username)
this.adminEmitter('user_offline', client.user.toJSONForPublic(null, this.Server.db.libraryItems)) this.adminEmitter('user_offline', client.user.toJSONForPublic(null, Database.libraryItems))
} }
delete this.clients[socketId].user delete this.clients[socketId].user

View File

@ -4,6 +4,7 @@ const { createNewSortInstance } = require('../libs/fastSort')
const Logger = require('../Logger') const Logger = require('../Logger')
const SocketAuthority = require('../SocketAuthority') const SocketAuthority = require('../SocketAuthority')
const Database = require('../Database')
const { reqSupportsWebp } = require('../utils/index') const { reqSupportsWebp } = require('../utils/index')
@ -21,7 +22,7 @@ class AuthorController {
// Used on author landing page to include library items and items grouped in series // Used on author landing page to include library items and items grouped in series
if (include.includes('items')) { if (include.includes('items')) {
authorJson.libraryItems = this.db.libraryItems.filter(li => { authorJson.libraryItems = Database.libraryItems.filter(li => {
if (libraryId && li.libraryId !== libraryId) return false if (libraryId && li.libraryId !== libraryId) return false
if (!req.user.checkCanAccessLibraryItem(li)) return false // filter out library items user cannot access if (!req.user.checkCanAccessLibraryItem(li)) return false // filter out library items user cannot access
return li.media.metadata.hasAuthor && li.media.metadata.hasAuthor(req.author.id) return li.media.metadata.hasAuthor && li.media.metadata.hasAuthor(req.author.id)
@ -97,23 +98,29 @@ class AuthorController {
const authorNameUpdate = payload.name !== undefined && payload.name !== req.author.name const authorNameUpdate = payload.name !== undefined && payload.name !== req.author.name
// Check if author name matches another author and merge the authors // Check if author name matches another author and merge the authors
const existingAuthor = authorNameUpdate ? this.db.authors.find(au => au.id !== req.author.id && payload.name === au.name) : false const existingAuthor = authorNameUpdate ? Database.authors.find(au => au.id !== req.author.id && payload.name === au.name) : false
if (existingAuthor) { if (existingAuthor) {
const itemsWithAuthor = this.db.libraryItems.filter(li => li.mediaType === 'book' && li.media.metadata.hasAuthor(req.author.id)) const bookAuthorsToCreate = []
const itemsWithAuthor = Database.libraryItems.filter(li => li.mediaType === 'book' && li.media.metadata.hasAuthor(req.author.id))
itemsWithAuthor.forEach(libraryItem => { // Replace old author with merging author for each book itemsWithAuthor.forEach(libraryItem => { // Replace old author with merging author for each book
libraryItem.media.metadata.replaceAuthor(req.author, existingAuthor) libraryItem.media.metadata.replaceAuthor(req.author, existingAuthor)
bookAuthorsToCreate.push({
bookId: libraryItem.media.id,
authorId: existingAuthor.id
})
}) })
if (itemsWithAuthor.length) { if (itemsWithAuthor.length) {
await this.db.updateLibraryItems(itemsWithAuthor) await Database.removeBulkBookAuthors(req.author.id) // Remove all old BookAuthor
await Database.createBulkBookAuthors(bookAuthorsToCreate) // Create all new BookAuthor
SocketAuthority.emitter('items_updated', itemsWithAuthor.map(li => li.toJSONExpanded())) SocketAuthority.emitter('items_updated', itemsWithAuthor.map(li => li.toJSONExpanded()))
} }
// Remove old author // Remove old author
await this.db.removeEntity('author', req.author.id) await Database.removeAuthor(req.author.id)
SocketAuthority.emitter('author_removed', req.author.toJSON()) SocketAuthority.emitter('author_removed', req.author.toJSON())
// Send updated num books for merged author // Send updated num books for merged author
const numBooks = this.db.libraryItems.filter(li => { const numBooks = Database.libraryItems.filter(li => {
return li.media.metadata.hasAuthor && li.media.metadata.hasAuthor(existingAuthor.id) return li.media.metadata.hasAuthor && li.media.metadata.hasAuthor(existingAuthor.id)
}).length }).length
SocketAuthority.emitter('author_updated', existingAuthor.toJSONExpanded(numBooks)) SocketAuthority.emitter('author_updated', existingAuthor.toJSONExpanded(numBooks))
@ -131,18 +138,17 @@ class AuthorController {
req.author.updatedAt = Date.now() req.author.updatedAt = Date.now()
if (authorNameUpdate) { // Update author name on all books if (authorNameUpdate) { // Update author name on all books
const itemsWithAuthor = this.db.libraryItems.filter(li => li.mediaType === 'book' && li.media.metadata.hasAuthor(req.author.id)) const itemsWithAuthor = Database.libraryItems.filter(li => li.mediaType === 'book' && li.media.metadata.hasAuthor(req.author.id))
itemsWithAuthor.forEach(libraryItem => { itemsWithAuthor.forEach(libraryItem => {
libraryItem.media.metadata.updateAuthor(req.author) libraryItem.media.metadata.updateAuthor(req.author)
}) })
if (itemsWithAuthor.length) { if (itemsWithAuthor.length) {
await this.db.updateLibraryItems(itemsWithAuthor)
SocketAuthority.emitter('items_updated', itemsWithAuthor.map(li => li.toJSONExpanded())) SocketAuthority.emitter('items_updated', itemsWithAuthor.map(li => li.toJSONExpanded()))
} }
} }
await this.db.updateEntity('author', req.author) await Database.updateAuthor(req.author)
const numBooks = this.db.libraryItems.filter(li => { const numBooks = Database.libraryItems.filter(li => {
return li.media.metadata.hasAuthor && li.media.metadata.hasAuthor(req.author.id) return li.media.metadata.hasAuthor && li.media.metadata.hasAuthor(req.author.id)
}).length }).length
SocketAuthority.emitter('author_updated', req.author.toJSONExpanded(numBooks)) SocketAuthority.emitter('author_updated', req.author.toJSONExpanded(numBooks))
@ -159,7 +165,7 @@ class AuthorController {
var q = (req.query.q || '').toLowerCase() var q = (req.query.q || '').toLowerCase()
if (!q) return res.json([]) if (!q) return res.json([])
var limit = (req.query.limit && !isNaN(req.query.limit)) ? Number(req.query.limit) : 25 var limit = (req.query.limit && !isNaN(req.query.limit)) ? Number(req.query.limit) : 25
var authors = this.db.authors.filter(au => au.name.toLowerCase().includes(q)) var authors = Database.authors.filter(au => au.name.toLowerCase().includes(q))
authors = authors.slice(0, limit) authors = authors.slice(0, limit)
res.json({ res.json({
results: authors results: authors
@ -204,8 +210,8 @@ class AuthorController {
if (hasUpdates) { if (hasUpdates) {
req.author.updatedAt = Date.now() req.author.updatedAt = Date.now()
await this.db.updateEntity('author', req.author) await Database.updateAuthor(req.author)
const numBooks = this.db.libraryItems.filter(li => { const numBooks = Database.libraryItems.filter(li => {
return li.media.metadata.hasAuthor && li.media.metadata.hasAuthor(req.author.id) return li.media.metadata.hasAuthor && li.media.metadata.hasAuthor(req.author.id)
}).length }).length
SocketAuthority.emitter('author_updated', req.author.toJSONExpanded(numBooks)) SocketAuthority.emitter('author_updated', req.author.toJSONExpanded(numBooks))
@ -238,7 +244,7 @@ class AuthorController {
} }
middleware(req, res, next) { middleware(req, res, next) {
var author = this.db.authors.find(au => au.id === req.params.id) const author = Database.authors.find(au => au.id === req.params.id)
if (!author) return res.sendStatus(404) if (!author) return res.sendStatus(404)
if (req.method == 'DELETE' && !req.user.canDelete) { if (req.method == 'DELETE' && !req.user.canDelete) {

View File

@ -43,9 +43,8 @@ class BackupController {
res.sendFile(req.backup.fullPath) res.sendFile(req.backup.fullPath)
} }
async apply(req, res) { apply(req, res) {
await this.backupManager.requestApplyBackup(req.backup) this.backupManager.requestApplyBackup(req.backup, res)
res.sendStatus(200)
} }
middleware(req, res, next) { middleware(req, res, next) {

View File

@ -8,7 +8,6 @@ class CacheController {
if (!req.user.isAdminOrUp) { if (!req.user.isAdminOrUp) {
return res.sendStatus(403) return res.sendStatus(403)
} }
Logger.info(`[MiscController] Purging all cache`)
await this.cacheManager.purgeAll() await this.cacheManager.purgeAll()
res.sendStatus(200) res.sendStatus(200)
} }
@ -18,7 +17,6 @@ class CacheController {
if (!req.user.isAdminOrUp) { if (!req.user.isAdminOrUp) {
return res.sendStatus(403) return res.sendStatus(403)
} }
Logger.info(`[MiscController] Purging items cache`)
await this.cacheManager.purgeItems() await this.cacheManager.purgeItems()
res.sendStatus(200) res.sendStatus(200)
} }

View File

@ -1,5 +1,6 @@
const Logger = require('../Logger') const Logger = require('../Logger')
const SocketAuthority = require('../SocketAuthority') const SocketAuthority = require('../SocketAuthority')
const Database = require('../Database')
const Collection = require('../objects/Collection') const Collection = require('../objects/Collection')
@ -13,22 +14,22 @@ class CollectionController {
if (!success) { if (!success) {
return res.status(500).send('Invalid collection data') return res.status(500).send('Invalid collection data')
} }
var jsonExpanded = newCollection.toJSONExpanded(this.db.libraryItems) var jsonExpanded = newCollection.toJSONExpanded(Database.libraryItems)
await this.db.insertEntity('collection', newCollection) await Database.createCollection(newCollection)
SocketAuthority.emitter('collection_added', jsonExpanded) SocketAuthority.emitter('collection_added', jsonExpanded)
res.json(jsonExpanded) res.json(jsonExpanded)
} }
findAll(req, res) { findAll(req, res) {
res.json({ res.json({
collections: this.db.collections.map(c => c.toJSONExpanded(this.db.libraryItems)) collections: Database.collections.map(c => c.toJSONExpanded(Database.libraryItems))
}) })
} }
findOne(req, res) { findOne(req, res) {
const includeEntities = (req.query.include || '').split(',') const includeEntities = (req.query.include || '').split(',')
const collectionExpanded = req.collection.toJSONExpanded(this.db.libraryItems) const collectionExpanded = req.collection.toJSONExpanded(Database.libraryItems)
if (includeEntities.includes('rssfeed')) { if (includeEntities.includes('rssfeed')) {
const feedData = this.rssFeedManager.findFeedForEntityId(collectionExpanded.id) const feedData = this.rssFeedManager.findFeedForEntityId(collectionExpanded.id)
@ -41,9 +42,9 @@ class CollectionController {
async update(req, res) { async update(req, res) {
const collection = req.collection const collection = req.collection
const wasUpdated = collection.update(req.body) const wasUpdated = collection.update(req.body)
const jsonExpanded = collection.toJSONExpanded(this.db.libraryItems) const jsonExpanded = collection.toJSONExpanded(Database.libraryItems)
if (wasUpdated) { if (wasUpdated) {
await this.db.updateEntity('collection', collection) await Database.updateCollection(collection)
SocketAuthority.emitter('collection_updated', jsonExpanded) SocketAuthority.emitter('collection_updated', jsonExpanded)
} }
res.json(jsonExpanded) res.json(jsonExpanded)
@ -51,19 +52,19 @@ class CollectionController {
async delete(req, res) { async delete(req, res) {
const collection = req.collection const collection = req.collection
const jsonExpanded = collection.toJSONExpanded(this.db.libraryItems) const jsonExpanded = collection.toJSONExpanded(Database.libraryItems)
// Close rss feed - remove from db and emit socket event // Close rss feed - remove from db and emit socket event
await this.rssFeedManager.closeFeedForEntityId(collection.id) await this.rssFeedManager.closeFeedForEntityId(collection.id)
await this.db.removeEntity('collection', collection.id) await Database.removeCollection(collection.id)
SocketAuthority.emitter('collection_removed', jsonExpanded) SocketAuthority.emitter('collection_removed', jsonExpanded)
res.sendStatus(200) res.sendStatus(200)
} }
async addBook(req, res) { async addBook(req, res) {
const collection = req.collection const collection = req.collection
const libraryItem = this.db.libraryItems.find(li => li.id === req.body.id) const libraryItem = Database.libraryItems.find(li => li.id === req.body.id)
if (!libraryItem) { if (!libraryItem) {
return res.status(500).send('Book not found') return res.status(500).send('Book not found')
} }
@ -74,8 +75,14 @@ class CollectionController {
return res.status(500).send('Book already in collection') return res.status(500).send('Book already in collection')
} }
collection.addBook(req.body.id) collection.addBook(req.body.id)
const jsonExpanded = collection.toJSONExpanded(this.db.libraryItems) const jsonExpanded = collection.toJSONExpanded(Database.libraryItems)
await this.db.updateEntity('collection', collection)
const collectionBook = {
collectionId: collection.id,
bookId: libraryItem.media.id,
order: collection.books.length
}
await Database.createCollectionBook(collectionBook)
SocketAuthority.emitter('collection_updated', jsonExpanded) SocketAuthority.emitter('collection_updated', jsonExpanded)
res.json(jsonExpanded) res.json(jsonExpanded)
} }
@ -83,13 +90,18 @@ class CollectionController {
// DELETE: api/collections/:id/book/:bookId // DELETE: api/collections/:id/book/:bookId
async removeBook(req, res) { async removeBook(req, res) {
const collection = req.collection const collection = req.collection
const libraryItem = Database.libraryItems.find(li => li.id === req.params.bookId)
if (!libraryItem) {
return res.sendStatus(404)
}
if (collection.books.includes(req.params.bookId)) { if (collection.books.includes(req.params.bookId)) {
collection.removeBook(req.params.bookId) collection.removeBook(req.params.bookId)
var jsonExpanded = collection.toJSONExpanded(this.db.libraryItems) const jsonExpanded = collection.toJSONExpanded(Database.libraryItems)
await this.db.updateEntity('collection', collection)
SocketAuthority.emitter('collection_updated', jsonExpanded) SocketAuthority.emitter('collection_updated', jsonExpanded)
await Database.updateCollection(collection)
} }
res.json(collection.toJSONExpanded(this.db.libraryItems)) res.json(collection.toJSONExpanded(Database.libraryItems))
} }
// POST: api/collections/:id/batch/add // POST: api/collections/:id/batch/add
@ -98,19 +110,30 @@ class CollectionController {
if (!req.body.books || !req.body.books.length) { if (!req.body.books || !req.body.books.length) {
return res.status(500).send('Invalid request body') return res.status(500).send('Invalid request body')
} }
var bookIdsToAdd = req.body.books const bookIdsToAdd = req.body.books
var hasUpdated = false const collectionBooksToAdd = []
for (let i = 0; i < bookIdsToAdd.length; i++) { let hasUpdated = false
if (!collection.books.includes(bookIdsToAdd[i])) {
collection.addBook(bookIdsToAdd[i]) let order = collection.books.length
for (const libraryItemId of bookIdsToAdd) {
const libraryItem = Database.libraryItems.find(li => li.id === libraryItemId)
if (!libraryItem) continue
if (!collection.books.includes(libraryItemId)) {
collection.addBook(libraryItemId)
collectionBooksToAdd.push({
collectionId: collection.id,
bookId: libraryItem.media.id,
order: order++
})
hasUpdated = true hasUpdated = true
} }
} }
if (hasUpdated) { if (hasUpdated) {
await this.db.updateEntity('collection', collection) await Database.createBulkCollectionBooks(collectionBooksToAdd)
SocketAuthority.emitter('collection_updated', collection.toJSONExpanded(this.db.libraryItems)) SocketAuthority.emitter('collection_updated', collection.toJSONExpanded(Database.libraryItems))
} }
res.json(collection.toJSONExpanded(this.db.libraryItems)) res.json(collection.toJSONExpanded(Database.libraryItems))
} }
// POST: api/collections/:id/batch/remove // POST: api/collections/:id/batch/remove
@ -120,23 +143,26 @@ class CollectionController {
return res.status(500).send('Invalid request body') return res.status(500).send('Invalid request body')
} }
var bookIdsToRemove = req.body.books var bookIdsToRemove = req.body.books
var hasUpdated = false let hasUpdated = false
for (let i = 0; i < bookIdsToRemove.length; i++) { for (const libraryItemId of bookIdsToRemove) {
if (collection.books.includes(bookIdsToRemove[i])) { const libraryItem = Database.libraryItems.find(li => li.id === libraryItemId)
collection.removeBook(bookIdsToRemove[i]) if (!libraryItem) continue
if (collection.books.includes(libraryItemId)) {
collection.removeBook(libraryItemId)
hasUpdated = true hasUpdated = true
} }
} }
if (hasUpdated) { if (hasUpdated) {
await this.db.updateEntity('collection', collection) await Database.updateCollection(collection)
SocketAuthority.emitter('collection_updated', collection.toJSONExpanded(this.db.libraryItems)) SocketAuthority.emitter('collection_updated', collection.toJSONExpanded(Database.libraryItems))
} }
res.json(collection.toJSONExpanded(this.db.libraryItems)) res.json(collection.toJSONExpanded(Database.libraryItems))
} }
middleware(req, res, next) { middleware(req, res, next) {
if (req.params.id) { if (req.params.id) {
const collection = this.db.collections.find(c => c.id === req.params.id) const collection = Database.collections.find(c => c.id === req.params.id)
if (!collection) { if (!collection) {
return res.status(404).send('Collection not found') return res.status(404).send('Collection not found')
} }

View File

@ -1,22 +1,23 @@
const Logger = require('../Logger') const Logger = require('../Logger')
const SocketAuthority = require('../SocketAuthority') const SocketAuthority = require('../SocketAuthority')
const Database = require('../Database')
class EmailController { class EmailController {
constructor() { } constructor() { }
getSettings(req, res) { getSettings(req, res) {
res.json({ res.json({
settings: this.db.emailSettings settings: Database.emailSettings
}) })
} }
async updateSettings(req, res) { async updateSettings(req, res) {
const updated = this.db.emailSettings.update(req.body) const updated = Database.emailSettings.update(req.body)
if (updated) { if (updated) {
await this.db.updateEntity('settings', this.db.emailSettings) await Database.updateSetting(Database.emailSettings)
} }
res.json({ res.json({
settings: this.db.emailSettings settings: Database.emailSettings
}) })
} }
@ -36,24 +37,24 @@ class EmailController {
} }
} }
const updated = this.db.emailSettings.update({ const updated = Database.emailSettings.update({
ereaderDevices ereaderDevices
}) })
if (updated) { if (updated) {
await this.db.updateEntity('settings', this.db.emailSettings) await Database.updateSetting(Database.emailSettings)
SocketAuthority.adminEmitter('ereader-devices-updated', { SocketAuthority.adminEmitter('ereader-devices-updated', {
ereaderDevices: this.db.emailSettings.ereaderDevices ereaderDevices: Database.emailSettings.ereaderDevices
}) })
} }
res.json({ res.json({
ereaderDevices: this.db.emailSettings.ereaderDevices ereaderDevices: Database.emailSettings.ereaderDevices
}) })
} }
async sendEBookToDevice(req, res) { async sendEBookToDevice(req, res) {
Logger.debug(`[EmailController] Send ebook to device request for libraryItemId=${req.body.libraryItemId}, deviceName=${req.body.deviceName}`) Logger.debug(`[EmailController] Send ebook to device request for libraryItemId=${req.body.libraryItemId}, deviceName=${req.body.deviceName}`)
const libraryItem = this.db.getLibraryItem(req.body.libraryItemId) const libraryItem = Database.getLibraryItem(req.body.libraryItemId)
if (!libraryItem) { if (!libraryItem) {
return res.status(404).send('Library item not found') return res.status(404).send('Library item not found')
} }
@ -67,7 +68,7 @@ class EmailController {
return res.status(404).send('EBook file not found') return res.status(404).send('EBook file not found')
} }
const device = this.db.emailSettings.getEReaderDevice(req.body.deviceName) const device = Database.emailSettings.getEReaderDevice(req.body.deviceName)
if (!device) { if (!device) {
return res.status(404).send('E-reader device not found') return res.status(404).send('E-reader device not found')
} }

View File

@ -1,5 +1,6 @@
const Path = require('path') const Path = require('path')
const Logger = require('../Logger') const Logger = require('../Logger')
const Database = require('../Database')
const fs = require('../libs/fsExtra') const fs = require('../libs/fsExtra')
class FileSystemController { class FileSystemController {
@ -16,7 +17,7 @@ class FileSystemController {
}) })
// Do not include existing mapped library paths in response // Do not include existing mapped library paths in response
this.db.libraries.forEach(lib => { Database.libraries.forEach(lib => {
lib.folders.forEach((folder) => { lib.folders.forEach((folder) => {
let dir = folder.fullPath let dir = folder.fullPath
if (dir.includes(global.appRoot)) dir = dir.replace(global.appRoot, '') if (dir.includes(global.appRoot)) dir = dir.replace(global.appRoot, '')

View File

@ -9,6 +9,9 @@ const { sort, createNewSortInstance } = require('../libs/fastSort')
const naturalSort = createNewSortInstance({ const naturalSort = createNewSortInstance({
comparer: new Intl.Collator(undefined, { numeric: true, sensitivity: 'base' }).compare comparer: new Intl.Collator(undefined, { numeric: true, sensitivity: 'base' }).compare
}) })
const Database = require('../Database')
class LibraryController { class LibraryController {
constructor() { } constructor() { }
@ -40,13 +43,14 @@ class LibraryController {
} }
const library = new Library() const library = new Library()
newLibraryPayload.displayOrder = this.db.libraries.length + 1
newLibraryPayload.displayOrder = Database.libraries.map(li => li.displayOrder).sort((a, b) => a - b).pop() + 1
library.setData(newLibraryPayload) library.setData(newLibraryPayload)
await this.db.insertEntity('library', library) await Database.createLibrary(library)
// Only emit to users with access to library // Only emit to users with access to library
const userFilter = (user) => { const userFilter = (user) => {
return user.checkCanAccessLibrary && user.checkCanAccessLibrary(library.id) return user.checkCanAccessLibrary?.(library.id)
} }
SocketAuthority.emitter('library_added', library.toJSON(), userFilter) SocketAuthority.emitter('library_added', library.toJSON(), userFilter)
@ -58,14 +62,15 @@ class LibraryController {
findAll(req, res) { findAll(req, res) {
const librariesAccessible = req.user.librariesAccessible || [] const librariesAccessible = req.user.librariesAccessible || []
if (librariesAccessible && librariesAccessible.length) { if (librariesAccessible.length) {
return res.json({ return res.json({
libraries: this.db.libraries.filter(lib => librariesAccessible.includes(lib.id)).map(lib => lib.toJSON()) libraries: Database.libraries.filter(lib => librariesAccessible.includes(lib.id)).map(lib => lib.toJSON())
}) })
} }
res.json({ res.json({
libraries: this.db.libraries.map(lib => lib.toJSON()) libraries: Database.libraries.map(lib => lib.toJSON())
// libraries: Database.libraries.map(lib => lib.toJSON())
}) })
} }
@ -75,7 +80,7 @@ class LibraryController {
return res.json({ return res.json({
filterdata: libraryHelpers.getDistinctFilterDataNew(req.libraryItems), filterdata: libraryHelpers.getDistinctFilterDataNew(req.libraryItems),
issues: req.libraryItems.filter(li => li.hasIssues).length, issues: req.libraryItems.filter(li => li.hasIssues).length,
numUserPlaylists: this.db.playlists.filter(p => p.userId === req.user.id && p.libraryId === req.library.id).length, numUserPlaylists: Database.playlists.filter(p => p.userId === req.user.id && p.libraryId === req.library.id).length,
library: req.library library: req.library
}) })
} }
@ -128,14 +133,14 @@ class LibraryController {
this.cronManager.updateLibraryScanCron(library) this.cronManager.updateLibraryScanCron(library)
// Remove libraryItems no longer in library // Remove libraryItems no longer in library
const itemsToRemove = this.db.libraryItems.filter(li => li.libraryId === library.id && !library.checkFullPathInLibrary(li.path)) const itemsToRemove = Database.libraryItems.filter(li => li.libraryId === library.id && !library.checkFullPathInLibrary(li.path))
if (itemsToRemove.length) { if (itemsToRemove.length) {
Logger.info(`[Scanner] Updating library, removing ${itemsToRemove.length} items`) Logger.info(`[Scanner] Updating library, removing ${itemsToRemove.length} items`)
for (let i = 0; i < itemsToRemove.length; i++) { for (let i = 0; i < itemsToRemove.length; i++) {
await this.handleDeleteLibraryItem(itemsToRemove[i]) await this.handleDeleteLibraryItem(itemsToRemove[i])
} }
} }
await this.db.updateEntity('library', library) await Database.updateLibrary(library)
// Only emit to users with access to library // Only emit to users with access to library
const userFilter = (user) => { const userFilter = (user) => {
@ -153,21 +158,21 @@ class LibraryController {
this.watcher.removeLibrary(library) this.watcher.removeLibrary(library)
// Remove collections for library // Remove collections for library
const collections = this.db.collections.filter(c => c.libraryId === library.id) const collections = Database.collections.filter(c => c.libraryId === library.id)
for (const collection of collections) { for (const collection of collections) {
Logger.info(`[Server] deleting collection "${collection.name}" for library "${library.name}"`) Logger.info(`[Server] deleting collection "${collection.name}" for library "${library.name}"`)
await this.db.removeEntity('collection', collection.id) await Database.removeCollection(collection.id)
} }
// Remove items in this library // Remove items in this library
const libraryItems = this.db.libraryItems.filter(li => li.libraryId === library.id) const libraryItems = Database.libraryItems.filter(li => li.libraryId === library.id)
Logger.info(`[Server] deleting library "${library.name}" with ${libraryItems.length} items"`) Logger.info(`[Server] deleting library "${library.name}" with ${libraryItems.length} items"`)
for (let i = 0; i < libraryItems.length; i++) { for (let i = 0; i < libraryItems.length; i++) {
await this.handleDeleteLibraryItem(libraryItems[i]) await this.handleDeleteLibraryItem(libraryItems[i])
} }
const libraryJson = library.toJSON() const libraryJson = library.toJSON()
await this.db.removeEntity('library', library.id) await Database.removeLibrary(library.id)
SocketAuthority.emitter('library_removed', libraryJson) SocketAuthority.emitter('library_removed', libraryJson)
return res.json(libraryJson) return res.json(libraryJson)
} }
@ -197,7 +202,7 @@ class LibraryController {
// Step 1 - Filter the retrieved library items // Step 1 - Filter the retrieved library items
let filterSeries = null let filterSeries = null
if (payload.filterBy) { if (payload.filterBy) {
libraryItems = libraryHelpers.getFilteredLibraryItems(libraryItems, payload.filterBy, req.user, this.rssFeedManager.feedsArray) libraryItems = libraryHelpers.getFilteredLibraryItems(libraryItems, payload.filterBy, req.user, Database.feeds)
payload.total = libraryItems.length payload.total = libraryItems.length
// Determining if we are filtering titles by a series, and if so, which series // Determining if we are filtering titles by a series, and if so, which series
@ -209,7 +214,7 @@ class LibraryController {
// If also filtering by series, will not collapse the filtered series as this would lead // If also filtering by series, will not collapse the filtered series as this would lead
// to series having a collapsed series that is just that series. // to series having a collapsed series that is just that series.
if (payload.collapseseries) { if (payload.collapseseries) {
let collapsedItems = libraryHelpers.collapseBookSeries(libraryItems, this.db.series, filterSeries, req.library.settings.hideSingleBookSeries) let collapsedItems = libraryHelpers.collapseBookSeries(libraryItems, Database.series, filterSeries, req.library.settings.hideSingleBookSeries)
if (!(collapsedItems.length == 1 && collapsedItems[0].collapsedSeries)) { if (!(collapsedItems.length == 1 && collapsedItems[0].collapsedSeries)) {
libraryItems = collapsedItems libraryItems = collapsedItems
@ -237,7 +242,7 @@ class LibraryController {
// If no series sequence then fallback to sorting by title (or collapsed series name for sub-series) // If no series sequence then fallback to sorting by title (or collapsed series name for sub-series)
sortArray.push({ sortArray.push({
asc: (li) => { asc: (li) => {
if (this.db.serverSettings.sortingIgnorePrefix) { if (Database.serverSettings.sortingIgnorePrefix) {
return li.collapsedSeries?.nameIgnorePrefix || li.media.metadata.titleIgnorePrefix return li.collapsedSeries?.nameIgnorePrefix || li.media.metadata.titleIgnorePrefix
} else { } else {
return li.collapsedSeries?.name || li.media.metadata.title return li.collapsedSeries?.name || li.media.metadata.title
@ -247,15 +252,11 @@ class LibraryController {
} }
if (payload.sortBy) { if (payload.sortBy) {
// old sort key TODO: should be mutated in dbMigration
let sortKey = payload.sortBy let sortKey = payload.sortBy
if (sortKey.startsWith('book.')) {
sortKey = sortKey.replace('book.', 'media.metadata.')
}
// Handle server setting sortingIgnorePrefix // Handle server setting sortingIgnorePrefix
const sortByTitle = sortKey === 'media.metadata.title' const sortByTitle = sortKey === 'media.metadata.title'
if (sortByTitle && this.db.serverSettings.sortingIgnorePrefix) { if (sortByTitle && Database.serverSettings.sortingIgnorePrefix) {
// BookMetadata.js has titleIgnorePrefix getter // BookMetadata.js has titleIgnorePrefix getter
sortKey += 'IgnorePrefix' sortKey += 'IgnorePrefix'
} }
@ -267,7 +268,7 @@ class LibraryController {
sortArray.push({ sortArray.push({
asc: (li) => { asc: (li) => {
if (li.collapsedSeries) { if (li.collapsedSeries) {
return this.db.serverSettings.sortingIgnorePrefix ? return Database.serverSettings.sortingIgnorePrefix ?
li.collapsedSeries.nameIgnorePrefix : li.collapsedSeries.nameIgnorePrefix :
li.collapsedSeries.name li.collapsedSeries.name
} else { } else {
@ -284,7 +285,7 @@ class LibraryController {
if (mediaIsBook && sortBySequence) { if (mediaIsBook && sortBySequence) {
return li.media.metadata.getSeries(filterSeries).sequence return li.media.metadata.getSeries(filterSeries).sequence
} else if (mediaIsBook && sortByTitle && li.collapsedSeries) { } else if (mediaIsBook && sortByTitle && li.collapsedSeries) {
return this.db.serverSettings.sortingIgnorePrefix ? return Database.serverSettings.sortingIgnorePrefix ?
li.collapsedSeries.nameIgnorePrefix : li.collapsedSeries.nameIgnorePrefix :
li.collapsedSeries.name li.collapsedSeries.name
} else { } else {
@ -411,7 +412,7 @@ class LibraryController {
include: include.join(',') include: include.join(',')
} }
let series = libraryHelpers.getSeriesFromBooks(libraryItems, this.db.series, null, payload.filterBy, req.user, payload.minified, req.library.settings.hideSingleBookSeries) let series = libraryHelpers.getSeriesFromBooks(libraryItems, Database.series, null, payload.filterBy, req.user, payload.minified, req.library.settings.hideSingleBookSeries)
const direction = payload.sortDesc ? 'desc' : 'asc' const direction = payload.sortDesc ? 'desc' : 'asc'
series = naturalSort(series).by([ series = naturalSort(series).by([
@ -428,7 +429,7 @@ class LibraryController {
} else if (payload.sortBy === 'lastBookAdded') { } else if (payload.sortBy === 'lastBookAdded') {
return Math.max(...(se.books).map(x => x.addedAt), 0) return Math.max(...(se.books).map(x => x.addedAt), 0)
} else { // sort by name } else { // sort by name
return this.db.serverSettings.sortingIgnorePrefix ? se.nameIgnorePrefixSort : se.name return Database.serverSettings.sortingIgnorePrefix ? se.nameIgnorePrefixSort : se.name
} }
} }
} }
@ -467,7 +468,7 @@ class LibraryController {
async getSeriesForLibrary(req, res) { async getSeriesForLibrary(req, res) {
const include = (req.query.include || '').split(',').map(v => v.trim().toLowerCase()).filter(v => !!v) const include = (req.query.include || '').split(',').map(v => v.trim().toLowerCase()).filter(v => !!v)
const series = this.db.series.find(se => se.id === req.params.seriesId) const series = Database.series.find(se => se.id === req.params.seriesId)
if (!series) return res.sendStatus(404) if (!series) return res.sendStatus(404)
const libraryItemsInSeries = req.libraryItems.filter(li => li.media.metadata.hasSeries?.(series.id)) const libraryItemsInSeries = req.libraryItems.filter(li => li.media.metadata.hasSeries?.(series.id))
@ -508,7 +509,7 @@ class LibraryController {
include: include.join(',') include: include.join(',')
} }
let collections = this.db.collections.filter(c => c.libraryId === req.library.id).map(c => { let collections = Database.collections.filter(c => c.libraryId === req.library.id).map(c => {
const expanded = c.toJSONExpanded(libraryItems, payload.minified) const expanded = c.toJSONExpanded(libraryItems, payload.minified)
// If all books restricted to user in this collection then hide this collection // If all books restricted to user in this collection then hide this collection
@ -535,7 +536,7 @@ class LibraryController {
// api/libraries/:id/playlists // api/libraries/:id/playlists
async getUserPlaylistsForLibrary(req, res) { async getUserPlaylistsForLibrary(req, res) {
let playlistsForUser = this.db.playlists.filter(p => p.userId === req.user.id && p.libraryId === req.library.id).map(p => p.toJSONExpanded(this.db.libraryItems)) let playlistsForUser = Database.playlists.filter(p => p.userId === req.user.id && p.libraryId === req.library.id).map(p => p.toJSONExpanded(Database.libraryItems))
const payload = { const payload = {
results: [], results: [],
@ -559,7 +560,7 @@ class LibraryController {
return res.status(400).send('Invalid library media type') return res.status(400).send('Invalid library media type')
} }
let libraryItems = this.db.libraryItems.filter(li => li.libraryId === req.library.id) let libraryItems = Database.libraryItems.filter(li => li.libraryId === req.library.id)
let albums = libraryHelpers.groupMusicLibraryItemsIntoAlbums(libraryItems) let albums = libraryHelpers.groupMusicLibraryItemsIntoAlbums(libraryItems)
albums = naturalSort(albums).asc(a => a.title) // Alphabetical by album title albums = naturalSort(albums).asc(a => a.title) // Alphabetical by album title
@ -603,26 +604,26 @@ class LibraryController {
var orderdata = req.body var orderdata = req.body
var hasUpdates = false var hasUpdates = false
for (let i = 0; i < orderdata.length; i++) { for (let i = 0; i < orderdata.length; i++) {
var library = this.db.libraries.find(lib => lib.id === orderdata[i].id) var library = Database.libraries.find(lib => lib.id === orderdata[i].id)
if (!library) { if (!library) {
Logger.error(`[LibraryController] Invalid library not found in reorder ${orderdata[i].id}`) Logger.error(`[LibraryController] Invalid library not found in reorder ${orderdata[i].id}`)
return res.sendStatus(500) return res.sendStatus(500)
} }
if (library.update({ displayOrder: orderdata[i].newOrder })) { if (library.update({ displayOrder: orderdata[i].newOrder })) {
hasUpdates = true hasUpdates = true
await this.db.updateEntity('library', library) await Database.updateLibrary(library)
} }
} }
if (hasUpdates) { if (hasUpdates) {
this.db.libraries.sort((a, b) => a.displayOrder - b.displayOrder) Database.libraries.sort((a, b) => a.displayOrder - b.displayOrder)
Logger.debug(`[LibraryController] Updated library display orders`) Logger.debug(`[LibraryController] Updated library display orders`)
} else { } else {
Logger.debug(`[LibraryController] Library orders were up to date`) Logger.debug(`[LibraryController] Library orders were up to date`)
} }
res.json({ res.json({
libraries: this.db.libraries.map(lib => lib.toJSON()) libraries: Database.libraries.map(lib => lib.toJSON())
}) })
} }
@ -652,7 +653,7 @@ class LibraryController {
if (queryResult.series?.length) { if (queryResult.series?.length) {
queryResult.series.forEach((se) => { queryResult.series.forEach((se) => {
if (!seriesMatches[se.id]) { if (!seriesMatches[se.id]) {
const _series = this.db.series.find(_se => _se.id === se.id) const _series = Database.series.find(_se => _se.id === se.id)
if (_series) seriesMatches[se.id] = { series: _series.toJSON(), books: [li.toJSON()] } if (_series) seriesMatches[se.id] = { series: _series.toJSON(), books: [li.toJSON()] }
} else { } else {
seriesMatches[se.id].books.push(li.toJSON()) seriesMatches[se.id].books.push(li.toJSON())
@ -662,7 +663,7 @@ class LibraryController {
if (queryResult.authors?.length) { if (queryResult.authors?.length) {
queryResult.authors.forEach((au) => { queryResult.authors.forEach((au) => {
if (!authorMatches[au.id]) { if (!authorMatches[au.id]) {
const _author = this.db.authors.find(_au => _au.id === au.id) const _author = Database.authors.find(_au => _au.id === au.id)
if (_author) { if (_author) {
authorMatches[au.id] = _author.toJSON() authorMatches[au.id] = _author.toJSON()
authorMatches[au.id].numBooks = 1 authorMatches[au.id].numBooks = 1
@ -729,7 +730,7 @@ class LibraryController {
if (li.media.metadata.authors && li.media.metadata.authors.length) { if (li.media.metadata.authors && li.media.metadata.authors.length) {
li.media.metadata.authors.forEach((au) => { li.media.metadata.authors.forEach((au) => {
if (!authors[au.id]) { if (!authors[au.id]) {
const _author = this.db.authors.find(_au => _au.id === au.id) const _author = Database.authors.find(_au => _au.id === au.id)
if (_author) { if (_author) {
authors[au.id] = _author.toJSON() authors[au.id] = _author.toJSON()
authors[au.id].numBooks = 1 authors[au.id].numBooks = 1
@ -791,7 +792,7 @@ class LibraryController {
} }
if (itemsUpdated.length) { if (itemsUpdated.length) {
await this.db.updateLibraryItems(itemsUpdated) await Database.updateBulkBooks(itemsUpdated.map(i => i.media))
SocketAuthority.emitter('items_updated', itemsUpdated.map(li => li.toJSONExpanded())) SocketAuthority.emitter('items_updated', itemsUpdated.map(li => li.toJSONExpanded()))
} }
@ -816,7 +817,7 @@ class LibraryController {
} }
if (itemsUpdated.length) { if (itemsUpdated.length) {
await this.db.updateLibraryItems(itemsUpdated) await Database.updateBulkBooks(itemsUpdated.map(i => i.media))
SocketAuthority.emitter('items_updated', itemsUpdated.map(li => li.toJSONExpanded())) SocketAuthority.emitter('items_updated', itemsUpdated.map(li => li.toJSONExpanded()))
} }
@ -900,12 +901,12 @@ class LibraryController {
return res.sendStatus(403) return res.sendStatus(403)
} }
const library = this.db.libraries.find(lib => lib.id === req.params.id) const library = Database.libraries.find(lib => lib.id === req.params.id)
if (!library) { if (!library) {
return res.status(404).send('Library not found') return res.status(404).send('Library not found')
} }
req.library = library req.library = library
req.libraryItems = this.db.libraryItems.filter(li => { req.libraryItems = Database.libraryItems.filter(li => {
return li.libraryId === library.id && req.user.checkCanAccessLibraryItem(li) return li.libraryId === library.id && req.user.checkCanAccessLibraryItem(li)
}) })
next() next()

View File

@ -2,9 +2,10 @@ const Path = require('path')
const fs = require('../libs/fsExtra') const fs = require('../libs/fsExtra')
const Logger = require('../Logger') const Logger = require('../Logger')
const SocketAuthority = require('../SocketAuthority') const SocketAuthority = require('../SocketAuthority')
const Database = require('../Database')
const zipHelpers = require('../utils/zipHelpers') const zipHelpers = require('../utils/zipHelpers')
const { reqSupportsWebp, isNullOrNaN } = require('../utils/index') const { reqSupportsWebp } = require('../utils/index')
const { ScanResult } = require('../utils/constants') const { ScanResult } = require('../utils/constants')
const { getAudioMimeTypeFromExtname } = require('../utils/fileUtils') const { getAudioMimeTypeFromExtname } = require('../utils/fileUtils')
@ -31,7 +32,7 @@ class LibraryItemController {
if (item.mediaType == 'book') { if (item.mediaType == 'book') {
if (includeEntities.includes('authors')) { if (includeEntities.includes('authors')) {
item.media.metadata.authors = item.media.metadata.authors.map(au => { item.media.metadata.authors = item.media.metadata.authors.map(au => {
var author = this.db.authors.find(_au => _au.id === au.id) var author = Database.authors.find(_au => _au.id === au.id)
if (!author) return null if (!author) return null
return { return {
...author ...author
@ -61,7 +62,7 @@ class LibraryItemController {
const hasUpdates = libraryItem.update(req.body) const hasUpdates = libraryItem.update(req.body)
if (hasUpdates) { if (hasUpdates) {
Logger.debug(`[LibraryItemController] Updated now saving`) Logger.debug(`[LibraryItemController] Updated now saving`)
await this.db.updateLibraryItem(libraryItem) await Database.updateLibraryItem(libraryItem)
SocketAuthority.emitter('item_updated', libraryItem.toJSONExpanded()) SocketAuthority.emitter('item_updated', libraryItem.toJSONExpanded())
} }
res.json(libraryItem.toJSON()) res.json(libraryItem.toJSON())
@ -104,7 +105,7 @@ class LibraryItemController {
// Book specific // Book specific
if (libraryItem.isBook) { if (libraryItem.isBook) {
await this.createAuthorsAndSeriesForItemUpdate(mediaPayload) await this.createAuthorsAndSeriesForItemUpdate(mediaPayload, libraryItem.libraryId)
} }
// Podcast specific // Podcast specific
@ -139,7 +140,7 @@ class LibraryItemController {
} }
Logger.debug(`[LibraryItemController] Updated library item media ${libraryItem.media.metadata.title}`) Logger.debug(`[LibraryItemController] Updated library item media ${libraryItem.media.metadata.title}`)
await this.db.updateLibraryItem(libraryItem) await Database.updateLibraryItem(libraryItem)
SocketAuthority.emitter('item_updated', libraryItem.toJSONExpanded()) SocketAuthority.emitter('item_updated', libraryItem.toJSONExpanded())
} }
res.json({ res.json({
@ -174,7 +175,7 @@ class LibraryItemController {
return res.status(500).send('Unknown error occurred') return res.status(500).send('Unknown error occurred')
} }
await this.db.updateLibraryItem(libraryItem) await Database.updateLibraryItem(libraryItem)
SocketAuthority.emitter('item_updated', libraryItem.toJSONExpanded()) SocketAuthority.emitter('item_updated', libraryItem.toJSONExpanded())
res.json({ res.json({
success: true, success: true,
@ -194,7 +195,7 @@ class LibraryItemController {
return res.status(500).send(validationResult.error) return res.status(500).send(validationResult.error)
} }
if (validationResult.updated) { if (validationResult.updated) {
await this.db.updateLibraryItem(libraryItem) await Database.updateLibraryItem(libraryItem)
SocketAuthority.emitter('item_updated', libraryItem.toJSONExpanded()) SocketAuthority.emitter('item_updated', libraryItem.toJSONExpanded())
} }
res.json({ res.json({
@ -210,7 +211,7 @@ class LibraryItemController {
if (libraryItem.media.coverPath) { if (libraryItem.media.coverPath) {
libraryItem.updateMediaCover('') libraryItem.updateMediaCover('')
await this.cacheManager.purgeCoverCache(libraryItem.id) await this.cacheManager.purgeCoverCache(libraryItem.id)
await this.db.updateLibraryItem(libraryItem) await Database.updateLibraryItem(libraryItem)
SocketAuthority.emitter('item_updated', libraryItem.toJSONExpanded()) SocketAuthority.emitter('item_updated', libraryItem.toJSONExpanded())
} }
@ -282,7 +283,7 @@ class LibraryItemController {
return res.sendStatus(500) return res.sendStatus(500)
} }
libraryItem.media.updateAudioTracks(orderedFileData) libraryItem.media.updateAudioTracks(orderedFileData)
await this.db.updateLibraryItem(libraryItem) await Database.updateLibraryItem(libraryItem)
SocketAuthority.emitter('item_updated', libraryItem.toJSONExpanded()) SocketAuthority.emitter('item_updated', libraryItem.toJSONExpanded())
res.json(libraryItem.toJSON()) res.json(libraryItem.toJSON())
} }
@ -309,7 +310,7 @@ class LibraryItemController {
return res.sendStatus(500) return res.sendStatus(500)
} }
const itemsToDelete = this.db.libraryItems.filter(li => libraryItemIds.includes(li.id)) const itemsToDelete = Database.libraryItems.filter(li => libraryItemIds.includes(li.id))
if (!itemsToDelete.length) { if (!itemsToDelete.length) {
return res.sendStatus(404) return res.sendStatus(404)
} }
@ -338,15 +339,15 @@ class LibraryItemController {
for (let i = 0; i < updatePayloads.length; i++) { for (let i = 0; i < updatePayloads.length; i++) {
var mediaPayload = updatePayloads[i].mediaPayload var mediaPayload = updatePayloads[i].mediaPayload
var libraryItem = this.db.libraryItems.find(_li => _li.id === updatePayloads[i].id) var libraryItem = Database.libraryItems.find(_li => _li.id === updatePayloads[i].id)
if (!libraryItem) return null if (!libraryItem) return null
await this.createAuthorsAndSeriesForItemUpdate(mediaPayload) await this.createAuthorsAndSeriesForItemUpdate(mediaPayload, libraryItem.libraryId)
var hasUpdates = libraryItem.media.update(mediaPayload) var hasUpdates = libraryItem.media.update(mediaPayload)
if (hasUpdates) { if (hasUpdates) {
Logger.debug(`[LibraryItemController] Updated library item media ${libraryItem.media.metadata.title}`) Logger.debug(`[LibraryItemController] Updated library item media ${libraryItem.media.metadata.title}`)
await this.db.updateLibraryItem(libraryItem) await Database.updateLibraryItem(libraryItem)
SocketAuthority.emitter('item_updated', libraryItem.toJSONExpanded()) SocketAuthority.emitter('item_updated', libraryItem.toJSONExpanded())
itemsUpdated++ itemsUpdated++
} }
@ -366,7 +367,7 @@ class LibraryItemController {
} }
const libraryItems = [] const libraryItems = []
libraryItemIds.forEach((lid) => { libraryItemIds.forEach((lid) => {
const li = this.db.libraryItems.find(_li => _li.id === lid) const li = Database.libraryItems.find(_li => _li.id === lid)
if (li) libraryItems.push(li.toJSONExpanded()) if (li) libraryItems.push(li.toJSONExpanded())
}) })
res.json({ res.json({
@ -389,7 +390,7 @@ class LibraryItemController {
return res.sendStatus(400) return res.sendStatus(400)
} }
const libraryItems = req.body.libraryItemIds.map(lid => this.db.getLibraryItem(lid)).filter(li => li) const libraryItems = req.body.libraryItemIds.map(lid => Database.getLibraryItem(lid)).filter(li => li)
if (!libraryItems?.length) { if (!libraryItems?.length) {
return res.sendStatus(400) return res.sendStatus(400)
} }
@ -424,7 +425,7 @@ class LibraryItemController {
return res.sendStatus(400) return res.sendStatus(400)
} }
const libraryItems = req.body.libraryItemIds.map(lid => this.db.getLibraryItem(lid)).filter(li => li) const libraryItems = req.body.libraryItemIds.map(lid => Database.getLibraryItem(lid)).filter(li => li)
if (!libraryItems?.length) { if (!libraryItems?.length) {
return res.sendStatus(400) return res.sendStatus(400)
} }
@ -440,18 +441,6 @@ class LibraryItemController {
} }
} }
// DELETE: api/items/all
async deleteAll(req, res) {
if (!req.user.isAdminOrUp) {
Logger.warn('User other than admin attempted to delete all library items', req.user)
return res.sendStatus(403)
}
Logger.info('Removing all Library Items')
var success = await this.db.recreateLibraryItemsDb()
if (success) res.sendStatus(200)
else res.sendStatus(500)
}
// POST: api/items/:id/scan (admin) // POST: api/items/:id/scan (admin)
async scan(req, res) { async scan(req, res) {
if (!req.user.isAdminOrUp) { if (!req.user.isAdminOrUp) {
@ -504,7 +493,7 @@ class LibraryItemController {
const chapters = req.body.chapters || [] const chapters = req.body.chapters || []
const wasUpdated = req.libraryItem.media.updateChapters(chapters) const wasUpdated = req.libraryItem.media.updateChapters(chapters)
if (wasUpdated) { if (wasUpdated) {
await this.db.updateLibraryItem(req.libraryItem) await Database.updateLibraryItem(req.libraryItem)
SocketAuthority.emitter('item_updated', req.libraryItem.toJSONExpanded()) SocketAuthority.emitter('item_updated', req.libraryItem.toJSONExpanded())
} }
@ -586,7 +575,7 @@ class LibraryItemController {
} }
} }
req.libraryItem.updatedAt = Date.now() req.libraryItem.updatedAt = Date.now()
await this.db.updateLibraryItem(req.libraryItem) await Database.updateLibraryItem(req.libraryItem)
SocketAuthority.emitter('item_updated', req.libraryItem.toJSONExpanded()) SocketAuthority.emitter('item_updated', req.libraryItem.toJSONExpanded())
res.sendStatus(200) res.sendStatus(200)
} }
@ -682,13 +671,13 @@ class LibraryItemController {
} }
req.libraryItem.updatedAt = Date.now() req.libraryItem.updatedAt = Date.now()
await this.db.updateLibraryItem(req.libraryItem) await Database.updateLibraryItem(req.libraryItem)
SocketAuthority.emitter('item_updated', req.libraryItem.toJSONExpanded()) SocketAuthority.emitter('item_updated', req.libraryItem.toJSONExpanded())
res.sendStatus(200) res.sendStatus(200)
} }
middleware(req, res, next) { middleware(req, res, next) {
req.libraryItem = this.db.libraryItems.find(li => li.id === req.params.id) req.libraryItem = Database.libraryItems.find(li => li.id === req.params.id)
if (!req.libraryItem?.media) return res.sendStatus(404) if (!req.libraryItem?.media) return res.sendStatus(404)
// Check user can access this library item // Check user can access this library item

View File

@ -1,7 +1,8 @@
const Logger = require('../Logger') const Logger = require('../Logger')
const SocketAuthority = require('../SocketAuthority') const SocketAuthority = require('../SocketAuthority')
const Database = require('../Database')
const { sort } = require('../libs/fastSort') const { sort } = require('../libs/fastSort')
const { isObject, toNumber } = require('../utils/index') const { toNumber } = require('../utils/index')
class MeController { class MeController {
constructor() { } constructor() { }
@ -33,7 +34,7 @@ class MeController {
// GET: api/me/listening-stats // GET: api/me/listening-stats
async getListeningStats(req, res) { async getListeningStats(req, res) {
var listeningStats = await this.getUserListeningStatsHelpers(req.user.id) const listeningStats = await this.getUserListeningStatsHelpers(req.user.id)
res.json(listeningStats) res.json(listeningStats)
} }
@ -51,21 +52,21 @@ class MeController {
if (!req.user.removeMediaProgress(req.params.id)) { if (!req.user.removeMediaProgress(req.params.id)) {
return res.sendStatus(200) return res.sendStatus(200)
} }
await this.db.updateEntity('user', req.user) await Database.removeMediaProgress(req.params.id)
SocketAuthority.clientEmitter(req.user.id, 'user_updated', req.user.toJSONForBrowser()) SocketAuthority.clientEmitter(req.user.id, 'user_updated', req.user.toJSONForBrowser())
res.sendStatus(200) res.sendStatus(200)
} }
// PATCH: api/me/progress/:id // PATCH: api/me/progress/:id
async createUpdateMediaProgress(req, res) { async createUpdateMediaProgress(req, res) {
var libraryItem = this.db.libraryItems.find(ab => ab.id === req.params.id) const libraryItem = Database.libraryItems.find(ab => ab.id === req.params.id)
if (!libraryItem) { if (!libraryItem) {
return res.status(404).send('Item not found') return res.status(404).send('Item not found')
} }
var wasUpdated = req.user.createUpdateMediaProgress(libraryItem, req.body) if (req.user.createUpdateMediaProgress(libraryItem, req.body)) {
if (wasUpdated) { const mediaProgress = req.user.getMediaProgress(libraryItem.id)
await this.db.updateEntity('user', req.user) if (mediaProgress) await Database.upsertMediaProgress(mediaProgress)
SocketAuthority.clientEmitter(req.user.id, 'user_updated', req.user.toJSONForBrowser()) SocketAuthority.clientEmitter(req.user.id, 'user_updated', req.user.toJSONForBrowser())
} }
res.sendStatus(200) res.sendStatus(200)
@ -73,8 +74,8 @@ class MeController {
// PATCH: api/me/progress/:id/:episodeId // PATCH: api/me/progress/:id/:episodeId
async createUpdateEpisodeMediaProgress(req, res) { async createUpdateEpisodeMediaProgress(req, res) {
var episodeId = req.params.episodeId const episodeId = req.params.episodeId
var libraryItem = this.db.libraryItems.find(ab => ab.id === req.params.id) const libraryItem = Database.libraryItems.find(ab => ab.id === req.params.id)
if (!libraryItem) { if (!libraryItem) {
return res.status(404).send('Item not found') return res.status(404).send('Item not found')
} }
@ -83,9 +84,9 @@ class MeController {
return res.status(404).send('Episode not found') return res.status(404).send('Episode not found')
} }
var wasUpdated = req.user.createUpdateMediaProgress(libraryItem, req.body, episodeId) if (req.user.createUpdateMediaProgress(libraryItem, req.body, episodeId)) {
if (wasUpdated) { const mediaProgress = req.user.getMediaProgress(libraryItem.id, episodeId)
await this.db.updateEntity('user', req.user) if (mediaProgress) await Database.upsertMediaProgress(mediaProgress)
SocketAuthority.clientEmitter(req.user.id, 'user_updated', req.user.toJSONForBrowser()) SocketAuthority.clientEmitter(req.user.id, 'user_updated', req.user.toJSONForBrowser())
} }
res.sendStatus(200) res.sendStatus(200)
@ -93,24 +94,26 @@ class MeController {
// PATCH: api/me/progress/batch/update // PATCH: api/me/progress/batch/update
async batchUpdateMediaProgress(req, res) { async batchUpdateMediaProgress(req, res) {
var itemProgressPayloads = req.body const itemProgressPayloads = req.body
if (!itemProgressPayloads || !itemProgressPayloads.length) { if (!itemProgressPayloads?.length) {
return res.status(400).send('Missing request payload') return res.status(400).send('Missing request payload')
} }
var shouldUpdate = false let shouldUpdate = false
itemProgressPayloads.forEach((itemProgress) => { for (const itemProgress of itemProgressPayloads) {
var libraryItem = this.db.libraryItems.find(li => li.id === itemProgress.libraryItemId) // Make sure this library item exists const libraryItem = Database.libraryItems.find(li => li.id === itemProgress.libraryItemId) // Make sure this library item exists
if (libraryItem) { if (libraryItem) {
var wasUpdated = req.user.createUpdateMediaProgress(libraryItem, itemProgress, itemProgress.episodeId) if (req.user.createUpdateMediaProgress(libraryItem, itemProgress, itemProgress.episodeId)) {
if (wasUpdated) shouldUpdate = true const mediaProgress = req.user.getMediaProgress(libraryItem.id, itemProgress.episodeId)
if (mediaProgress) await Database.upsertMediaProgress(mediaProgress)
shouldUpdate = true
}
} else { } else {
Logger.error(`[MeController] batchUpdateMediaProgress: Library Item does not exist ${itemProgress.id}`) Logger.error(`[MeController] batchUpdateMediaProgress: Library Item does not exist ${itemProgress.id}`)
} }
}) }
if (shouldUpdate) { if (shouldUpdate) {
await this.db.updateEntity('user', req.user)
SocketAuthority.clientEmitter(req.user.id, 'user_updated', req.user.toJSONForBrowser()) SocketAuthority.clientEmitter(req.user.id, 'user_updated', req.user.toJSONForBrowser())
} }
@ -119,18 +122,18 @@ class MeController {
// POST: api/me/item/:id/bookmark // POST: api/me/item/:id/bookmark
async createBookmark(req, res) { async createBookmark(req, res) {
var libraryItem = this.db.libraryItems.find(li => li.id === req.params.id) var libraryItem = Database.libraryItems.find(li => li.id === req.params.id)
if (!libraryItem) return res.sendStatus(404) if (!libraryItem) return res.sendStatus(404)
const { time, title } = req.body const { time, title } = req.body
var bookmark = req.user.createBookmark(libraryItem.id, time, title) var bookmark = req.user.createBookmark(libraryItem.id, time, title)
await this.db.updateEntity('user', req.user) await Database.updateUser(req.user)
SocketAuthority.clientEmitter(req.user.id, 'user_updated', req.user.toJSONForBrowser()) SocketAuthority.clientEmitter(req.user.id, 'user_updated', req.user.toJSONForBrowser())
res.json(bookmark) res.json(bookmark)
} }
// PATCH: api/me/item/:id/bookmark // PATCH: api/me/item/:id/bookmark
async updateBookmark(req, res) { async updateBookmark(req, res) {
var libraryItem = this.db.libraryItems.find(li => li.id === req.params.id) var libraryItem = Database.libraryItems.find(li => li.id === req.params.id)
if (!libraryItem) return res.sendStatus(404) if (!libraryItem) return res.sendStatus(404)
const { time, title } = req.body const { time, title } = req.body
if (!req.user.findBookmark(libraryItem.id, time)) { if (!req.user.findBookmark(libraryItem.id, time)) {
@ -139,14 +142,14 @@ class MeController {
} }
var bookmark = req.user.updateBookmark(libraryItem.id, time, title) var bookmark = req.user.updateBookmark(libraryItem.id, time, title)
if (!bookmark) return res.sendStatus(500) if (!bookmark) return res.sendStatus(500)
await this.db.updateEntity('user', req.user) await Database.updateUser(req.user)
SocketAuthority.clientEmitter(req.user.id, 'user_updated', req.user.toJSONForBrowser()) SocketAuthority.clientEmitter(req.user.id, 'user_updated', req.user.toJSONForBrowser())
res.json(bookmark) res.json(bookmark)
} }
// DELETE: api/me/item/:id/bookmark/:time // DELETE: api/me/item/:id/bookmark/:time
async removeBookmark(req, res) { async removeBookmark(req, res) {
var libraryItem = this.db.libraryItems.find(li => li.id === req.params.id) var libraryItem = Database.libraryItems.find(li => li.id === req.params.id)
if (!libraryItem) return res.sendStatus(404) if (!libraryItem) return res.sendStatus(404)
var time = Number(req.params.time) var time = Number(req.params.time)
if (isNaN(time)) return res.sendStatus(500) if (isNaN(time)) return res.sendStatus(500)
@ -156,7 +159,7 @@ class MeController {
return res.sendStatus(404) return res.sendStatus(404)
} }
req.user.removeBookmark(libraryItem.id, time) req.user.removeBookmark(libraryItem.id, time)
await this.db.updateEntity('user', req.user) await Database.updateUser(req.user)
SocketAuthority.clientEmitter(req.user.id, 'user_updated', req.user.toJSONForBrowser()) SocketAuthority.clientEmitter(req.user.id, 'user_updated', req.user.toJSONForBrowser())
res.sendStatus(200) res.sendStatus(200)
} }
@ -178,16 +181,16 @@ class MeController {
return res.sendStatus(500) return res.sendStatus(500)
} }
const updatedLocalMediaProgress = [] const updatedLocalMediaProgress = []
var numServerProgressUpdates = 0 let numServerProgressUpdates = 0
const updatedServerMediaProgress = [] const updatedServerMediaProgress = []
const localMediaProgress = req.body.localMediaProgress || [] const localMediaProgress = req.body.localMediaProgress || []
localMediaProgress.forEach(localProgress => { for (const localProgress of localMediaProgress) {
if (!localProgress.libraryItemId) { if (!localProgress.libraryItemId) {
Logger.error(`[MeController] syncLocalMediaProgress invalid local media progress object`, localProgress) Logger.error(`[MeController] syncLocalMediaProgress invalid local media progress object`, localProgress)
return return
} }
var libraryItem = this.db.getLibraryItem(localProgress.libraryItemId) const libraryItem = Database.getLibraryItem(localProgress.libraryItemId)
if (!libraryItem) { if (!libraryItem) {
Logger.error(`[MeController] syncLocalMediaProgress invalid local media progress object no library item`, localProgress) Logger.error(`[MeController] syncLocalMediaProgress invalid local media progress object no library item`, localProgress)
return return
@ -199,12 +202,14 @@ class MeController {
Logger.debug(`[MeController] syncLocalMediaProgress local progress is new - creating ${localProgress.id}`) Logger.debug(`[MeController] syncLocalMediaProgress local progress is new - creating ${localProgress.id}`)
req.user.createUpdateMediaProgress(libraryItem, localProgress, localProgress.episodeId) req.user.createUpdateMediaProgress(libraryItem, localProgress, localProgress.episodeId)
mediaProgress = req.user.getMediaProgress(localProgress.libraryItemId, localProgress.episodeId) mediaProgress = req.user.getMediaProgress(localProgress.libraryItemId, localProgress.episodeId)
if (mediaProgress) await Database.upsertMediaProgress(mediaProgress)
updatedServerMediaProgress.push(mediaProgress) updatedServerMediaProgress.push(mediaProgress)
numServerProgressUpdates++ numServerProgressUpdates++
} else if (mediaProgress.lastUpdate < localProgress.lastUpdate) { } else if (mediaProgress.lastUpdate < localProgress.lastUpdate) {
Logger.debug(`[MeController] syncLocalMediaProgress local progress is more recent - updating ${mediaProgress.id}`) Logger.debug(`[MeController] syncLocalMediaProgress local progress is more recent - updating ${mediaProgress.id}`)
req.user.createUpdateMediaProgress(libraryItem, localProgress, localProgress.episodeId) req.user.createUpdateMediaProgress(libraryItem, localProgress, localProgress.episodeId)
mediaProgress = req.user.getMediaProgress(localProgress.libraryItemId, localProgress.episodeId) mediaProgress = req.user.getMediaProgress(localProgress.libraryItemId, localProgress.episodeId)
if (mediaProgress) await Database.upsertMediaProgress(mediaProgress)
updatedServerMediaProgress.push(mediaProgress) updatedServerMediaProgress.push(mediaProgress)
numServerProgressUpdates++ numServerProgressUpdates++
} else if (mediaProgress.lastUpdate > localProgress.lastUpdate) { } else if (mediaProgress.lastUpdate > localProgress.lastUpdate) {
@ -222,11 +227,10 @@ class MeController {
} else { } else {
Logger.debug(`[MeController] syncLocalMediaProgress server and local are in sync - ${mediaProgress.id}`) Logger.debug(`[MeController] syncLocalMediaProgress server and local are in sync - ${mediaProgress.id}`)
} }
}) }
Logger.debug(`[MeController] syncLocalMediaProgress server updates = ${numServerProgressUpdates}, local updates = ${updatedLocalMediaProgress.length}`) Logger.debug(`[MeController] syncLocalMediaProgress server updates = ${numServerProgressUpdates}, local updates = ${updatedLocalMediaProgress.length}`)
if (numServerProgressUpdates > 0) { if (numServerProgressUpdates > 0) {
await this.db.updateEntity('user', req.user)
SocketAuthority.clientEmitter(req.user.id, 'user_updated', req.user.toJSONForBrowser()) SocketAuthority.clientEmitter(req.user.id, 'user_updated', req.user.toJSONForBrowser())
} }
@ -244,7 +248,7 @@ class MeController {
let itemsInProgress = [] let itemsInProgress = []
for (const mediaProgress of req.user.mediaProgress) { for (const mediaProgress of req.user.mediaProgress) {
if (!mediaProgress.isFinished && (mediaProgress.progress > 0 || mediaProgress.ebookProgress > 0)) { if (!mediaProgress.isFinished && (mediaProgress.progress > 0 || mediaProgress.ebookProgress > 0)) {
const libraryItem = this.db.getLibraryItem(mediaProgress.libraryItemId) const libraryItem = Database.getLibraryItem(mediaProgress.libraryItemId)
if (libraryItem) { if (libraryItem) {
if (mediaProgress.episodeId && libraryItem.mediaType === 'podcast') { if (mediaProgress.episodeId && libraryItem.mediaType === 'podcast') {
const episode = libraryItem.media.episodes.find(ep => ep.id === mediaProgress.episodeId) const episode = libraryItem.media.episodes.find(ep => ep.id === mediaProgress.episodeId)
@ -274,7 +278,7 @@ class MeController {
// GET: api/me/series/:id/remove-from-continue-listening // GET: api/me/series/:id/remove-from-continue-listening
async removeSeriesFromContinueListening(req, res) { async removeSeriesFromContinueListening(req, res) {
const series = this.db.series.find(se => se.id === req.params.id) const series = Database.series.find(se => se.id === req.params.id)
if (!series) { if (!series) {
Logger.error(`[MeController] removeSeriesFromContinueListening: Series ${req.params.id} not found`) Logger.error(`[MeController] removeSeriesFromContinueListening: Series ${req.params.id} not found`)
return res.sendStatus(404) return res.sendStatus(404)
@ -282,7 +286,7 @@ class MeController {
const hasUpdated = req.user.addSeriesToHideFromContinueListening(req.params.id) const hasUpdated = req.user.addSeriesToHideFromContinueListening(req.params.id)
if (hasUpdated) { if (hasUpdated) {
await this.db.updateEntity('user', req.user) await Database.updateUser(req.user)
SocketAuthority.clientEmitter(req.user.id, 'user_updated', req.user.toJSONForBrowser()) SocketAuthority.clientEmitter(req.user.id, 'user_updated', req.user.toJSONForBrowser())
} }
res.json(req.user.toJSONForBrowser()) res.json(req.user.toJSONForBrowser())
@ -290,7 +294,7 @@ class MeController {
// GET: api/me/series/:id/readd-to-continue-listening // GET: api/me/series/:id/readd-to-continue-listening
async readdSeriesFromContinueListening(req, res) { async readdSeriesFromContinueListening(req, res) {
const series = this.db.series.find(se => se.id === req.params.id) const series = Database.series.find(se => se.id === req.params.id)
if (!series) { if (!series) {
Logger.error(`[MeController] readdSeriesFromContinueListening: Series ${req.params.id} not found`) Logger.error(`[MeController] readdSeriesFromContinueListening: Series ${req.params.id} not found`)
return res.sendStatus(404) return res.sendStatus(404)
@ -298,7 +302,7 @@ class MeController {
const hasUpdated = req.user.removeSeriesFromHideFromContinueListening(req.params.id) const hasUpdated = req.user.removeSeriesFromHideFromContinueListening(req.params.id)
if (hasUpdated) { if (hasUpdated) {
await this.db.updateEntity('user', req.user) await Database.updateUser(req.user)
SocketAuthority.clientEmitter(req.user.id, 'user_updated', req.user.toJSONForBrowser()) SocketAuthority.clientEmitter(req.user.id, 'user_updated', req.user.toJSONForBrowser())
} }
res.json(req.user.toJSONForBrowser()) res.json(req.user.toJSONForBrowser())
@ -308,7 +312,7 @@ class MeController {
async removeItemFromContinueListening(req, res) { async removeItemFromContinueListening(req, res) {
const hasUpdated = req.user.removeProgressFromContinueListening(req.params.id) const hasUpdated = req.user.removeProgressFromContinueListening(req.params.id)
if (hasUpdated) { if (hasUpdated) {
await this.db.updateEntity('user', req.user) await Database.updateUser(req.user)
SocketAuthority.clientEmitter(req.user.id, 'user_updated', req.user.toJSONForBrowser()) SocketAuthority.clientEmitter(req.user.id, 'user_updated', req.user.toJSONForBrowser())
} }
res.json(req.user.toJSONForBrowser()) res.json(req.user.toJSONForBrowser())

View File

@ -2,6 +2,7 @@ const Path = require('path')
const fs = require('../libs/fsExtra') const fs = require('../libs/fsExtra')
const Logger = require('../Logger') const Logger = require('../Logger')
const SocketAuthority = require('../SocketAuthority') const SocketAuthority = require('../SocketAuthority')
const Database = require('../Database')
const filePerms = require('../utils/filePerms') const filePerms = require('../utils/filePerms')
const patternValidation = require('../libs/nodeCron/pattern-validation') const patternValidation = require('../libs/nodeCron/pattern-validation')
@ -30,7 +31,7 @@ class MiscController {
var libraryId = req.body.library var libraryId = req.body.library
var folderId = req.body.folder var folderId = req.body.folder
var library = this.db.libraries.find(lib => lib.id === libraryId) var library = Database.libraries.find(lib => lib.id === libraryId)
if (!library) { if (!library) {
return res.status(404).send(`Library not found with id ${libraryId}`) return res.status(404).send(`Library not found with id ${libraryId}`)
} }
@ -111,23 +112,23 @@ class MiscController {
Logger.error('User other than admin attempting to update server settings', req.user) Logger.error('User other than admin attempting to update server settings', req.user)
return res.sendStatus(403) return res.sendStatus(403)
} }
var settingsUpdate = req.body const settingsUpdate = req.body
if (!settingsUpdate || !isObject(settingsUpdate)) { if (!settingsUpdate || !isObject(settingsUpdate)) {
return res.status(500).send('Invalid settings update object') return res.status(500).send('Invalid settings update object')
} }
var madeUpdates = this.db.serverSettings.update(settingsUpdate) const madeUpdates = Database.serverSettings.update(settingsUpdate)
if (madeUpdates) { if (madeUpdates) {
await Database.updateServerSettings()
// If backup schedule is updated - update backup manager // If backup schedule is updated - update backup manager
if (settingsUpdate.backupSchedule !== undefined) { if (settingsUpdate.backupSchedule !== undefined) {
this.backupManager.updateCronSchedule() this.backupManager.updateCronSchedule()
} }
await this.db.updateServerSettings()
} }
return res.json({ return res.json({
success: true, success: true,
serverSettings: this.db.serverSettings.toJSONForBrowser() serverSettings: Database.serverSettings.toJSONForBrowser()
}) })
} }
@ -147,7 +148,7 @@ class MiscController {
return res.sendStatus(404) return res.sendStatus(404)
} }
const tags = [] const tags = []
this.db.libraryItems.forEach((li) => { Database.libraryItems.forEach((li) => {
if (li.media.tags && li.media.tags.length) { if (li.media.tags && li.media.tags.length) {
li.media.tags.forEach((tag) => { li.media.tags.forEach((tag) => {
if (!tags.includes(tag)) tags.push(tag) if (!tags.includes(tag)) tags.push(tag)
@ -176,7 +177,7 @@ class MiscController {
let tagMerged = false let tagMerged = false
let numItemsUpdated = 0 let numItemsUpdated = 0
for (const li of this.db.libraryItems) { for (const li of Database.libraryItems) {
if (!li.media.tags || !li.media.tags.length) continue if (!li.media.tags || !li.media.tags.length) continue
if (li.media.tags.includes(newTag)) tagMerged = true // new tag is an existing tag so this is a merge if (li.media.tags.includes(newTag)) tagMerged = true // new tag is an existing tag so this is a merge
@ -187,7 +188,7 @@ class MiscController {
li.media.tags.push(newTag) // Add new tag li.media.tags.push(newTag) // Add new tag
} }
Logger.debug(`[MiscController] Rename tag "${tag}" to "${newTag}" for item "${li.media.metadata.title}"`) Logger.debug(`[MiscController] Rename tag "${tag}" to "${newTag}" for item "${li.media.metadata.title}"`)
await this.db.updateLibraryItem(li) await Database.updateLibraryItem(li)
SocketAuthority.emitter('item_updated', li.toJSONExpanded()) SocketAuthority.emitter('item_updated', li.toJSONExpanded())
numItemsUpdated++ numItemsUpdated++
} }
@ -209,13 +210,13 @@ class MiscController {
const tag = Buffer.from(decodeURIComponent(req.params.tag), 'base64').toString() const tag = Buffer.from(decodeURIComponent(req.params.tag), 'base64').toString()
let numItemsUpdated = 0 let numItemsUpdated = 0
for (const li of this.db.libraryItems) { for (const li of Database.libraryItems) {
if (!li.media.tags || !li.media.tags.length) continue if (!li.media.tags || !li.media.tags.length) continue
if (li.media.tags.includes(tag)) { if (li.media.tags.includes(tag)) {
li.media.tags = li.media.tags.filter(t => t !== tag) li.media.tags = li.media.tags.filter(t => t !== tag)
Logger.debug(`[MiscController] Remove tag "${tag}" from item "${li.media.metadata.title}"`) Logger.debug(`[MiscController] Remove tag "${tag}" from item "${li.media.metadata.title}"`)
await this.db.updateLibraryItem(li) await Database.updateLibraryItem(li)
SocketAuthority.emitter('item_updated', li.toJSONExpanded()) SocketAuthority.emitter('item_updated', li.toJSONExpanded())
numItemsUpdated++ numItemsUpdated++
} }
@ -233,7 +234,7 @@ class MiscController {
return res.sendStatus(404) return res.sendStatus(404)
} }
const genres = [] const genres = []
this.db.libraryItems.forEach((li) => { Database.libraryItems.forEach((li) => {
if (li.media.metadata.genres && li.media.metadata.genres.length) { if (li.media.metadata.genres && li.media.metadata.genres.length) {
li.media.metadata.genres.forEach((genre) => { li.media.metadata.genres.forEach((genre) => {
if (!genres.includes(genre)) genres.push(genre) if (!genres.includes(genre)) genres.push(genre)
@ -262,7 +263,7 @@ class MiscController {
let genreMerged = false let genreMerged = false
let numItemsUpdated = 0 let numItemsUpdated = 0
for (const li of this.db.libraryItems) { for (const li of Database.libraryItems) {
if (!li.media.metadata.genres || !li.media.metadata.genres.length) continue if (!li.media.metadata.genres || !li.media.metadata.genres.length) continue
if (li.media.metadata.genres.includes(newGenre)) genreMerged = true // new genre is an existing genre so this is a merge if (li.media.metadata.genres.includes(newGenre)) genreMerged = true // new genre is an existing genre so this is a merge
@ -273,7 +274,7 @@ class MiscController {
li.media.metadata.genres.push(newGenre) // Add new genre li.media.metadata.genres.push(newGenre) // Add new genre
} }
Logger.debug(`[MiscController] Rename genre "${genre}" to "${newGenre}" for item "${li.media.metadata.title}"`) Logger.debug(`[MiscController] Rename genre "${genre}" to "${newGenre}" for item "${li.media.metadata.title}"`)
await this.db.updateLibraryItem(li) await Database.updateLibraryItem(li)
SocketAuthority.emitter('item_updated', li.toJSONExpanded()) SocketAuthority.emitter('item_updated', li.toJSONExpanded())
numItemsUpdated++ numItemsUpdated++
} }
@ -295,13 +296,13 @@ class MiscController {
const genre = Buffer.from(decodeURIComponent(req.params.genre), 'base64').toString() const genre = Buffer.from(decodeURIComponent(req.params.genre), 'base64').toString()
let numItemsUpdated = 0 let numItemsUpdated = 0
for (const li of this.db.libraryItems) { for (const li of Database.libraryItems) {
if (!li.media.metadata.genres || !li.media.metadata.genres.length) continue if (!li.media.metadata.genres || !li.media.metadata.genres.length) continue
if (li.media.metadata.genres.includes(genre)) { if (li.media.metadata.genres.includes(genre)) {
li.media.metadata.genres = li.media.metadata.genres.filter(t => t !== genre) li.media.metadata.genres = li.media.metadata.genres.filter(t => t !== genre)
Logger.debug(`[MiscController] Remove genre "${genre}" from item "${li.media.metadata.title}"`) Logger.debug(`[MiscController] Remove genre "${genre}" from item "${li.media.metadata.title}"`)
await this.db.updateLibraryItem(li) await Database.updateLibraryItem(li)
SocketAuthority.emitter('item_updated', li.toJSONExpanded()) SocketAuthority.emitter('item_updated', li.toJSONExpanded())
numItemsUpdated++ numItemsUpdated++
} }

View File

@ -1,4 +1,5 @@
const Logger = require('../Logger') const Logger = require('../Logger')
const Database = require('../Database')
const { version } = require('../../package.json') const { version } = require('../../package.json')
class NotificationController { class NotificationController {
@ -7,14 +8,14 @@ class NotificationController {
get(req, res) { get(req, res) {
res.json({ res.json({
data: this.notificationManager.getData(), data: this.notificationManager.getData(),
settings: this.db.notificationSettings settings: Database.notificationSettings
}) })
} }
async update(req, res) { async update(req, res) {
const updated = this.db.notificationSettings.update(req.body) const updated = Database.notificationSettings.update(req.body)
if (updated) { if (updated) {
await this.db.updateEntity('settings', this.db.notificationSettings) await Database.updateSetting(Database.notificationSettings)
} }
res.sendStatus(200) res.sendStatus(200)
} }
@ -29,31 +30,31 @@ class NotificationController {
} }
async createNotification(req, res) { async createNotification(req, res) {
const success = this.db.notificationSettings.createNotification(req.body) const success = Database.notificationSettings.createNotification(req.body)
if (success) { if (success) {
await this.db.updateEntity('settings', this.db.notificationSettings) await Database.updateSetting(Database.notificationSettings)
} }
res.json(this.db.notificationSettings) res.json(Database.notificationSettings)
} }
async deleteNotification(req, res) { async deleteNotification(req, res) {
if (this.db.notificationSettings.removeNotification(req.notification.id)) { if (Database.notificationSettings.removeNotification(req.notification.id)) {
await this.db.updateEntity('settings', this.db.notificationSettings) await Database.updateSetting(Database.notificationSettings)
} }
res.json(this.db.notificationSettings) res.json(Database.notificationSettings)
} }
async updateNotification(req, res) { async updateNotification(req, res) {
const success = this.db.notificationSettings.updateNotification(req.body) const success = Database.notificationSettings.updateNotification(req.body)
if (success) { if (success) {
await this.db.updateEntity('settings', this.db.notificationSettings) await Database.updateSetting(Database.notificationSettings)
} }
res.json(this.db.notificationSettings) res.json(Database.notificationSettings)
} }
async sendNotificationTest(req, res) { async sendNotificationTest(req, res) {
if (!this.db.notificationSettings.isUseable) return res.status(500).send('Apprise is not configured') if (!Database.notificationSettings.isUseable) return res.status(500).send('Apprise is not configured')
const success = await this.notificationManager.sendTestNotification(req.notification) const success = await this.notificationManager.sendTestNotification(req.notification)
if (success) res.sendStatus(200) if (success) res.sendStatus(200)
@ -66,7 +67,7 @@ class NotificationController {
} }
if (req.params.id) { if (req.params.id) {
const notification = this.db.notificationSettings.getNotification(req.params.id) const notification = Database.notificationSettings.getNotification(req.params.id)
if (!notification) { if (!notification) {
return res.sendStatus(404) return res.sendStatus(404)
} }

View File

@ -1,5 +1,6 @@
const Logger = require('../Logger') const Logger = require('../Logger')
const SocketAuthority = require('../SocketAuthority') const SocketAuthority = require('../SocketAuthority')
const Database = require('../Database')
const Playlist = require('../objects/Playlist') const Playlist = require('../objects/Playlist')
@ -14,8 +15,8 @@ class PlaylistController {
if (!success) { if (!success) {
return res.status(400).send('Invalid playlist request data') return res.status(400).send('Invalid playlist request data')
} }
const jsonExpanded = newPlaylist.toJSONExpanded(this.db.libraryItems) const jsonExpanded = newPlaylist.toJSONExpanded(Database.libraryItems)
await this.db.insertEntity('playlist', newPlaylist) await Database.createPlaylist(newPlaylist)
SocketAuthority.clientEmitter(newPlaylist.userId, 'playlist_added', jsonExpanded) SocketAuthority.clientEmitter(newPlaylist.userId, 'playlist_added', jsonExpanded)
res.json(jsonExpanded) res.json(jsonExpanded)
} }
@ -23,22 +24,22 @@ class PlaylistController {
// GET: api/playlists // GET: api/playlists
findAllForUser(req, res) { findAllForUser(req, res) {
res.json({ res.json({
playlists: this.db.playlists.filter(p => p.userId === req.user.id).map(p => p.toJSONExpanded(this.db.libraryItems)) playlists: Database.playlists.filter(p => p.userId === req.user.id).map(p => p.toJSONExpanded(Database.libraryItems))
}) })
} }
// GET: api/playlists/:id // GET: api/playlists/:id
findOne(req, res) { findOne(req, res) {
res.json(req.playlist.toJSONExpanded(this.db.libraryItems)) res.json(req.playlist.toJSONExpanded(Database.libraryItems))
} }
// PATCH: api/playlists/:id // PATCH: api/playlists/:id
async update(req, res) { async update(req, res) {
const playlist = req.playlist const playlist = req.playlist
let wasUpdated = playlist.update(req.body) let wasUpdated = playlist.update(req.body)
const jsonExpanded = playlist.toJSONExpanded(this.db.libraryItems) const jsonExpanded = playlist.toJSONExpanded(Database.libraryItems)
if (wasUpdated) { if (wasUpdated) {
await this.db.updateEntity('playlist', playlist) await Database.updatePlaylist(playlist)
SocketAuthority.clientEmitter(playlist.userId, 'playlist_updated', jsonExpanded) SocketAuthority.clientEmitter(playlist.userId, 'playlist_updated', jsonExpanded)
} }
res.json(jsonExpanded) res.json(jsonExpanded)
@ -47,8 +48,8 @@ class PlaylistController {
// DELETE: api/playlists/:id // DELETE: api/playlists/:id
async delete(req, res) { async delete(req, res) {
const playlist = req.playlist const playlist = req.playlist
const jsonExpanded = playlist.toJSONExpanded(this.db.libraryItems) const jsonExpanded = playlist.toJSONExpanded(Database.libraryItems)
await this.db.removeEntity('playlist', playlist.id) await Database.removePlaylist(playlist.id)
SocketAuthority.clientEmitter(playlist.userId, 'playlist_removed', jsonExpanded) SocketAuthority.clientEmitter(playlist.userId, 'playlist_removed', jsonExpanded)
res.sendStatus(200) res.sendStatus(200)
} }
@ -62,7 +63,7 @@ class PlaylistController {
return res.status(400).send('Request body has no libraryItemId') return res.status(400).send('Request body has no libraryItemId')
} }
const libraryItem = this.db.libraryItems.find(li => li.id === itemToAdd.libraryItemId) const libraryItem = Database.libraryItems.find(li => li.id === itemToAdd.libraryItemId)
if (!libraryItem) { if (!libraryItem) {
return res.status(400).send('Library item not found') return res.status(400).send('Library item not found')
} }
@ -80,8 +81,16 @@ class PlaylistController {
} }
playlist.addItem(itemToAdd.libraryItemId, itemToAdd.episodeId) playlist.addItem(itemToAdd.libraryItemId, itemToAdd.episodeId)
const jsonExpanded = playlist.toJSONExpanded(this.db.libraryItems)
await this.db.updateEntity('playlist', playlist) const playlistMediaItem = {
playlistId: playlist.id,
mediaItemId: itemToAdd.episodeId || libraryItem.media.id,
mediaItemType: itemToAdd.episodeId ? 'podcastEpisode' : 'book',
order: playlist.items.length
}
const jsonExpanded = playlist.toJSONExpanded(Database.libraryItems)
await Database.createPlaylistMediaItem(playlistMediaItem)
SocketAuthority.clientEmitter(playlist.userId, 'playlist_updated', jsonExpanded) SocketAuthority.clientEmitter(playlist.userId, 'playlist_updated', jsonExpanded)
res.json(jsonExpanded) res.json(jsonExpanded)
} }
@ -99,15 +108,15 @@ class PlaylistController {
playlist.removeItem(itemToRemove.libraryItemId, itemToRemove.episodeId) playlist.removeItem(itemToRemove.libraryItemId, itemToRemove.episodeId)
const jsonExpanded = playlist.toJSONExpanded(this.db.libraryItems) const jsonExpanded = playlist.toJSONExpanded(Database.libraryItems)
// Playlist is removed when there are no items // Playlist is removed when there are no items
if (!playlist.items.length) { if (!playlist.items.length) {
Logger.info(`[PlaylistController] Playlist "${playlist.name}" has no more items - removing it`) Logger.info(`[PlaylistController] Playlist "${playlist.name}" has no more items - removing it`)
await this.db.removeEntity('playlist', playlist.id) await Database.removePlaylist(playlist.id)
SocketAuthority.clientEmitter(playlist.userId, 'playlist_removed', jsonExpanded) SocketAuthority.clientEmitter(playlist.userId, 'playlist_removed', jsonExpanded)
} else { } else {
await this.db.updateEntity('playlist', playlist) await Database.updatePlaylist(playlist)
SocketAuthority.clientEmitter(playlist.userId, 'playlist_updated', jsonExpanded) SocketAuthority.clientEmitter(playlist.userId, 'playlist_updated', jsonExpanded)
} }
@ -122,20 +131,34 @@ class PlaylistController {
} }
const itemsToAdd = req.body.items const itemsToAdd = req.body.items
let hasUpdated = false let hasUpdated = false
let order = playlist.items.length
const playlistMediaItems = []
for (const item of itemsToAdd) { for (const item of itemsToAdd) {
if (!item.libraryItemId) { if (!item.libraryItemId) {
return res.status(400).send('Item does not have libraryItemId') return res.status(400).send('Item does not have libraryItemId')
} }
const libraryItem = Database.getLibraryItem(item.libraryItemId)
if (!libraryItem) {
return res.status(400).send('Item not found with id ' + item.libraryItemId)
}
if (!playlist.containsItem(item)) { if (!playlist.containsItem(item)) {
playlistMediaItems.push({
playlistId: playlist.id,
mediaItemId: item.episodeId || libraryItem.media.id, // podcastEpisodeId or bookId
mediaItemType: item.episodeId ? 'podcastEpisode' : 'book',
order: order++
})
playlist.addItem(item.libraryItemId, item.episodeId) playlist.addItem(item.libraryItemId, item.episodeId)
hasUpdated = true hasUpdated = true
} }
} }
const jsonExpanded = playlist.toJSONExpanded(this.db.libraryItems) const jsonExpanded = playlist.toJSONExpanded(Database.libraryItems)
if (hasUpdated) { if (hasUpdated) {
await this.db.updateEntity('playlist', playlist) await Database.createBulkPlaylistMediaItems(playlistMediaItems)
SocketAuthority.clientEmitter(playlist.userId, 'playlist_updated', jsonExpanded) SocketAuthority.clientEmitter(playlist.userId, 'playlist_updated', jsonExpanded)
} }
res.json(jsonExpanded) res.json(jsonExpanded)
@ -153,21 +176,22 @@ class PlaylistController {
if (!item.libraryItemId) { if (!item.libraryItemId) {
return res.status(400).send('Item does not have libraryItemId') return res.status(400).send('Item does not have libraryItemId')
} }
if (playlist.containsItem(item)) { if (playlist.containsItem(item)) {
playlist.removeItem(item.libraryItemId, item.episodeId) playlist.removeItem(item.libraryItemId, item.episodeId)
hasUpdated = true hasUpdated = true
} }
} }
const jsonExpanded = playlist.toJSONExpanded(this.db.libraryItems) const jsonExpanded = playlist.toJSONExpanded(Database.libraryItems)
if (hasUpdated) { if (hasUpdated) {
// Playlist is removed when there are no items // Playlist is removed when there are no items
if (!playlist.items.length) { if (!playlist.items.length) {
Logger.info(`[PlaylistController] Playlist "${playlist.name}" has no more items - removing it`) Logger.info(`[PlaylistController] Playlist "${playlist.name}" has no more items - removing it`)
await this.db.removeEntity('playlist', playlist.id) await Database.removePlaylist(playlist.id)
SocketAuthority.clientEmitter(playlist.userId, 'playlist_removed', jsonExpanded) SocketAuthority.clientEmitter(playlist.userId, 'playlist_removed', jsonExpanded)
} else { } else {
await this.db.updateEntity('playlist', playlist) await Database.updatePlaylist(playlist)
SocketAuthority.clientEmitter(playlist.userId, 'playlist_updated', jsonExpanded) SocketAuthority.clientEmitter(playlist.userId, 'playlist_updated', jsonExpanded)
} }
} }
@ -176,12 +200,12 @@ class PlaylistController {
// POST: api/playlists/collection/:collectionId // POST: api/playlists/collection/:collectionId
async createFromCollection(req, res) { async createFromCollection(req, res) {
let collection = this.db.collections.find(c => c.id === req.params.collectionId) let collection = Database.collections.find(c => c.id === req.params.collectionId)
if (!collection) { if (!collection) {
return res.status(404).send('Collection not found') return res.status(404).send('Collection not found')
} }
// Expand collection to get library items // Expand collection to get library items
collection = collection.toJSONExpanded(this.db.libraryItems) collection = collection.toJSONExpanded(Database.libraryItems)
// Filter out library items not accessible to user // Filter out library items not accessible to user
const libraryItems = collection.books.filter(item => req.user.checkCanAccessLibraryItem(item)) const libraryItems = collection.books.filter(item => req.user.checkCanAccessLibraryItem(item))
@ -201,15 +225,15 @@ class PlaylistController {
} }
newPlaylist.setData(newPlaylistData) newPlaylist.setData(newPlaylistData)
const jsonExpanded = newPlaylist.toJSONExpanded(this.db.libraryItems) const jsonExpanded = newPlaylist.toJSONExpanded(Database.libraryItems)
await this.db.insertEntity('playlist', newPlaylist) await Database.createPlaylist(newPlaylist)
SocketAuthority.clientEmitter(newPlaylist.userId, 'playlist_added', jsonExpanded) SocketAuthority.clientEmitter(newPlaylist.userId, 'playlist_added', jsonExpanded)
res.json(jsonExpanded) res.json(jsonExpanded)
} }
middleware(req, res, next) { middleware(req, res, next) {
if (req.params.id) { if (req.params.id) {
const playlist = this.db.playlists.find(p => p.id === req.params.id) const playlist = Database.playlists.find(p => p.id === req.params.id)
if (!playlist) { if (!playlist) {
return res.status(404).send('Playlist not found') return res.status(404).send('Playlist not found')
} }

View File

@ -1,5 +1,6 @@
const Logger = require('../Logger') const Logger = require('../Logger')
const SocketAuthority = require('../SocketAuthority') const SocketAuthority = require('../SocketAuthority')
const Database = require('../Database')
const fs = require('../libs/fsExtra') const fs = require('../libs/fsExtra')
@ -18,7 +19,7 @@ class PodcastController {
} }
const payload = req.body const payload = req.body
const library = this.db.libraries.find(lib => lib.id === payload.libraryId) const library = Database.libraries.find(lib => lib.id === payload.libraryId)
if (!library) { if (!library) {
Logger.error(`[PodcastController] Create: Library not found "${payload.libraryId}"`) Logger.error(`[PodcastController] Create: Library not found "${payload.libraryId}"`)
return res.status(404).send('Library not found') return res.status(404).send('Library not found')
@ -33,7 +34,7 @@ class PodcastController {
const podcastPath = filePathToPOSIX(payload.path) const podcastPath = filePathToPOSIX(payload.path)
// Check if a library item with this podcast folder exists already // Check if a library item with this podcast folder exists already
const existingLibraryItem = this.db.libraryItems.find(li => li.path === podcastPath && li.libraryId === library.id) const existingLibraryItem = Database.libraryItems.find(li => li.path === podcastPath && li.libraryId === library.id)
if (existingLibraryItem) { if (existingLibraryItem) {
Logger.error(`[PodcastController] Podcast already exists with name "${existingLibraryItem.media.metadata.title}" at path "${podcastPath}"`) Logger.error(`[PodcastController] Podcast already exists with name "${existingLibraryItem.media.metadata.title}" at path "${podcastPath}"`)
return res.status(400).send('Podcast already exists') return res.status(400).send('Podcast already exists')
@ -80,7 +81,7 @@ class PodcastController {
} }
} }
await this.db.insertLibraryItem(libraryItem) await Database.createLibraryItem(libraryItem)
SocketAuthority.emitter('item_added', libraryItem.toJSONExpanded()) SocketAuthority.emitter('item_added', libraryItem.toJSONExpanded())
res.json(libraryItem.toJSONExpanded()) res.json(libraryItem.toJSONExpanded())
@ -199,7 +200,7 @@ class PodcastController {
const overrideDetails = req.query.override === '1' const overrideDetails = req.query.override === '1'
const episodesUpdated = await this.scanner.quickMatchPodcastEpisodes(req.libraryItem, { overrideDetails }) const episodesUpdated = await this.scanner.quickMatchPodcastEpisodes(req.libraryItem, { overrideDetails })
if (episodesUpdated) { if (episodesUpdated) {
await this.db.updateLibraryItem(req.libraryItem) await Database.updateLibraryItem(req.libraryItem)
SocketAuthority.emitter('item_updated', req.libraryItem.toJSONExpanded()) SocketAuthority.emitter('item_updated', req.libraryItem.toJSONExpanded())
} }
@ -216,9 +217,8 @@ class PodcastController {
return res.status(404).send('Episode not found') return res.status(404).send('Episode not found')
} }
var wasUpdated = libraryItem.media.updateEpisode(episodeId, req.body) if (libraryItem.media.updateEpisode(episodeId, req.body)) {
if (wasUpdated) { await Database.updateLibraryItem(libraryItem)
await this.db.updateLibraryItem(libraryItem)
SocketAuthority.emitter('item_updated', libraryItem.toJSONExpanded()) SocketAuthority.emitter('item_updated', libraryItem.toJSONExpanded())
} }
@ -267,13 +267,13 @@ class PodcastController {
libraryItem.removeLibraryFile(episodeRemoved.audioFile.ino) libraryItem.removeLibraryFile(episodeRemoved.audioFile.ino)
} }
await this.db.updateLibraryItem(libraryItem) await Database.updateLibraryItem(libraryItem)
SocketAuthority.emitter('item_updated', libraryItem.toJSONExpanded()) SocketAuthority.emitter('item_updated', libraryItem.toJSONExpanded())
res.json(libraryItem.toJSON()) res.json(libraryItem.toJSON())
} }
middleware(req, res, next) { middleware(req, res, next) {
const item = this.db.libraryItems.find(li => li.id === req.params.id) const item = Database.libraryItems.find(li => li.id === req.params.id)
if (!item || !item.media) return res.sendStatus(404) if (!item || !item.media) return res.sendStatus(404)
if (!item.isPodcast) { if (!item.isPodcast) {

View File

@ -1,5 +1,5 @@
const Logger = require('../Logger') const Logger = require('../Logger')
const SocketAuthority = require('../SocketAuthority') const Database = require('../Database')
class RSSFeedController { class RSSFeedController {
constructor() { } constructor() { }
@ -8,7 +8,7 @@ class RSSFeedController {
async openRSSFeedForItem(req, res) { async openRSSFeedForItem(req, res) {
const options = req.body || {} const options = req.body || {}
const item = this.db.libraryItems.find(li => li.id === req.params.itemId) const item = Database.libraryItems.find(li => li.id === req.params.itemId)
if (!item) return res.sendStatus(404) if (!item) return res.sendStatus(404)
// Check user can access this library item // Check user can access this library item
@ -30,7 +30,7 @@ class RSSFeedController {
} }
// Check that this slug is not being used for another feed (slug will also be the Feed id) // Check that this slug is not being used for another feed (slug will also be the Feed id)
if (this.rssFeedManager.feeds[options.slug]) { if (this.rssFeedManager.findFeedBySlug(options.slug)) {
Logger.error(`[RSSFeedController] Cannot open RSS feed because slug "${options.slug}" is already in use`) Logger.error(`[RSSFeedController] Cannot open RSS feed because slug "${options.slug}" is already in use`)
return res.status(400).send('Slug already in use') return res.status(400).send('Slug already in use')
} }
@ -45,7 +45,7 @@ class RSSFeedController {
async openRSSFeedForCollection(req, res) { async openRSSFeedForCollection(req, res) {
const options = req.body || {} const options = req.body || {}
const collection = this.db.collections.find(li => li.id === req.params.collectionId) const collection = Database.collections.find(li => li.id === req.params.collectionId)
if (!collection) return res.sendStatus(404) if (!collection) return res.sendStatus(404)
// Check request body options exist // Check request body options exist
@ -55,12 +55,12 @@ class RSSFeedController {
} }
// Check that this slug is not being used for another feed (slug will also be the Feed id) // Check that this slug is not being used for another feed (slug will also be the Feed id)
if (this.rssFeedManager.feeds[options.slug]) { if (this.rssFeedManager.findFeedBySlug(options.slug)) {
Logger.error(`[RSSFeedController] Cannot open RSS feed because slug "${options.slug}" is already in use`) Logger.error(`[RSSFeedController] Cannot open RSS feed because slug "${options.slug}" is already in use`)
return res.status(400).send('Slug already in use') return res.status(400).send('Slug already in use')
} }
const collectionExpanded = collection.toJSONExpanded(this.db.libraryItems) const collectionExpanded = collection.toJSONExpanded(Database.libraryItems)
const collectionItemsWithTracks = collectionExpanded.books.filter(li => li.media.tracks.length) const collectionItemsWithTracks = collectionExpanded.books.filter(li => li.media.tracks.length)
// Check collection has audio tracks // Check collection has audio tracks
@ -79,7 +79,7 @@ class RSSFeedController {
async openRSSFeedForSeries(req, res) { async openRSSFeedForSeries(req, res) {
const options = req.body || {} const options = req.body || {}
const series = this.db.series.find(se => se.id === req.params.seriesId) const series = Database.series.find(se => se.id === req.params.seriesId)
if (!series) return res.sendStatus(404) if (!series) return res.sendStatus(404)
// Check request body options exist // Check request body options exist
@ -89,14 +89,14 @@ class RSSFeedController {
} }
// Check that this slug is not being used for another feed (slug will also be the Feed id) // Check that this slug is not being used for another feed (slug will also be the Feed id)
if (this.rssFeedManager.feeds[options.slug]) { if (this.rssFeedManager.findFeedBySlug(options.slug)) {
Logger.error(`[RSSFeedController] Cannot open RSS feed because slug "${options.slug}" is already in use`) Logger.error(`[RSSFeedController] Cannot open RSS feed because slug "${options.slug}" is already in use`)
return res.status(400).send('Slug already in use') return res.status(400).send('Slug already in use')
} }
const seriesJson = series.toJSON() const seriesJson = series.toJSON()
// Get books in series that have audio tracks // Get books in series that have audio tracks
seriesJson.books = this.db.libraryItems.filter(li => li.mediaType === 'book' && li.media.metadata.hasSeries(series.id) && li.media.tracks.length) seriesJson.books = Database.libraryItems.filter(li => li.mediaType === 'book' && li.media.metadata.hasSeries(series.id) && li.media.tracks.length)
// Check series has audio tracks // Check series has audio tracks
if (!seriesJson.books.length) { if (!seriesJson.books.length) {
@ -111,10 +111,8 @@ class RSSFeedController {
} }
// POST: api/feeds/:id/close // POST: api/feeds/:id/close
async closeRSSFeed(req, res) { closeRSSFeed(req, res) {
await this.rssFeedManager.closeRssFeed(req.params.id) this.rssFeedManager.closeRssFeed(req, res)
res.sendStatus(200)
} }
middleware(req, res, next) { middleware(req, res, next) {
@ -123,14 +121,6 @@ class RSSFeedController {
return res.sendStatus(403) return res.sendStatus(403)
} }
if (req.params.id) {
const feed = this.rssFeedManager.findFeed(req.params.id)
if (!feed) {
Logger.error(`[RSSFeedController] RSS feed not found with id "${req.params.id}"`)
return res.sendStatus(404)
}
}
next() next()
} }
} }

View File

@ -1,5 +1,6 @@
const Logger = require('../Logger') const Logger = require('../Logger')
const SocketAuthority = require('../SocketAuthority') const SocketAuthority = require('../SocketAuthority')
const Database = require('../Database')
class SeriesController { class SeriesController {
constructor() { } constructor() { }
@ -45,7 +46,7 @@ class SeriesController {
var q = (req.query.q || '').toLowerCase() var q = (req.query.q || '').toLowerCase()
if (!q) return res.json([]) if (!q) return res.json([])
var limit = (req.query.limit && !isNaN(req.query.limit)) ? Number(req.query.limit) : 25 var limit = (req.query.limit && !isNaN(req.query.limit)) ? Number(req.query.limit) : 25
var series = this.db.series.filter(se => se.name.toLowerCase().includes(q)) var series = Database.series.filter(se => se.name.toLowerCase().includes(q))
series = series.slice(0, limit) series = series.slice(0, limit)
res.json({ res.json({
results: series results: series
@ -55,20 +56,20 @@ class SeriesController {
async update(req, res) { async update(req, res) {
const hasUpdated = req.series.update(req.body) const hasUpdated = req.series.update(req.body)
if (hasUpdated) { if (hasUpdated) {
await this.db.updateEntity('series', req.series) await Database.updateSeries(req.series)
SocketAuthority.emitter('series_updated', req.series.toJSON()) SocketAuthority.emitter('series_updated', req.series.toJSON())
} }
res.json(req.series.toJSON()) res.json(req.series.toJSON())
} }
middleware(req, res, next) { middleware(req, res, next) {
const series = this.db.series.find(se => se.id === req.params.id) const series = Database.series.find(se => se.id === req.params.id)
if (!series) return res.sendStatus(404) if (!series) return res.sendStatus(404)
/** /**
* Filter out any library items not accessible to user * Filter out any library items not accessible to user
*/ */
const libraryItems = this.db.libraryItems.filter(li => li.media.metadata.hasSeries?.(series.id)) const libraryItems = Database.libraryItems.filter(li => li.media.metadata.hasSeries?.(series.id))
const libraryItemsAccessible = libraryItems.filter(req.user.checkCanAccessLibraryItem) const libraryItemsAccessible = libraryItems.filter(req.user.checkCanAccessLibraryItem)
if (libraryItems.length && !libraryItemsAccessible.length) { if (libraryItems.length && !libraryItemsAccessible.length) {
Logger.warn(`[SeriesController] User attempted to access series "${series.id}" without access to any of the books`, req.user) Logger.warn(`[SeriesController] User attempted to access series "${series.id}" without access to any of the books`, req.user)

View File

@ -1,4 +1,5 @@
const Logger = require('../Logger') const Logger = require('../Logger')
const Database = require('../Database')
const { toNumber } = require('../utils/index') const { toNumber } = require('../utils/index')
class SessionController { class SessionController {
@ -49,7 +50,7 @@ class SessionController {
} }
const openSessions = this.playbackSessionManager.sessions.map(se => { const openSessions = this.playbackSessionManager.sessions.map(se => {
const user = this.db.users.find(u => u.id === se.userId) || null const user = Database.users.find(u => u.id === se.userId) || null
return { return {
...se.toJSON(), ...se.toJSON(),
user: user ? { id: user.id, username: user.username } : null user: user ? { id: user.id, username: user.username } : null
@ -62,7 +63,7 @@ class SessionController {
} }
getOpenSession(req, res) { getOpenSession(req, res) {
var libraryItem = this.db.getLibraryItem(req.session.libraryItemId) var libraryItem = Database.getLibraryItem(req.session.libraryItemId)
var sessionForClient = req.session.toJSONForClient(libraryItem) var sessionForClient = req.session.toJSONForClient(libraryItem)
res.json(sessionForClient) res.json(sessionForClient)
} }
@ -87,7 +88,7 @@ class SessionController {
await this.playbackSessionManager.removeSession(req.session.id) await this.playbackSessionManager.removeSession(req.session.id)
} }
await this.db.removeEntity('session', req.session.id) await Database.removePlaybackSession(req.session.id)
res.sendStatus(200) res.sendStatus(200)
} }
@ -115,7 +116,7 @@ class SessionController {
} }
async middleware(req, res, next) { async middleware(req, res, next) {
const playbackSession = await this.db.getPlaybackSession(req.params.id) const playbackSession = await Database.getPlaybackSession(req.params.id)
if (!playbackSession) { if (!playbackSession) {
Logger.error(`[SessionController] Unable to find playback session with id=${req.params.id}`) Logger.error(`[SessionController] Unable to find playback session with id=${req.params.id}`)
return res.sendStatus(404) return res.sendStatus(404)

View File

@ -1,4 +1,5 @@
const Logger = require('../Logger') const Logger = require('../Logger')
const Database = require('../Database')
class ToolsController { class ToolsController {
constructor() { } constructor() { }
@ -65,7 +66,7 @@ class ToolsController {
const libraryItems = [] const libraryItems = []
for (const libraryItemId of libraryItemIds) { for (const libraryItemId of libraryItemIds) {
const libraryItem = this.db.getLibraryItem(libraryItemId) const libraryItem = Database.getLibraryItem(libraryItemId)
if (!libraryItem) { if (!libraryItem) {
Logger.error(`[ToolsController] Batch embed metadata library item (${libraryItemId}) not found`) Logger.error(`[ToolsController] Batch embed metadata library item (${libraryItemId}) not found`)
return res.sendStatus(404) return res.sendStatus(404)
@ -105,7 +106,7 @@ class ToolsController {
} }
if (req.params.id) { if (req.params.id) {
const item = this.db.libraryItems.find(li => li.id === req.params.id) const item = Database.libraryItems.find(li => li.id === req.params.id)
if (!item || !item.media) return res.sendStatus(404) if (!item || !item.media) return res.sendStatus(404)
// Check user can access this library item // Check user can access this library item

View File

@ -1,9 +1,11 @@
const uuidv4 = require("uuid").v4
const Logger = require('../Logger') const Logger = require('../Logger')
const SocketAuthority = require('../SocketAuthority') const SocketAuthority = require('../SocketAuthority')
const Database = require('../Database')
const User = require('../objects/user/User') const User = require('../objects/user/User')
const { getId, toNumber } = require('../utils/index') const { toNumber } = require('../utils/index')
class UserController { class UserController {
constructor() { } constructor() { }
@ -15,11 +17,11 @@ class UserController {
const includes = (req.query.include || '').split(',').map(i => i.trim()) const includes = (req.query.include || '').split(',').map(i => i.trim())
// Minimal toJSONForBrowser does not include mediaProgress and bookmarks // Minimal toJSONForBrowser does not include mediaProgress and bookmarks
const users = this.db.users.map(u => u.toJSONForBrowser(hideRootToken, true)) const users = Database.users.map(u => u.toJSONForBrowser(hideRootToken, true))
if (includes.includes('latestSession')) { if (includes.includes('latestSession')) {
for (const user of users) { for (const user of users) {
const userSessions = await this.db.selectUserSessions(user.id) const userSessions = await Database.getPlaybackSessions({ userId: user.id })
user.latestSession = userSessions.sort((a, b) => b.updatedAt - a.updatedAt).shift() || null user.latestSession = userSessions.sort((a, b) => b.updatedAt - a.updatedAt).shift() || null
} }
} }
@ -35,7 +37,7 @@ class UserController {
return res.sendStatus(403) return res.sendStatus(403)
} }
const user = this.db.users.find(u => u.id === req.params.id) const user = Database.users.find(u => u.id === req.params.id)
if (!user) { if (!user) {
return res.sendStatus(404) return res.sendStatus(404)
} }
@ -47,18 +49,19 @@ class UserController {
var account = req.body var account = req.body
var username = account.username var username = account.username
var usernameExists = this.db.users.find(u => u.username.toLowerCase() === username.toLowerCase()) var usernameExists = Database.users.find(u => u.username.toLowerCase() === username.toLowerCase())
if (usernameExists) { if (usernameExists) {
return res.status(500).send('Username already taken') return res.status(500).send('Username already taken')
} }
account.id = getId('usr') account.id = uuidv4()
account.pash = await this.auth.hashPass(account.password) account.pash = await this.auth.hashPass(account.password)
delete account.password delete account.password
account.token = await this.auth.generateAccessToken({ userId: account.id, username }) account.token = await this.auth.generateAccessToken({ userId: account.id, username })
account.createdAt = Date.now() account.createdAt = Date.now()
var newUser = new User(account) const newUser = new User(account)
var success = await this.db.insertEntity('user', newUser)
const success = await Database.createUser(newUser)
if (success) { if (success) {
SocketAuthority.adminEmitter('user_added', newUser.toJSONForBrowser()) SocketAuthority.adminEmitter('user_added', newUser.toJSONForBrowser())
res.json({ res.json({
@ -81,7 +84,7 @@ class UserController {
var shouldUpdateToken = false var shouldUpdateToken = false
if (account.username !== undefined && account.username !== user.username) { if (account.username !== undefined && account.username !== user.username) {
var usernameExists = this.db.users.find(u => u.username.toLowerCase() === account.username.toLowerCase()) var usernameExists = Database.users.find(u => u.username.toLowerCase() === account.username.toLowerCase())
if (usernameExists) { if (usernameExists) {
return res.status(500).send('Username already taken') return res.status(500).send('Username already taken')
} }
@ -94,13 +97,12 @@ class UserController {
delete account.password delete account.password
} }
var hasUpdated = user.update(account) if (user.update(account)) {
if (hasUpdated) {
if (shouldUpdateToken) { if (shouldUpdateToken) {
user.token = await this.auth.generateAccessToken({ userId: user.id, username: user.username }) user.token = await this.auth.generateAccessToken({ userId: user.id, username: user.username })
Logger.info(`[UserController] User ${user.username} was generated a new api token`) Logger.info(`[UserController] User ${user.username} was generated a new api token`)
} }
await this.db.updateEntity('user', user) await Database.updateUser(user)
SocketAuthority.clientEmitter(req.user.id, 'user_updated', user.toJSONForBrowser()) SocketAuthority.clientEmitter(req.user.id, 'user_updated', user.toJSONForBrowser())
} }
@ -124,13 +126,13 @@ class UserController {
// Todo: check if user is logged in and cancel streams // Todo: check if user is logged in and cancel streams
// Remove user playlists // Remove user playlists
const userPlaylists = this.db.playlists.filter(p => p.userId === user.id) const userPlaylists = Database.playlists.filter(p => p.userId === user.id)
for (const playlist of userPlaylists) { for (const playlist of userPlaylists) {
await this.db.removeEntity('playlist', playlist.id) await Database.removePlaylist(playlist.id)
} }
const userJson = user.toJSONForBrowser() const userJson = user.toJSONForBrowser()
await this.db.removeEntity('user', user.id) await Database.removeUser(user.id)
SocketAuthority.adminEmitter('user_removed', userJson) SocketAuthority.adminEmitter('user_removed', userJson)
res.json({ res.json({
success: true success: true
@ -164,40 +166,6 @@ class UserController {
res.json(listeningStats) res.json(listeningStats)
} }
// POST: api/users/:id/purge-media-progress
async purgeMediaProgress(req, res) {
const user = req.reqUser
if (user.type === 'root' && !req.user.isRoot) {
Logger.error(`[UserController] Admin user attempted to purge media progress of root user`, req.user.username)
return res.sendStatus(403)
}
var progressPurged = 0
user.mediaProgress = user.mediaProgress.filter(mp => {
const libraryItem = this.db.libraryItems.find(li => li.id === mp.libraryItemId)
if (!libraryItem) {
progressPurged++
return false
} else if (mp.episodeId) {
const episode = libraryItem.mediaType === 'podcast' ? libraryItem.media.getEpisode(mp.episodeId) : null
if (!episode) { // Episode not found
progressPurged++
return false
}
}
return true
})
if (progressPurged) {
Logger.info(`[UserController] Purged ${progressPurged} media progress for user ${user.username}`)
await this.db.updateEntity('user', user)
SocketAuthority.adminEmitter('user_updated', user.toJSONForBrowser())
}
res.json(this.userJsonWithItemProgressDetails(user, !req.user.isRoot))
}
// POST: api/users/online (admin) // POST: api/users/online (admin)
async getOnlineUsers(req, res) { async getOnlineUsers(req, res) {
if (!req.user.isAdminOrUp) { if (!req.user.isAdminOrUp) {
@ -218,7 +186,7 @@ class UserController {
} }
if (req.params.id) { if (req.params.id) {
req.reqUser = this.db.users.find(u => u.id === req.params.id) req.reqUser = Database.users.find(u => u.id === req.params.id)
if (!req.reqUser) { if (!req.reqUser) {
return res.sendStatus(404) return res.sendStatus(404)
} }

View File

@ -0,0 +1,16 @@
const itemDb = require('../db/item.db')
const getLibraryItem = async (req, res) => {
let libraryItem = null
if (req.query.expanded == 1) {
libraryItem = await itemDb.getLibraryItemExpanded(req.params.id)
} else {
libraryItem = await itemDb.getLibraryItemMinified(req.params.id)
}
res.json(libraryItem)
}
module.exports = {
getLibraryItem
}

View File

@ -0,0 +1,80 @@
/**
* TODO: Unused for testing
*/
const { Sequelize } = require('sequelize')
const Database = require('../Database')
const getLibraryItemMinified = (libraryItemId) => {
return Database.models.libraryItem.findByPk(libraryItemId, {
include: [
{
model: Database.models.book,
attributes: [
'id', 'title', 'subtitle', 'publishedYear', 'publishedDate', 'publisher', 'description', 'isbn', 'asin', 'language', 'explicit', 'narrators', 'coverPath', 'genres', 'tags'
],
include: [
{
model: Database.models.author,
attributes: ['id', 'name'],
through: {
attributes: []
}
},
{
model: Database.models.series,
attributes: ['id', 'name'],
through: {
attributes: ['sequence']
}
}
]
},
{
model: Database.models.podcast,
attributes: [
'id', 'title', 'author', 'releaseDate', 'feedURL', 'imageURL', 'description', 'itunesPageURL', 'itunesId', 'itunesArtistId', 'language', 'podcastType', 'explicit', 'autoDownloadEpisodes', 'genres', 'tags',
[Sequelize.literal('(SELECT COUNT(*) FROM "podcastEpisodes" WHERE "podcastEpisodes"."podcastId" = podcast.id)'), 'numPodcastEpisodes']
]
}
]
})
}
const getLibraryItemExpanded = (libraryItemId) => {
return Database.models.libraryItem.findByPk(libraryItemId, {
include: [
{
model: Database.models.book,
include: [
{
model: Database.models.author,
through: {
attributes: []
}
},
{
model: Database.models.series,
through: {
attributes: ['sequence']
}
}
]
},
{
model: Database.models.podcast,
include: [
{
model: Database.models.podcastEpisode
}
]
},
'libraryFolder',
'library'
]
})
}
module.exports = {
getLibraryItemMinified,
getLibraryItemExpanded
}

View File

@ -1,7 +0,0 @@
Copyright 2021 James BonTempo (jamesbontempo@gmail.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -1,489 +0,0 @@
"use strict";
const {
existsSync,
mkdirSync,
readFileSync,
writeFileSync
} = require("graceful-fs");
const {
join,
resolve
} = require("path");
const {
aggregateStoreData,
aggregateStoreDataSync,
distributeStoreData,
distributeStoreDataSync,
deleteStoreData,
deleteStoreDataSync,
dropEverything,
dropEverythingSync,
getStoreNames,
getStoreNamesSync,
insertStoreData,
insertStoreDataSync,
insertFileData,
selectStoreData,
selectStoreDataSync,
statsStoreData,
statsStoreDataSync,
updateStoreData,
updateStoreDataSync
} = require("./njodb");
const {
Randomizer,
Reducer,
Result
} = require("./objects");
const {
validateArray,
validateFunction,
validateName,
validateObject,
validatePath,
validateSize
} = require("./validators");
const defaults = {
"datadir": "data",
"dataname": "data",
"datastores": 5,
"tempdir": "tmp",
"lockoptions": {
"stale": 5000,
"update": 1000,
"retries": {
"retries": 5000,
"minTimeout": 250,
"maxTimeout": 5000,
"factor": 0.15,
"randomize": false
}
}
};
const mergeProperties = (defaults, userProperties) => {
var target = Object.assign({}, defaults);
for (let key of Object.keys(userProperties)) {
if (Object.prototype.hasOwnProperty.call(target, key)) {
if (typeof userProperties[key] !== 'object' && !Array.isArray(userProperties[key])) {
Object.assign(target, { [key]: userProperties[key] });
} else {
target[key] = mergeProperties(target[key], userProperties[key]);
}
}
}
return target;
}
const saveProperties = (root, properties) => {
properties = {
"datadir": properties.datadir,
"dataname": properties.dataname,
"datastores": properties.datastores,
"tempdir": properties.tempdir,
"lockoptions": properties.lockoptions
};
const propertiesFile = join(root, "njodb.properties");
writeFileSync(propertiesFile, JSON.stringify(properties, null, 4));
return properties;
}
process.on("uncaughtException", error => {
if (error.code === "ECOMPROMISED") {
console.error(Object.assign(new Error("Stale lock or attempt to update it after release"), { code: error.code }));
} else {
throw error;
}
});
class Database {
constructor(root, properties = {}) {
validateObject(properties);
this.properties = {};
if (root !== undefined && root !== null) {
validateName(root);
this.properties.root = root;
} else {
this.properties.root = process.cwd();
}
if (!existsSync(this.properties.root)) mkdirSync(this.properties.root);
const propertiesFile = join(this.properties.root, "njodb.properties");
if (existsSync(propertiesFile)) {
this.setProperties(JSON.parse(readFileSync(propertiesFile)));
} else {
this.setProperties(mergeProperties(defaults, properties));
}
if (!existsSync(this.properties.datapath)) mkdirSync(this.properties.datapath);
if (!existsSync(this.properties.temppath)) mkdirSync(this.properties.temppath);
this.properties.storenames = getStoreNamesSync(this.properties.datapath, this.properties.dataname);
return this;
}
// Database management methods
getProperties() {
return this.properties;
}
setProperties(properties) {
validateObject(properties);
this.properties.datadir = (validateName(properties.datadir)) ? properties.datadir : defaults.datadir;
this.properties.dataname = (validateName(properties.dataname)) ? properties.dataname : defaults.dataname;
this.properties.datastores = (validateSize(properties.datastores)) ? properties.datastores : defaults.datastores;
this.properties.tempdir = (validateName(properties.tempdir)) ? properties.tempdir : defaults.tempdir;
this.properties.lockoptions = (validateObject(properties.lockoptions)) ? properties.lockoptions : defaults.lockoptions;
this.properties.datapath = join(this.properties.root, this.properties.datadir);
this.properties.temppath = join(this.properties.root, this.properties.tempdir);
saveProperties(this.properties.root, this.properties);
return this.properties;
}
async stats() {
var stats = {
root: resolve(this.properties.root),
data: resolve(this.properties.datapath),
temp: resolve(this.properties.temppath)
};
var promises = [];
for (const storename of this.properties.storenames) {
const storepath = join(this.properties.datapath, storename);
promises.push(statsStoreData(storepath, this.properties.lockoptions));
}
const results = await Promise.all(promises);
return Object.assign(stats, Reducer("stats", results));
}
statsSync() {
var stats = {
root: resolve(this.properties.root),
data: resolve(this.properties.datapath),
temp: resolve(this.properties.temppath)
};
var results = [];
for (const storename of this.properties.storenames) {
const storepath = join(this.properties.datapath, storename);
results.push(statsStoreDataSync(storepath));
}
return Object.assign(stats, Reducer("stats", results));
}
async grow() {
this.properties.datastores++;
const results = await distributeStoreData(this.properties);
this.properties.storenames = await getStoreNames(this.properties.datapath, this.properties.dataname);
saveProperties(this.properties.root, this.properties);
return results;
}
growSync() {
this.properties.datastores++;
const results = distributeStoreDataSync(this.properties);
this.properties.storenames = getStoreNamesSync(this.properties.datapath, this.properties.dataname);
saveProperties(this.properties.root, this.properties);
return results;
}
async shrink() {
if (this.properties.datastores > 1) {
this.properties.datastores--;
const results = await distributeStoreData(this.properties);
this.properties.storenames = await getStoreNames(this.properties.datapath, this.properties.dataname);
saveProperties(this.properties.root, this.properties);
return results;
} else {
throw new Error("Database cannot shrink any further");
}
}
shrinkSync() {
if (this.properties.datastores > 1) {
this.properties.datastores--;
const results = distributeStoreDataSync(this.properties);
this.properties.storenames = getStoreNamesSync(this.properties.datapath, this.properties.dataname);
saveProperties(this.properties.root, this.properties);
return results;
} else {
throw new Error("Database cannot shrink any further");
}
}
async resize(size) {
validateSize(size);
this.properties.datastores = size;
const results = await distributeStoreData(this.properties);
this.properties.storenames = await getStoreNames(this.properties.datapath, this.properties.dataname);
saveProperties(this.properties.root, this.properties);
return results;
}
resizeSync(size) {
validateSize(size);
this.properties.datastores = size;
const results = distributeStoreDataSync(this.properties);
this.properties.storenames = getStoreNamesSync(this.properties.datapath, this.properties.dataname);
saveProperties(this.properties.root, this.properties);
return results;
}
async drop() {
const results = await dropEverything(this.properties);
return Reducer("drop", results);
}
dropSync() {
const results = dropEverythingSync(this.properties);
return Reducer("drop", results);
}
// Data manipulation methods
async insert(data) {
validateArray(data);
var promises = [];
var records = [];
for (let i = 0; i < this.properties.datastores; i++) {
records[i] = "";
}
for (let i = 0; i < data.length; i++) {
records[i % this.properties.datastores] += JSON.stringify(data[i]) + "\n";
}
const randomizer = Randomizer(Array.from(Array(this.properties.datastores).keys()), false);
for (var j = 0; j < records.length; j++) {
if (records[j] !== "") {
const storenumber = randomizer.next();
const storename = [this.properties.dataname, storenumber, "json"].join(".");
const storepath = join(this.properties.datapath, storename)
promises.push(insertStoreData(storepath, records[j], this.properties.lockoptions));
}
}
const results = await Promise.all(promises);
this.properties.storenames = await getStoreNames(this.properties.datapath, this.properties.dataname);
return Reducer("insert", results);
}
insertSync(data) {
validateArray(data);
var results = [];
var records = [];
for (let i = 0; i < this.properties.datastores; i++) {
records[i] = "";
}
for (let i = 0; i < data.length; i++) {
records[i % this.properties.datastores] += JSON.stringify(data[i]) + "\n";
}
const randomizer = Randomizer(Array.from(Array(this.properties.datastores).keys()), false);
for (var j = 0; j < records.length; j++) {
if (records[j] !== "") {
const storenumber = randomizer.next();
const storename = [this.properties.dataname, storenumber, "json"].join(".");
const storepath = join(this.properties.datapath, storename)
results.push(insertStoreDataSync(storepath, records[j], this.properties.lockoptions));
}
}
this.properties.storenames = getStoreNamesSync(this.properties.datapath, this.properties.dataname);
return Reducer("insert", results);
}
async insertFile(file) {
validatePath(file);
const results = await insertFileData(file, this.properties.datapath, this.properties.storenames, this.properties.lockoptions);
return results;
}
insertFileSync(file) {
validatePath(file);
const data = readFileSync(file, "utf8").split("\n");
var records = [];
var results = Result("insertFile");
for (var record of data) {
record = record.trim()
results.lines++;
if (record.length > 0) {
try {
records.push(JSON.parse(record));
} catch (error) {
results.errors.push({ error: error.message, line: results.lines, data: record });
}
} else {
results.blanks++;
}
}
return Object.assign(results, this.insertSync(records));
}
async select(match, project) {
validateFunction(match);
if (project) validateFunction(project);
var promises = [];
for (const storename of this.properties.storenames) {
const storepath = join(this.properties.datapath, storename);
promises.push(selectStoreData(storepath, match, project, this.properties.lockoptions));
}
const results = await Promise.all(promises);
return Reducer("select", results);
}
selectSync(match, project) {
validateFunction(match);
if (project) validateFunction(project);
var results = [];
for (const storename of this.properties.storenames) {
const storepath = join(this.properties.datapath, storename);
results.push(selectStoreDataSync(storepath, match, project));
}
return Reducer("select", results);
}
async update(match, update) {
validateFunction(match);
validateFunction(update);
var promises = [];
for (const storename of this.properties.storenames) {
const storepath = join(this.properties.datapath, storename);
const tempstorename = [storename, Date.now(), "tmp"].join(".");
const tempstorepath = join(this.properties.temppath, tempstorename);
promises.push(updateStoreData(storepath, match, update, tempstorepath, this.properties.lockoptions));
}
const results = await Promise.all(promises);
return Reducer("update", results);
}
updateSync(match, update) {
validateFunction(match);
validateFunction(update);
var results = [];
for (const storename of this.properties.storenames) {
const storepath = join(this.properties.datapath, storename);
const tempstorename = [storename, Date.now(), "tmp"].join(".");
const tempstorepath = join(this.properties.temppath, tempstorename);
results.push(updateStoreDataSync(storepath, match, update, tempstorepath));
}
return Reducer("update", results);
}
async delete(match) {
validateFunction(match);
var promises = [];
for (const storename of this.properties.storenames) {
const storepath = join(this.properties.datapath, storename);
const tempstorename = [storename, Date.now(), "tmp"].join(".");
const tempstorepath = join(this.properties.temppath, tempstorename);
promises.push(deleteStoreData(storepath, match, tempstorepath, this.properties.lockoptions));
}
const results = await Promise.all(promises);
return Reducer("delete", results);
}
deleteSync(match) {
validateFunction(match);
var results = [];
for (const storename of this.properties.storenames) {
const storepath = join(this.properties.datapath, storename);
const tempstorename = [storename, Date.now(), "tmp"].join(".");
const tempstorepath = join(this.properties.temppath, tempstorename);
results.push(deleteStoreDataSync(storepath, match, tempstorepath));
}
return Reducer("delete", results);
}
async aggregate(match, index, project) {
validateFunction(match);
validateFunction(index);
if (project) validateFunction(project);
var promises = [];
for (const storename of this.properties.storenames) {
const storepath = join(this.properties.datapath, storename);
promises.push(aggregateStoreData(storepath, match, index, project, this.properties.lockoptions));
}
const results = await Promise.all(promises);
return Reducer("aggregate", results);
}
aggregateSync(match, index, project) {
validateFunction(match);
validateFunction(index);
if (project) validateFunction(project);
var results = [];
for (const storename of this.properties.storenames) {
const storepath = join(this.properties.datapath, storename);
results.push(aggregateStoreDataSync(storepath, match, index, project));
}
return Reducer("aggregate", results);
}
}
exports.Database = Database;

View File

@ -1,723 +0,0 @@
"use strict";
const {
appendFile,
appendFileSync,
createReadStream,
createWriteStream,
readFileSync,
readdir,
readdirSync,
stat,
statSync,
writeFile
} = require("graceful-fs");
const {
join,
resolve
} = require("path");
const { createInterface } = require("readline");
const { promisify } = require("util");
const {
check,
checkSync,
lock,
lockSync
} = require("../properLockfile");
const {
deleteFile,
deleteFileSync,
deleteDirectory,
deleteDirectorySync,
fileExists,
fileExistsSync,
moveFile,
moveFileSync,
releaseLock,
releaseLockSync,
replaceFile,
replaceFileSync
} = require("./utils");
const {
Handler,
Randomizer,
Result
} = require("./objects");
const filterStoreNames = (files, dataname) => {
var storenames = [];
const re = new RegExp("^" + [dataname, "\\d+", "json"].join(".") + "$");
for (const file of files) {
if (re.test(file)) storenames.push(file);
}
return storenames;
};
const getStoreNames = async (datapath, dataname) => {
const files = await promisify(readdir)(datapath);
return filterStoreNames(files, dataname);
}
const getStoreNamesSync = (datapath, dataname) => {
const files = readdirSync(datapath);
return filterStoreNames(files, dataname);
};
// Database management
const statsStoreData = async (store, lockoptions) => {
var release, stats, results;
release = await lock(store, lockoptions);
const handlerResults = await new Promise((resolve, reject) => {
const reader = createInterface({ input: createReadStream(store), crlfDelay: Infinity });
const handler = Handler("stats");
reader.on("line", record => handler.next(record));
reader.on("close", () => resolve(handler.return()));
reader.on("error", error => reject(error));
});
if (await check(store, lockoptions)) await releaseLock(store, release);
results = Object.assign({ store: resolve(store) }, handlerResults)
stats = await promisify(stat)(store);
results.size = stats.size;
results.created = stats.birthtime;
results.modified = stats.mtime;
results.end = Date.now()
return results;
};
const statsStoreDataSync = (store) => {
var file, release, results;
release = lockSync(store);
file = readFileSync(store, "utf8");
if (checkSync(store)) releaseLockSync(store, release);
const data = file.split("\n");
const handler = Handler("stats");
for (var record of data) {
handler.next(record)
}
results = Object.assign({ store: resolve(store) }, handler.return());
const stats = statSync(store);
results.size = stats.size;
results.created = stats.birthtime;
results.modified = stats.mtime;
results.end = Date.now();
return results;
};
const distributeStoreData = async (properties) => {
var results = Result("distribute");
var storepaths = [];
var tempstorepaths = [];
var locks = [];
for (let storename of properties.storenames) {
const storepath = join(properties.datapath, storename);
storepaths.push(storepath);
locks.push(lock(storepath, properties.lockoptions));
}
const releases = await Promise.all(locks);
var writes = [];
var writers = [];
for (let i = 0; i < properties.datastores; i++) {
const tempstorepath = join(properties.temppath, [properties.dataname, i, results.start, "json"].join("."));
tempstorepaths.push(tempstorepath);
await promisify(writeFile)(tempstorepath, "");
writers.push(createWriteStream(tempstorepath, { flags: "r+" }));
}
for (let storename of properties.storenames) {
writes.push(new Promise((resolve, reject) => {
var line = 0;
const store = join(properties.datapath, storename);
const randomizer = Randomizer(Array.from(Array(properties.datastores).keys()), false);
const reader = createInterface({ input: createReadStream(store), crlfDelay: Infinity });
reader.on("line", record => {
const storenumber = randomizer.next();
line++;
try {
record = JSON.stringify(JSON.parse(record));
results.records++;
} catch {
results.errors.push({ line: line, data: record });
} finally {
writers[storenumber].write(record + "\n");
}
});
reader.on("close", () => {
resolve(true);
});
reader.on("error", error => {
reject(error);
});
}));
}
await Promise.all(writes);
for (let writer of writers) {
writer.end();
}
var deletes = [];
for (let storepath of storepaths) {
deletes.push(deleteFile(storepath));
}
await Promise.all(deletes);
for (const release of releases) {
release();
}
var moves = [];
for (let i = 0; i < tempstorepaths.length; i++) {
moves.push(moveFile(tempstorepaths[i], join(properties.datapath, [properties.dataname, i, "json"].join("."))))
}
await Promise.all(moves);
results.stores = tempstorepaths.length,
results.end = Date.now();
results.elapsed = results.end - results.start;
return results;
};
const distributeStoreDataSync = (properties) => {
var results = Result("distribute");
var storepaths = [];
var tempstorepaths = [];
var releases = [];
var data = [];
for (let storename of properties.storenames) {
const storepath = join(properties.datapath, storename);
storepaths.push(storepath);
releases.push(lockSync(storepath));
const file = readFileSync(storepath, "utf8").trimEnd();
if (file.length > 0) data = data.concat(file.split("\n"));
}
var records = [];
for (var i = 0; i < data.length; i++) {
try {
data[i] = JSON.stringify(JSON.parse(data[i]));
results.records++;
} catch (error) {
results.errors.push({ line: i, data: data[i] });
} finally {
if (i === i % properties.datastores) records[i] = [];
records[i % properties.datastores] += data[i] + "\n";
}
}
const randomizer = Randomizer(Array.from(Array(properties.datastores).keys()), false);
for (var j = 0; j < records.length; j++) {
const storenumber = randomizer.next();
const tempstorepath = join(properties.temppath, [properties.dataname, storenumber, results.start, "json"].join("."));
tempstorepaths.push(tempstorepath);
appendFileSync(tempstorepath, records[j]);
}
for (let storepath of storepaths) {
deleteFileSync(storepath);
}
for (const release of releases) {
release();
}
for (let i = 0; i < tempstorepaths.length; i++) {
moveFileSync(tempstorepaths[i], join(properties.datapath, [properties.dataname, i, "json"].join(".")));
}
results.stores = tempstorepaths.length,
results.end = Date.now();
results.elapsed = results.end - results.start;
return results;
};
const dropEverything = async (properties) => {
var locks = [];
for (let storename of properties.storenames) {
locks.push(lock(join(properties.datapath, storename), properties.lockoptions));
}
const releases = await Promise.all(locks);
var deletes = [];
for (let storename of properties.storenames) {
deletes.push(deleteFile(join(properties.datapath, storename)));
}
var results = await Promise.all(deletes);
for (const release of releases) {
release();
}
deletes = [
deleteDirectory(properties.temppath),
deleteDirectory(properties.datapath),
deleteFile(join(properties.root, "njodb.properties"))
];
results = results.concat(await Promise.all(deletes));
return results;
}
const dropEverythingSync = (properties) => {
var results = [];
var releases = [];
for (let storename of properties.storenames) {
releases.push(lockSync(join(properties.datapath, storename)));
}
for (let storename of properties.storenames) {
results.push(deleteFileSync(join(properties.datapath, storename)));
}
for (const release of releases) {
release();
}
results.push(deleteDirectorySync(properties.temppath));
results.push(deleteDirectorySync(properties.datapath));
results.push(deleteFileSync(join(properties.root, "njodb.properties")));
return results;
}
// Data manipulation
const insertStoreData = async (store, data, lockoptions) => {
let release, results;
results = Object.assign({ store: resolve(store) }, Result("insert"));
if (await fileExists(store)) release = await lock(store, lockoptions);
await promisify(appendFile)(store, data, "utf8");
if (await check(store, lockoptions)) await releaseLock(store, release);
results.inserted = (data.length > 0) ? data.split("\n").length - 1 : 0;
results.end = Date.now();
return results;
};
const insertStoreDataSync = (store, data) => {
let release, results;
results = Object.assign({ store: resolve(store) }, Result("insert"));
if (fileExistsSync(store)) release = lockSync(store);
appendFileSync(store, data, "utf8");
if (checkSync(store)) releaseLockSync(store, release);
results.inserted = (data.length > 0) ? data.split("\n").length - 1 : 0;
results.end = Date.now();
return results;
};
const insertFileData = async (file, datapath, storenames, lockoptions) => {
let datastores, locks, releases, writers, results;
results = Result("insertFile");
datastores = storenames.length;
locks = [];
writers = [];
for (let storename of storenames) {
const storepath = join(datapath, storename);
locks.push(lock(storepath, lockoptions));
writers.push(createWriteStream(storepath, { flags: "r+" }));
}
releases = await Promise.all(locks);
await new Promise((resolve, reject) => {
const randomizer = Randomizer(Array.from(Array(datastores).keys()), false);
const reader = createInterface({ input: createReadStream(file), crlfDelay: Infinity });
reader.on("line", record => {
record = record.trim();
const storenumber = randomizer.next();
results.lines++;
if (record.length > 0) {
try {
record = JSON.parse(record);
results.inserted++;
} catch (error) {
results.errors.push({ error: error.message, line: results.lines, data: record });
} finally {
writers[storenumber].write(JSON.stringify(record) + "\n");
}
} else {
results.blanks++;
}
});
reader.on("close", () => {
resolve(true);
});
reader.on("error", error => {
reject(error);
});
});
for (const writer of writers) {
writer.end();
}
for (const release of releases) {
release();
}
results.end = Date.now();
results.elapsed = results.end - results.start;
return results;
}
const selectStoreData = async (store, match, project, lockoptions) => {
let release, results;
release = await lock(store, lockoptions);
const handlerResults = await new Promise((resolve, reject) => {
const reader = createInterface({ input: createReadStream(store), crlfDelay: Infinity });
const handler = Handler("select", match, project);
reader.on("line", record => handler.next(record));
reader.on("close", () => resolve(handler.return()));
reader.on("error", error => reject(error));
});
if (await check(store, lockoptions)) await releaseLock(store, release);
results = Object.assign({ store: store }, handlerResults);
return results;
};
const selectStoreDataSync = (store, match, project) => {
let file, release, results;
release = lockSync(store);
file = readFileSync(store, "utf8");
if (checkSync(store)) releaseLockSync(store, release);
const records = file.split("\n");
const handler = Handler("select", match, project);
for (var record of records) {
handler.next(record);
}
results = Object.assign({ store: store }, handler.return());
return results;
};
const updateStoreData = async (store, match, update, tempstore, lockoptions) => {
let release, results;
release = await lock(store, lockoptions);
const handlerResults = await new Promise((resolve, reject) => {
const writer = createWriteStream(tempstore);
const handler = Handler("update", match, update);
writer.on("open", () => {
// Reader was opening and closing before writer ever opened
const reader = createInterface({ input: createReadStream(store), crlfDelay: Infinity });
reader.on("line", record => {
handler.next(record, writer)
});
reader.on("close", () => {
writer.end();
resolve(handler.return());
});
reader.on("error", error => reject(error));
});
writer.on("error", error => reject(error));
});
results = Object.assign({ store: store, tempstore: tempstore }, handlerResults);
if (results.updated > 0) {
if (!await replaceFile(store, tempstore)) {
results.errors = [...results.records];
results.updated = 0;
}
} else {
await deleteFile(tempstore);
}
if (await check(store, lockoptions)) await releaseLock(store, release);
results.end = Date.now();
delete results.data;
delete results.records;
return results;
};
const updateStoreDataSync = (store, match, update, tempstore) => {
let file, release, results;
release = lockSync(store);
file = readFileSync(store, "utf8").trimEnd();
if (checkSync(store)) releaseLockSync(store, release);
const records = file.split("\n");
const handler = Handler("update", match, update);
for (var record of records) {
handler.next(record);
}
results = Object.assign({ store: store, tempstore: tempstore }, handler.return());
if (results.updated > 0) {
let append, replace;
try {
appendFileSync(tempstore, results.data.join("\n") + "\n", "utf8");
append = true;
} catch {
append = false;
}
if (append) replace = replaceFileSync(store, tempstore);
if (!(append || replace)) {
results.errors = [...results.records];
results.updated = 0;
}
}
results.end = Date.now();
delete results.data;
delete results.records;
return results;
};
const deleteStoreData = async (store, match, tempstore, lockoptions) => {
let release, results;
release = await lock(store, lockoptions);
const handlerResults = await new Promise((resolve, reject) => {
const writer = createWriteStream(tempstore);
const handler = Handler("delete", match);
writer.on("open", () => {
// Create reader after writer opens otherwise the reader can sometimes close before the writer opens
const reader = createInterface({ input: createReadStream(store), crlfDelay: Infinity });
reader.on("line", record => handler.next(record, writer));
reader.on("close", () => {
writer.end();
resolve(handler.return());
});
reader.on("error", error => reject(error));
});
writer.on("error", error => reject(error));
});
results = Object.assign({ store: store, tempstore: tempstore }, handlerResults);
if (results.deleted > 0) {
if (!await replaceFile(store, tempstore)) {
results.errors = [...results.records];
results.deleted = 0;
}
} else {
await deleteFile(tempstore);
}
if (await check(store, lockoptions)) await releaseLock(store, release);
results.end = Date.now();
delete results.data;
delete results.records;
return results;
};
const deleteStoreDataSync = (store, match, tempstore) => {
let file, release, results;
release = lockSync(store);
file = readFileSync(store, "utf8");
if (checkSync(store)) releaseLockSync(store, release);
const records = file.split("\n");
const handler = Handler("delete", match);
for (var record of records) {
handler.next(record)
}
results = Object.assign({ store: store, tempstore: tempstore }, handler.return());
if (results.deleted > 0) {
let append, replace;
try {
appendFileSync(tempstore, results.data.join("\n") + "\n", "utf8");
append = true;
} catch {
append = false;
}
if (append) replace = replaceFileSync(store, tempstore);
if (!(append || replace)) {
results.errors = [...results.records];
results.updated = 0;
}
}
results.end = Date.now();
delete results.data;
delete results.records;
return results;
};
const aggregateStoreData = async (store, match, index, project, lockoptions) => {
let release, results;
release = await lock(store, lockoptions);
const handlerResults = await new Promise((resolve, reject) => {
const reader = createInterface({ input: createReadStream(store), crlfDelay: Infinity });
const handler = Handler("aggregate", match, index, project);
reader.on("line", record => handler.next(record));
reader.on("close", () => resolve(handler.return()));
reader.on("error", error => reject(error));
});
if (await check(store, lockoptions)) releaseLock(store, release);
results = Object.assign({ store: store }, handlerResults);
return results;
}
const aggregateStoreDataSync = (store, match, index, project) => {
let file, release, results;
release = lockSync(store);
file = readFileSync(store, "utf8");
if (checkSync(store)) releaseLockSync(store, release);
const records = file.split("\n");
const handler = Handler("aggregate", match, index, project);
for (var record of records) {
handler.next(record);
}
results = Object.assign({ store: store }, handler.return());
return results;
}
exports.getStoreNames = getStoreNames;
exports.getStoreNamesSync = getStoreNamesSync;
// Database management
exports.statsStoreData = statsStoreData;
exports.statsStoreDataSync = statsStoreDataSync;
exports.distributeStoreData = distributeStoreData;
exports.distributeStoreDataSync = distributeStoreDataSync;
exports.dropEverything = dropEverything;
exports.dropEverythingSync = dropEverythingSync;
// Data manipulation
exports.insertStoreData = insertStoreData;
exports.insertStoreDataSync = insertStoreDataSync;
exports.insertFileData = insertFileData;
exports.selectStoreData = selectStoreData;
exports.selectStoreDataSync = selectStoreDataSync;
exports.updateStoreData = updateStoreData;
exports.updateStoreDataSync = updateStoreDataSync;
exports.deleteStoreData = deleteStoreData;
exports.deleteStoreDataSync = deleteStoreDataSync;
exports.aggregateStoreData = aggregateStoreData;
exports.aggregateStoreDataSync = aggregateStoreDataSync;

View File

@ -1,608 +0,0 @@
"use strict";
const {
convertSize,
max,
min
} = require("./utils");
const Randomizer = (data, replacement) => {
var mutable = [...data];
if (replacement === undefined || typeof replacement !== "boolean") replacement = true;
function _next() {
var selection;
const index = Math.floor(Math.random() * mutable.length);
if (replacement) {
selection = mutable.slice(index, index + 1)[0];
} else {
selection = mutable.splice(index, 1)[0];
if (mutable.length === 0) mutable = [...data];
}
return selection;
}
return {
next: _next
};
};
const Result = (type) => {
var _result;
switch (type) {
case "stats":
_result = {
size: 0,
lines: 0,
records: 0,
errors: [],
blanks: 0,
created: undefined,
modified: undefined,
start: Date.now(),
end: undefined,
elapsed: 0
};
break;
case "distribute":
_result = {
stores: undefined,
records: 0,
errors: [],
start: Date.now(),
end: undefined,
elapsed: undefined
};
break;
case "insert":
_result = {
inserted: 0,
start: Date.now(),
end: undefined,
elapsed: 0
};
break;
case "insertFile":
_result = {
lines: 0,
inserted: 0,
errors: [],
blanks: 0,
start: Date.now(),
end: undefined
};
break;
case "select":
_result = {
lines: 0,
selected: 0,
ignored: 0,
errors: [],
blanks: 0,
start: Date.now(),
end: undefined,
elapsed: 0,
data: [],
};
break;
case "update":
_result = {
lines: 0,
selected: 0,
updated: 0,
unchanged: 0,
errors: [],
blanks: 0,
start: Date.now(),
end: undefined,
elapsed: 0,
data: [],
records: []
};
break;
case "delete":
_result = {
lines: 0,
deleted: 0,
retained: 0,
errors: [],
blanks: 0,
start: Date.now(),
end: undefined,
elapsed: 0,
data: [],
records: []
};
break;
case "aggregate":
_result = {
lines: 0,
aggregates: {},
indexed: 0,
unindexed: 0,
errors: [],
blanks: 0,
start: Date.now(),
end: undefined,
elapsed: 0
};
break;
}
return _result;
}
const Reduce = (type) => {
var _reduce;
switch (type) {
case "stats":
_reduce = Object.assign(Result("stats"), {
stores: 0,
min: undefined,
max: undefined,
mean: undefined,
var: undefined,
std: undefined,
m2: 0
});
break;
case "drop":
_reduce = {
dropped: false,
start: Date.now(),
end: 0,
elapsed: 0
};
break;
case "aggregate":
_reduce = Object.assign(Result("aggregate"), {
data: []
});
break;
default:
_reduce = Result(type);
break;
}
_reduce.details = undefined;
return _reduce;
};
const Handler = (type, ...functions) => {
var _results = Result(type);
const _next = (record, writer) => {
record = new Record(record);
_results.lines++;
if (record.length === 0) {
_results.blanks++;
} else {
if (record.data) {
switch (type) {
case "stats":
statsHandler(record, _results);
break;
case "select":
selectHandler(record, functions[0], functions[1], _results);
break;
case "update":
updateHandler(record, functions[0], functions[1], writer, _results);
break;
case "delete":
deleteHandler(record, functions[0], writer, _results);
break;
case "aggregate":
aggregateHandler(record, functions[0], functions[1], functions[2], _results);
break;
}
} else {
_results.errors.push({ error: record.error, line: _results.lines, data: record.source });
if (type === "update" || type === "delete") {
if (writer) {
writer.write(record.source + "\n");
} else {
_results.data.push(record.source);
}
}
}
}
};
const _return = () => {
_results.end = Date.now();
_results.elapsed = _results.end - _results.start;
return _results;
}
return {
next: _next,
return: _return
};
};
const statsHandler = (record, results) => {
results.records++;
return results;
};
const selectHandler = (record, selecter, projecter, results) => {
if (record.select(selecter)) {
if (projecter) {
results.data.push(record.project(projecter));
} else {
results.data.push(record.data);
}
results.selected++;
} else {
results.ignored++;
}
};
const updateHandler = (record, selecter, updater, writer, results) => {
if (record.select(selecter)) {
results.selected++;
if (record.update(updater)) {
results.updated++;
results.records.push(record.data);
} else {
results.unchanged++;
}
} else {
results.unchanged++;
}
if (writer) {
writer.write(JSON.stringify(record.data) + "\n");
} else {
results.data.push(JSON.stringify(record.data));
}
};
const deleteHandler = (record, selecter, writer, results) => {
if (record.select(selecter)) {
results.deleted++;
results.records.push(record.data);
} else {
results.retained++;
if (writer) {
writer.write(JSON.stringify(record.data) + "\n");
} else {
results.data.push(JSON.stringify(record.data));
}
}
};
const aggregateHandler = (record, selecter, indexer, projecter, results) => {
if (record.select(selecter)) {
const index = record.index(indexer);
if (!index) {
results.unindexed++;
} else {
var projection;
var fields;
if (results.aggregates[index]) {
results.aggregates[index].count++;
} else {
results.aggregates[index] = {
count: 1,
aggregates: {}
};
}
if (projecter) {
projection = record.project(projecter);
fields = Object.keys(projection);
} else {
projection = record.data;
fields = Object.keys(record.data);
}
for (const field of fields) {
if (projection[field] !== undefined) {
if (results.aggregates[index].aggregates[field]) {
accumulateAggregate(results.aggregates[index].aggregates[field], projection[field]);
} else {
results.aggregates[index].aggregates[field] = {
min: projection[field],
max: projection[field],
count: 1
};
if (typeof projection[field] === "number") {
results.aggregates[index].aggregates[field]["sum"] = projection[field];
results.aggregates[index].aggregates[field]["mean"] = projection[field];
results.aggregates[index].aggregates[field]["m2"] = 0;
}
}
}
}
results.indexed++;
}
}
}
const accumulateAggregate = (index, projection) => {
index["min"] = min(index["min"], projection);
index["max"] = max(index["max"], projection);
index["count"]++;
// Welford's algorithm
if (typeof projection === "number") {
const delta1 = projection - index["mean"];
index["sum"] += projection;
index["mean"] += delta1 / index["count"];
const delta2 = projection - index["mean"];
index["m2"] += delta1 * delta2;
}
return index;
};
class Record {
constructor(record) {
this.source = record.trim();
this.length = this.source.length
this.data = {};
this.error = "";
try {
this.data = JSON.parse(this.source)
} catch (e) {
this.data = undefined;
this.error = e.message;
}
}
}
Record.prototype.select = function (selecter) {
var result;
try {
result = selecter(this.data);
} catch {
return false;
}
if (typeof result !== "boolean") {
throw new TypeError("Selecter must return a boolean");
} else {
return result;
}
};
Record.prototype.update = function (updater) {
var result;
try {
result = updater(this.data);
} catch {
return false;
}
if (typeof result !== "object") {
throw new TypeError("Updater must return an object");
} else {
this.data = result;
return true;
}
}
Record.prototype.project = function (projecter) {
var result;
try {
result = projecter(this.data);
} catch {
return undefined;
}
if (Array.isArray(result) || typeof result !== "object") {
throw new TypeError("Projecter must return an object");
} else {
return result;
}
};
Record.prototype.index = function (indexer) {
try {
return indexer(this.data);
} catch {
return undefined;
}
};
const Reducer = (type, results) => {
var _reduce = Reduce(type);
var i = 0;
var aggregates = {};
for (const result of results) {
switch (type) {
case "stats":
statsReducer(_reduce, result, i);
break;
case "insert":
insertReducer(_reduce, result);
break;
case "select":
selectReducer(_reduce, result);
break;
case "update":
updateReducer(_reduce, result);
break;
case "delete":
deleteReducer(_reduce, result);
break;
case "aggregate":
aggregateReducer(_reduce, result, aggregates);
break
}
if (type === "stats") {
_reduce.stores++;
i++;
}
if (type === "drop") {
_reduce.dropped = true;
} else if (type !== "insert") {
_reduce.lines += result.lines;
_reduce.errors = _reduce.errors.concat(result.errors);
_reduce.blanks += result.blanks;
}
_reduce.start = min(_reduce.start, result.start);
_reduce.end = max(_reduce.end, result.end);
}
if (type === "stats") {
_reduce.size = convertSize(_reduce.size);
_reduce.var = _reduce.m2 / (results.length);
_reduce.std = Math.sqrt(_reduce.m2 / (results.length));
delete _reduce.m2;
} else if (type === "aggregate") {
for (const index of Object.keys(aggregates)) {
var aggregate = {
index: index,
count: aggregates[index].count,
aggregates: []
};
for (const field of Object.keys(aggregates[index].aggregates)) {
delete aggregates[index].aggregates[field].m2;
aggregate.aggregates.push({ field: field, data: aggregates[index].aggregates[field] });
}
_reduce.data.push(aggregate);
}
delete _reduce.aggregates;
}
_reduce.elapsed = _reduce.end - _reduce.start;
_reduce.details = results;
return _reduce;
};
const statsReducer = (reduce, result, i) => {
reduce.size += result.size;
reduce.records += result.records;
reduce.min = min(reduce.min, result.records);
reduce.max = max(reduce.max, result.records);
if (reduce.mean === undefined) reduce.mean = result.records;
const delta1 = result.records - reduce.mean;
reduce.mean += delta1 / (i + 2);
const delta2 = result.records - reduce.mean;
reduce.m2 += delta1 * delta2;
reduce.created = min(reduce.created, result.created);
reduce.modified = max(reduce.modified, result.modified);
};
const insertReducer = (reduce, result) => {
reduce.inserted += result.inserted;
};
const selectReducer = (reduce, result) => {
reduce.selected += result.selected;
reduce.ignored += result.ignored;
reduce.data = reduce.data.concat(result.data);
delete result.data;
};
const updateReducer = (reduce, result) => {
reduce.selected += result.selected;
reduce.updated += result.updated;
reduce.unchanged += result.unchanged;
};
const deleteReducer = (reduce, result) => {
reduce.deleted += result.deleted;
reduce.retained += result.retained;
};
const aggregateReducer = (reduce, result, aggregates) => {
reduce.indexed += result.indexed;
reduce.unindexed += result.unindexed;
const indexes = Object.keys(result.aggregates);
for (const index of indexes) {
if (aggregates[index]) {
aggregates[index].count += result.aggregates[index].count;
} else {
aggregates[index] = {
count: result.aggregates[index].count,
aggregates: {}
};
}
const fields = Object.keys(result.aggregates[index].aggregates);
for (const field of fields) {
const aggregateObject = aggregates[index].aggregates[field];
const resultObject = result.aggregates[index].aggregates[field];
if (aggregateObject) {
reduceAggregate(aggregateObject, resultObject);
} else {
aggregates[index].aggregates[field] = {
min: resultObject["min"],
max: resultObject["max"],
count: resultObject["count"]
};
if (resultObject["m2"] !== undefined) {
aggregates[index].aggregates[field]["sum"] = resultObject["sum"];
aggregates[index].aggregates[field]["mean"] = resultObject["mean"];
aggregates[index].aggregates[field]["varp"] = resultObject["m2"] / resultObject["count"];
aggregates[index].aggregates[field]["vars"] = resultObject["m2"] / (resultObject["count"] - 1);
aggregates[index].aggregates[field]["stdp"] = Math.sqrt(resultObject["m2"] / resultObject["count"]);
aggregates[index].aggregates[field]["stds"] = Math.sqrt(resultObject["m2"] / (resultObject["count"] - 1));
aggregates[index].aggregates[field]["m2"] = resultObject["m2"];
}
}
}
}
delete result.aggregates;
};
const reduceAggregate = (aggregate, result) => {
const n = aggregate["count"] + result["count"];
aggregate["min"] = min(aggregate["min"], result["min"]);
aggregate["max"] = max(aggregate["max"], result["max"]);
// Parallel version of Welford's algorithm
if (result["m2"] !== undefined) {
const delta = result["mean"] - aggregate["mean"];
const m2 = aggregate["m2"] + result["m2"] + (Math.pow(delta, 2) * ((aggregate["count"] * result["count"]) / n));
aggregate["m2"] = m2;
aggregate["varp"] = m2 / n;
aggregate["vars"] = m2 / (n - 1);
aggregate["stdp"] = Math.sqrt(m2 / n);
aggregate["stds"] = Math.sqrt(m2 / (n - 1));
}
if (result["sum"] !== undefined) {
aggregate["mean"] = (aggregate["sum"] + result["sum"]) / n;
aggregate["sum"] += result["sum"];
}
aggregate["count"] = n;
};
exports.Randomizer = Randomizer;
exports.Result = Result;
exports.Reduce = Reduce;
exports.Handler = Handler;
exports.Reducer = Reducer;

View File

@ -1,178 +0,0 @@
"use strict";
const {
access,
constants,
existsSync,
rename,
renameSync,
rmdir,
rmdirSync,
unlink,
unlinkSync
} = require("graceful-fs");
const { promisify } = require("util");
const min = (a, b) => {
if (b === undefined || a <= b) return a;
return b;
};
const max = (a, b) => {
if (b === undefined || a > b) return a;
return b;
};
const convertSize = (size) => {
const sizes = ["bytes", "KB", "MB", "GB"];
var index = Math.floor(Math.log2(size) / 10);
if (index > 3) index = 3;
return Math.round(((size / Math.pow(1024, index)) + Number.EPSILON) * 100) / 100 + " " + sizes[index];
};
const fileExists = async (a) => {
try {
await promisify(access)(a, constants.F_OK);
return true;
} catch (error) {
// console.error(error); file does not exist no need for error
return false;
}
}
const fileExistsSync = (a) => {
try {
return existsSync(a);
} catch (error) {
console.error(error);
return false;
}
}
const moveFile = async (a, b) => {
try {
await promisify(rename)(a, b);
return true;
} catch (error) {
console.error(error);
return false;
}
};
const moveFileSync = (a, b) => {
try {
renameSync(a, b);
return true;
} catch (error) {
console.error(error);
return false;
}
};
const deleteFile = async (filepath) => {
try {
await promisify(unlink)(filepath);
return true;
} catch (error) {
console.error(error);
return false;
}
};
const deleteFileSync = (filepath) => {
try {
unlinkSync(filepath);
return true;
} catch (error) {
console.error(error);
return false;
}
}
const replaceFile = async (a, b) => {
if (!await moveFile(a, a + ".old")) return false;
if (!await moveFile(b, a)) {
await moveFile(a + ".old", a);
return false;
}
await deleteFile(a + ".old");
return true;
};
const replaceFileSync = (a, b) => {
if (!moveFileSync(a, a + ".old")) return false;
if (!moveFileSync(b, a)) {
moveFile(a + ".old", a);
return false;
}
deleteFileSync(a + ".old");
return true;
};
const deleteDirectory = async (dirpath) => {
try {
await promisify(rmdir)(dirpath);
return true;
} catch {
return false;
}
};
const deleteDirectorySync = (dirpath) => {
try {
rmdirSync(dirpath);
return true;
} catch {
return false;
}
};
const releaseLock = async (store, release) => {
try {
await release();
} catch (error) {
if (!["ERELEASED", "ENOTACQUIRED"].includes(error.code)) {
error.store = store;
throw error;
}
}
}
const releaseLockSync = (store, release) => {
try {
release();
} catch (error) {
if (!["ERELEASED", "ENOTACQUIRED"].includes(error.code)) {
error.store = store;
throw error;
}
}
}
exports.min = min;
exports.max = max;
exports.convertSize = convertSize;
exports.fileExists = fileExists;
exports.fileExistsSync = fileExistsSync;
exports.moveFile = moveFile;
exports.moveFileSync = moveFileSync;
exports.replaceFile = replaceFile;
exports.replaceFileSync = replaceFileSync;
exports.deleteFile = deleteFile;
exports.deleteFileSync = deleteFileSync;
exports.deleteDirectory = deleteDirectory;
exports.deleteDirectorySync = deleteDirectorySync;
exports.releaseLock = releaseLock;
exports.releaseLockSync = releaseLockSync;

View File

@ -1,70 +0,0 @@
"use strict";
const { existsSync } = require("graceful-fs");
const validateSize = (s) => {
if (typeof s !== "number") {
throw new TypeError("Size must be a number");
} else if (s <= 0) {
throw new RangeError("Size must be greater than zero");
}
return s;
};
const validateName = (n) => {
if (typeof n !== "string") {
throw new TypeError("Name must be a string");
} else if (n.trim().length <= 0) {
throw new Error("Name must be a non-blank string")
}
return n;
};
const validatePath = (p) => {
if (typeof p !== "string") {
throw new TypeError("Path must be a string");
} else if (p.trim().length <= 0) {
throw new Error("Path must be a non-blank string");
} else if (!existsSync(p)) {
throw new Error("Path does not exist");
}
return p;
};
const validateArray = (a) => {
if (!Array.isArray(a)) {
throw new TypeError("Not an array");
}
return a;
};
const validateObject = (o) => {
if (typeof o !== "object") {
throw new TypeError("Not an object");
}
return o;
};
const validateFunction = (f) => {
if (typeof f !== "function") {
throw new TypeError("Not a function")
}
// } else {
// const fString = f.toString();
// if (/\s*function/.test(fString) && !/\W+return\W+/.test(fString)) throw new Error("Function must return a value");
// }
return f;
}
exports.validateSize = validateSize;
exports.validateName = validateName;
exports.validatePath = validatePath;
exports.validateArray = validateArray;
exports.validateObject = validateObject;
exports.validateFunction = validateFunction;

View File

@ -1,21 +0,0 @@
The MIT License (MIT)
Copyright (c) 2018 Made With MOXY Lda <hello@moxy.studio>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@ -1,46 +0,0 @@
'use strict';
//
// used by njodb
// Source: https://github.com/moxystudio/node-proper-lockfile
//
const lockfile = require('./lib/lockfile');
const { toPromise, toSync, toSyncOptions } = require('./lib/adapter');
async function lock(file, options) {
const release = await toPromise(lockfile.lock)(file, options);
return toPromise(release);
}
function lockSync(file, options) {
const release = toSync(lockfile.lock)(file, toSyncOptions(options));
return toSync(release);
}
function unlock(file, options) {
return toPromise(lockfile.unlock)(file, options);
}
function unlockSync(file, options) {
return toSync(lockfile.unlock)(file, toSyncOptions(options));
}
function check(file, options) {
return toPromise(lockfile.check)(file, options);
}
function checkSync(file, options) {
return toSync(lockfile.check)(file, toSyncOptions(options));
}
module.exports = lock;
module.exports.lock = lock;
module.exports.unlock = unlock;
module.exports.lockSync = lockSync;
module.exports.unlockSync = unlockSync;
module.exports.check = check;
module.exports.checkSync = checkSync;

View File

@ -1,85 +0,0 @@
'use strict';
const fs = require('graceful-fs');
function createSyncFs(fs) {
const methods = ['mkdir', 'realpath', 'stat', 'rmdir', 'utimes'];
const newFs = { ...fs };
methods.forEach((method) => {
newFs[method] = (...args) => {
const callback = args.pop();
let ret;
try {
ret = fs[`${method}Sync`](...args);
} catch (err) {
return callback(err);
}
callback(null, ret);
};
});
return newFs;
}
// ----------------------------------------------------------
function toPromise(method) {
return (...args) => new Promise((resolve, reject) => {
args.push((err, result) => {
if (err) {
reject(err);
} else {
resolve(result);
}
});
method(...args);
});
}
function toSync(method) {
return (...args) => {
let err;
let result;
args.push((_err, _result) => {
err = _err;
result = _result;
});
method(...args);
if (err) {
throw err;
}
return result;
};
}
function toSyncOptions(options) {
// Shallow clone options because we are oging to mutate them
options = { ...options };
// Transform fs to use the sync methods instead
options.fs = createSyncFs(options.fs || fs);
// Retries are not allowed because it requires the flow to be sync
if (
(typeof options.retries === 'number' && options.retries > 0) ||
(options.retries && typeof options.retries.retries === 'number' && options.retries.retries > 0)
) {
throw Object.assign(new Error('Cannot use retries with the sync api'), { code: 'ESYNC' });
}
return options;
}
module.exports = {
toPromise,
toSync,
toSyncOptions,
};

View File

@ -1,345 +0,0 @@
'use strict';
const path = require('path');
const fs = require('graceful-fs');
const retry = require('../../retry');
const onExit = require('../../signalExit');
const mtimePrecision = require('./mtime-precision');
const locks = {};
function getLockFile(file, options) {
return options.lockfilePath || `${file}.lock`;
}
function resolveCanonicalPath(file, options, callback) {
if (!options.realpath) {
return callback(null, path.resolve(file));
}
// Use realpath to resolve symlinks
// It also resolves relative paths
options.fs.realpath(file, callback);
}
function acquireLock(file, options, callback) {
const lockfilePath = getLockFile(file, options);
// Use mkdir to create the lockfile (atomic operation)
options.fs.mkdir(lockfilePath, (err) => {
if (!err) {
// At this point, we acquired the lock!
// Probe the mtime precision
return mtimePrecision.probe(lockfilePath, options.fs, (err, mtime, mtimePrecision) => {
// If it failed, try to remove the lock..
/* istanbul ignore if */
if (err) {
options.fs.rmdir(lockfilePath, () => { });
return callback(err);
}
callback(null, mtime, mtimePrecision);
});
}
// If error is not EEXIST then some other error occurred while locking
if (err.code !== 'EEXIST') {
return callback(err);
}
// Otherwise, check if lock is stale by analyzing the file mtime
if (options.stale <= 0) {
return callback(Object.assign(new Error('Lock file is already being held'), { code: 'ELOCKED', file }));
}
options.fs.stat(lockfilePath, (err, stat) => {
if (err) {
// Retry if the lockfile has been removed (meanwhile)
// Skip stale check to avoid recursiveness
if (err.code === 'ENOENT') {
return acquireLock(file, { ...options, stale: 0 }, callback);
}
return callback(err);
}
if (!isLockStale(stat, options)) {
return callback(Object.assign(new Error('Lock file is already being held'), { code: 'ELOCKED', file }));
}
// If it's stale, remove it and try again!
// Skip stale check to avoid recursiveness
removeLock(file, options, (err) => {
if (err) {
return callback(err);
}
acquireLock(file, { ...options, stale: 0 }, callback);
});
});
});
}
function isLockStale(stat, options) {
return stat.mtime.getTime() < Date.now() - options.stale;
}
function removeLock(file, options, callback) {
// Remove lockfile, ignoring ENOENT errors
options.fs.rmdir(getLockFile(file, options), (err) => {
if (err && err.code !== 'ENOENT') {
return callback(err);
}
callback();
});
}
function updateLock(file, options) {
const lock = locks[file];
// Just for safety, should never happen
/* istanbul ignore if */
if (lock.updateTimeout) {
return;
}
lock.updateDelay = lock.updateDelay || options.update;
lock.updateTimeout = setTimeout(() => {
lock.updateTimeout = null;
// Stat the file to check if mtime is still ours
// If it is, we can still recover from a system sleep or a busy event loop
options.fs.stat(lock.lockfilePath, (err, stat) => {
const isOverThreshold = lock.lastUpdate + options.stale < Date.now();
// If it failed to update the lockfile, keep trying unless
// the lockfile was deleted or we are over the threshold
if (err) {
if (err.code === 'ENOENT' || isOverThreshold) {
console.error(`lockfile "${file}" compromised. stat code=${err.code}, isOverThreshold=${isOverThreshold}`)
return setLockAsCompromised(file, lock, Object.assign(err, { code: 'ECOMPROMISED' }));
}
lock.updateDelay = 1000;
return updateLock(file, options);
}
const isMtimeOurs = lock.mtime.getTime() === stat.mtime.getTime();
if (!isMtimeOurs) {
console.error(`lockfile "${file}" compromised. mtime is not ours`)
return setLockAsCompromised(
file,
lock,
Object.assign(
new Error('Unable to update lock within the stale threshold'),
{ code: 'ECOMPROMISED' }
));
}
const mtime = mtimePrecision.getMtime(lock.mtimePrecision);
options.fs.utimes(lock.lockfilePath, mtime, mtime, (err) => {
const isOverThreshold = lock.lastUpdate + options.stale < Date.now();
// Ignore if the lock was released
if (lock.released) {
return;
}
// If it failed to update the lockfile, keep trying unless
// the lockfile was deleted or we are over the threshold
if (err) {
if (err.code === 'ENOENT' || isOverThreshold) {
console.error(`lockfile "${file}" compromised. utimes code=${err.code}, isOverThreshold=${isOverThreshold}`)
return setLockAsCompromised(file, lock, Object.assign(err, { code: 'ECOMPROMISED' }));
}
lock.updateDelay = 1000;
return updateLock(file, options);
}
// All ok, keep updating..
lock.mtime = mtime;
lock.lastUpdate = Date.now();
lock.updateDelay = null;
updateLock(file, options);
});
});
}, lock.updateDelay);
// Unref the timer so that the nodejs process can exit freely
// This is safe because all acquired locks will be automatically released
// on process exit
// We first check that `lock.updateTimeout.unref` exists because some users
// may be using this module outside of NodeJS (e.g., in an electron app),
// and in those cases `setTimeout` return an integer.
/* istanbul ignore else */
if (lock.updateTimeout.unref) {
lock.updateTimeout.unref();
}
}
function setLockAsCompromised(file, lock, err) {
// Signal the lock has been released
lock.released = true;
// Cancel lock mtime update
// Just for safety, at this point updateTimeout should be null
/* istanbul ignore if */
if (lock.updateTimeout) {
clearTimeout(lock.updateTimeout);
}
if (locks[file] === lock) {
delete locks[file];
}
lock.options.onCompromised(err);
}
// ----------------------------------------------------------
function lock(file, options, callback) {
/* istanbul ignore next */
options = {
stale: 10000,
update: null,
realpath: true,
retries: 0,
fs,
onCompromised: (err) => { throw err; },
...options,
};
options.retries = options.retries || 0;
options.retries = typeof options.retries === 'number' ? { retries: options.retries } : options.retries;
options.stale = Math.max(options.stale || 0, 2000);
options.update = options.update == null ? options.stale / 2 : options.update || 0;
options.update = Math.max(Math.min(options.update, options.stale / 2), 1000);
// Resolve to a canonical file path
resolveCanonicalPath(file, options, (err, file) => {
if (err) {
return callback(err);
}
// Attempt to acquire the lock
const operation = retry.operation(options.retries);
operation.attempt(() => {
acquireLock(file, options, (err, mtime, mtimePrecision) => {
if (operation.retry(err)) {
return;
}
if (err) {
return callback(operation.mainError());
}
// We now own the lock
const lock = locks[file] = {
lockfilePath: getLockFile(file, options),
mtime,
mtimePrecision,
options,
lastUpdate: Date.now(),
};
// We must keep the lock fresh to avoid staleness
updateLock(file, options);
callback(null, (releasedCallback) => {
if (lock.released) {
return releasedCallback &&
releasedCallback(Object.assign(new Error('Lock is already released'), { code: 'ERELEASED' }));
}
// Not necessary to use realpath twice when unlocking
unlock(file, { ...options, realpath: false }, releasedCallback);
});
});
});
});
}
function unlock(file, options, callback) {
options = {
fs,
realpath: true,
...options,
};
// Resolve to a canonical file path
resolveCanonicalPath(file, options, (err, file) => {
if (err) {
return callback(err);
}
// Skip if the lock is not acquired
const lock = locks[file];
if (!lock) {
return callback(Object.assign(new Error('Lock is not acquired/owned by you'), { code: 'ENOTACQUIRED' }));
}
lock.updateTimeout && clearTimeout(lock.updateTimeout); // Cancel lock mtime update
lock.released = true; // Signal the lock has been released
delete locks[file]; // Delete from locks
removeLock(file, options, callback);
});
}
function check(file, options, callback) {
options = {
stale: 10000,
realpath: true,
fs,
...options,
};
options.stale = Math.max(options.stale || 0, 2000);
// Resolve to a canonical file path
resolveCanonicalPath(file, options, (err, file) => {
if (err) {
return callback(err);
}
// Check if lockfile exists
options.fs.stat(getLockFile(file, options), (err, stat) => {
if (err) {
// If does not exist, file is not locked. Otherwise, callback with error
return err.code === 'ENOENT' ? callback(null, false) : callback(err);
}
// Otherwise, check if lock is stale by analyzing the file mtime
return callback(null, !isLockStale(stat, options));
});
});
}
function getLocks() {
return locks;
}
// Remove acquired locks on exit
/* istanbul ignore next */
onExit(() => {
for (const file in locks) {
const options = locks[file].options;
try { options.fs.rmdirSync(getLockFile(file, options)); } catch (e) { /* Empty */ }
}
});
module.exports.lock = lock;
module.exports.unlock = unlock;
module.exports.check = check;
module.exports.getLocks = getLocks;

View File

@ -1,55 +0,0 @@
'use strict';
const cacheSymbol = Symbol();
function probe(file, fs, callback) {
const cachedPrecision = fs[cacheSymbol];
if (cachedPrecision) {
return fs.stat(file, (err, stat) => {
/* istanbul ignore if */
if (err) {
return callback(err);
}
callback(null, stat.mtime, cachedPrecision);
});
}
// Set mtime by ceiling Date.now() to seconds + 5ms so that it's "not on the second"
const mtime = new Date((Math.ceil(Date.now() / 1000) * 1000) + 5);
fs.utimes(file, mtime, mtime, (err) => {
/* istanbul ignore if */
if (err) {
return callback(err);
}
fs.stat(file, (err, stat) => {
/* istanbul ignore if */
if (err) {
return callback(err);
}
const precision = stat.mtime.getTime() % 1000 === 0 ? 's' : 'ms';
// Cache the precision in a non-enumerable way
Object.defineProperty(fs, cacheSymbol, { value: precision });
callback(null, stat.mtime, precision);
});
});
}
function getMtime(precision) {
let now = Date.now();
if (precision === 's') {
now = Math.ceil(now / 1000) * 1000;
}
return new Date(now);
}
module.exports.probe = probe;
module.exports.getMtime = getMtime;

View File

@ -1,21 +0,0 @@
Copyright (c) 2011:
Tim Koschützki (tim@debuggable.com)
Felix Geisendörfer (felix@debuggable.com)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@ -1,105 +0,0 @@
//
// used by properLockFile
// Source: https://github.com/tim-kos/node-retry
//
var RetryOperation = require('./retry_operation');
exports.operation = function (options) {
var timeouts = exports.timeouts(options);
return new RetryOperation(timeouts, {
forever: options && options.forever,
unref: options && options.unref,
maxRetryTime: options && options.maxRetryTime
});
};
exports.timeouts = function (options) {
if (options instanceof Array) {
return [].concat(options);
}
var opts = {
retries: 10,
factor: 2,
minTimeout: 1 * 1000,
maxTimeout: Infinity,
randomize: false
};
for (var key in options) {
opts[key] = options[key];
}
if (opts.minTimeout > opts.maxTimeout) {
throw new Error('minTimeout is greater than maxTimeout');
}
var timeouts = [];
for (var i = 0; i < opts.retries; i++) {
timeouts.push(this.createTimeout(i, opts));
}
if (options && options.forever && !timeouts.length) {
timeouts.push(this.createTimeout(i, opts));
}
// sort the array numerically ascending
timeouts.sort(function (a, b) {
return a - b;
});
return timeouts;
};
exports.createTimeout = function (attempt, opts) {
var random = (opts.randomize)
? (Math.random() + 1)
: 1;
var timeout = Math.round(random * opts.minTimeout * Math.pow(opts.factor, attempt));
timeout = Math.min(timeout, opts.maxTimeout);
return timeout;
};
exports.wrap = function (obj, options, methods) {
if (options instanceof Array) {
methods = options;
options = null;
}
if (!methods) {
methods = [];
for (var key in obj) {
if (typeof obj[key] === 'function') {
methods.push(key);
}
}
}
for (var i = 0; i < methods.length; i++) {
var method = methods[i];
var original = obj[method];
obj[method] = function retryWrapper(original) {
var op = exports.operation(options);
var args = Array.prototype.slice.call(arguments, 1);
var callback = args.pop();
args.push(function (err) {
if (op.retry(err)) {
return;
}
if (err) {
arguments[0] = op.mainError();
}
callback.apply(this, arguments);
});
op.attempt(function () {
original.apply(obj, args);
});
}.bind(obj, original);
obj[method].options = options;
}
};

View File

@ -1,158 +0,0 @@
function RetryOperation(timeouts, options) {
// Compatibility for the old (timeouts, retryForever) signature
if (typeof options === 'boolean') {
options = { forever: options };
}
this._originalTimeouts = JSON.parse(JSON.stringify(timeouts));
this._timeouts = timeouts;
this._options = options || {};
this._maxRetryTime = options && options.maxRetryTime || Infinity;
this._fn = null;
this._errors = [];
this._attempts = 1;
this._operationTimeout = null;
this._operationTimeoutCb = null;
this._timeout = null;
this._operationStart = null;
if (this._options.forever) {
this._cachedTimeouts = this._timeouts.slice(0);
}
}
module.exports = RetryOperation;
RetryOperation.prototype.reset = function() {
this._attempts = 1;
this._timeouts = this._originalTimeouts;
}
RetryOperation.prototype.stop = function() {
if (this._timeout) {
clearTimeout(this._timeout);
}
this._timeouts = [];
this._cachedTimeouts = null;
};
RetryOperation.prototype.retry = function(err) {
if (this._timeout) {
clearTimeout(this._timeout);
}
if (!err) {
return false;
}
var currentTime = new Date().getTime();
if (err && currentTime - this._operationStart >= this._maxRetryTime) {
this._errors.unshift(new Error('RetryOperation timeout occurred'));
return false;
}
this._errors.push(err);
var timeout = this._timeouts.shift();
if (timeout === undefined) {
if (this._cachedTimeouts) {
// retry forever, only keep last error
this._errors.splice(this._errors.length - 1, this._errors.length);
this._timeouts = this._cachedTimeouts.slice(0);
timeout = this._timeouts.shift();
} else {
return false;
}
}
var self = this;
var timer = setTimeout(function() {
self._attempts++;
if (self._operationTimeoutCb) {
self._timeout = setTimeout(function() {
self._operationTimeoutCb(self._attempts);
}, self._operationTimeout);
if (self._options.unref) {
self._timeout.unref();
}
}
self._fn(self._attempts);
}, timeout);
if (this._options.unref) {
timer.unref();
}
return true;
};
RetryOperation.prototype.attempt = function(fn, timeoutOps) {
this._fn = fn;
if (timeoutOps) {
if (timeoutOps.timeout) {
this._operationTimeout = timeoutOps.timeout;
}
if (timeoutOps.cb) {
this._operationTimeoutCb = timeoutOps.cb;
}
}
var self = this;
if (this._operationTimeoutCb) {
this._timeout = setTimeout(function() {
self._operationTimeoutCb();
}, self._operationTimeout);
}
this._operationStart = new Date().getTime();
this._fn(this._attempts);
};
RetryOperation.prototype.try = function(fn) {
console.log('Using RetryOperation.try() is deprecated');
this.attempt(fn);
};
RetryOperation.prototype.start = function(fn) {
console.log('Using RetryOperation.start() is deprecated');
this.attempt(fn);
};
RetryOperation.prototype.start = RetryOperation.prototype.try;
RetryOperation.prototype.errors = function() {
return this._errors;
};
RetryOperation.prototype.attempts = function() {
return this._attempts;
};
RetryOperation.prototype.mainError = function() {
if (this._errors.length === 0) {
return null;
}
var counts = {};
var mainError = null;
var mainErrorCount = 0;
for (var i = 0; i < this._errors.length; i++) {
var error = this._errors[i];
var message = error.message;
var count = (counts[message] || 0) + 1;
counts[message] = count;
if (count >= mainErrorCount) {
mainError = error;
mainErrorCount = count;
}
}
return mainError;
};

View File

@ -1,16 +0,0 @@
The ISC License
Copyright (c) 2015-2022 Benjamin Coe, Isaac Z. Schlueter, and Contributors
Permission to use, copy, modify, and/or distribute this software
for any purpose with or without fee is hereby granted, provided
that the above copyright notice and this permission notice
appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE
LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES
OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION,
ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

View File

@ -1,207 +0,0 @@
//
// used by properLockFile
// Source: https://github.com/tapjs/signal-exit
//
// Note: since nyc uses this module to output coverage, any lines
// that are in the direct sync flow of nyc's outputCoverage are
// ignored, since we can never get coverage for them.
// grab a reference to node's real process object right away
var process = global.process
const processOk = function (process) {
return process &&
typeof process === 'object' &&
typeof process.removeListener === 'function' &&
typeof process.emit === 'function' &&
typeof process.reallyExit === 'function' &&
typeof process.listeners === 'function' &&
typeof process.kill === 'function' &&
typeof process.pid === 'number' &&
typeof process.on === 'function'
}
// some kind of non-node environment, just no-op
/* istanbul ignore if */
if (!processOk(process)) {
module.exports = function () {
return function () { }
}
} else {
var assert = require('assert')
var signals = require('./signals.js')
var isWin = /^win/i.test(process.platform)
var EE = require('events')
/* istanbul ignore if */
if (typeof EE !== 'function') {
EE = EE.EventEmitter
}
var emitter
if (process.__signal_exit_emitter__) {
emitter = process.__signal_exit_emitter__
} else {
emitter = process.__signal_exit_emitter__ = new EE()
emitter.count = 0
emitter.emitted = {}
}
// Because this emitter is a global, we have to check to see if a
// previous version of this library failed to enable infinite listeners.
// I know what you're about to say. But literally everything about
// signal-exit is a compromise with evil. Get used to it.
if (!emitter.infinite) {
emitter.setMaxListeners(Infinity)
emitter.infinite = true
}
module.exports = function (cb, opts) {
/* istanbul ignore if */
if (!processOk(global.process)) {
return function () { }
}
assert.equal(typeof cb, 'function', 'a callback must be provided for exit handler')
if (loaded === false) {
load()
}
var ev = 'exit'
if (opts && opts.alwaysLast) {
ev = 'afterexit'
}
var remove = function () {
emitter.removeListener(ev, cb)
if (emitter.listeners('exit').length === 0 &&
emitter.listeners('afterexit').length === 0) {
unload()
}
}
emitter.on(ev, cb)
return remove
}
var unload = function unload() {
if (!loaded || !processOk(global.process)) {
return
}
loaded = false
signals.forEach(function (sig) {
try {
process.removeListener(sig, sigListeners[sig])
} catch (er) { }
})
process.emit = originalProcessEmit
process.reallyExit = originalProcessReallyExit
emitter.count -= 1
}
module.exports.unload = unload
var emit = function emit(event, code, signal) {
/* istanbul ignore if */
if (emitter.emitted[event]) {
return
}
emitter.emitted[event] = true
emitter.emit(event, code, signal)
}
// { <signal>: <listener fn>, ... }
var sigListeners = {}
signals.forEach(function (sig) {
sigListeners[sig] = function listener() {
/* istanbul ignore if */
if (!processOk(global.process)) {
return
}
// If there are no other listeners, an exit is coming!
// Simplest way: remove us and then re-send the signal.
// We know that this will kill the process, so we can
// safely emit now.
var listeners = process.listeners(sig)
if (listeners.length === emitter.count) {
unload()
emit('exit', null, sig)
/* istanbul ignore next */
emit('afterexit', null, sig)
/* istanbul ignore next */
if (isWin && sig === 'SIGHUP') {
// "SIGHUP" throws an `ENOSYS` error on Windows,
// so use a supported signal instead
sig = 'SIGINT'
}
/* istanbul ignore next */
process.kill(process.pid, sig)
}
}
})
module.exports.signals = function () {
return signals
}
var loaded = false
var load = function load() {
if (loaded || !processOk(global.process)) {
return
}
loaded = true
// This is the number of onSignalExit's that are in play.
// It's important so that we can count the correct number of
// listeners on signals, and don't wait for the other one to
// handle it instead of us.
emitter.count += 1
signals = signals.filter(function (sig) {
try {
process.on(sig, sigListeners[sig])
return true
} catch (er) {
return false
}
})
process.emit = processEmit
process.reallyExit = processReallyExit
}
module.exports.load = load
var originalProcessReallyExit = process.reallyExit
var processReallyExit = function processReallyExit(code) {
/* istanbul ignore if */
if (!processOk(global.process)) {
return
}
process.exitCode = code || /* istanbul ignore next */ 0
emit('exit', process.exitCode, null)
/* istanbul ignore next */
emit('afterexit', process.exitCode, null)
/* istanbul ignore next */
originalProcessReallyExit.call(process, process.exitCode)
}
var originalProcessEmit = process.emit
var processEmit = function processEmit(ev, arg) {
if (ev === 'exit' && processOk(global.process)) {
/* istanbul ignore else */
if (arg !== undefined) {
process.exitCode = arg
}
var ret = originalProcessEmit.apply(this, arguments)
/* istanbul ignore next */
emit('exit', process.exitCode, null)
/* istanbul ignore next */
emit('afterexit', process.exitCode, null)
/* istanbul ignore next */
return ret
} else {
return originalProcessEmit.apply(this, arguments)
}
}
}

View File

@ -1,53 +0,0 @@
// This is not the set of all possible signals.
//
// It IS, however, the set of all signals that trigger
// an exit on either Linux or BSD systems. Linux is a
// superset of the signal names supported on BSD, and
// the unknown signals just fail to register, so we can
// catch that easily enough.
//
// Don't bother with SIGKILL. It's uncatchable, which
// means that we can't fire any callbacks anyway.
//
// If a user does happen to register a handler on a non-
// fatal signal like SIGWINCH or something, and then
// exit, it'll end up firing `process.emit('exit')`, so
// the handler will be fired anyway.
//
// SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised
// artificially, inherently leave the process in a
// state from which it is not safe to try and enter JS
// listeners.
module.exports = [
'SIGABRT',
'SIGALRM',
'SIGHUP',
'SIGINT',
'SIGTERM'
]
if (process.platform !== 'win32') {
module.exports.push(
'SIGVTALRM',
'SIGXCPU',
'SIGXFSZ',
'SIGUSR2',
'SIGTRAP',
'SIGSYS',
'SIGQUIT',
'SIGIOT'
// should detect profiler and enable/disable accordingly.
// see #21
// 'SIGPROF'
)
}
if (process.platform === 'linux') {
module.exports.push(
'SIGIO',
'SIGPOLL',
'SIGPWR',
'SIGSTKFLT',
'SIGUNUSED'
)
}

View File

@ -10,8 +10,7 @@ const { writeConcatFile } = require('../utils/ffmpegHelpers')
const toneHelpers = require('../utils/toneHelpers') const toneHelpers = require('../utils/toneHelpers')
class AbMergeManager { class AbMergeManager {
constructor(db, taskManager) { constructor(taskManager) {
this.db = db
this.taskManager = taskManager this.taskManager = taskManager
this.itemsCacheDir = Path.join(global.MetadataPath, 'cache/items') this.itemsCacheDir = Path.join(global.MetadataPath, 'cache/items')

View File

@ -10,8 +10,7 @@ const toneHelpers = require('../utils/toneHelpers')
const Task = require('../objects/Task') const Task = require('../objects/Task')
class AudioMetadataMangaer { class AudioMetadataMangaer {
constructor(db, taskManager) { constructor(taskManager) {
this.db = db
this.taskManager = taskManager this.taskManager = taskManager
this.itemsCacheDir = Path.join(global.MetadataPath, 'cache/items') this.itemsCacheDir = Path.join(global.MetadataPath, 'cache/items')

View File

@ -1,6 +1,8 @@
const sqlite3 = require('sqlite3')
const Path = require('path') const Path = require('path')
const Logger = require('../Logger') const Logger = require('../Logger')
const SocketAuthority = require('../SocketAuthority') const SocketAuthority = require('../SocketAuthority')
const Database = require('../Database')
const cron = require('../libs/nodeCron') const cron = require('../libs/nodeCron')
const fs = require('../libs/fsExtra') const fs = require('../libs/fsExtra')
@ -14,27 +16,32 @@ const filePerms = require('../utils/filePerms')
const Backup = require('../objects/Backup') const Backup = require('../objects/Backup')
class BackupManager { class BackupManager {
constructor(db) { constructor() {
this.BackupPath = Path.join(global.MetadataPath, 'backups') this.BackupPath = Path.join(global.MetadataPath, 'backups')
this.ItemsMetadataPath = Path.join(global.MetadataPath, 'items') this.ItemsMetadataPath = Path.join(global.MetadataPath, 'items')
this.AuthorsMetadataPath = Path.join(global.MetadataPath, 'authors') this.AuthorsMetadataPath = Path.join(global.MetadataPath, 'authors')
this.db = db
this.scheduleTask = null this.scheduleTask = null
this.backups = [] this.backups = []
} }
get serverSettings() { get backupSchedule() {
return this.db.serverSettings || {} return global.ServerSettings.backupSchedule
}
get backupsToKeep() {
return global.ServerSettings.backupsToKeep || 2
}
get maxBackupSize() {
return global.ServerSettings.maxBackupSize || 1
} }
async init() { async init() {
const backupsDirExists = await fs.pathExists(this.BackupPath) const backupsDirExists = await fs.pathExists(this.BackupPath)
if (!backupsDirExists) { if (!backupsDirExists) {
await fs.ensureDir(this.BackupPath) await fs.ensureDir(this.BackupPath)
await filePerms.setDefault(this.BackupPath)
} }
await this.loadBackups() await this.loadBackups()
@ -42,42 +49,42 @@ class BackupManager {
} }
scheduleCron() { scheduleCron() {
if (!this.serverSettings.backupSchedule) { if (!this.backupSchedule) {
Logger.info(`[BackupManager] Auto Backups are disabled`) Logger.info(`[BackupManager] Auto Backups are disabled`)
return return
} }
try { try {
var cronSchedule = this.serverSettings.backupSchedule var cronSchedule = this.backupSchedule
this.scheduleTask = cron.schedule(cronSchedule, this.runBackup.bind(this)) this.scheduleTask = cron.schedule(cronSchedule, this.runBackup.bind(this))
} catch (error) { } catch (error) {
Logger.error(`[BackupManager] Failed to schedule backup cron ${this.serverSettings.backupSchedule}`, error) Logger.error(`[BackupManager] Failed to schedule backup cron ${this.backupSchedule}`, error)
} }
} }
updateCronSchedule() { updateCronSchedule() {
if (this.scheduleTask && !this.serverSettings.backupSchedule) { if (this.scheduleTask && !this.backupSchedule) {
Logger.info(`[BackupManager] Disabling backup schedule`) Logger.info(`[BackupManager] Disabling backup schedule`)
if (this.scheduleTask.stop) this.scheduleTask.stop() if (this.scheduleTask.stop) this.scheduleTask.stop()
this.scheduleTask = null this.scheduleTask = null
} else if (!this.scheduleTask && this.serverSettings.backupSchedule) { } else if (!this.scheduleTask && this.backupSchedule) {
Logger.info(`[BackupManager] Starting backup schedule ${this.serverSettings.backupSchedule}`) Logger.info(`[BackupManager] Starting backup schedule ${this.backupSchedule}`)
this.scheduleCron() this.scheduleCron()
} else if (this.serverSettings.backupSchedule) { } else if (this.backupSchedule) {
Logger.info(`[BackupManager] Restarting backup schedule ${this.serverSettings.backupSchedule}`) Logger.info(`[BackupManager] Restarting backup schedule ${this.backupSchedule}`)
if (this.scheduleTask.stop) this.scheduleTask.stop() if (this.scheduleTask.stop) this.scheduleTask.stop()
this.scheduleCron() this.scheduleCron()
} }
} }
async uploadBackup(req, res) { async uploadBackup(req, res) {
var backupFile = req.files.file const backupFile = req.files.file
if (Path.extname(backupFile.name) !== '.audiobookshelf') { if (Path.extname(backupFile.name) !== '.audiobookshelf') {
Logger.error(`[BackupManager] Invalid backup file uploaded "${backupFile.name}"`) Logger.error(`[BackupManager] Invalid backup file uploaded "${backupFile.name}"`)
return res.status(500).send('Invalid backup file') return res.status(500).send('Invalid backup file')
} }
var tempPath = Path.join(this.BackupPath, backupFile.name) const tempPath = Path.join(this.BackupPath, backupFile.name)
var success = await backupFile.mv(tempPath).then(() => true).catch((error) => { const success = await backupFile.mv(tempPath).then(() => true).catch((error) => {
Logger.error('[BackupManager] Failed to move backup file', path, error) Logger.error('[BackupManager] Failed to move backup file', path, error)
return false return false
}) })
@ -86,10 +93,17 @@ class BackupManager {
} }
const zip = new StreamZip.async({ file: tempPath }) const zip = new StreamZip.async({ file: tempPath })
const data = await zip.entryData('details')
var details = data.toString('utf8').split('\n')
var backup = new Backup({ details, fullPath: tempPath }) const entries = await zip.entries()
if (!Object.keys(entries).includes('absdatabase.sqlite')) {
Logger.error(`[BackupManager] Invalid backup with no absdatabase.sqlite file - might be a backup created on an old Audiobookshelf server.`)
return res.status(500).send('Invalid backup with no absdatabase.sqlite file - might be a backup created on an old Audiobookshelf server.')
}
const data = await zip.entryData('details')
const details = data.toString('utf8').split('\n')
const backup = new Backup({ details, fullPath: tempPath })
if (!backup.serverVersion) { if (!backup.serverVersion) {
Logger.error(`[BackupManager] Invalid backup with no server version - might be a backup created before version 2.0.0`) Logger.error(`[BackupManager] Invalid backup with no server version - might be a backup created before version 2.0.0`)
@ -98,7 +112,7 @@ class BackupManager {
backup.fileSize = await getFileSize(backup.fullPath) backup.fileSize = await getFileSize(backup.fullPath)
var existingBackupIndex = this.backups.findIndex(b => b.id === backup.id) const existingBackupIndex = this.backups.findIndex(b => b.id === backup.id)
if (existingBackupIndex >= 0) { if (existingBackupIndex >= 0) {
Logger.warn(`[BackupManager] Backup already exists with id ${backup.id} - overwriting`) Logger.warn(`[BackupManager] Backup already exists with id ${backup.id} - overwriting`)
this.backups.splice(existingBackupIndex, 1, backup) this.backups.splice(existingBackupIndex, 1, backup)
@ -122,14 +136,23 @@ class BackupManager {
} }
} }
async requestApplyBackup(backup) { async requestApplyBackup(backup, res) {
const zip = new StreamZip.async({ file: backup.fullPath }) const zip = new StreamZip.async({ file: backup.fullPath })
await zip.extract('config/', global.ConfigPath)
if (backup.backupMetadataCovers) { const entries = await zip.entries()
await zip.extract('metadata-items/', this.ItemsMetadataPath) if (!Object.keys(entries).includes('absdatabase.sqlite')) {
await zip.extract('metadata-authors/', this.AuthorsMetadataPath) Logger.error(`[BackupManager] Cannot apply old backup ${backup.fullPath}`)
return res.status(500).send('Invalid backup file. Does not include absdatabase.sqlite. This might be from an older Audiobookshelf server.')
} }
await this.db.reinit()
await Database.disconnect()
await zip.extract('absdatabase.sqlite', global.ConfigPath)
await zip.extract('metadata-items/', this.ItemsMetadataPath)
await zip.extract('metadata-authors/', this.AuthorsMetadataPath)
await Database.reconnect()
SocketAuthority.emitter('backup_applied') SocketAuthority.emitter('backup_applied')
} }
@ -157,8 +180,10 @@ class BackupManager {
const backup = new Backup({ details, fullPath: fullFilePath }) const backup = new Backup({ details, fullPath: fullFilePath })
if (!backup.serverVersion) { if (!backup.serverVersion) { // Backups before v2
Logger.error(`[BackupManager] Old unsupported backup was found "${backup.fullPath}"`) Logger.error(`[BackupManager] Old unsupported backup was found "${backup.filename}"`)
} else if (!backup.key) { // Backups before sqlite migration
Logger.warn(`[BackupManager] Old unsupported backup was found "${backup.filename}" (pre sqlite migration)`)
} }
backup.fileSize = await getFileSize(backup.fullPath) backup.fileSize = await getFileSize(backup.fullPath)
@ -182,44 +207,52 @@ class BackupManager {
async runBackup() { async runBackup() {
// Check if Metadata Path is inside Config Path (otherwise there will be an infinite loop as the archiver tries to zip itself) // Check if Metadata Path is inside Config Path (otherwise there will be an infinite loop as the archiver tries to zip itself)
Logger.info(`[BackupManager] Running Backup`) Logger.info(`[BackupManager] Running Backup`)
var newBackup = new Backup() const newBackup = new Backup()
newBackup.setData(this.BackupPath)
const newBackData = { await fs.ensureDir(this.AuthorsMetadataPath)
backupMetadataCovers: this.serverSettings.backupMetadataCovers,
backupDirPath: this.BackupPath // Create backup sqlite file
const sqliteBackupPath = await this.backupSqliteDb(newBackup).catch((error) => {
Logger.error(`[BackupManager] Failed to backup sqlite db`, error)
return false
})
if (!sqliteBackupPath) {
return false
} }
newBackup.setData(newBackData)
var metadataAuthorsPath = this.AuthorsMetadataPath // Zip sqlite file, /metadata/items, and /metadata/authors folders
if (!await fs.pathExists(metadataAuthorsPath)) metadataAuthorsPath = null const zipResult = await this.zipBackup(sqliteBackupPath, newBackup).catch((error) => {
var zipResult = await this.zipBackup(metadataAuthorsPath, newBackup).then(() => true).catch((error) => {
Logger.error(`[BackupManager] Backup Failed ${error}`) Logger.error(`[BackupManager] Backup Failed ${error}`)
return false return false
}) })
if (zipResult) {
Logger.info(`[BackupManager] Backup successful ${newBackup.id}`)
await filePerms.setDefault(newBackup.fullPath)
newBackup.fileSize = await getFileSize(newBackup.fullPath)
var existingIndex = this.backups.findIndex(b => b.id === newBackup.id)
if (existingIndex >= 0) {
this.backups.splice(existingIndex, 1, newBackup)
} else {
this.backups.push(newBackup)
}
// Check remove oldest backup // Remove sqlite backup
if (this.backups.length > this.serverSettings.backupsToKeep) { await fs.remove(sqliteBackupPath)
this.backups.sort((a, b) => a.createdAt - b.createdAt)
var oldBackup = this.backups.shift() if (!zipResult) return false
Logger.debug(`[BackupManager] Removing old backup ${oldBackup.id}`)
this.removeBackup(oldBackup) Logger.info(`[BackupManager] Backup successful ${newBackup.id}`)
}
return true newBackup.fileSize = await getFileSize(newBackup.fullPath)
const existingIndex = this.backups.findIndex(b => b.id === newBackup.id)
if (existingIndex >= 0) {
this.backups.splice(existingIndex, 1, newBackup)
} else { } else {
return false this.backups.push(newBackup)
} }
// Check remove oldest backup
if (this.backups.length > this.backupsToKeep) {
this.backups.sort((a, b) => a.createdAt - b.createdAt)
const oldBackup = this.backups.shift()
Logger.debug(`[BackupManager] Removing old backup ${oldBackup.id}`)
this.removeBackup(oldBackup)
}
return true
} }
async removeBackup(backup) { async removeBackup(backup) {
@ -233,7 +266,35 @@ class BackupManager {
} }
} }
zipBackup(metadataAuthorsPath, backup) { /**
* @see https://github.com/TryGhost/node-sqlite3/pull/1116
* @param {Backup} backup
* @promise
*/
backupSqliteDb(backup) {
const db = new sqlite3.Database(Database.dbPath)
const dbFilePath = Path.join(global.ConfigPath, `absdatabase.${backup.id}.sqlite`)
return new Promise(async (resolve, reject) => {
const backup = db.backup(dbFilePath)
backup.step(-1)
backup.finish()
// Max time ~2 mins
for (let i = 0; i < 240; i++) {
if (backup.completed) {
return resolve(dbFilePath)
} else if (backup.failed) {
return reject(backup.message || 'Unknown failure reason')
}
await new Promise((r) => setTimeout(r, 500))
}
Logger.error(`[BackupManager] Backup sqlite timed out`)
reject('Backup timed out')
})
}
zipBackup(sqliteBackupPath, backup) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
// create a file to stream archive data to // create a file to stream archive data to
const output = fs.createWriteStream(backup.fullPath) const output = fs.createWriteStream(backup.fullPath)
@ -245,7 +306,7 @@ class BackupManager {
// 'close' event is fired only when a file descriptor is involved // 'close' event is fired only when a file descriptor is involved
output.on('close', () => { output.on('close', () => {
Logger.info('[BackupManager]', archive.pointer() + ' total bytes') Logger.info('[BackupManager]', archive.pointer() + ' total bytes')
resolve() resolve(true)
}) })
// This event is fired when the data source is drained no matter what was the data source. // This event is fired when the data source is drained no matter what was the data source.
@ -281,7 +342,7 @@ class BackupManager {
reject(err) reject(err)
}) })
archive.on('progress', ({ fs: fsobj }) => { archive.on('progress', ({ fs: fsobj }) => {
const maxBackupSizeInBytes = this.serverSettings.maxBackupSize * 1000 * 1000 * 1000 const maxBackupSizeInBytes = this.maxBackupSize * 1000 * 1000 * 1000
if (fsobj.processedBytes > maxBackupSizeInBytes) { if (fsobj.processedBytes > maxBackupSizeInBytes) {
Logger.error(`[BackupManager] Archiver is too large - aborting to prevent endless loop, Bytes Processed: ${fsobj.processedBytes}`) Logger.error(`[BackupManager] Archiver is too large - aborting to prevent endless loop, Bytes Processed: ${fsobj.processedBytes}`)
archive.abort() archive.abort()
@ -295,26 +356,9 @@ class BackupManager {
// pipe archive data to the file // pipe archive data to the file
archive.pipe(output) archive.pipe(output)
archive.directory(Path.join(this.db.LibraryItemsPath, 'data'), 'config/libraryItems/data') archive.file(sqliteBackupPath, { name: 'absdatabase.sqlite' })
archive.directory(Path.join(this.db.UsersPath, 'data'), 'config/users/data') archive.directory(this.ItemsMetadataPath, 'metadata-items')
archive.directory(Path.join(this.db.SessionsPath, 'data'), 'config/sessions/data') archive.directory(this.AuthorsMetadataPath, 'metadata-authors')
archive.directory(Path.join(this.db.LibrariesPath, 'data'), 'config/libraries/data')
archive.directory(Path.join(this.db.SettingsPath, 'data'), 'config/settings/data')
archive.directory(Path.join(this.db.CollectionsPath, 'data'), 'config/collections/data')
archive.directory(Path.join(this.db.AuthorsPath, 'data'), 'config/authors/data')
archive.directory(Path.join(this.db.SeriesPath, 'data'), 'config/series/data')
archive.directory(Path.join(this.db.PlaylistsPath, 'data'), 'config/playlists/data')
archive.directory(Path.join(this.db.FeedsPath, 'data'), 'config/feeds/data')
if (this.serverSettings.backupMetadataCovers) {
Logger.debug(`[BackupManager] Backing up Metadata Items "${this.ItemsMetadataPath}"`)
archive.directory(this.ItemsMetadataPath, 'metadata-items')
if (metadataAuthorsPath) {
Logger.debug(`[BackupManager] Backing up Metadata Authors "${metadataAuthorsPath}"`)
archive.directory(metadataAuthorsPath, 'metadata-authors')
}
}
archive.append(backup.detailsString, { name: 'details' }) archive.append(backup.detailsString, { name: 'details' })

View File

@ -53,7 +53,7 @@ class CacheManager {
if (await fs.pathExists(path)) { if (await fs.pathExists(path)) {
if (global.XAccel) { if (global.XAccel) {
Logger.debug(`Use X-Accel to serve static file ${path}`) Logger.debug(`Use X-Accel to serve static file ${path}`)
return res.status(204).header({'X-Accel-Redirect': global.XAccel + path}).send() return res.status(204).header({ 'X-Accel-Redirect': global.XAccel + path }).send()
} }
const r = fs.createReadStream(path) const r = fs.createReadStream(path)
@ -79,7 +79,7 @@ class CacheManager {
if (global.XAccel) { if (global.XAccel) {
Logger.debug(`Use X-Accel to serve static file ${writtenFile}`) Logger.debug(`Use X-Accel to serve static file ${writtenFile}`)
return res.status(204).header({'X-Accel-Redirect': global.XAccel + writtenFile}).send() return res.status(204).header({ 'X-Accel-Redirect': global.XAccel + writtenFile }).send()
} }
var readStream = fs.createReadStream(writtenFile) var readStream = fs.createReadStream(writtenFile)
@ -116,6 +116,7 @@ class CacheManager {
} }
async purgeAll() { async purgeAll() {
Logger.info(`[CacheManager] Purging all cache at "${this.CachePath}"`)
if (await fs.pathExists(this.CachePath)) { if (await fs.pathExists(this.CachePath)) {
await fs.remove(this.CachePath).catch((error) => { await fs.remove(this.CachePath).catch((error) => {
Logger.error(`[CacheManager] Failed to remove cache dir "${this.CachePath}"`, error) Logger.error(`[CacheManager] Failed to remove cache dir "${this.CachePath}"`, error)
@ -125,6 +126,7 @@ class CacheManager {
} }
async purgeItems() { async purgeItems() {
Logger.info(`[CacheManager] Purging items cache at "${this.ItemCachePath}"`)
if (await fs.pathExists(this.ItemCachePath)) { if (await fs.pathExists(this.ItemCachePath)) {
await fs.remove(this.ItemCachePath).catch((error) => { await fs.remove(this.ItemCachePath).catch((error) => {
Logger.error(`[CacheManager] Failed to remove items cache dir "${this.ItemCachePath}"`, error) Logger.error(`[CacheManager] Failed to remove items cache dir "${this.ItemCachePath}"`, error)

View File

@ -10,15 +10,14 @@ const { downloadFile, filePathToPOSIX } = require('../utils/fileUtils')
const { extractCoverArt } = require('../utils/ffmpegHelpers') const { extractCoverArt } = require('../utils/ffmpegHelpers')
class CoverManager { class CoverManager {
constructor(db, cacheManager) { constructor(cacheManager) {
this.db = db
this.cacheManager = cacheManager this.cacheManager = cacheManager
this.ItemMetadataPath = Path.posix.join(global.MetadataPath, 'items') this.ItemMetadataPath = Path.posix.join(global.MetadataPath, 'items')
} }
getCoverDirectory(libraryItem) { getCoverDirectory(libraryItem) {
if (this.db.serverSettings.storeCoverWithItem && !libraryItem.isFile && !libraryItem.isMusic) { if (global.ServerSettings.storeCoverWithItem && !libraryItem.isFile && !libraryItem.isMusic) {
return libraryItem.path return libraryItem.path
} else { } else {
return Path.posix.join(this.ItemMetadataPath, libraryItem.id) return Path.posix.join(this.ItemMetadataPath, libraryItem.id)

View File

@ -1,9 +1,9 @@
const cron = require('../libs/nodeCron') const cron = require('../libs/nodeCron')
const Logger = require('../Logger') const Logger = require('../Logger')
const Database = require('../Database')
class CronManager { class CronManager {
constructor(db, scanner, podcastManager) { constructor(scanner, podcastManager) {
this.db = db
this.scanner = scanner this.scanner = scanner
this.podcastManager = podcastManager this.podcastManager = podcastManager
@ -19,7 +19,7 @@ class CronManager {
} }
initLibraryScanCrons() { initLibraryScanCrons() {
for (const library of this.db.libraries) { for (const library of Database.libraries) {
if (library.settings.autoScanCronExpression) { if (library.settings.autoScanCronExpression) {
this.startCronForLibrary(library) this.startCronForLibrary(library)
} }
@ -64,7 +64,7 @@ class CronManager {
initPodcastCrons() { initPodcastCrons() {
const cronExpressionMap = {} const cronExpressionMap = {}
this.db.libraryItems.forEach((li) => { Database.libraryItems.forEach((li) => {
if (li.mediaType === 'podcast' && li.media.autoDownloadEpisodes) { if (li.mediaType === 'podcast' && li.media.autoDownloadEpisodes) {
if (!li.media.autoDownloadSchedule) { if (!li.media.autoDownloadSchedule) {
Logger.error(`[CronManager] Podcast auto download schedule is not set for ${li.media.metadata.title}`) Logger.error(`[CronManager] Podcast auto download schedule is not set for ${li.media.metadata.title}`)
@ -119,7 +119,7 @@ class CronManager {
// Get podcast library items to check // Get podcast library items to check
const libraryItems = [] const libraryItems = []
for (const libraryItemId of libraryItemIds) { for (const libraryItemId of libraryItemIds) {
const libraryItem = this.db.libraryItems.find(li => li.id === libraryItemId) const libraryItem = Database.libraryItems.find(li => li.id === libraryItemId)
if (!libraryItem) { if (!libraryItem) {
Logger.error(`[CronManager] Library item ${libraryItemId} not found for episode check cron ${expression}`) Logger.error(`[CronManager] Library item ${libraryItemId} not found for episode check cron ${expression}`)
podcastCron.libraryItemIds = podcastCron.libraryItemIds.filter(lid => lid !== libraryItemId) // Filter it out podcastCron.libraryItemIds = podcastCron.libraryItemIds.filter(lid => lid !== libraryItemId) // Filter it out

View File

@ -1,14 +1,12 @@
const nodemailer = require('nodemailer') const nodemailer = require('nodemailer')
const Database = require('../Database')
const Logger = require("../Logger") const Logger = require("../Logger")
const SocketAuthority = require('../SocketAuthority')
class EmailManager { class EmailManager {
constructor(db) { constructor() { }
this.db = db
}
getTransporter() { getTransporter() {
return nodemailer.createTransport(this.db.emailSettings.getTransportObject()) return nodemailer.createTransport(Database.emailSettings.getTransportObject())
} }
async sendTest(res) { async sendTest(res) {
@ -25,8 +23,8 @@ class EmailManager {
} }
transporter.sendMail({ transporter.sendMail({
from: this.db.emailSettings.fromAddress, from: Database.emailSettings.fromAddress,
to: this.db.emailSettings.testAddress || this.db.emailSettings.fromAddress, to: Database.emailSettings.testAddress || Database.emailSettings.fromAddress,
subject: 'Test email from Audiobookshelf', subject: 'Test email from Audiobookshelf',
text: 'Success!' text: 'Success!'
}).then((result) => { }).then((result) => {
@ -52,7 +50,7 @@ class EmailManager {
} }
transporter.sendMail({ transporter.sendMail({
from: this.db.emailSettings.fromAddress, from: Database.emailSettings.fromAddress,
to: device.email, to: device.email,
subject: "Here is your Ebook!", subject: "Here is your Ebook!",
html: '<div dir="auto"></div>', html: '<div dir="auto"></div>',

View File

@ -9,9 +9,7 @@ const Logger = require('../Logger')
const TAG = '[LogManager]' const TAG = '[LogManager]'
class LogManager { class LogManager {
constructor(db) { constructor() {
this.db = db
this.DailyLogPath = Path.posix.join(global.MetadataPath, 'logs', 'daily') this.DailyLogPath = Path.posix.join(global.MetadataPath, 'logs', 'daily')
this.ScanLogPath = Path.posix.join(global.MetadataPath, 'logs', 'scans') this.ScanLogPath = Path.posix.join(global.MetadataPath, 'logs', 'scans')
@ -20,12 +18,8 @@ class LogManager {
this.dailyLogFiles = [] this.dailyLogFiles = []
} }
get serverSettings() {
return this.db.serverSettings || {}
}
get loggerDailyLogsToKeep() { get loggerDailyLogsToKeep() {
return this.serverSettings.loggerDailyLogsToKeep || 7 return global.ServerSettings.loggerDailyLogsToKeep || 7
} }
async ensureLogDirs() { async ensureLogDirs() {

View File

@ -1,12 +1,11 @@
const axios = require('axios') const axios = require('axios')
const Logger = require("../Logger") const Logger = require("../Logger")
const SocketAuthority = require('../SocketAuthority') const SocketAuthority = require('../SocketAuthority')
const Database = require('../Database')
const { notificationData } = require('../utils/notifications') const { notificationData } = require('../utils/notifications')
class NotificationManager { class NotificationManager {
constructor(db) { constructor() {
this.db = db
this.sendingNotification = false this.sendingNotification = false
this.notificationQueue = [] this.notificationQueue = []
} }
@ -16,10 +15,10 @@ class NotificationManager {
} }
onPodcastEpisodeDownloaded(libraryItem, episode) { onPodcastEpisodeDownloaded(libraryItem, episode) {
if (!this.db.notificationSettings.isUseable) return if (!Database.notificationSettings.isUseable) return
Logger.debug(`[NotificationManager] onPodcastEpisodeDownloaded: Episode "${episode.title}" for podcast ${libraryItem.media.metadata.title}`) Logger.debug(`[NotificationManager] onPodcastEpisodeDownloaded: Episode "${episode.title}" for podcast ${libraryItem.media.metadata.title}`)
const library = this.db.libraries.find(lib => lib.id === libraryItem.libraryId) const library = Database.libraries.find(lib => lib.id === libraryItem.libraryId)
const eventData = { const eventData = {
libraryItemId: libraryItem.id, libraryItemId: libraryItem.id,
libraryId: libraryItem.libraryId, libraryId: libraryItem.libraryId,
@ -42,19 +41,19 @@ class NotificationManager {
} }
async triggerNotification(eventName, eventData, intentionallyFail = false) { async triggerNotification(eventName, eventData, intentionallyFail = false) {
if (!this.db.notificationSettings.isUseable) return if (!Database.notificationSettings.isUseable) return
// Will queue the notification if sendingNotification and queue is not full // Will queue the notification if sendingNotification and queue is not full
if (!this.checkTriggerNotification(eventName, eventData)) return if (!this.checkTriggerNotification(eventName, eventData)) return
const notifications = this.db.notificationSettings.getActiveNotificationsForEvent(eventName) const notifications = Database.notificationSettings.getActiveNotificationsForEvent(eventName)
for (const notification of notifications) { for (const notification of notifications) {
Logger.debug(`[NotificationManager] triggerNotification: Sending ${eventName} notification ${notification.id}`) Logger.debug(`[NotificationManager] triggerNotification: Sending ${eventName} notification ${notification.id}`)
const success = intentionallyFail ? false : await this.sendNotification(notification, eventData) const success = intentionallyFail ? false : await this.sendNotification(notification, eventData)
notification.updateNotificationFired(success) notification.updateNotificationFired(success)
if (!success) { // Failed notification if (!success) { // Failed notification
if (notification.numConsecutiveFailedAttempts >= this.db.notificationSettings.maxFailedAttempts) { if (notification.numConsecutiveFailedAttempts >= Database.notificationSettings.maxFailedAttempts) {
Logger.error(`[NotificationManager] triggerNotification: ${notification.eventName}/${notification.id} reached max failed attempts`) Logger.error(`[NotificationManager] triggerNotification: ${notification.eventName}/${notification.id} reached max failed attempts`)
notification.enabled = false notification.enabled = false
} else { } else {
@ -63,8 +62,8 @@ class NotificationManager {
} }
} }
await this.db.updateEntity('settings', this.db.notificationSettings) await Database.updateSetting(Database.notificationSettings)
SocketAuthority.emitter('notifications_updated', this.db.notificationSettings.toJSON()) SocketAuthority.emitter('notifications_updated', Database.notificationSettings.toJSON())
this.notificationFinished() this.notificationFinished()
} }
@ -72,7 +71,7 @@ class NotificationManager {
// Return TRUE if notification should be triggered now // Return TRUE if notification should be triggered now
checkTriggerNotification(eventName, eventData) { checkTriggerNotification(eventName, eventData) {
if (this.sendingNotification) { if (this.sendingNotification) {
if (this.notificationQueue.length >= this.db.notificationSettings.maxNotificationQueue) { if (this.notificationQueue.length >= Database.notificationSettings.maxNotificationQueue) {
Logger.warn(`[NotificationManager] Notification queue is full - ignoring event ${eventName}`) Logger.warn(`[NotificationManager] Notification queue is full - ignoring event ${eventName}`)
} else { } else {
Logger.debug(`[NotificationManager] Queueing notification ${eventName} (Queue size: ${this.notificationQueue.length})`) Logger.debug(`[NotificationManager] Queueing notification ${eventName} (Queue size: ${this.notificationQueue.length})`)
@ -92,7 +91,7 @@ class NotificationManager {
const nextNotificationEvent = this.notificationQueue.shift() const nextNotificationEvent = this.notificationQueue.shift()
this.triggerNotification(nextNotificationEvent.eventName, nextNotificationEvent.eventData) this.triggerNotification(nextNotificationEvent.eventName, nextNotificationEvent.eventData)
} }
}, this.db.notificationSettings.notificationDelay) }, Database.notificationSettings.notificationDelay)
} }
sendTestNotification(notification) { sendTestNotification(notification) {
@ -107,7 +106,7 @@ class NotificationManager {
sendNotification(notification, eventData) { sendNotification(notification, eventData) {
const payload = notification.getApprisePayload(eventData) const payload = notification.getApprisePayload(eventData)
return axios.post(this.db.notificationSettings.appriseApiUrl, payload, { timeout: 6000 }).then((response) => { return axios.post(Database.notificationSettings.appriseApiUrl, payload, { timeout: 6000 }).then((response) => {
Logger.debug(`[NotificationManager] sendNotification: ${notification.eventName}/${notification.id} response=`, response.data) Logger.debug(`[NotificationManager] sendNotification: ${notification.eventName}/${notification.id} response=`, response.data)
return true return true
}).catch((error) => { }).catch((error) => {

View File

@ -2,6 +2,7 @@ const Path = require('path')
const serverVersion = require('../../package.json').version const serverVersion = require('../../package.json').version
const Logger = require('../Logger') const Logger = require('../Logger')
const SocketAuthority = require('../SocketAuthority') const SocketAuthority = require('../SocketAuthority')
const Database = require('../Database')
const date = require('../libs/dateAndTime') const date = require('../libs/dateAndTime')
const fs = require('../libs/fsExtra') const fs = require('../libs/fsExtra')
@ -15,8 +16,7 @@ const DeviceInfo = require('../objects/DeviceInfo')
const Stream = require('../objects/Stream') const Stream = require('../objects/Stream')
class PlaybackSessionManager { class PlaybackSessionManager {
constructor(db) { constructor() {
this.db = db
this.StreamsPath = Path.join(global.MetadataPath, 'streams') this.StreamsPath = Path.join(global.MetadataPath, 'streams')
this.sessions = [] this.sessions = []
@ -33,19 +33,32 @@ class PlaybackSessionManager {
return session?.stream || null return session?.stream || null
} }
getDeviceInfo(req) { async getDeviceInfo(req) {
const ua = uaParserJs(req.headers['user-agent']) const ua = uaParserJs(req.headers['user-agent'])
const ip = requestIp.getClientIp(req) const ip = requestIp.getClientIp(req)
const clientDeviceInfo = req.body?.deviceInfo || null const clientDeviceInfo = req.body?.deviceInfo || null
const deviceInfo = new DeviceInfo() const deviceInfo = new DeviceInfo()
deviceInfo.setData(ip, ua, clientDeviceInfo, serverVersion) deviceInfo.setData(ip, ua, clientDeviceInfo, serverVersion, req.user.id)
if (clientDeviceInfo?.deviceId) {
const existingDevice = await Database.getDeviceByDeviceId(clientDeviceInfo.deviceId)
if (existingDevice) {
if (existingDevice.update(deviceInfo)) {
await Database.updateDevice(existingDevice)
}
return existingDevice
}
}
await Database.createDevice(deviceInfo)
return deviceInfo return deviceInfo
} }
async startSessionRequest(req, res, episodeId) { async startSessionRequest(req, res, episodeId) {
const deviceInfo = this.getDeviceInfo(req) const deviceInfo = await this.getDeviceInfo(req)
Logger.debug(`[PlaybackSessionManager] startSessionRequest for device ${deviceInfo.deviceDescription}`) Logger.debug(`[PlaybackSessionManager] startSessionRequest for device ${deviceInfo.deviceDescription}`)
const { user, libraryItem, body: options } = req const { user, libraryItem, body: options } = req
const session = await this.startSession(user, deviceInfo, libraryItem, episodeId, options) const session = await this.startSession(user, deviceInfo, libraryItem, episodeId, options)
@ -77,7 +90,7 @@ class PlaybackSessionManager {
} }
async syncLocalSession(user, sessionJson) { async syncLocalSession(user, sessionJson) {
const libraryItem = this.db.getLibraryItem(sessionJson.libraryItemId) const libraryItem = Database.getLibraryItem(sessionJson.libraryItemId)
const episode = (sessionJson.episodeId && libraryItem && libraryItem.isPodcast) ? libraryItem.media.getEpisode(sessionJson.episodeId) : null const episode = (sessionJson.episodeId && libraryItem && libraryItem.isPodcast) ? libraryItem.media.getEpisode(sessionJson.episodeId) : null
if (!libraryItem || (libraryItem.isPodcast && !episode)) { if (!libraryItem || (libraryItem.isPodcast && !episode)) {
Logger.error(`[PlaybackSessionManager] syncLocalSession: Media item not found for session "${sessionJson.displayTitle}" (${sessionJson.id})`) Logger.error(`[PlaybackSessionManager] syncLocalSession: Media item not found for session "${sessionJson.displayTitle}" (${sessionJson.id})`)
@ -88,12 +101,12 @@ class PlaybackSessionManager {
} }
} }
let session = await this.db.getPlaybackSession(sessionJson.id) let session = await Database.getPlaybackSession(sessionJson.id)
if (!session) { if (!session) {
// New session from local // New session from local
session = new PlaybackSession(sessionJson) session = new PlaybackSession(sessionJson)
Logger.debug(`[PlaybackSessionManager] Inserting new session for "${session.displayTitle}" (${session.id})`) Logger.debug(`[PlaybackSessionManager] Inserting new session for "${session.displayTitle}" (${session.id})`)
await this.db.insertEntity('session', session) await Database.createPlaybackSession(session)
} else { } else {
session.currentTime = sessionJson.currentTime session.currentTime = sessionJson.currentTime
session.timeListening = sessionJson.timeListening session.timeListening = sessionJson.timeListening
@ -102,7 +115,7 @@ class PlaybackSessionManager {
session.dayOfWeek = date.format(new Date(), 'dddd') session.dayOfWeek = date.format(new Date(), 'dddd')
Logger.debug(`[PlaybackSessionManager] Updated session for "${session.displayTitle}" (${session.id})`) Logger.debug(`[PlaybackSessionManager] Updated session for "${session.displayTitle}" (${session.id})`)
await this.db.updateEntity('session', session) await Database.updatePlaybackSession(session)
} }
const result = { const result = {
@ -126,8 +139,8 @@ class PlaybackSessionManager {
// Update user and emit socket event // Update user and emit socket event
if (result.progressSynced) { if (result.progressSynced) {
await this.db.updateEntity('user', user)
const itemProgress = user.getMediaProgress(session.libraryItemId, session.episodeId) const itemProgress = user.getMediaProgress(session.libraryItemId, session.episodeId)
if (itemProgress) await Database.upsertMediaProgress(itemProgress)
SocketAuthority.clientEmitter(user.id, 'user_item_progress_updated', { SocketAuthority.clientEmitter(user.id, 'user_item_progress_updated', {
id: itemProgress.id, id: itemProgress.id,
sessionId: session.id, sessionId: session.id,
@ -155,7 +168,7 @@ class PlaybackSessionManager {
async startSession(user, deviceInfo, libraryItem, episodeId, options) { async startSession(user, deviceInfo, libraryItem, episodeId, options) {
// Close any sessions already open for user and device // Close any sessions already open for user and device
const userSessions = this.sessions.filter(playbackSession => playbackSession.userId === user.id && playbackSession.deviceId === deviceInfo.deviceId) const userSessions = this.sessions.filter(playbackSession => playbackSession.userId === user.id && playbackSession.deviceId === deviceInfo.id)
for (const session of userSessions) { for (const session of userSessions) {
Logger.info(`[PlaybackSessionManager] startSession: Closing open session "${session.displayTitle}" for user "${user.username}" (Device: ${session.deviceDescription})`) Logger.info(`[PlaybackSessionManager] startSession: Closing open session "${session.displayTitle}" for user "${user.username}" (Device: ${session.deviceDescription})`)
await this.closeSession(user, session, null) await this.closeSession(user, session, null)
@ -209,17 +222,14 @@ class PlaybackSessionManager {
newPlaybackSession.audioTracks = audioTracks newPlaybackSession.audioTracks = audioTracks
} }
// Will save on the first sync
user.currentSessionId = newPlaybackSession.id
this.sessions.push(newPlaybackSession) this.sessions.push(newPlaybackSession)
SocketAuthority.adminEmitter('user_stream_update', user.toJSONForPublic(this.sessions, this.db.libraryItems)) SocketAuthority.adminEmitter('user_stream_update', user.toJSONForPublic(this.sessions, Database.libraryItems))
return newPlaybackSession return newPlaybackSession
} }
async syncSession(user, session, syncData) { async syncSession(user, session, syncData) {
const libraryItem = this.db.libraryItems.find(li => li.id === session.libraryItemId) const libraryItem = Database.libraryItems.find(li => li.id === session.libraryItemId)
if (!libraryItem) { if (!libraryItem) {
Logger.error(`[PlaybackSessionManager] syncSession Library Item not found "${session.libraryItemId}"`) Logger.error(`[PlaybackSessionManager] syncSession Library Item not found "${session.libraryItemId}"`)
return null return null
@ -236,9 +246,8 @@ class PlaybackSessionManager {
} }
const wasUpdated = user.createUpdateMediaProgress(libraryItem, itemProgressUpdate, session.episodeId) const wasUpdated = user.createUpdateMediaProgress(libraryItem, itemProgressUpdate, session.episodeId)
if (wasUpdated) { if (wasUpdated) {
await this.db.updateEntity('user', user)
const itemProgress = user.getMediaProgress(session.libraryItemId, session.episodeId) const itemProgress = user.getMediaProgress(session.libraryItemId, session.episodeId)
if (itemProgress) await Database.upsertMediaProgress(itemProgress)
SocketAuthority.clientEmitter(user.id, 'user_item_progress_updated', { SocketAuthority.clientEmitter(user.id, 'user_item_progress_updated', {
id: itemProgress.id, id: itemProgress.id,
sessionId: session.id, sessionId: session.id,
@ -259,7 +268,7 @@ class PlaybackSessionManager {
await this.saveSession(session) await this.saveSession(session)
} }
Logger.debug(`[PlaybackSessionManager] closeSession "${session.id}"`) Logger.debug(`[PlaybackSessionManager] closeSession "${session.id}"`)
SocketAuthority.adminEmitter('user_stream_update', user.toJSONForPublic(this.sessions, this.db.libraryItems)) SocketAuthority.adminEmitter('user_stream_update', user.toJSONForPublic(this.sessions, Database.libraryItems))
SocketAuthority.clientEmitter(session.userId, 'user_session_closed', session.id) SocketAuthority.clientEmitter(session.userId, 'user_session_closed', session.id)
return this.removeSession(session.id) return this.removeSession(session.id)
} }
@ -268,10 +277,10 @@ class PlaybackSessionManager {
if (!session.timeListening) return // Do not save a session with no listening time if (!session.timeListening) return // Do not save a session with no listening time
if (session.lastSave) { if (session.lastSave) {
return this.db.updateEntity('session', session) return Database.updatePlaybackSession(session)
} else { } else {
session.lastSave = Date.now() session.lastSave = Date.now()
return this.db.insertEntity('session', session) return Database.createPlaybackSession(session)
} }
} }
@ -305,16 +314,5 @@ class PlaybackSessionManager {
Logger.error(`[PlaybackSessionManager] cleanOrphanStreams failed`, error) Logger.error(`[PlaybackSessionManager] cleanOrphanStreams failed`, error)
} }
} }
// Android app v0.9.54 and below had a bug where listening time was sending unix timestamp
// See https://github.com/advplyr/audiobookshelf/issues/868
// Remove playback sessions with listening time too high
async removeInvalidSessions() {
const selectFunc = (session) => isNaN(session.timeListening) || Number(session.timeListening) > 36000000
const numSessionsRemoved = await this.db.removeEntities('session', selectFunc, true)
if (numSessionsRemoved) {
Logger.info(`[PlaybackSessionManager] Removed ${numSessionsRemoved} invalid playback sessions`)
}
}
} }
module.exports = PlaybackSessionManager module.exports = PlaybackSessionManager

View File

@ -1,5 +1,6 @@
const Logger = require('../Logger') const Logger = require('../Logger')
const SocketAuthority = require('../SocketAuthority') const SocketAuthority = require('../SocketAuthority')
const Database = require('../Database')
const fs = require('../libs/fsExtra') const fs = require('../libs/fsExtra')
@ -19,8 +20,7 @@ const AudioFile = require('../objects/files/AudioFile')
const Task = require("../objects/Task") const Task = require("../objects/Task")
class PodcastManager { class PodcastManager {
constructor(db, watcher, notificationManager, taskManager) { constructor(watcher, notificationManager, taskManager) {
this.db = db
this.watcher = watcher this.watcher = watcher
this.notificationManager = notificationManager this.notificationManager = notificationManager
this.taskManager = taskManager this.taskManager = taskManager
@ -32,10 +32,6 @@ class PodcastManager {
this.MaxFailedEpisodeChecks = 24 this.MaxFailedEpisodeChecks = 24
} }
get serverSettings() {
return this.db.serverSettings || {}
}
getEpisodeDownloadsInQueue(libraryItemId) { getEpisodeDownloadsInQueue(libraryItemId) {
return this.downloadQueue.filter(d => d.libraryItemId === libraryItemId) return this.downloadQueue.filter(d => d.libraryItemId === libraryItemId)
} }
@ -59,6 +55,7 @@ class PodcastManager {
const newPe = new PodcastEpisode() const newPe = new PodcastEpisode()
newPe.setData(ep, index++) newPe.setData(ep, index++)
newPe.libraryItemId = libraryItem.id newPe.libraryItemId = libraryItem.id
newPe.podcastId = libraryItem.media.id
const newPeDl = new PodcastEpisodeDownload() const newPeDl = new PodcastEpisodeDownload()
newPeDl.setData(newPe, libraryItem, isAutoDownload, libraryItem.libraryId) newPeDl.setData(newPe, libraryItem, isAutoDownload, libraryItem.libraryId)
this.startPodcastEpisodeDownload(newPeDl) this.startPodcastEpisodeDownload(newPeDl)
@ -153,7 +150,7 @@ class PodcastManager {
return false return false
} }
const libraryItem = this.db.libraryItems.find(li => li.id === this.currentDownload.libraryItem.id) const libraryItem = Database.libraryItems.find(li => li.id === this.currentDownload.libraryItem.id)
if (!libraryItem) { if (!libraryItem) {
Logger.error(`[PodcastManager] Podcast Episode finished but library item was not found ${this.currentDownload.libraryItem.id}`) Logger.error(`[PodcastManager] Podcast Episode finished but library item was not found ${this.currentDownload.libraryItem.id}`)
return false return false
@ -182,7 +179,7 @@ class PodcastManager {
} }
libraryItem.updatedAt = Date.now() libraryItem.updatedAt = Date.now()
await this.db.updateLibraryItem(libraryItem) await Database.updateLibraryItem(libraryItem)
SocketAuthority.emitter('item_updated', libraryItem.toJSONExpanded()) SocketAuthority.emitter('item_updated', libraryItem.toJSONExpanded())
const podcastEpisodeExpanded = podcastEpisode.toJSONExpanded() const podcastEpisodeExpanded = podcastEpisode.toJSONExpanded()
podcastEpisodeExpanded.libraryItem = libraryItem.toJSONExpanded() podcastEpisodeExpanded.libraryItem = libraryItem.toJSONExpanded()
@ -235,6 +232,7 @@ class PodcastManager {
} }
const newAudioFile = new AudioFile() const newAudioFile = new AudioFile()
newAudioFile.setDataFromProbe(libraryFile, mediaProbeData) newAudioFile.setDataFromProbe(libraryFile, mediaProbeData)
newAudioFile.index = 1
return newAudioFile return newAudioFile
} }
@ -274,7 +272,7 @@ class PodcastManager {
libraryItem.media.lastEpisodeCheck = Date.now() libraryItem.media.lastEpisodeCheck = Date.now()
libraryItem.updatedAt = Date.now() libraryItem.updatedAt = Date.now()
await this.db.updateLibraryItem(libraryItem) await Database.updateLibraryItem(libraryItem)
SocketAuthority.emitter('item_updated', libraryItem.toJSONExpanded()) SocketAuthority.emitter('item_updated', libraryItem.toJSONExpanded())
return libraryItem.media.autoDownloadEpisodes return libraryItem.media.autoDownloadEpisodes
} }
@ -313,7 +311,7 @@ class PodcastManager {
libraryItem.media.lastEpisodeCheck = Date.now() libraryItem.media.lastEpisodeCheck = Date.now()
libraryItem.updatedAt = Date.now() libraryItem.updatedAt = Date.now()
await this.db.updateLibraryItem(libraryItem) await Database.updateLibraryItem(libraryItem)
SocketAuthority.emitter('item_updated', libraryItem.toJSONExpanded()) SocketAuthority.emitter('item_updated', libraryItem.toJSONExpanded())
return newEpisodes return newEpisodes

View File

@ -2,35 +2,28 @@ const Path = require('path')
const Logger = require('../Logger') const Logger = require('../Logger')
const SocketAuthority = require('../SocketAuthority') const SocketAuthority = require('../SocketAuthority')
const Database = require('../Database')
const fs = require('../libs/fsExtra') const fs = require('../libs/fsExtra')
const Feed = require('../objects/Feed') const Feed = require('../objects/Feed')
class RssFeedManager { class RssFeedManager {
constructor(db) { constructor() { }
this.db = db
this.feeds = {}
}
get feedsArray() {
return Object.values(this.feeds)
}
validateFeedEntity(feedObj) { validateFeedEntity(feedObj) {
if (feedObj.entityType === 'collection') { if (feedObj.entityType === 'collection') {
if (!this.db.collections.some(li => li.id === feedObj.entityId)) { if (!Database.collections.some(li => li.id === feedObj.entityId)) {
Logger.error(`[RssFeedManager] Removing feed "${feedObj.id}". Collection "${feedObj.entityId}" not found`) Logger.error(`[RssFeedManager] Removing feed "${feedObj.id}". Collection "${feedObj.entityId}" not found`)
return false return false
} }
} else if (feedObj.entityType === 'libraryItem') { } else if (feedObj.entityType === 'libraryItem') {
if (!this.db.libraryItems.some(li => li.id === feedObj.entityId)) { if (!Database.libraryItems.some(li => li.id === feedObj.entityId)) {
Logger.error(`[RssFeedManager] Removing feed "${feedObj.id}". Library item "${feedObj.entityId}" not found`) Logger.error(`[RssFeedManager] Removing feed "${feedObj.id}". Library item "${feedObj.entityId}" not found`)
return false return false
} }
} else if (feedObj.entityType === 'series') { } else if (feedObj.entityType === 'series') {
const series = this.db.series.find(s => s.id === feedObj.entityId) const series = Database.series.find(s => s.id === feedObj.entityId)
const hasSeriesBook = this.db.libraryItems.some(li => li.mediaType === 'book' && li.media.metadata.hasSeries(series.id) && li.media.tracks.length) const hasSeriesBook = series ? Database.libraryItems.some(li => li.mediaType === 'book' && li.media.metadata.hasSeries(series.id) && li.media.tracks.length) : false
if (!hasSeriesBook) { if (!hasSeriesBook) {
Logger.error(`[RssFeedManager] Removing feed "${feedObj.id}". Series "${feedObj.entityId}" not found or has no audio tracks`) Logger.error(`[RssFeedManager] Removing feed "${feedObj.id}". Series "${feedObj.entityId}" not found or has no audio tracks`)
return false return false
@ -43,46 +36,37 @@ class RssFeedManager {
} }
async init() { async init() {
const feedObjects = await this.db.getAllEntities('feed') for (const feed of Database.feeds) {
if (!feedObjects || !feedObjects.length) return
for (const feedObj of feedObjects) {
// Migration: In v2.2.12 entityType "item" was updated to "libraryItem"
if (feedObj.entityType === 'item') {
feedObj.entityType = 'libraryItem'
await this.db.updateEntity('feed', feedObj)
}
// Remove invalid feeds // Remove invalid feeds
if (!this.validateFeedEntity(feedObj)) { if (!this.validateFeedEntity(feed)) {
await this.db.removeEntity('feed', feedObj.id) await Database.removeFeed(feed.id)
} }
const feed = new Feed(feedObj)
this.feeds[feed.id] = feed
Logger.info(`[RssFeedManager] Opened rss feed ${feed.feedUrl}`)
} }
} }
findFeedForEntityId(entityId) { findFeedForEntityId(entityId) {
return Object.values(this.feeds).find(feed => feed.entityId === entityId) return Database.feeds.find(feed => feed.entityId === entityId)
} }
findFeed(feedId) { findFeedBySlug(slug) {
return this.feeds[feedId] || null return Database.feeds.find(feed => feed.slug === slug)
}
findFeed(id) {
return Database.feeds.find(feed => feed.id === id)
} }
async getFeed(req, res) { async getFeed(req, res) {
const feed = this.feeds[req.params.id] const feed = this.findFeedBySlug(req.params.slug)
if (!feed) { if (!feed) {
Logger.debug(`[RssFeedManager] Feed not found ${req.params.id}`) Logger.warn(`[RssFeedManager] Feed not found ${req.params.slug}`)
res.sendStatus(404) res.sendStatus(404)
return return
} }
// Check if feed needs to be updated // Check if feed needs to be updated
if (feed.entityType === 'libraryItem') { if (feed.entityType === 'libraryItem') {
const libraryItem = this.db.getLibraryItem(feed.entityId) const libraryItem = Database.getLibraryItem(feed.entityId)
let mostRecentlyUpdatedAt = libraryItem.updatedAt let mostRecentlyUpdatedAt = libraryItem.updatedAt
if (libraryItem.isPodcast) { if (libraryItem.isPodcast) {
@ -94,12 +78,12 @@ class RssFeedManager {
if (libraryItem && (!feed.entityUpdatedAt || mostRecentlyUpdatedAt > feed.entityUpdatedAt)) { if (libraryItem && (!feed.entityUpdatedAt || mostRecentlyUpdatedAt > feed.entityUpdatedAt)) {
Logger.debug(`[RssFeedManager] Updating RSS feed for item ${libraryItem.id} "${libraryItem.media.metadata.title}"`) Logger.debug(`[RssFeedManager] Updating RSS feed for item ${libraryItem.id} "${libraryItem.media.metadata.title}"`)
feed.updateFromItem(libraryItem) feed.updateFromItem(libraryItem)
await this.db.updateEntity('feed', feed) await Database.updateFeed(feed)
} }
} else if (feed.entityType === 'collection') { } else if (feed.entityType === 'collection') {
const collection = this.db.collections.find(c => c.id === feed.entityId) const collection = Database.collections.find(c => c.id === feed.entityId)
if (collection) { if (collection) {
const collectionExpanded = collection.toJSONExpanded(this.db.libraryItems) const collectionExpanded = collection.toJSONExpanded(Database.libraryItems)
// Find most recently updated item in collection // Find most recently updated item in collection
let mostRecentlyUpdatedAt = collectionExpanded.lastUpdate let mostRecentlyUpdatedAt = collectionExpanded.lastUpdate
@ -113,15 +97,15 @@ class RssFeedManager {
Logger.debug(`[RssFeedManager] Updating RSS feed for collection "${collection.name}"`) Logger.debug(`[RssFeedManager] Updating RSS feed for collection "${collection.name}"`)
feed.updateFromCollection(collectionExpanded) feed.updateFromCollection(collectionExpanded)
await this.db.updateEntity('feed', feed) await Database.updateFeed(feed)
} }
} }
} else if (feed.entityType === 'series') { } else if (feed.entityType === 'series') {
const series = this.db.series.find(s => s.id === feed.entityId) const series = Database.series.find(s => s.id === feed.entityId)
if (series) { if (series) {
const seriesJson = series.toJSON() const seriesJson = series.toJSON()
// Get books in series that have audio tracks // Get books in series that have audio tracks
seriesJson.books = this.db.libraryItems.filter(li => li.mediaType === 'book' && li.media.metadata.hasSeries(series.id) && li.media.tracks.length) seriesJson.books = Database.libraryItems.filter(li => li.mediaType === 'book' && li.media.metadata.hasSeries(series.id) && li.media.tracks.length)
// Find most recently updated item in series // Find most recently updated item in series
let mostRecentlyUpdatedAt = seriesJson.updatedAt let mostRecentlyUpdatedAt = seriesJson.updatedAt
@ -140,7 +124,7 @@ class RssFeedManager {
Logger.debug(`[RssFeedManager] Updating RSS feed for series "${seriesJson.name}"`) Logger.debug(`[RssFeedManager] Updating RSS feed for series "${seriesJson.name}"`)
feed.updateFromSeries(seriesJson) feed.updateFromSeries(seriesJson)
await this.db.updateEntity('feed', feed) await Database.updateFeed(feed)
} }
} }
} }
@ -151,9 +135,9 @@ class RssFeedManager {
} }
getFeedItem(req, res) { getFeedItem(req, res) {
const feed = this.feeds[req.params.id] const feed = this.findFeedBySlug(req.params.slug)
if (!feed) { if (!feed) {
Logger.debug(`[RssFeedManager] Feed not found ${req.params.id}`) Logger.debug(`[RssFeedManager] Feed not found ${req.params.slug}`)
res.sendStatus(404) res.sendStatus(404)
return return
} }
@ -167,9 +151,9 @@ class RssFeedManager {
} }
getFeedCover(req, res) { getFeedCover(req, res) {
const feed = this.feeds[req.params.id] const feed = this.findFeedBySlug(req.params.slug)
if (!feed) { if (!feed) {
Logger.debug(`[RssFeedManager] Feed not found ${req.params.id}`) Logger.debug(`[RssFeedManager] Feed not found ${req.params.slug}`)
res.sendStatus(404) res.sendStatus(404)
return return
} }
@ -194,10 +178,9 @@ class RssFeedManager {
const feed = new Feed() const feed = new Feed()
feed.setFromItem(user.id, slug, libraryItem, serverAddress, preventIndexing, ownerName, ownerEmail) feed.setFromItem(user.id, slug, libraryItem, serverAddress, preventIndexing, ownerName, ownerEmail)
this.feeds[feed.id] = feed
Logger.debug(`[RssFeedManager] Opened RSS feed "${feed.feedUrl}"`) Logger.info(`[RssFeedManager] Opened RSS feed "${feed.feedUrl}"`)
await this.db.insertEntity('feed', feed) await Database.createFeed(feed)
SocketAuthority.emitter('rss_feed_open', feed.toJSONMinified()) SocketAuthority.emitter('rss_feed_open', feed.toJSONMinified())
return feed return feed
} }
@ -211,10 +194,9 @@ class RssFeedManager {
const feed = new Feed() const feed = new Feed()
feed.setFromCollection(user.id, slug, collectionExpanded, serverAddress, preventIndexing, ownerName, ownerEmail) feed.setFromCollection(user.id, slug, collectionExpanded, serverAddress, preventIndexing, ownerName, ownerEmail)
this.feeds[feed.id] = feed
Logger.debug(`[RssFeedManager] Opened RSS feed "${feed.feedUrl}"`) Logger.info(`[RssFeedManager] Opened RSS feed "${feed.feedUrl}"`)
await this.db.insertEntity('feed', feed) await Database.createFeed(feed)
SocketAuthority.emitter('rss_feed_open', feed.toJSONMinified()) SocketAuthority.emitter('rss_feed_open', feed.toJSONMinified())
return feed return feed
} }
@ -228,25 +210,28 @@ class RssFeedManager {
const feed = new Feed() const feed = new Feed()
feed.setFromSeries(user.id, slug, seriesExpanded, serverAddress, preventIndexing, ownerName, ownerEmail) feed.setFromSeries(user.id, slug, seriesExpanded, serverAddress, preventIndexing, ownerName, ownerEmail)
this.feeds[feed.id] = feed
Logger.debug(`[RssFeedManager] Opened RSS feed "${feed.feedUrl}"`) Logger.info(`[RssFeedManager] Opened RSS feed "${feed.feedUrl}"`)
await this.db.insertEntity('feed', feed) await Database.createFeed(feed)
SocketAuthority.emitter('rss_feed_open', feed.toJSONMinified()) SocketAuthority.emitter('rss_feed_open', feed.toJSONMinified())
return feed return feed
} }
async handleCloseFeed(feed) { async handleCloseFeed(feed) {
if (!feed) return if (!feed) return
await this.db.removeEntity('feed', feed.id) await Database.removeFeed(feed.id)
SocketAuthority.emitter('rss_feed_closed', feed.toJSONMinified()) SocketAuthority.emitter('rss_feed_closed', feed.toJSONMinified())
delete this.feeds[feed.id]
Logger.info(`[RssFeedManager] Closed RSS feed "${feed.feedUrl}"`) Logger.info(`[RssFeedManager] Closed RSS feed "${feed.feedUrl}"`)
} }
closeRssFeed(id) { async closeRssFeed(req, res) {
if (!this.feeds[id]) return const feed = this.findFeed(req.params.id)
return this.handleCloseFeed(this.feeds[id]) if (!feed) {
Logger.error(`[RssFeedManager] RSS feed not found with id "${req.params.id}"`)
return res.sendStatus(404)
}
await this.handleCloseFeed(feed)
res.sendStatus(200)
} }
closeFeedForEntityId(entityId) { closeFeedForEntityId(entityId) {

86
server/models/Author.js Normal file
View File

@ -0,0 +1,86 @@
const { DataTypes, Model } = require('sequelize')
const oldAuthor = require('../objects/entities/Author')
module.exports = (sequelize) => {
class Author extends Model {
static async getOldAuthors() {
const authors = await this.findAll()
return authors.map(au => au.getOldAuthor())
}
getOldAuthor() {
return new oldAuthor({
id: this.id,
asin: this.asin,
name: this.name,
description: this.description,
imagePath: this.imagePath,
libraryId: this.libraryId,
addedAt: this.createdAt.valueOf(),
updatedAt: this.updatedAt.valueOf()
})
}
static updateFromOld(oldAuthor) {
const author = this.getFromOld(oldAuthor)
return this.update(author, {
where: {
id: author.id
}
})
}
static createFromOld(oldAuthor) {
const author = this.getFromOld(oldAuthor)
return this.create(author)
}
static createBulkFromOld(oldAuthors) {
const authors = oldAuthors.map(this.getFromOld)
return this.bulkCreate(authors)
}
static getFromOld(oldAuthor) {
return {
id: oldAuthor.id,
name: oldAuthor.name,
asin: oldAuthor.asin,
description: oldAuthor.description,
imagePath: oldAuthor.imagePath,
libraryId: oldAuthor.libraryId
}
}
static removeById(authorId) {
return this.destroy({
where: {
id: authorId
}
})
}
}
Author.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
name: DataTypes.STRING,
asin: DataTypes.STRING,
description: DataTypes.TEXT,
imagePath: DataTypes.STRING
}, {
sequelize,
modelName: 'author'
})
const { library } = sequelize.models
library.hasMany(Author, {
onDelete: 'CASCADE'
})
Author.belongsTo(library)
return Author
}

121
server/models/Book.js Normal file
View File

@ -0,0 +1,121 @@
const { DataTypes, Model } = require('sequelize')
const Logger = require('../Logger')
module.exports = (sequelize) => {
class Book extends Model {
static getOldBook(libraryItemExpanded) {
const bookExpanded = libraryItemExpanded.media
const authors = bookExpanded.authors.map(au => {
return {
id: au.id,
name: au.name
}
})
const series = bookExpanded.series.map(se => {
return {
id: se.id,
name: se.name,
sequence: se.bookSeries.sequence
}
})
return {
id: bookExpanded.id,
libraryItemId: libraryItemExpanded.id,
coverPath: bookExpanded.coverPath,
tags: bookExpanded.tags,
audioFiles: bookExpanded.audioFiles,
chapters: bookExpanded.chapters,
ebookFile: bookExpanded.ebookFile,
metadata: {
title: bookExpanded.title,
subtitle: bookExpanded.subtitle,
authors: authors,
narrators: bookExpanded.narrators,
series: series,
genres: bookExpanded.genres,
publishedYear: bookExpanded.publishedYear,
publishedDate: bookExpanded.publishedDate,
publisher: bookExpanded.publisher,
description: bookExpanded.description,
isbn: bookExpanded.isbn,
asin: bookExpanded.asin,
language: bookExpanded.language,
explicit: bookExpanded.explicit,
abridged: bookExpanded.abridged
}
}
}
/**
* @param {object} oldBook
* @returns {boolean} true if updated
*/
static saveFromOld(oldBook) {
const book = this.getFromOld(oldBook)
return this.update(book, {
where: {
id: book.id
}
}).then(result => result[0] > 0).catch((error) => {
Logger.error(`[Book] Failed to save book ${book.id}`, error)
return false
})
}
static getFromOld(oldBook) {
return {
id: oldBook.id,
title: oldBook.metadata.title,
subtitle: oldBook.metadata.subtitle,
publishedYear: oldBook.metadata.publishedYear,
publishedDate: oldBook.metadata.publishedDate,
publisher: oldBook.metadata.publisher,
description: oldBook.metadata.description,
isbn: oldBook.metadata.isbn,
asin: oldBook.metadata.asin,
language: oldBook.metadata.language,
explicit: !!oldBook.metadata.explicit,
abridged: !!oldBook.metadata.abridged,
narrators: oldBook.metadata.narrators,
ebookFile: oldBook.ebookFile?.toJSON() || null,
coverPath: oldBook.coverPath,
audioFiles: oldBook.audioFiles?.map(af => af.toJSON()) || [],
chapters: oldBook.chapters,
tags: oldBook.tags,
genres: oldBook.metadata.genres
}
}
}
Book.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
title: DataTypes.STRING,
subtitle: DataTypes.STRING,
publishedYear: DataTypes.STRING,
publishedDate: DataTypes.STRING,
publisher: DataTypes.STRING,
description: DataTypes.TEXT,
isbn: DataTypes.STRING,
asin: DataTypes.STRING,
language: DataTypes.STRING,
explicit: DataTypes.BOOLEAN,
abridged: DataTypes.BOOLEAN,
coverPath: DataTypes.STRING,
narrators: DataTypes.JSON,
audioFiles: DataTypes.JSON,
ebookFile: DataTypes.JSON,
chapters: DataTypes.JSON,
tags: DataTypes.JSON,
genres: DataTypes.JSON
}, {
sequelize,
modelName: 'book'
})
return Book
}

View File

@ -0,0 +1,40 @@
const { DataTypes, Model } = require('sequelize')
module.exports = (sequelize) => {
class BookAuthor extends Model {
static removeByIds(authorId = null, bookId = null) {
const where = {}
if (authorId) where.authorId = authorId
if (bookId) where.bookId = bookId
return this.destroy({
where
})
}
}
BookAuthor.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
}
}, {
sequelize,
modelName: 'bookAuthor',
timestamps: false
})
// Super Many-to-Many
// ref: https://sequelize.org/docs/v6/advanced-association-concepts/advanced-many-to-many/#the-best-of-both-worlds-the-super-many-to-many-relationship
const { book, author } = sequelize.models
book.belongsToMany(author, { through: BookAuthor })
author.belongsToMany(book, { through: BookAuthor })
book.hasMany(BookAuthor)
BookAuthor.belongsTo(book)
author.hasMany(BookAuthor)
BookAuthor.belongsTo(author)
return BookAuthor
}

View File

@ -0,0 +1,41 @@
const { DataTypes, Model } = require('sequelize')
module.exports = (sequelize) => {
class BookSeries extends Model {
static removeByIds(seriesId = null, bookId = null) {
const where = {}
if (seriesId) where.seriesId = seriesId
if (bookId) where.bookId = bookId
return this.destroy({
where
})
}
}
BookSeries.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
sequence: DataTypes.STRING
}, {
sequelize,
modelName: 'bookSeries',
timestamps: false
})
// Super Many-to-Many
// ref: https://sequelize.org/docs/v6/advanced-association-concepts/advanced-many-to-many/#the-best-of-both-worlds-the-super-many-to-many-relationship
const { book, series } = sequelize.models
book.belongsToMany(series, { through: BookSeries })
series.belongsToMany(book, { through: BookSeries })
book.hasMany(BookSeries)
BookSeries.belongsTo(book)
series.hasMany(BookSeries)
BookSeries.belongsTo(series)
return BookSeries
}

116
server/models/Collection.js Normal file
View File

@ -0,0 +1,116 @@
const { DataTypes, Model } = require('sequelize')
const oldCollection = require('../objects/Collection')
const { areEquivalent } = require('../utils/index')
module.exports = (sequelize) => {
class Collection extends Model {
static async getOldCollections() {
const collections = await this.findAll({
include: {
model: sequelize.models.book,
include: sequelize.models.libraryItem
},
order: [[sequelize.models.book, sequelize.models.collectionBook, 'order', 'ASC']]
})
return collections.map(c => this.getOldCollection(c))
}
static getOldCollection(collectionExpanded) {
const libraryItemIds = collectionExpanded.books?.map(b => b.libraryItem?.id || null).filter(lid => lid) || []
return new oldCollection({
id: collectionExpanded.id,
libraryId: collectionExpanded.libraryId,
name: collectionExpanded.name,
description: collectionExpanded.description,
books: libraryItemIds,
lastUpdate: collectionExpanded.updatedAt.valueOf(),
createdAt: collectionExpanded.createdAt.valueOf()
})
}
static createFromOld(oldCollection) {
const collection = this.getFromOld(oldCollection)
return this.create(collection)
}
static async fullUpdateFromOld(oldCollection, collectionBooks) {
const existingCollection = await this.findByPk(oldCollection.id, {
include: sequelize.models.collectionBook
})
if (!existingCollection) return false
let hasUpdates = false
const collection = this.getFromOld(oldCollection)
for (const cb of collectionBooks) {
const existingCb = existingCollection.collectionBooks.find(i => i.bookId === cb.bookId)
if (!existingCb) {
await sequelize.models.collectionBook.create(cb)
hasUpdates = true
} else if (existingCb.order != cb.order) {
await existingCb.update({ order: cb.order })
hasUpdates = true
}
}
for (const cb of existingCollection.collectionBooks) {
// collectionBook was removed
if (!collectionBooks.some(i => i.bookId === cb.bookId)) {
await cb.destroy()
hasUpdates = true
}
}
let hasCollectionUpdates = false
for (const key in collection) {
let existingValue = existingCollection[key]
if (existingValue instanceof Date) existingValue = existingValue.valueOf()
if (!areEquivalent(collection[key], existingValue)) {
hasCollectionUpdates = true
}
}
if (hasCollectionUpdates) {
existingCollection.update(collection)
hasUpdates = true
}
return hasUpdates
}
static getFromOld(oldCollection) {
return {
id: oldCollection.id,
name: oldCollection.name,
description: oldCollection.description,
libraryId: oldCollection.libraryId
}
}
static removeById(collectionId) {
return this.destroy({
where: {
id: collectionId
}
})
}
}
Collection.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
name: DataTypes.STRING,
description: DataTypes.TEXT
}, {
sequelize,
modelName: 'collection'
})
const { library } = sequelize.models
library.hasMany(Collection)
Collection.belongsTo(library)
return Collection
}

View File

@ -0,0 +1,46 @@
const { DataTypes, Model } = require('sequelize')
module.exports = (sequelize) => {
class CollectionBook extends Model {
static removeByIds(collectionId, bookId) {
return this.destroy({
where: {
bookId,
collectionId
}
})
}
}
CollectionBook.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
order: DataTypes.INTEGER
}, {
sequelize,
timestamps: true,
updatedAt: false,
modelName: 'collectionBook'
})
// Super Many-to-Many
// ref: https://sequelize.org/docs/v6/advanced-association-concepts/advanced-many-to-many/#the-best-of-both-worlds-the-super-many-to-many-relationship
const { book, collection } = sequelize.models
book.belongsToMany(collection, { through: CollectionBook })
collection.belongsToMany(book, { through: CollectionBook })
book.hasMany(CollectionBook, {
onDelete: 'CASCADE'
})
CollectionBook.belongsTo(book)
collection.hasMany(CollectionBook, {
onDelete: 'CASCADE'
})
CollectionBook.belongsTo(collection)
return CollectionBook
}

116
server/models/Device.js Normal file
View File

@ -0,0 +1,116 @@
const { DataTypes, Model } = require('sequelize')
const oldDevice = require('../objects/DeviceInfo')
module.exports = (sequelize) => {
class Device extends Model {
getOldDevice() {
let browserVersion = null
let sdkVersion = null
if (this.clientName === 'Abs Android') {
sdkVersion = this.deviceVersion || null
} else {
browserVersion = this.deviceVersion || null
}
return new oldDevice({
id: this.id,
deviceId: this.deviceId,
userId: this.userId,
ipAddress: this.ipAddress,
browserName: this.extraData.browserName || null,
browserVersion,
osName: this.extraData.osName || null,
osVersion: this.extraData.osVersion || null,
clientVersion: this.clientVersion || null,
manufacturer: this.extraData.manufacturer || null,
model: this.extraData.model || null,
sdkVersion,
deviceName: this.deviceName,
clientName: this.clientName
})
}
static async getOldDeviceByDeviceId(deviceId) {
const device = await this.findOne({
where: {
deviceId
}
})
if (!device) return null
return device.getOldDevice()
}
static createFromOld(oldDevice) {
const device = this.getFromOld(oldDevice)
return this.create(device)
}
static updateFromOld(oldDevice) {
const device = this.getFromOld(oldDevice)
return this.update(device, {
where: {
id: device.id
}
})
}
static getFromOld(oldDeviceInfo) {
let extraData = {}
if (oldDeviceInfo.manufacturer) {
extraData.manufacturer = oldDeviceInfo.manufacturer
}
if (oldDeviceInfo.model) {
extraData.model = oldDeviceInfo.model
}
if (oldDeviceInfo.osName) {
extraData.osName = oldDeviceInfo.osName
}
if (oldDeviceInfo.osVersion) {
extraData.osVersion = oldDeviceInfo.osVersion
}
if (oldDeviceInfo.browserName) {
extraData.browserName = oldDeviceInfo.browserName
}
return {
id: oldDeviceInfo.id,
deviceId: oldDeviceInfo.deviceId,
clientName: oldDeviceInfo.clientName || null,
clientVersion: oldDeviceInfo.clientVersion || null,
ipAddress: oldDeviceInfo.ipAddress,
deviceName: oldDeviceInfo.deviceName || null,
deviceVersion: oldDeviceInfo.sdkVersion || oldDeviceInfo.browserVersion || null,
userId: oldDeviceInfo.userId,
extraData
}
}
}
Device.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
deviceId: DataTypes.STRING,
clientName: DataTypes.STRING, // e.g. Abs Web, Abs Android
clientVersion: DataTypes.STRING, // e.g. Server version or mobile version
ipAddress: DataTypes.STRING,
deviceName: DataTypes.STRING, // e.g. Windows 10 Chrome, Google Pixel 6, Apple iPhone 10,3
deviceVersion: DataTypes.STRING, // e.g. Browser version or Android SDK
extraData: DataTypes.JSON
}, {
sequelize,
modelName: 'device'
})
const { user } = sequelize.models
user.hasMany(Device, {
onDelete: 'CASCADE'
})
Device.belongsTo(user)
return Device
}

253
server/models/Feed.js Normal file
View File

@ -0,0 +1,253 @@
const { DataTypes, Model } = require('sequelize')
const oldFeed = require('../objects/Feed')
const areEquivalent = require('../utils/areEquivalent')
/*
* Polymorphic association: https://sequelize.org/docs/v6/advanced-association-concepts/polymorphic-associations/
* Feeds can be created from LibraryItem, Collection, Playlist or Series
*/
module.exports = (sequelize) => {
class Feed extends Model {
static async getOldFeeds() {
const feeds = await this.findAll({
include: {
model: sequelize.models.feedEpisode
}
})
return feeds.map(f => this.getOldFeed(f))
}
static getOldFeed(feedExpanded) {
const episodes = feedExpanded.feedEpisodes.map((feedEpisode) => feedEpisode.getOldEpisode())
return new oldFeed({
id: feedExpanded.id,
slug: feedExpanded.slug,
userId: feedExpanded.userId,
entityType: feedExpanded.entityType,
entityId: feedExpanded.entityId,
entityUpdatedAt: feedExpanded.entityUpdatedAt?.valueOf() || null,
meta: {
title: feedExpanded.title,
description: feedExpanded.description,
author: feedExpanded.author,
imageUrl: feedExpanded.imageURL,
feedUrl: feedExpanded.feedURL,
link: feedExpanded.siteURL,
explicit: feedExpanded.explicit,
type: feedExpanded.podcastType,
language: feedExpanded.language,
preventIndexing: feedExpanded.preventIndexing,
ownerName: feedExpanded.ownerName,
ownerEmail: feedExpanded.ownerEmail
},
serverAddress: feedExpanded.serverAddress,
feedUrl: feedExpanded.feedURL,
episodes,
createdAt: feedExpanded.createdAt.valueOf(),
updatedAt: feedExpanded.updatedAt.valueOf()
})
}
static removeById(feedId) {
return this.destroy({
where: {
id: feedId
}
})
}
static async fullCreateFromOld(oldFeed) {
const feedObj = this.getFromOld(oldFeed)
const newFeed = await this.create(feedObj)
if (oldFeed.episodes?.length) {
for (const oldFeedEpisode of oldFeed.episodes) {
const feedEpisode = sequelize.models.feedEpisode.getFromOld(oldFeedEpisode)
feedEpisode.feedId = newFeed.id
await sequelize.models.feedEpisode.create(feedEpisode)
}
}
}
static async fullUpdateFromOld(oldFeed) {
const oldFeedEpisodes = oldFeed.episodes || []
const feedObj = this.getFromOld(oldFeed)
const existingFeed = await this.findByPk(feedObj.id, {
include: sequelize.models.feedEpisode
})
if (!existingFeed) return false
let hasUpdates = false
for (const feedEpisode of existingFeed.feedEpisodes) {
const oldFeedEpisode = oldFeedEpisodes.find(ep => ep.id === feedEpisode.id)
// Episode removed
if (!oldFeedEpisode) {
feedEpisode.destroy()
} else {
let episodeHasUpdates = false
const oldFeedEpisodeCleaned = sequelize.models.feedEpisode.getFromOld(oldFeedEpisode)
for (const key in oldFeedEpisodeCleaned) {
if (!areEquivalent(oldFeedEpisodeCleaned[key], feedEpisode[key])) {
episodeHasUpdates = true
}
}
if (episodeHasUpdates) {
await feedEpisode.update(oldFeedEpisodeCleaned)
hasUpdates = true
}
}
}
let feedHasUpdates = false
for (const key in feedObj) {
let existingValue = existingFeed[key]
if (existingValue instanceof Date) existingValue = existingValue.valueOf()
if (!areEquivalent(existingValue, feedObj[key])) {
feedHasUpdates = true
}
}
if (feedHasUpdates) {
await existingFeed.update(feedObj)
hasUpdates = true
}
return hasUpdates
}
static getFromOld(oldFeed) {
const oldFeedMeta = oldFeed.meta || {}
return {
id: oldFeed.id,
slug: oldFeed.slug,
entityType: oldFeed.entityType,
entityId: oldFeed.entityId,
entityUpdatedAt: oldFeed.entityUpdatedAt,
serverAddress: oldFeed.serverAddress,
feedURL: oldFeed.feedUrl,
imageURL: oldFeedMeta.imageUrl,
siteURL: oldFeedMeta.link,
title: oldFeedMeta.title,
description: oldFeedMeta.description,
author: oldFeedMeta.author,
podcastType: oldFeedMeta.type || null,
language: oldFeedMeta.language || null,
ownerName: oldFeedMeta.ownerName || null,
ownerEmail: oldFeedMeta.ownerEmail || null,
explicit: !!oldFeedMeta.explicit,
preventIndexing: !!oldFeedMeta.preventIndexing,
userId: oldFeed.userId
}
}
getEntity(options) {
if (!this.entityType) return Promise.resolve(null)
const mixinMethodName = `get${sequelize.uppercaseFirst(this.entityType)}`
return this[mixinMethodName](options)
}
}
Feed.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
slug: DataTypes.STRING,
entityType: DataTypes.STRING,
entityId: DataTypes.UUIDV4,
entityUpdatedAt: DataTypes.DATE,
serverAddress: DataTypes.STRING,
feedURL: DataTypes.STRING,
imageURL: DataTypes.STRING,
siteURL: DataTypes.STRING,
title: DataTypes.STRING,
description: DataTypes.TEXT,
author: DataTypes.STRING,
podcastType: DataTypes.STRING,
language: DataTypes.STRING,
ownerName: DataTypes.STRING,
ownerEmail: DataTypes.STRING,
explicit: DataTypes.BOOLEAN,
preventIndexing: DataTypes.BOOLEAN
}, {
sequelize,
modelName: 'feed'
})
const { user, libraryItem, collection, series, playlist } = sequelize.models
user.hasMany(Feed)
Feed.belongsTo(user)
libraryItem.hasMany(Feed, {
foreignKey: 'entityId',
constraints: false,
scope: {
entityType: 'libraryItem'
}
})
Feed.belongsTo(libraryItem, { foreignKey: 'entityId', constraints: false })
collection.hasMany(Feed, {
foreignKey: 'entityId',
constraints: false,
scope: {
entityType: 'collection'
}
})
Feed.belongsTo(collection, { foreignKey: 'entityId', constraints: false })
series.hasMany(Feed, {
foreignKey: 'entityId',
constraints: false,
scope: {
entityType: 'series'
}
})
Feed.belongsTo(series, { foreignKey: 'entityId', constraints: false })
playlist.hasMany(Feed, {
foreignKey: 'entityId',
constraints: false,
scope: {
entityType: 'playlist'
}
})
Feed.belongsTo(playlist, { foreignKey: 'entityId', constraints: false })
Feed.addHook('afterFind', findResult => {
if (!findResult) return
if (!Array.isArray(findResult)) findResult = [findResult]
for (const instance of findResult) {
if (instance.entityType === 'libraryItem' && instance.libraryItem !== undefined) {
instance.entity = instance.libraryItem
instance.dataValues.entity = instance.dataValues.libraryItem
} else if (instance.entityType === 'collection' && instance.collection !== undefined) {
instance.entity = instance.collection
instance.dataValues.entity = instance.dataValues.collection
} else if (instance.entityType === 'series' && instance.series !== undefined) {
instance.entity = instance.series
instance.dataValues.entity = instance.dataValues.series
} else if (instance.entityType === 'playlist' && instance.playlist !== undefined) {
instance.entity = instance.playlist
instance.dataValues.entity = instance.dataValues.playlist
}
// To prevent mistakes:
delete instance.libraryItem
delete instance.dataValues.libraryItem
delete instance.collection
delete instance.dataValues.collection
delete instance.series
delete instance.dataValues.series
delete instance.playlist
delete instance.dataValues.playlist
}
})
return Feed
}

View File

@ -0,0 +1,82 @@
const { DataTypes, Model } = require('sequelize')
module.exports = (sequelize) => {
class FeedEpisode extends Model {
getOldEpisode() {
const enclosure = {
url: this.enclosureURL,
size: this.enclosureSize,
type: this.enclosureType
}
return {
id: this.id,
title: this.title,
description: this.description,
enclosure,
pubDate: this.pubDate,
link: this.siteURL,
author: this.author,
explicit: this.explicit,
duration: this.duration,
season: this.season,
episode: this.episode,
episodeType: this.episodeType,
fullPath: this.filePath
}
}
static getFromOld(oldFeedEpisode) {
return {
id: oldFeedEpisode.id,
title: oldFeedEpisode.title,
author: oldFeedEpisode.author,
description: oldFeedEpisode.description,
siteURL: oldFeedEpisode.link,
enclosureURL: oldFeedEpisode.enclosure?.url || null,
enclosureType: oldFeedEpisode.enclosure?.type || null,
enclosureSize: oldFeedEpisode.enclosure?.size || null,
pubDate: oldFeedEpisode.pubDate,
season: oldFeedEpisode.season || null,
episode: oldFeedEpisode.episode || null,
episodeType: oldFeedEpisode.episodeType || null,
duration: oldFeedEpisode.duration,
filePath: oldFeedEpisode.fullPath,
explicit: !!oldFeedEpisode.explicit
}
}
}
FeedEpisode.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
title: DataTypes.STRING,
author: DataTypes.STRING,
description: DataTypes.TEXT,
siteURL: DataTypes.STRING,
enclosureURL: DataTypes.STRING,
enclosureType: DataTypes.STRING,
enclosureSize: DataTypes.BIGINT,
pubDate: DataTypes.STRING,
season: DataTypes.STRING,
episode: DataTypes.STRING,
episodeType: DataTypes.STRING,
duration: DataTypes.FLOAT,
filePath: DataTypes.STRING,
explicit: DataTypes.BOOLEAN
}, {
sequelize,
modelName: 'feedEpisode'
})
const { feed } = sequelize.models
feed.hasMany(FeedEpisode, {
onDelete: 'CASCADE'
})
FeedEpisode.belongsTo(feed)
return FeedEpisode
}

137
server/models/Library.js Normal file
View File

@ -0,0 +1,137 @@
const { DataTypes, Model } = require('sequelize')
const Logger = require('../Logger')
const oldLibrary = require('../objects/Library')
module.exports = (sequelize) => {
class Library extends Model {
static async getAllOldLibraries() {
const libraries = await this.findAll({
include: sequelize.models.libraryFolder
})
return libraries.map(lib => this.getOldLibrary(lib))
}
static getOldLibrary(libraryExpanded) {
const folders = libraryExpanded.libraryFolders.map(folder => {
return {
id: folder.id,
fullPath: folder.path,
libraryId: folder.libraryId,
addedAt: folder.createdAt.valueOf()
}
})
return new oldLibrary({
id: libraryExpanded.id,
name: libraryExpanded.name,
folders,
displayOrder: libraryExpanded.displayOrder,
icon: libraryExpanded.icon,
mediaType: libraryExpanded.mediaType,
provider: libraryExpanded.provider,
settings: libraryExpanded.settings,
createdAt: libraryExpanded.createdAt.valueOf(),
lastUpdate: libraryExpanded.updatedAt.valueOf()
})
}
/**
* @param {object} oldLibrary
* @returns {Library|null}
*/
static async createFromOld(oldLibrary) {
const library = this.getFromOld(oldLibrary)
library.libraryFolders = oldLibrary.folders.map(folder => {
return {
id: folder.id,
path: folder.fullPath
}
})
return this.create(library, {
include: sequelize.models.libraryFolder
}).catch((error) => {
Logger.error(`[Library] Failed to create library ${library.id}`, error)
return null
})
}
static async updateFromOld(oldLibrary) {
const existingLibrary = await this.findByPk(oldLibrary.id, {
include: sequelize.models.libraryFolder
})
if (!existingLibrary) {
Logger.error(`[Library] Failed to update library ${oldLibrary.id} - not found`)
return null
}
const library = this.getFromOld(oldLibrary)
const libraryFolders = oldLibrary.folders.map(folder => {
return {
id: folder.id,
path: folder.fullPath,
libraryId: library.id
}
})
for (const libraryFolder of libraryFolders) {
const existingLibraryFolder = existingLibrary.libraryFolders.find(lf => lf.id === libraryFolder.id)
if (!existingLibraryFolder) {
await sequelize.models.libraryFolder.create(libraryFolder)
} else if (existingLibraryFolder.path !== libraryFolder.path) {
await existingLibraryFolder.update({ path: libraryFolder.path })
}
}
const libraryFoldersRemoved = existingLibrary.libraryFolders.filter(lf => !libraryFolders.some(_lf => _lf.id === lf.id))
for (const existingLibraryFolder of libraryFoldersRemoved) {
await existingLibraryFolder.destroy()
}
return existingLibrary.update(library)
}
static getFromOld(oldLibrary) {
return {
id: oldLibrary.id,
name: oldLibrary.name,
displayOrder: oldLibrary.displayOrder,
icon: oldLibrary.icon || null,
mediaType: oldLibrary.mediaType || null,
provider: oldLibrary.provider,
settings: oldLibrary.settings?.toJSON() || {},
createdAt: oldLibrary.createdAt,
updatedAt: oldLibrary.lastUpdate
}
}
static removeById(libraryId) {
return this.destroy({
where: {
id: libraryId
}
})
}
}
Library.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
name: DataTypes.STRING,
displayOrder: DataTypes.INTEGER,
icon: DataTypes.STRING,
mediaType: DataTypes.STRING,
provider: DataTypes.STRING,
lastScan: DataTypes.DATE,
lastScanVersion: DataTypes.STRING,
settings: DataTypes.JSON
}, {
sequelize,
modelName: 'library'
})
return Library
}

View File

@ -0,0 +1,25 @@
const { DataTypes, Model } = require('sequelize')
module.exports = (sequelize) => {
class LibraryFolder extends Model { }
LibraryFolder.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
path: DataTypes.STRING
}, {
sequelize,
modelName: 'libraryFolder'
})
const { library } = sequelize.models
library.hasMany(LibraryFolder, {
onDelete: 'CASCADE'
})
LibraryFolder.belongsTo(library)
return LibraryFolder
}

View File

@ -0,0 +1,380 @@
const { DataTypes, Model } = require('sequelize')
const Logger = require('../Logger')
const oldLibraryItem = require('../objects/LibraryItem')
const { areEquivalent } = require('../utils/index')
module.exports = (sequelize) => {
class LibraryItem extends Model {
static async getAllOldLibraryItems() {
let libraryItems = await this.findAll({
include: [
{
model: sequelize.models.book,
include: [
{
model: sequelize.models.author,
through: {
attributes: []
}
},
{
model: sequelize.models.series,
through: {
attributes: ['sequence']
}
}
]
},
{
model: sequelize.models.podcast,
include: [
{
model: sequelize.models.podcastEpisode
}
]
}
]
})
return libraryItems.map(ti => this.getOldLibraryItem(ti))
}
static getOldLibraryItem(libraryItemExpanded) {
let media = null
if (libraryItemExpanded.mediaType === 'book') {
media = sequelize.models.book.getOldBook(libraryItemExpanded)
} else if (libraryItemExpanded.mediaType === 'podcast') {
media = sequelize.models.podcast.getOldPodcast(libraryItemExpanded)
}
return new oldLibraryItem({
id: libraryItemExpanded.id,
ino: libraryItemExpanded.ino,
libraryId: libraryItemExpanded.libraryId,
folderId: libraryItemExpanded.libraryFolderId,
path: libraryItemExpanded.path,
relPath: libraryItemExpanded.relPath,
isFile: libraryItemExpanded.isFile,
mtimeMs: libraryItemExpanded.mtime?.valueOf(),
ctimeMs: libraryItemExpanded.ctime?.valueOf(),
birthtimeMs: libraryItemExpanded.birthtime?.valueOf(),
addedAt: libraryItemExpanded.createdAt.valueOf(),
updatedAt: libraryItemExpanded.updatedAt.valueOf(),
lastScan: libraryItemExpanded.lastScan?.valueOf(),
scanVersion: libraryItemExpanded.lastScanVersion,
isMissing: !!libraryItemExpanded.isMissing,
isInvalid: !!libraryItemExpanded.isInvalid,
mediaType: libraryItemExpanded.mediaType,
media,
libraryFiles: libraryItemExpanded.libraryFiles
})
}
static async fullCreateFromOld(oldLibraryItem) {
const newLibraryItem = await this.create(this.getFromOld(oldLibraryItem))
if (oldLibraryItem.mediaType === 'book') {
const bookObj = sequelize.models.book.getFromOld(oldLibraryItem.media)
bookObj.libraryItemId = newLibraryItem.id
const newBook = await sequelize.models.book.create(bookObj)
const oldBookAuthors = oldLibraryItem.media.metadata.authors || []
const oldBookSeriesAll = oldLibraryItem.media.metadata.series || []
for (const oldBookAuthor of oldBookAuthors) {
await sequelize.models.bookAuthor.create({ authorId: oldBookAuthor.id, bookId: newBook.id })
}
for (const oldSeries of oldBookSeriesAll) {
await sequelize.models.bookSeries.create({ seriesId: oldSeries.id, bookId: newBook.id, sequence: oldSeries.sequence })
}
} else if (oldLibraryItem.mediaType === 'podcast') {
const podcastObj = sequelize.models.podcast.getFromOld(oldLibraryItem.media)
podcastObj.libraryItemId = newLibraryItem.id
const newPodcast = await sequelize.models.podcast.create(podcastObj)
const oldEpisodes = oldLibraryItem.media.episodes || []
for (const oldEpisode of oldEpisodes) {
const episodeObj = sequelize.models.podcastEpisode.getFromOld(oldEpisode)
episodeObj.libraryItemId = newLibraryItem.id
episodeObj.podcastId = newPodcast.id
await sequelize.models.podcastEpisode.create(episodeObj)
}
}
return newLibraryItem
}
static async fullUpdateFromOld(oldLibraryItem) {
const libraryItemExpanded = await this.findByPk(oldLibraryItem.id, {
include: [
{
model: sequelize.models.book,
include: [
{
model: sequelize.models.author,
through: {
attributes: []
}
},
{
model: sequelize.models.series,
through: {
attributes: ['sequence']
}
}
]
},
{
model: sequelize.models.podcast,
include: [
{
model: sequelize.models.podcastEpisode
}
]
}
]
})
if (!libraryItemExpanded) return false
let hasUpdates = false
// Check update Book/Podcast
if (libraryItemExpanded.media) {
let updatedMedia = null
if (libraryItemExpanded.mediaType === 'podcast') {
updatedMedia = sequelize.models.podcast.getFromOld(oldLibraryItem.media)
const existingPodcastEpisodes = libraryItemExpanded.media.podcastEpisodes || []
const updatedPodcastEpisodes = oldLibraryItem.media.episodes || []
for (const existingPodcastEpisode of existingPodcastEpisodes) {
// Episode was removed
if (!updatedPodcastEpisodes.some(ep => ep.id === existingPodcastEpisode.id)) {
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" episode "${existingPodcastEpisode.title}" was removed`)
await existingPodcastEpisode.destroy()
hasUpdates = true
}
}
for (const updatedPodcastEpisode of updatedPodcastEpisodes) {
const existingEpisodeMatch = existingPodcastEpisodes.find(ep => ep.id === updatedPodcastEpisode.id)
if (!existingEpisodeMatch) {
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" episode "${updatedPodcastEpisode.title}" was added`)
await sequelize.models.podcastEpisode.createFromOld(updatedPodcastEpisode)
hasUpdates = true
} else {
const updatedEpisodeCleaned = sequelize.models.podcastEpisode.getFromOld(updatedPodcastEpisode)
let episodeHasUpdates = false
for (const key in updatedEpisodeCleaned) {
let existingValue = existingEpisodeMatch[key]
if (existingValue instanceof Date) existingValue = existingValue.valueOf()
if (!areEquivalent(updatedEpisodeCleaned[key], existingValue, true)) {
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" episode "${existingEpisodeMatch.title}" ${key} was updated from "${existingValue}" to "${updatedEpisodeCleaned[key]}"`)
episodeHasUpdates = true
}
}
if (episodeHasUpdates) {
await existingEpisodeMatch.update(updatedEpisodeCleaned)
hasUpdates = true
}
}
}
} else if (libraryItemExpanded.mediaType === 'book') {
updatedMedia = sequelize.models.book.getFromOld(oldLibraryItem.media)
const existingAuthors = libraryItemExpanded.media.authors || []
const existingSeriesAll = libraryItemExpanded.media.series || []
const updatedAuthors = oldLibraryItem.media.metadata.authors || []
const updatedSeriesAll = oldLibraryItem.media.metadata.series || []
for (const existingAuthor of existingAuthors) {
// Author was removed from Book
if (!updatedAuthors.some(au => au.id === existingAuthor.id)) {
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" author "${existingAuthor.name}" was removed`)
await sequelize.models.bookAuthor.removeByIds(existingAuthor.id, libraryItemExpanded.media.id)
hasUpdates = true
}
}
for (const updatedAuthor of updatedAuthors) {
// Author was added
if (!existingAuthors.some(au => au.id === updatedAuthor.id)) {
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" author "${updatedAuthor.name}" was added`)
await sequelize.models.bookAuthor.create({ authorId: updatedAuthor.id, bookId: libraryItemExpanded.media.id })
hasUpdates = true
}
}
for (const existingSeries of existingSeriesAll) {
// Series was removed
if (!updatedSeriesAll.some(se => se.id === existingSeries.id)) {
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" series "${existingSeries.name}" was removed`)
await sequelize.models.bookSeries.removeByIds(existingSeries.id, libraryItemExpanded.media.id)
hasUpdates = true
}
}
for (const updatedSeries of updatedSeriesAll) {
// Series was added/updated
const existingSeriesMatch = existingSeriesAll.find(se => se.id === updatedSeries.id)
if (!existingSeriesMatch) {
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" series "${updatedSeries.name}" was added`)
await sequelize.models.bookSeries.create({ seriesId: updatedSeries.id, bookId: libraryItemExpanded.media.id, sequence: updatedSeries.sequence })
hasUpdates = true
} else if (existingSeriesMatch.bookSeries.sequence !== updatedSeries.sequence) {
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" series "${updatedSeries.name}" sequence was updated from "${existingSeriesMatch.bookSeries.sequence}" to "${updatedSeries.sequence}"`)
await existingSeriesMatch.bookSeries.update({ sequence: updatedSeries.sequence })
hasUpdates = true
}
}
}
let hasMediaUpdates = false
for (const key in updatedMedia) {
let existingValue = libraryItemExpanded.media[key]
if (existingValue instanceof Date) existingValue = existingValue.valueOf()
if (!areEquivalent(updatedMedia[key], existingValue, true)) {
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" ${libraryItemExpanded.mediaType}.${key} updated from ${existingValue} to ${updatedMedia[key]}`)
hasMediaUpdates = true
}
}
if (hasMediaUpdates && updatedMedia) {
await libraryItemExpanded.media.update(updatedMedia)
hasUpdates = true
}
}
const updatedLibraryItem = this.getFromOld(oldLibraryItem)
let hasLibraryItemUpdates = false
for (const key in updatedLibraryItem) {
let existingValue = libraryItemExpanded[key]
if (existingValue instanceof Date) existingValue = existingValue.valueOf()
if (!areEquivalent(updatedLibraryItem[key], existingValue, true)) {
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" ${key} updated from ${existingValue} to ${updatedLibraryItem[key]}`)
hasLibraryItemUpdates = true
}
}
if (hasLibraryItemUpdates) {
await libraryItemExpanded.update(updatedLibraryItem)
Logger.info(`[LibraryItem] Library item "${libraryItemExpanded.id}" updated`)
hasUpdates = true
}
return hasUpdates
}
static getFromOld(oldLibraryItem) {
return {
id: oldLibraryItem.id,
ino: oldLibraryItem.ino,
path: oldLibraryItem.path,
relPath: oldLibraryItem.relPath,
mediaId: oldLibraryItem.media.id,
mediaType: oldLibraryItem.mediaType,
isFile: !!oldLibraryItem.isFile,
isMissing: !!oldLibraryItem.isMissing,
isInvalid: !!oldLibraryItem.isInvalid,
mtime: oldLibraryItem.mtimeMs,
ctime: oldLibraryItem.ctimeMs,
birthtime: oldLibraryItem.birthtimeMs,
lastScan: oldLibraryItem.lastScan,
lastScanVersion: oldLibraryItem.scanVersion,
libraryId: oldLibraryItem.libraryId,
libraryFolderId: oldLibraryItem.folderId,
libraryFiles: oldLibraryItem.libraryFiles?.map(lf => lf.toJSON()) || []
}
}
static removeById(libraryItemId) {
return this.destroy({
where: {
id: libraryItemId
},
individualHooks: true
})
}
getMedia(options) {
if (!this.mediaType) return Promise.resolve(null)
const mixinMethodName = `get${sequelize.uppercaseFirst(this.mediaType)}`
return this[mixinMethodName](options)
}
}
LibraryItem.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
ino: DataTypes.STRING,
path: DataTypes.STRING,
relPath: DataTypes.STRING,
mediaId: DataTypes.UUIDV4,
mediaType: DataTypes.STRING,
isFile: DataTypes.BOOLEAN,
isMissing: DataTypes.BOOLEAN,
isInvalid: DataTypes.BOOLEAN,
mtime: DataTypes.DATE(6),
ctime: DataTypes.DATE(6),
birthtime: DataTypes.DATE(6),
lastScan: DataTypes.DATE,
lastScanVersion: DataTypes.STRING,
libraryFiles: DataTypes.JSON
}, {
sequelize,
modelName: 'libraryItem'
})
const { library, libraryFolder, book, podcast } = sequelize.models
library.hasMany(LibraryItem)
LibraryItem.belongsTo(library)
libraryFolder.hasMany(LibraryItem)
LibraryItem.belongsTo(libraryFolder)
book.hasOne(LibraryItem, {
foreignKey: 'mediaId',
constraints: false,
scope: {
mediaType: 'book'
}
})
LibraryItem.belongsTo(book, { foreignKey: 'mediaId', constraints: false })
podcast.hasOne(LibraryItem, {
foreignKey: 'mediaId',
constraints: false,
scope: {
mediaType: 'podcast'
}
})
LibraryItem.belongsTo(podcast, { foreignKey: 'mediaId', constraints: false })
LibraryItem.addHook('afterFind', findResult => {
if (!findResult) return
if (!Array.isArray(findResult)) findResult = [findResult]
for (const instance of findResult) {
if (instance.mediaType === 'book' && instance.book !== undefined) {
instance.media = instance.book
instance.dataValues.media = instance.dataValues.book
} else if (instance.mediaType === 'podcast' && instance.podcast !== undefined) {
instance.media = instance.podcast
instance.dataValues.media = instance.dataValues.podcast
}
// To prevent mistakes:
delete instance.book
delete instance.dataValues.book
delete instance.podcast
delete instance.dataValues.podcast
}
})
LibraryItem.addHook('afterDestroy', async instance => {
if (!instance) return
const media = await instance.getMedia()
if (media) {
media.destroy()
}
})
return LibraryItem
}

View File

@ -0,0 +1,143 @@
const { DataTypes, Model } = require('sequelize')
/*
* Polymorphic association: https://sequelize.org/docs/v6/advanced-association-concepts/polymorphic-associations/
* Book has many MediaProgress. PodcastEpisode has many MediaProgress.
*/
module.exports = (sequelize) => {
class MediaProgress extends Model {
getOldMediaProgress() {
const isPodcastEpisode = this.mediaItemType === 'podcastEpisode'
return {
id: this.id,
userId: this.userId,
libraryItemId: this.extraData?.libraryItemId || null,
episodeId: isPodcastEpisode ? this.mediaItemId : null,
mediaItemId: this.mediaItemId,
mediaItemType: this.mediaItemType,
duration: this.duration,
progress: this.extraData?.progress || null,
currentTime: this.currentTime,
isFinished: !!this.isFinished,
hideFromContinueListening: !!this.hideFromContinueListening,
ebookLocation: this.ebookLocation,
ebookProgress: this.ebookProgress,
lastUpdate: this.updatedAt.valueOf(),
startedAt: this.createdAt.valueOf(),
finishedAt: this.finishedAt?.valueOf() || null
}
}
static upsertFromOld(oldMediaProgress) {
const mediaProgress = this.getFromOld(oldMediaProgress)
return this.upsert(mediaProgress)
}
static getFromOld(oldMediaProgress) {
return {
id: oldMediaProgress.id,
userId: oldMediaProgress.userId,
mediaItemId: oldMediaProgress.mediaItemId,
mediaItemType: oldMediaProgress.mediaItemType,
duration: oldMediaProgress.duration,
currentTime: oldMediaProgress.currentTime,
ebookLocation: oldMediaProgress.ebookLocation || null,
ebookProgress: oldMediaProgress.ebookProgress || null,
isFinished: !!oldMediaProgress.isFinished,
hideFromContinueListening: !!oldMediaProgress.hideFromContinueListening,
finishedAt: oldMediaProgress.finishedAt,
createdAt: oldMediaProgress.startedAt || oldMediaProgress.lastUpdate,
updatedAt: oldMediaProgress.lastUpdate,
extraData: {
libraryItemId: oldMediaProgress.libraryItemId,
progress: oldMediaProgress.progress
}
}
}
static removeById(mediaProgressId) {
return this.destroy({
where: {
id: mediaProgressId
}
})
}
getMediaItem(options) {
if (!this.mediaItemType) return Promise.resolve(null)
const mixinMethodName = `get${sequelize.uppercaseFirst(this.mediaItemType)}`
return this[mixinMethodName](options)
}
}
MediaProgress.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
mediaItemId: DataTypes.UUIDV4,
mediaItemType: DataTypes.STRING,
duration: DataTypes.FLOAT,
currentTime: DataTypes.FLOAT,
isFinished: DataTypes.BOOLEAN,
hideFromContinueListening: DataTypes.BOOLEAN,
ebookLocation: DataTypes.STRING,
ebookProgress: DataTypes.FLOAT,
finishedAt: DataTypes.DATE,
extraData: DataTypes.JSON
}, {
sequelize,
modelName: 'mediaProgress'
})
const { book, podcastEpisode, user } = sequelize.models
book.hasMany(MediaProgress, {
foreignKey: 'mediaItemId',
constraints: false,
scope: {
mediaItemType: 'book'
}
})
MediaProgress.belongsTo(book, { foreignKey: 'mediaItemId', constraints: false })
podcastEpisode.hasMany(MediaProgress, {
foreignKey: 'mediaItemId',
constraints: false,
scope: {
mediaItemType: 'podcastEpisode'
}
})
MediaProgress.belongsTo(podcastEpisode, { foreignKey: 'mediaItemId', constraints: false })
MediaProgress.addHook('afterFind', findResult => {
if (!findResult) return
if (!Array.isArray(findResult)) findResult = [findResult]
for (const instance of findResult) {
if (instance.mediaItemType === 'book' && instance.book !== undefined) {
instance.mediaItem = instance.book
instance.dataValues.mediaItem = instance.dataValues.book
} else if (instance.mediaItemType === 'podcastEpisode' && instance.podcastEpisode !== undefined) {
instance.mediaItem = instance.podcastEpisode
instance.dataValues.mediaItem = instance.dataValues.podcastEpisode
}
// To prevent mistakes:
delete instance.book
delete instance.dataValues.book
delete instance.podcastEpisode
delete instance.dataValues.podcastEpisode
}
})
user.hasMany(MediaProgress, {
onDelete: 'CASCADE'
})
MediaProgress.belongsTo(user)
return MediaProgress
}

View File

@ -0,0 +1,198 @@
const { DataTypes, Model } = require('sequelize')
const oldPlaybackSession = require('../objects/PlaybackSession')
module.exports = (sequelize) => {
class PlaybackSession extends Model {
static async getOldPlaybackSessions(where = null) {
const playbackSessions = await this.findAll({
where,
include: [
{
model: sequelize.models.device
}
]
})
return playbackSessions.map(session => this.getOldPlaybackSession(session))
}
static async getById(sessionId) {
const playbackSession = await this.findByPk(sessionId, {
include: [
{
model: sequelize.models.device
}
]
})
if (!playbackSession) return null
return this.getOldPlaybackSession(playbackSession)
}
static getOldPlaybackSession(playbackSessionExpanded) {
const isPodcastEpisode = playbackSessionExpanded.mediaItemType === 'podcastEpisode'
return new oldPlaybackSession({
id: playbackSessionExpanded.id,
userId: playbackSessionExpanded.userId,
libraryId: playbackSessionExpanded.libraryId,
libraryItemId: playbackSessionExpanded.extraData?.libraryItemId || null,
bookId: isPodcastEpisode ? null : playbackSessionExpanded.mediaItemId,
episodeId: isPodcastEpisode ? playbackSessionExpanded.mediaItemId : null,
mediaType: isPodcastEpisode ? 'podcast' : 'book',
mediaMetadata: playbackSessionExpanded.mediaMetadata,
chapters: null,
displayTitle: playbackSessionExpanded.displayTitle,
displayAuthor: playbackSessionExpanded.displayAuthor,
coverPath: playbackSessionExpanded.coverPath,
duration: playbackSessionExpanded.duration,
playMethod: playbackSessionExpanded.playMethod,
mediaPlayer: playbackSessionExpanded.mediaPlayer,
deviceInfo: playbackSessionExpanded.device?.getOldDevice() || null,
serverVersion: playbackSessionExpanded.serverVersion,
date: playbackSessionExpanded.date,
dayOfWeek: playbackSessionExpanded.dayOfWeek,
timeListening: playbackSessionExpanded.timeListening,
startTime: playbackSessionExpanded.startTime,
currentTime: playbackSessionExpanded.currentTime,
startedAt: playbackSessionExpanded.createdAt.valueOf(),
updatedAt: playbackSessionExpanded.updatedAt.valueOf()
})
}
static removeById(sessionId) {
return this.destroy({
where: {
id: sessionId
}
})
}
static createFromOld(oldPlaybackSession) {
const playbackSession = this.getFromOld(oldPlaybackSession)
return this.create(playbackSession)
}
static updateFromOld(oldPlaybackSession) {
const playbackSession = this.getFromOld(oldPlaybackSession)
return this.update(playbackSession, {
where: {
id: playbackSession.id
}
})
}
static getFromOld(oldPlaybackSession) {
return {
id: oldPlaybackSession.id,
mediaItemId: oldPlaybackSession.episodeId || oldPlaybackSession.bookId,
mediaItemType: oldPlaybackSession.episodeId ? 'podcastEpisode' : 'book',
libraryId: oldPlaybackSession.libraryId,
displayTitle: oldPlaybackSession.displayTitle,
displayAuthor: oldPlaybackSession.displayAuthor,
duration: oldPlaybackSession.duration,
playMethod: oldPlaybackSession.playMethod,
mediaPlayer: oldPlaybackSession.mediaPlayer,
startTime: oldPlaybackSession.startTime,
currentTime: oldPlaybackSession.currentTime,
serverVersion: oldPlaybackSession.serverVersion || null,
createdAt: oldPlaybackSession.startedAt,
updatedAt: oldPlaybackSession.updatedAt,
userId: oldPlaybackSession.userId,
deviceId: oldPlaybackSession.deviceInfo?.id || null,
timeListening: oldPlaybackSession.timeListening,
coverPath: oldPlaybackSession.coverPath,
mediaMetadata: oldPlaybackSession.mediaMetadata,
date: oldPlaybackSession.date,
dayOfWeek: oldPlaybackSession.dayOfWeek,
extraData: {
libraryItemId: oldPlaybackSession.libraryItemId
}
}
}
getMediaItem(options) {
if (!this.mediaItemType) return Promise.resolve(null)
const mixinMethodName = `get${sequelize.uppercaseFirst(this.mediaItemType)}`
return this[mixinMethodName](options)
}
}
PlaybackSession.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
mediaItemId: DataTypes.UUIDV4,
mediaItemType: DataTypes.STRING,
displayTitle: DataTypes.STRING,
displayAuthor: DataTypes.STRING,
duration: DataTypes.FLOAT,
playMethod: DataTypes.INTEGER,
mediaPlayer: DataTypes.STRING,
startTime: DataTypes.FLOAT,
currentTime: DataTypes.FLOAT,
serverVersion: DataTypes.STRING,
coverPath: DataTypes.STRING,
timeListening: DataTypes.INTEGER,
mediaMetadata: DataTypes.JSON,
date: DataTypes.STRING,
dayOfWeek: DataTypes.STRING,
extraData: DataTypes.JSON
}, {
sequelize,
modelName: 'playbackSession'
})
const { book, podcastEpisode, user, device, library } = sequelize.models
user.hasMany(PlaybackSession)
PlaybackSession.belongsTo(user)
device.hasMany(PlaybackSession)
PlaybackSession.belongsTo(device)
library.hasMany(PlaybackSession)
PlaybackSession.belongsTo(library)
book.hasMany(PlaybackSession, {
foreignKey: 'mediaItemId',
constraints: false,
scope: {
mediaItemType: 'book'
}
})
PlaybackSession.belongsTo(book, { foreignKey: 'mediaItemId', constraints: false })
podcastEpisode.hasOne(PlaybackSession, {
foreignKey: 'mediaItemId',
constraints: false,
scope: {
mediaItemType: 'podcastEpisode'
}
})
PlaybackSession.belongsTo(podcastEpisode, { foreignKey: 'mediaItemId', constraints: false })
PlaybackSession.addHook('afterFind', findResult => {
if (!findResult) return
if (!Array.isArray(findResult)) findResult = [findResult]
for (const instance of findResult) {
if (instance.mediaItemType === 'book' && instance.book !== undefined) {
instance.mediaItem = instance.book
instance.dataValues.mediaItem = instance.dataValues.book
} else if (instance.mediaItemType === 'podcastEpisode' && instance.podcastEpisode !== undefined) {
instance.mediaItem = instance.podcastEpisode
instance.dataValues.mediaItem = instance.dataValues.podcastEpisode
}
// To prevent mistakes:
delete instance.book
delete instance.dataValues.book
delete instance.podcastEpisode
delete instance.dataValues.podcastEpisode
}
})
return PlaybackSession
}

172
server/models/Playlist.js Normal file
View File

@ -0,0 +1,172 @@
const { DataTypes, Model } = require('sequelize')
const Logger = require('../Logger')
const oldPlaylist = require('../objects/Playlist')
const { areEquivalent } = require('../utils/index')
module.exports = (sequelize) => {
class Playlist extends Model {
static async getOldPlaylists() {
const playlists = await this.findAll({
include: {
model: sequelize.models.playlistMediaItem,
include: [
{
model: sequelize.models.book,
include: sequelize.models.libraryItem
},
{
model: sequelize.models.podcastEpisode,
include: {
model: sequelize.models.podcast,
include: sequelize.models.libraryItem
}
}
]
},
order: [['playlistMediaItems', 'order', 'ASC']]
})
return playlists.map(p => this.getOldPlaylist(p))
}
static getOldPlaylist(playlistExpanded) {
const items = playlistExpanded.playlistMediaItems.map(pmi => {
const libraryItemId = pmi.mediaItem?.podcast?.libraryItem?.id || pmi.mediaItem?.libraryItem?.id || null
if (!libraryItemId) {
Logger.error(`[Playlist] Invalid playlist media item - No library item id found`, JSON.stringify(pmi, null, 2))
return null
}
return {
episodeId: pmi.mediaItemType === 'podcastEpisode' ? pmi.mediaItemId : '',
libraryItemId
}
}).filter(pmi => pmi)
return new oldPlaylist({
id: playlistExpanded.id,
libraryId: playlistExpanded.libraryId,
userId: playlistExpanded.userId,
name: playlistExpanded.name,
description: playlistExpanded.description,
items,
lastUpdate: playlistExpanded.updatedAt.valueOf(),
createdAt: playlistExpanded.createdAt.valueOf()
})
}
static createFromOld(oldPlaylist) {
const playlist = this.getFromOld(oldPlaylist)
return this.create(playlist)
}
static async fullUpdateFromOld(oldPlaylist, playlistMediaItems) {
const existingPlaylist = await this.findByPk(oldPlaylist.id, {
include: sequelize.models.playlistMediaItem
})
if (!existingPlaylist) return false
let hasUpdates = false
const playlist = this.getFromOld(oldPlaylist)
for (const pmi of playlistMediaItems) {
const existingPmi = existingPlaylist.playlistMediaItems.find(i => i.mediaItemId === pmi.mediaItemId)
if (!existingPmi) {
await sequelize.models.playlistMediaItem.create(pmi)
hasUpdates = true
} else if (existingPmi.order != pmi.order) {
await existingPmi.update({ order: pmi.order })
hasUpdates = true
}
}
for (const pmi of existingPlaylist.playlistMediaItems) {
// Pmi was removed
if (!playlistMediaItems.some(i => i.mediaItemId === pmi.mediaItemId)) {
await pmi.destroy()
hasUpdates = true
}
}
let hasPlaylistUpdates = false
for (const key in playlist) {
let existingValue = existingPlaylist[key]
if (existingValue instanceof Date) existingValue = existingValue.valueOf()
if (!areEquivalent(playlist[key], existingValue)) {
hasPlaylistUpdates = true
}
}
if (hasPlaylistUpdates) {
existingPlaylist.update(playlist)
hasUpdates = true
}
return hasUpdates
}
static getFromOld(oldPlaylist) {
return {
id: oldPlaylist.id,
name: oldPlaylist.name,
description: oldPlaylist.description,
userId: oldPlaylist.userId,
libraryId: oldPlaylist.libraryId
}
}
static removeById(playlistId) {
return this.destroy({
where: {
id: playlistId
}
})
}
}
Playlist.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
name: DataTypes.STRING,
description: DataTypes.TEXT
}, {
sequelize,
modelName: 'playlist'
})
const { library, user } = sequelize.models
library.hasMany(Playlist)
Playlist.belongsTo(library)
user.hasMany(Playlist)
Playlist.belongsTo(user)
Playlist.addHook('afterFind', findResult => {
if (!findResult) return
if (!Array.isArray(findResult)) findResult = [findResult]
for (const instance of findResult) {
if (instance.playlistMediaItems?.length) {
instance.playlistMediaItems = instance.playlistMediaItems.map(pmi => {
if (pmi.mediaItemType === 'book' && pmi.book !== undefined) {
pmi.mediaItem = pmi.book
pmi.dataValues.mediaItem = pmi.dataValues.book
} else if (pmi.mediaItemType === 'podcastEpisode' && pmi.podcastEpisode !== undefined) {
pmi.mediaItem = pmi.podcastEpisode
pmi.dataValues.mediaItem = pmi.dataValues.podcastEpisode
}
// To prevent mistakes:
delete pmi.book
delete pmi.dataValues.book
delete pmi.podcastEpisode
delete pmi.dataValues.podcastEpisode
return pmi
})
}
}
})
return Playlist
}

View File

@ -0,0 +1,84 @@
const { DataTypes, Model } = require('sequelize')
module.exports = (sequelize) => {
class PlaylistMediaItem extends Model {
static removeByIds(playlistId, mediaItemId) {
return this.destroy({
where: {
playlistId,
mediaItemId
}
})
}
getMediaItem(options) {
if (!this.mediaItemType) return Promise.resolve(null)
const mixinMethodName = `get${sequelize.uppercaseFirst(this.mediaItemType)}`
return this[mixinMethodName](options)
}
}
PlaylistMediaItem.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
mediaItemId: DataTypes.UUIDV4,
mediaItemType: DataTypes.STRING,
order: DataTypes.INTEGER
}, {
sequelize,
timestamps: true,
updatedAt: false,
modelName: 'playlistMediaItem'
})
const { book, podcastEpisode, playlist } = sequelize.models
book.hasMany(PlaylistMediaItem, {
foreignKey: 'mediaItemId',
constraints: false,
scope: {
mediaItemType: 'book'
}
})
PlaylistMediaItem.belongsTo(book, { foreignKey: 'mediaItemId', constraints: false })
podcastEpisode.hasOne(PlaylistMediaItem, {
foreignKey: 'mediaItemId',
constraints: false,
scope: {
mediaItemType: 'podcastEpisode'
}
})
PlaylistMediaItem.belongsTo(podcastEpisode, { foreignKey: 'mediaItemId', constraints: false })
PlaylistMediaItem.addHook('afterFind', findResult => {
if (!findResult) return
if (!Array.isArray(findResult)) findResult = [findResult]
for (const instance of findResult) {
if (instance.mediaItemType === 'book' && instance.book !== undefined) {
instance.mediaItem = instance.book
instance.dataValues.mediaItem = instance.dataValues.book
} else if (instance.mediaItemType === 'podcastEpisode' && instance.podcastEpisode !== undefined) {
instance.mediaItem = instance.podcastEpisode
instance.dataValues.mediaItem = instance.dataValues.podcastEpisode
}
// To prevent mistakes:
delete instance.book
delete instance.dataValues.book
delete instance.podcastEpisode
delete instance.dataValues.podcastEpisode
}
})
playlist.hasMany(PlaylistMediaItem, {
onDelete: 'CASCADE'
})
PlaylistMediaItem.belongsTo(playlist)
return PlaylistMediaItem
}

98
server/models/Podcast.js Normal file
View File

@ -0,0 +1,98 @@
const { DataTypes, Model } = require('sequelize')
module.exports = (sequelize) => {
class Podcast extends Model {
static getOldPodcast(libraryItemExpanded) {
const podcastExpanded = libraryItemExpanded.media
const podcastEpisodes = podcastExpanded.podcastEpisodes.map(ep => ep.getOldPodcastEpisode(libraryItemExpanded.id)).sort((a, b) => a.index - b.index)
return {
id: podcastExpanded.id,
libraryItemId: libraryItemExpanded.id,
metadata: {
title: podcastExpanded.title,
author: podcastExpanded.author,
description: podcastExpanded.description,
releaseDate: podcastExpanded.releaseDate,
genres: podcastExpanded.genres,
feedUrl: podcastExpanded.feedURL,
imageUrl: podcastExpanded.imageURL,
itunesPageUrl: podcastExpanded.itunesPageURL,
itunesId: podcastExpanded.itunesId,
itunesArtistId: podcastExpanded.itunesArtistId,
explicit: podcastExpanded.explicit,
language: podcastExpanded.language,
type: podcastExpanded.podcastType
},
coverPath: podcastExpanded.coverPath,
tags: podcastExpanded.tags,
episodes: podcastEpisodes,
autoDownloadEpisodes: podcastExpanded.autoDownloadEpisodes,
autoDownloadSchedule: podcastExpanded.autoDownloadSchedule,
lastEpisodeCheck: podcastExpanded.lastEpisodeCheck?.valueOf() || null,
maxEpisodesToKeep: podcastExpanded.maxEpisodesToKeep,
maxNewEpisodesToDownload: podcastExpanded.maxNewEpisodesToDownload
}
}
static getFromOld(oldPodcast) {
const oldPodcastMetadata = oldPodcast.metadata
return {
id: oldPodcast.id,
title: oldPodcastMetadata.title,
author: oldPodcastMetadata.author,
releaseDate: oldPodcastMetadata.releaseDate,
feedURL: oldPodcastMetadata.feedUrl,
imageURL: oldPodcastMetadata.imageUrl,
description: oldPodcastMetadata.description,
itunesPageURL: oldPodcastMetadata.itunesPageUrl,
itunesId: oldPodcastMetadata.itunesId,
itunesArtistId: oldPodcastMetadata.itunesArtistId,
language: oldPodcastMetadata.language,
podcastType: oldPodcastMetadata.type,
explicit: !!oldPodcastMetadata.explicit,
autoDownloadEpisodes: !!oldPodcast.autoDownloadEpisodes,
autoDownloadSchedule: oldPodcast.autoDownloadSchedule,
lastEpisodeCheck: oldPodcast.lastEpisodeCheck,
maxEpisodesToKeep: oldPodcast.maxEpisodesToKeep,
maxNewEpisodesToDownload: oldPodcast.maxNewEpisodesToDownload,
coverPath: oldPodcast.coverPath,
tags: oldPodcast.tags,
genres: oldPodcastMetadata.genres
}
}
}
Podcast.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
title: DataTypes.STRING,
author: DataTypes.STRING,
releaseDate: DataTypes.STRING,
feedURL: DataTypes.STRING,
imageURL: DataTypes.STRING,
description: DataTypes.TEXT,
itunesPageURL: DataTypes.STRING,
itunesId: DataTypes.STRING,
itunesArtistId: DataTypes.STRING,
language: DataTypes.STRING,
podcastType: DataTypes.STRING,
explicit: DataTypes.BOOLEAN,
autoDownloadEpisodes: DataTypes.BOOLEAN,
autoDownloadSchedule: DataTypes.STRING,
lastEpisodeCheck: DataTypes.DATE,
maxEpisodesToKeep: DataTypes.INTEGER,
maxNewEpisodesToDownload: DataTypes.INTEGER,
coverPath: DataTypes.STRING,
tags: DataTypes.JSON,
genres: DataTypes.JSON
}, {
sequelize,
modelName: 'podcast'
})
return Podcast
}

View File

@ -0,0 +1,95 @@
const { DataTypes, Model } = require('sequelize')
module.exports = (sequelize) => {
class PodcastEpisode extends Model {
getOldPodcastEpisode(libraryItemId = null) {
let enclosure = null
if (this.enclosureURL) {
enclosure = {
url: this.enclosureURL,
type: this.enclosureType,
length: this.enclosureSize !== null ? String(this.enclosureSize) : null
}
}
return {
libraryItemId: libraryItemId || null,
podcastId: this.podcastId,
id: this.id,
index: this.index,
season: this.season,
episode: this.episode,
episodeType: this.episodeType,
title: this.title,
subtitle: this.subtitle,
description: this.description,
enclosure,
pubDate: this.pubDate,
chapters: this.chapters,
audioFile: this.audioFile,
publishedAt: this.publishedAt?.valueOf() || null,
addedAt: this.createdAt.valueOf(),
updatedAt: this.updatedAt.valueOf()
}
}
static createFromOld(oldEpisode) {
const podcastEpisode = this.getFromOld(oldEpisode)
return this.create(podcastEpisode)
}
static getFromOld(oldEpisode) {
return {
id: oldEpisode.id,
index: oldEpisode.index,
season: oldEpisode.season,
episode: oldEpisode.episode,
episodeType: oldEpisode.episodeType,
title: oldEpisode.title,
subtitle: oldEpisode.subtitle,
description: oldEpisode.description,
pubDate: oldEpisode.pubDate,
enclosureURL: oldEpisode.enclosure?.url || null,
enclosureSize: oldEpisode.enclosure?.length || null,
enclosureType: oldEpisode.enclosure?.type || null,
publishedAt: oldEpisode.publishedAt,
podcastId: oldEpisode.podcastId,
audioFile: oldEpisode.audioFile?.toJSON() || null,
chapters: oldEpisode.chapters
}
}
}
PodcastEpisode.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
index: DataTypes.INTEGER,
season: DataTypes.STRING,
episode: DataTypes.STRING,
episodeType: DataTypes.STRING,
title: DataTypes.STRING,
subtitle: DataTypes.STRING(1000),
description: DataTypes.TEXT,
pubDate: DataTypes.STRING,
enclosureURL: DataTypes.STRING,
enclosureSize: DataTypes.BIGINT,
enclosureType: DataTypes.STRING,
publishedAt: DataTypes.DATE,
audioFile: DataTypes.JSON,
chapters: DataTypes.JSON
}, {
sequelize,
modelName: 'podcastEpisode'
})
const { podcast } = sequelize.models
podcast.hasMany(PodcastEpisode, {
onDelete: 'CASCADE'
})
PodcastEpisode.belongsTo(podcast)
return PodcastEpisode
}

80
server/models/Series.js Normal file
View File

@ -0,0 +1,80 @@
const { DataTypes, Model } = require('sequelize')
const oldSeries = require('../objects/entities/Series')
module.exports = (sequelize) => {
class Series extends Model {
static async getAllOldSeries() {
const series = await this.findAll()
return series.map(se => se.getOldSeries())
}
getOldSeries() {
return new oldSeries({
id: this.id,
name: this.name,
description: this.description,
libraryId: this.libraryId,
addedAt: this.createdAt.valueOf(),
updatedAt: this.updatedAt.valueOf()
})
}
static updateFromOld(oldSeries) {
const series = this.getFromOld(oldSeries)
return this.update(series, {
where: {
id: series.id
}
})
}
static createFromOld(oldSeries) {
const series = this.getFromOld(oldSeries)
return this.create(series)
}
static createBulkFromOld(oldSeriesObjs) {
const series = oldSeriesObjs.map(this.getFromOld)
return this.bulkCreate(series)
}
static getFromOld(oldSeries) {
return {
id: oldSeries.id,
name: oldSeries.name,
description: oldSeries.description,
libraryId: oldSeries.libraryId
}
}
static removeById(seriesId) {
return this.destroy({
where: {
id: seriesId
}
})
}
}
Series.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
name: DataTypes.STRING,
description: DataTypes.TEXT
}, {
sequelize,
modelName: 'series'
})
const { library } = sequelize.models
library.hasMany(Series, {
onDelete: 'CASCADE'
})
Series.belongsTo(library)
return Series
}

45
server/models/Setting.js Normal file
View File

@ -0,0 +1,45 @@
const { DataTypes, Model } = require('sequelize')
const oldEmailSettings = require('../objects/settings/EmailSettings')
const oldServerSettings = require('../objects/settings/ServerSettings')
const oldNotificationSettings = require('../objects/settings/NotificationSettings')
module.exports = (sequelize) => {
class Setting extends Model {
static async getOldSettings() {
const settings = (await this.findAll()).map(se => se.value)
const emailSettingsJson = settings.find(se => se.id === 'email-settings')
const serverSettingsJson = settings.find(se => se.id === 'server-settings')
const notificationSettingsJson = settings.find(se => se.id === 'notification-settings')
return {
settings,
emailSettings: new oldEmailSettings(emailSettingsJson),
serverSettings: new oldServerSettings(serverSettingsJson),
notificationSettings: new oldNotificationSettings(notificationSettingsJson)
}
}
static updateSettingObj(setting) {
return this.upsert({
key: setting.id,
value: setting
})
}
}
Setting.init({
key: {
type: DataTypes.STRING,
primaryKey: true
},
value: DataTypes.JSON
}, {
sequelize,
modelName: 'setting'
})
return Setting
}

Some files were not shown because too many files have changed in this diff Show More