From d5ce7b4939ca31a8c8d0456c230c1f3d10525362 Mon Sep 17 00:00:00 2001 From: advplyr Date: Sun, 5 Jan 2025 12:05:01 -0600 Subject: [PATCH] Migrate to new library item in scanner --- server/Database.js | 17 -- server/controllers/AuthorController.js | 10 + server/controllers/LibraryItemController.js | 135 ++++----- server/controllers/PodcastController.js | 6 +- server/controllers/SearchController.js | 2 +- server/finders/BookFinder.js | 2 +- server/managers/CoverManager.js | 55 ---- server/models/Author.js | 16 ++ server/models/Book.js | 144 +++++++--- server/models/LibraryItem.js | 252 +--------------- server/models/MediaItemShare.js | 11 +- server/models/Series.js | 18 +- server/objects/LibraryItem.js | 120 -------- server/objects/mediaTypes/Book.js | 16 -- server/objects/mediaTypes/Podcast.js | 17 -- server/objects/metadata/BookMetadata.js | 25 -- server/objects/metadata/PodcastMetadata.js | 4 - server/routers/ApiRouter.js | 105 ------- server/scanner/LibraryScanner.js | 18 +- server/scanner/Scanner.js | 301 +++++++++++++------- server/utils/podcastUtils.js | 6 + 21 files changed, 435 insertions(+), 845 deletions(-) diff --git a/server/Database.js b/server/Database.js index 61385981..82a8fbd1 100644 --- a/server/Database.js +++ b/server/Database.js @@ -401,23 +401,6 @@ class Database { return this.models.setting.updateSettingObj(settings.toJSON()) } - /** - * Save metadata file and update library item - * - * @param {import('./objects/LibraryItem')} oldLibraryItem - * @returns {Promise} - */ - async updateLibraryItem(oldLibraryItem) { - if (!this.sequelize) return false - await oldLibraryItem.saveMetadata() - const updated = await this.models.libraryItem.fullUpdateFromOld(oldLibraryItem) - // Clear library filter data cache - if (updated) { - delete this.libraryFilterData[oldLibraryItem.libraryId] - } - return updated - } - getPlaybackSessions(where = null) { if (!this.sequelize) return false return this.models.playbackSession.getOldPlaybackSessions(where) diff --git a/server/controllers/AuthorController.js b/server/controllers/AuthorController.js index 31c94307..47150883 100644 --- a/server/controllers/AuthorController.js +++ b/server/controllers/AuthorController.js @@ -242,8 +242,18 @@ class AuthorController { await CacheManager.purgeImageCache(req.author.id) // Purge cache } + // Load library items so that metadata file can be updated + const allItemsWithAuthor = await Database.authorModel.getAllLibraryItemsForAuthor(req.author.id) + allItemsWithAuthor.forEach((libraryItem) => { + libraryItem.media.authors = libraryItem.media.authors.filter((au) => au.id !== req.author.id) + }) + await req.author.destroy() + for (const libraryItem of allItemsWithAuthor) { + await libraryItem.saveMetadataFile() + } + SocketAuthority.emitter('author_removed', req.author.toOldJSON()) // Update filter data diff --git a/server/controllers/LibraryItemController.js b/server/controllers/LibraryItemController.js index 14a85f6e..3a4fb159 100644 --- a/server/controllers/LibraryItemController.js +++ b/server/controllers/LibraryItemController.js @@ -81,31 +81,6 @@ class LibraryItemController { res.json(req.libraryItem.toOldJSON()) } - /** - * PATCH: /api/items/:id - * - * @deprecated - * Use the updateMedia /api/items/:id/media endpoint instead or updateCover /api/items/:id/cover - * - * @param {LibraryItemControllerRequest} req - * @param {Response} res - */ - async update(req, res) { - // Item has cover and update is removing cover so purge it from cache - if (req.libraryItem.media.coverPath && req.body.media && (req.body.media.coverPath === '' || req.body.media.coverPath === null)) { - await CacheManager.purgeCoverCache(req.libraryItem.id) - } - - const oldLibraryItem = Database.libraryItemModel.getOldLibraryItem(req.libraryItem) - const hasUpdates = oldLibraryItem.update(req.body) - if (hasUpdates) { - Logger.debug(`[LibraryItemController] Updated now saving`) - await Database.updateLibraryItem(oldLibraryItem) - SocketAuthority.emitter('item_updated', oldLibraryItem.toJSONExpanded()) - } - res.json(oldLibraryItem.toJSON()) - } - /** * DELETE: /api/items/:id * Delete library item. Will delete from database and file system if hard delete is requested. @@ -219,11 +194,6 @@ class LibraryItemController { if (res.writableEnded || res.headersSent) return } - // Book specific - if (req.libraryItem.isBook) { - await this.createAuthorsAndSeriesForItemUpdate(mediaPayload, req.libraryItem.libraryId) - } - // Podcast specific let isPodcastAutoDownloadUpdated = false if (req.libraryItem.isPodcast) { @@ -234,41 +204,56 @@ class LibraryItemController { } } - // Book specific - Get all series being removed from this item - let seriesRemoved = [] - if (req.libraryItem.isBook && mediaPayload.metadata?.series) { - const seriesIdsInUpdate = mediaPayload.metadata.series?.map((se) => se.id) || [] - seriesRemoved = req.libraryItem.media.series.filter((se) => !seriesIdsInUpdate.includes(se.id)) + let hasUpdates = (await req.libraryItem.media.updateFromRequest(mediaPayload)) || mediaPayload.url + + if (req.libraryItem.isBook && Array.isArray(mediaPayload.metadata?.series)) { + const seriesUpdateData = await req.libraryItem.media.updateSeriesFromRequest(mediaPayload.metadata.series, req.libraryItem.libraryId) + if (seriesUpdateData?.seriesRemoved.length) { + // Check remove empty series + Logger.debug(`[LibraryItemController] Series were removed from book. Check if series are now empty.`) + await this.checkRemoveEmptySeries(seriesUpdateData.seriesRemoved.map((se) => se.id)) + } + if (seriesUpdateData?.seriesAdded.length) { + // Add series to filter data + seriesUpdateData.seriesAdded.forEach((se) => { + Database.addSeriesToFilterData(req.libraryItem.libraryId, se.name, se.id) + }) + } + if (seriesUpdateData?.hasUpdates) { + hasUpdates = true + } } - let authorsRemoved = [] - if (req.libraryItem.isBook && mediaPayload.metadata?.authors) { - const authorIdsInUpdate = mediaPayload.metadata.authors.map((au) => au.id) - authorsRemoved = req.libraryItem.media.authors.filter((au) => !authorIdsInUpdate.includes(au.id)) + if (req.libraryItem.isBook && Array.isArray(mediaPayload.metadata?.authors)) { + const authorNames = mediaPayload.metadata.authors.map((au) => (typeof au.name === 'string' ? au.name.trim() : null)).filter((au) => au) + const authorUpdateData = await req.libraryItem.media.updateAuthorsFromRequest(authorNames, req.libraryItem.libraryId) + if (authorUpdateData?.authorsRemoved.length) { + // Check remove empty authors + Logger.debug(`[LibraryItemController] Authors were removed from book. Check if authors are now empty.`) + await this.checkRemoveAuthorsWithNoBooks(authorUpdateData.authorsRemoved.map((au) => au.id)) + hasUpdates = true + } + if (authorUpdateData?.authorsAdded.length) { + // Add authors to filter data + authorUpdateData.authorsAdded.forEach((au) => { + Database.addAuthorToFilterData(req.libraryItem.libraryId, au.name, au.id) + }) + hasUpdates = true + } } - const hasUpdates = (await req.libraryItem.media.updateFromRequest(mediaPayload)) || mediaPayload.url if (hasUpdates) { req.libraryItem.changed('updatedAt', true) await req.libraryItem.save() + await req.libraryItem.saveMetadataFile() + if (isPodcastAutoDownloadUpdated) { this.cronManager.checkUpdatePodcastCron(req.libraryItem) } Logger.debug(`[LibraryItemController] Updated library item media ${req.libraryItem.media.title}`) SocketAuthority.emitter('item_updated', req.libraryItem.toOldJSONExpanded()) - - if (authorsRemoved.length) { - // Check remove empty authors - Logger.debug(`[LibraryItemController] Authors were removed from book. Check if authors are now empty.`) - await this.checkRemoveAuthorsWithNoBooks(authorsRemoved.map((au) => au.id)) - } - if (seriesRemoved.length) { - // Check remove empty series - Logger.debug(`[LibraryItemController] Series were removed from book. Check if series are now empty.`) - await this.checkRemoveEmptySeries(seriesRemoved.map((se) => se.id)) - } } res.json({ updated: hasUpdates, @@ -527,8 +512,7 @@ class LibraryItemController { options.overrideDetails = !!reqBody.overrideDetails } - const oldLibraryItem = Database.libraryItemModel.getOldLibraryItem(req.libraryItem) - var matchResult = await Scanner.quickMatchLibraryItem(this, oldLibraryItem, options) + const matchResult = await Scanner.quickMatchLibraryItem(this, req.libraryItem, options) res.json(matchResult) } @@ -640,26 +624,44 @@ class LibraryItemController { const mediaPayload = updatePayload.mediaPayload const libraryItem = libraryItems.find((li) => li.id === updatePayload.id) - await this.createAuthorsAndSeriesForItemUpdate(mediaPayload, libraryItem.libraryId) + let hasUpdates = await libraryItem.media.updateFromRequest(mediaPayload) - if (libraryItem.isBook) { - if (Array.isArray(mediaPayload.metadata?.series)) { - const seriesIdsInUpdate = mediaPayload.metadata.series.map((se) => se.id) - const seriesRemoved = libraryItem.media.series.filter((se) => !seriesIdsInUpdate.includes(se.id)) - seriesIdsRemoved.push(...seriesRemoved.map((se) => se.id)) + if (libraryItem.isBook && Array.isArray(mediaPayload.metadata?.series)) { + const seriesUpdateData = await libraryItem.media.updateSeriesFromRequest(mediaPayload.metadata.series, libraryItem.libraryId) + if (seriesUpdateData?.seriesRemoved.length) { + seriesIdsRemoved.push(...seriesUpdateData.seriesRemoved.map((se) => se.id)) } - if (Array.isArray(mediaPayload.metadata?.authors)) { - const authorIdsInUpdate = mediaPayload.metadata.authors.map((au) => au.id) - const authorsRemoved = libraryItem.media.authors.filter((au) => !authorIdsInUpdate.includes(au.id)) - authorIdsRemoved.push(...authorsRemoved.map((au) => au.id)) + if (seriesUpdateData?.seriesAdded.length) { + seriesUpdateData.seriesAdded.forEach((se) => { + Database.addSeriesToFilterData(libraryItem.libraryId, se.name, se.id) + }) + } + if (seriesUpdateData?.hasUpdates) { + hasUpdates = true + } + } + + if (libraryItem.isBook && Array.isArray(mediaPayload.metadata?.authors)) { + const authorNames = mediaPayload.metadata.authors.map((au) => (typeof au.name === 'string' ? au.name.trim() : null)).filter((au) => au) + const authorUpdateData = await libraryItem.media.updateAuthorsFromRequest(authorNames, libraryItem.libraryId) + if (authorUpdateData?.authorsRemoved.length) { + authorIdsRemoved.push(...authorUpdateData.authorsRemoved.map((au) => au.id)) + hasUpdates = true + } + if (authorUpdateData?.authorsAdded.length) { + authorUpdateData.authorsAdded.forEach((au) => { + Database.addAuthorToFilterData(libraryItem.libraryId, au.name, au.id) + }) + hasUpdates = true } } - const hasUpdates = await libraryItem.media.updateFromRequest(mediaPayload) if (hasUpdates) { libraryItem.changed('updatedAt', true) await libraryItem.save() + await libraryItem.saveMetadataFile() + Logger.debug(`[LibraryItemController] Updated library item media "${libraryItem.media.title}"`) SocketAuthority.emitter('item_updated', libraryItem.toOldJSONExpanded()) itemsUpdated++ @@ -739,8 +741,7 @@ class LibraryItemController { } for (const libraryItem of libraryItems) { - const oldLibraryItem = Database.libraryItemModel.getOldLibraryItem(libraryItem) - const matchResult = await Scanner.quickMatchLibraryItem(this, oldLibraryItem, options) + const matchResult = await Scanner.quickMatchLibraryItem(this, libraryItem, options) if (matchResult.updated) { itemsUpdated++ } else if (matchResult.warning) { @@ -891,6 +892,8 @@ class LibraryItemController { req.libraryItem.media.changed('chapters', true) await req.libraryItem.media.save() + await req.libraryItem.saveMetadataFile() + SocketAuthority.emitter('item_updated', req.libraryItem.toOldJSONExpanded()) } diff --git a/server/controllers/PodcastController.js b/server/controllers/PodcastController.js index 3d8ff240..1d1c106d 100644 --- a/server/controllers/PodcastController.js +++ b/server/controllers/PodcastController.js @@ -375,11 +375,9 @@ class PodcastController { } const overrideDetails = req.query.override === '1' - const oldLibraryItem = Database.libraryItemModel.getOldLibraryItem(req.libraryItem) - const episodesUpdated = await Scanner.quickMatchPodcastEpisodes(oldLibraryItem, { overrideDetails }) + const episodesUpdated = await Scanner.quickMatchPodcastEpisodes(req.libraryItem, { overrideDetails }) if (episodesUpdated) { - await Database.updateLibraryItem(oldLibraryItem) - SocketAuthority.emitter('item_updated', oldLibraryItem.toJSONExpanded()) + SocketAuthority.emitter('item_updated', req.libraryItem.toOldJSONExpanded()) } res.json({ diff --git a/server/controllers/SearchController.js b/server/controllers/SearchController.js index a19ff876..51aaa910 100644 --- a/server/controllers/SearchController.js +++ b/server/controllers/SearchController.js @@ -24,7 +24,7 @@ class SearchController { */ async findBooks(req, res) { const id = req.query.id - const libraryItem = await Database.libraryItemModel.getOldById(id) + const libraryItem = await Database.libraryItemModel.getExpandedById(id) const provider = req.query.provider || 'google' const title = req.query.title || '' const author = req.query.author || '' diff --git a/server/finders/BookFinder.js b/server/finders/BookFinder.js index 47d1118c..f4323094 100644 --- a/server/finders/BookFinder.js +++ b/server/finders/BookFinder.js @@ -361,7 +361,7 @@ class BookFinder { /** * Search for books including fuzzy searches * - * @param {Object} libraryItem + * @param {import('../models/LibraryItem')} libraryItem * @param {string} provider * @param {string} title * @param {string} author diff --git a/server/managers/CoverManager.js b/server/managers/CoverManager.js index 945c69ab..c8f88910 100644 --- a/server/managers/CoverManager.js +++ b/server/managers/CoverManager.js @@ -123,61 +123,6 @@ class CoverManager { } } - /** - * - * @param {Object} libraryItem - old library item - * @param {string} url - * @param {boolean} [forceLibraryItemFolder=false] - * @returns {Promise<{error:string}|{cover:string}>} - */ - async downloadCoverFromUrl(libraryItem, url, forceLibraryItemFolder = false) { - try { - // Force save cover with library item is used for adding new podcasts - var coverDirPath = forceLibraryItemFolder ? libraryItem.path : this.getCoverDirectory(libraryItem) - await fs.ensureDir(coverDirPath) - - var temppath = Path.posix.join(coverDirPath, 'cover') - - let errorMsg = '' - let success = await downloadImageFile(url, temppath) - .then(() => true) - .catch((err) => { - errorMsg = err.message || 'Unknown error' - Logger.error(`[CoverManager] Download image file failed for "${url}"`, errorMsg) - return false - }) - if (!success) { - return { - error: 'Failed to download image from url: ' + errorMsg - } - } - - var imgtype = await this.checkFileIsValidImage(temppath, true) - - if (imgtype.error) { - return imgtype - } - - var coverFilename = `cover.${imgtype.ext}` - var coverFullPath = Path.posix.join(coverDirPath, coverFilename) - await fs.rename(temppath, coverFullPath) - - await this.removeOldCovers(coverDirPath, '.' + imgtype.ext) - await CacheManager.purgeCoverCache(libraryItem.id) - - Logger.info(`[CoverManager] Downloaded libraryItem cover "${coverFullPath}" from url "${url}" for "${libraryItem.media.metadata.title}"`) - libraryItem.updateMediaCover(coverFullPath) - return { - cover: coverFullPath - } - } catch (error) { - Logger.error(`[CoverManager] Fetch cover image from url "${url}" failed`, error) - return { - error: 'Failed to fetch image from url' - } - } - } - /** * * @param {string} coverPath diff --git a/server/models/Author.js b/server/models/Author.js index f3bbba57..287b6697 100644 --- a/server/models/Author.js +++ b/server/models/Author.js @@ -107,6 +107,22 @@ class Author extends Model { return libraryItems } + /** + * + * @param {string} name + * @param {string} libraryId + * @returns {Promise} + */ + static async findOrCreateByNameAndLibrary(name, libraryId) { + const author = await this.getByNameAndLibrary(name, libraryId) + if (author) return author + return this.create({ + name, + lastFirst: this.getLastFirst(name), + libraryId + }) + } + /** * Initialize model * @param {import('../Database').sequelize} sequelize diff --git a/server/models/Book.js b/server/models/Book.js index 4c2006a1..dff79da2 100644 --- a/server/models/Book.js +++ b/server/models/Book.js @@ -542,49 +542,113 @@ class Book extends Model { await this.save() } - if (Array.isArray(payload.metadata?.authors)) { - const authorsRemoved = this.authors.filter((au) => !payload.metadata.authors.some((a) => a.id === au.id)) - const newAuthors = payload.metadata.authors.filter((a) => !this.authors.some((au) => au.id === a.id)) - - for (const author of authorsRemoved) { - await this.sequelize.models.bookAuthor.removeByIds(author.id, this.id) - Logger.debug(`[Book] "${this.title}" Removed author ${author.id}`) - hasUpdates = true - } - for (const author of newAuthors) { - await this.sequelize.models.bookAuthor.create({ bookId: this.id, authorId: author.id }) - Logger.debug(`[Book] "${this.title}" Added author ${author.id}`) - hasUpdates = true - } - } - - if (Array.isArray(payload.metadata?.series)) { - const seriesRemoved = this.series.filter((se) => !payload.metadata.series.some((s) => s.id === se.id)) - const newSeries = payload.metadata.series.filter((s) => !this.series.some((se) => se.id === s.id)) - - for (const series of seriesRemoved) { - await this.sequelize.models.bookSeries.removeByIds(series.id, this.id) - Logger.debug(`[Book] "${this.title}" Removed series ${series.id}`) - hasUpdates = true - } - for (const series of newSeries) { - await this.sequelize.models.bookSeries.create({ bookId: this.id, seriesId: series.id, sequence: series.sequence }) - Logger.debug(`[Book] "${this.title}" Added series ${series.id}`) - hasUpdates = true - } - for (const series of payload.metadata.series) { - const existingSeries = this.series.find((se) => se.id === series.id) - if (existingSeries && existingSeries.bookSeries.sequence !== series.sequence) { - await existingSeries.bookSeries.update({ sequence: series.sequence }) - Logger.debug(`[Book] "${this.title}" Updated series ${series.id} sequence ${series.sequence}`) - hasUpdates = true - } - } - } - return hasUpdates } + /** + * Creates or removes authors from the book using the author names from the request + * + * @param {string[]} authors + * @param {string} libraryId + * @returns {Promise<{authorsRemoved: import('./Author')[], authorsAdded: import('./Author')[]}>} + */ + async updateAuthorsFromRequest(authors, libraryId) { + if (!Array.isArray(authors)) return null + + if (!this.authors) { + throw new Error(`[Book] Cannot update authors because authors are not loaded for book ${this.id}`) + } + + /** @type {typeof import('./Author')} */ + const authorModel = this.sequelize.models.author + + /** @type {typeof import('./BookAuthor')} */ + const bookAuthorModel = this.sequelize.models.bookAuthor + + const authorsCleaned = authors.map((a) => a.toLowerCase()).filter((a) => a) + const authorsRemoved = this.authors.filter((au) => !authorsCleaned.includes(au.name.toLowerCase())) + const newAuthorNames = authors.filter((a) => !this.authors.some((au) => au.name.toLowerCase() === a.toLowerCase())) + + for (const author of authorsRemoved) { + await bookAuthorModel.removeByIds(author.id, this.id) + Logger.debug(`[Book] "${this.title}" Removed author "${author.name}"`) + this.authors = this.authors.filter((au) => au.id !== author.id) + } + const authorsAdded = [] + for (const authorName of newAuthorNames) { + const author = await authorModel.findOrCreateByNameAndLibrary(authorName, libraryId) + await bookAuthorModel.create({ bookId: this.id, authorId: author.id }) + Logger.debug(`[Book] "${this.title}" Added author "${author.name}"`) + this.authors.push(author) + authorsAdded.push(author) + } + + return { + authorsRemoved, + authorsAdded + } + } + + /** + * Creates or removes series from the book using the series names from the request. + * Updates series sequence if it has changed. + * + * @param {{ name: string, sequence: string }[]} seriesObjects + * @param {string} libraryId + * @returns {Promise<{seriesRemoved: import('./Series')[], seriesAdded: import('./Series')[], hasUpdates: boolean}>} + */ + async updateSeriesFromRequest(seriesObjects, libraryId) { + if (!Array.isArray(seriesObjects) || seriesObjects.some((se) => !se.name || typeof se.name !== 'string')) return null + + if (!this.series) { + throw new Error(`[Book] Cannot update series because series are not loaded for book ${this.id}`) + } + + /** @type {typeof import('./Series')} */ + const seriesModel = this.sequelize.models.series + + /** @type {typeof import('./BookSeries')} */ + const bookSeriesModel = this.sequelize.models.bookSeries + + const seriesNamesCleaned = seriesObjects.map((se) => se.name.toLowerCase()) + const seriesRemoved = this.series.filter((se) => !seriesNamesCleaned.includes(se.name.toLowerCase())) + const seriesAdded = [] + let hasUpdates = false + for (const seriesObj of seriesObjects) { + const seriesObjSequence = typeof seriesObj.sequence === 'string' ? seriesObj.sequence : null + + const existingSeries = this.series.find((se) => se.name.toLowerCase() === seriesObj.name.toLowerCase()) + if (existingSeries) { + if (existingSeries.bookSeries.sequence !== seriesObjSequence) { + existingSeries.bookSeries.sequence = seriesObjSequence + await existingSeries.bookSeries.save() + hasUpdates = true + Logger.debug(`[Book] "${this.title}" Updated series "${existingSeries.name}" sequence ${seriesObjSequence}`) + } + } else { + const series = await seriesModel.findOrCreateByNameAndLibrary(seriesObj.name, libraryId) + series.bookSeries = await bookSeriesModel.create({ bookId: this.id, seriesId: series.id, sequence: seriesObjSequence }) + this.series.push(series) + seriesAdded.push(series) + hasUpdates = true + Logger.debug(`[Book] "${this.title}" Added series "${series.name}"`) + } + } + + for (const series of seriesRemoved) { + await bookSeriesModel.removeByIds(series.id, this.id) + this.series = this.series.filter((se) => se.id !== series.id) + Logger.debug(`[Book] "${this.title}" Removed series ${series.id}`) + hasUpdates = true + } + + return { + seriesRemoved, + seriesAdded, + hasUpdates + } + } + /** * Old model kept metadata in a separate object */ diff --git a/server/models/LibraryItem.js b/server/models/LibraryItem.js index 31a6a0b4..d19816a3 100644 --- a/server/models/LibraryItem.js +++ b/server/models/LibraryItem.js @@ -160,206 +160,6 @@ class LibraryItem extends Model { }) } - /** - * Updates libraryItem, book, authors and series from old library item - * - * @param {oldLibraryItem} oldLibraryItem - * @returns {Promise} true if updates were made - */ - static async fullUpdateFromOld(oldLibraryItem) { - const libraryItemExpanded = await this.getExpandedById(oldLibraryItem.id) - if (!libraryItemExpanded) return false - - let hasUpdates = false - - // Check update Book/Podcast - if (libraryItemExpanded.media) { - let updatedMedia = null - if (libraryItemExpanded.mediaType === 'podcast') { - updatedMedia = this.sequelize.models.podcast.getFromOld(oldLibraryItem.media) - - const existingPodcastEpisodes = libraryItemExpanded.media.podcastEpisodes || [] - const updatedPodcastEpisodes = oldLibraryItem.media.episodes || [] - - for (const existingPodcastEpisode of existingPodcastEpisodes) { - // Episode was removed - if (!updatedPodcastEpisodes.some((ep) => ep.id === existingPodcastEpisode.id)) { - Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" episode "${existingPodcastEpisode.title}" was removed`) - await existingPodcastEpisode.destroy() - hasUpdates = true - } - } - for (const updatedPodcastEpisode of updatedPodcastEpisodes) { - const existingEpisodeMatch = existingPodcastEpisodes.find((ep) => ep.id === updatedPodcastEpisode.id) - if (!existingEpisodeMatch) { - Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" episode "${updatedPodcastEpisode.title}" was added`) - await this.sequelize.models.podcastEpisode.createFromOld(updatedPodcastEpisode) - hasUpdates = true - } else { - const updatedEpisodeCleaned = this.sequelize.models.podcastEpisode.getFromOld(updatedPodcastEpisode) - let episodeHasUpdates = false - for (const key in updatedEpisodeCleaned) { - let existingValue = existingEpisodeMatch[key] - if (existingValue instanceof Date) existingValue = existingValue.valueOf() - - if (!areEquivalent(updatedEpisodeCleaned[key], existingValue, true)) { - Logger.debug(util.format(`[LibraryItem] "${libraryItemExpanded.media.title}" episode "${existingEpisodeMatch.title}" ${key} was updated from %j to %j`, existingValue, updatedEpisodeCleaned[key])) - episodeHasUpdates = true - } - } - if (episodeHasUpdates) { - await existingEpisodeMatch.update(updatedEpisodeCleaned) - hasUpdates = true - } - } - } - } else if (libraryItemExpanded.mediaType === 'book') { - updatedMedia = this.sequelize.models.book.getFromOld(oldLibraryItem.media) - - const existingAuthors = libraryItemExpanded.media.authors || [] - const existingSeriesAll = libraryItemExpanded.media.series || [] - const updatedAuthors = oldLibraryItem.media.metadata.authors || [] - const uniqueUpdatedAuthors = updatedAuthors.filter((au, idx) => updatedAuthors.findIndex((a) => a.id === au.id) === idx) - const updatedSeriesAll = oldLibraryItem.media.metadata.series || [] - - for (const existingAuthor of existingAuthors) { - // Author was removed from Book - if (!uniqueUpdatedAuthors.some((au) => au.id === existingAuthor.id)) { - Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" author "${existingAuthor.name}" was removed`) - await this.sequelize.models.bookAuthor.removeByIds(existingAuthor.id, libraryItemExpanded.media.id) - hasUpdates = true - } - } - for (const updatedAuthor of uniqueUpdatedAuthors) { - // Author was added - if (!existingAuthors.some((au) => au.id === updatedAuthor.id)) { - Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" author "${updatedAuthor.name}" was added`) - await this.sequelize.models.bookAuthor.create({ authorId: updatedAuthor.id, bookId: libraryItemExpanded.media.id }) - hasUpdates = true - } - } - for (const existingSeries of existingSeriesAll) { - // Series was removed - if (!updatedSeriesAll.some((se) => se.id === existingSeries.id)) { - Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" series "${existingSeries.name}" was removed`) - await this.sequelize.models.bookSeries.removeByIds(existingSeries.id, libraryItemExpanded.media.id) - hasUpdates = true - } - } - for (const updatedSeries of updatedSeriesAll) { - // Series was added/updated - const existingSeriesMatch = existingSeriesAll.find((se) => se.id === updatedSeries.id) - if (!existingSeriesMatch) { - Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" series "${updatedSeries.name}" was added`) - await this.sequelize.models.bookSeries.create({ seriesId: updatedSeries.id, bookId: libraryItemExpanded.media.id, sequence: updatedSeries.sequence }) - hasUpdates = true - } else if (existingSeriesMatch.bookSeries.sequence !== updatedSeries.sequence) { - Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" series "${updatedSeries.name}" sequence was updated from "${existingSeriesMatch.bookSeries.sequence}" to "${updatedSeries.sequence}"`) - await existingSeriesMatch.bookSeries.update({ id: updatedSeries.id, sequence: updatedSeries.sequence }) - hasUpdates = true - } - } - } - - let hasMediaUpdates = false - for (const key in updatedMedia) { - let existingValue = libraryItemExpanded.media[key] - if (existingValue instanceof Date) existingValue = existingValue.valueOf() - - if (!areEquivalent(updatedMedia[key], existingValue, true)) { - if (key === 'chapters') { - // Handle logging of chapters separately because the object is large - const chaptersRemoved = libraryItemExpanded.media.chapters.filter((ch) => !updatedMedia.chapters.some((uch) => uch.id === ch.id)) - if (chaptersRemoved.length) { - Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" chapters removed: ${chaptersRemoved.map((ch) => ch.title).join(', ')}`) - } - const chaptersAdded = updatedMedia.chapters.filter((uch) => !libraryItemExpanded.media.chapters.some((ch) => ch.id === uch.id)) - if (chaptersAdded.length) { - Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" chapters added: ${chaptersAdded.map((ch) => ch.title).join(', ')}`) - } - if (!chaptersRemoved.length && !chaptersAdded.length) { - Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" chapters updated`) - } - } else { - Logger.debug(util.format(`[LibraryItem] "${libraryItemExpanded.media.title}" ${libraryItemExpanded.mediaType}.${key} updated from %j to %j`, existingValue, updatedMedia[key])) - } - - hasMediaUpdates = true - } - } - if (hasMediaUpdates && updatedMedia) { - await libraryItemExpanded.media.update(updatedMedia) - hasUpdates = true - } - } - - const updatedLibraryItem = this.getFromOld(oldLibraryItem) - let hasLibraryItemUpdates = false - for (const key in updatedLibraryItem) { - let existingValue = libraryItemExpanded[key] - if (existingValue instanceof Date) existingValue = existingValue.valueOf() - - if (!areEquivalent(updatedLibraryItem[key], existingValue, true)) { - if (key === 'libraryFiles') { - // Handle logging of libraryFiles separately because the object is large (should be addressed when migrating off the old library item model) - const libraryFilesRemoved = libraryItemExpanded.libraryFiles.filter((lf) => !updatedLibraryItem.libraryFiles.some((ulf) => ulf.ino === lf.ino)) - if (libraryFilesRemoved.length) { - Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" library files removed: ${libraryFilesRemoved.map((lf) => lf.metadata.path).join(', ')}`) - } - const libraryFilesAdded = updatedLibraryItem.libraryFiles.filter((ulf) => !libraryItemExpanded.libraryFiles.some((lf) => lf.ino === ulf.ino)) - if (libraryFilesAdded.length) { - Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" library files added: ${libraryFilesAdded.map((lf) => lf.metadata.path).join(', ')}`) - } - if (!libraryFilesRemoved.length && !libraryFilesAdded.length) { - Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" library files updated`) - } - } else { - Logger.debug(util.format(`[LibraryItem] "${libraryItemExpanded.media.title}" ${key} updated from %j to %j`, existingValue, updatedLibraryItem[key])) - } - - hasLibraryItemUpdates = true - if (key === 'updatedAt') { - libraryItemExpanded.changed('updatedAt', true) - } - } - } - if (hasLibraryItemUpdates) { - await libraryItemExpanded.update(updatedLibraryItem) - Logger.info(`[LibraryItem] Library item "${libraryItemExpanded.id}" updated`) - hasUpdates = true - } - return hasUpdates - } - - static getFromOld(oldLibraryItem) { - const extraData = {} - if (oldLibraryItem.oldLibraryItemId) { - extraData.oldLibraryItemId = oldLibraryItem.oldLibraryItemId - } - return { - id: oldLibraryItem.id, - ino: oldLibraryItem.ino, - path: oldLibraryItem.path, - relPath: oldLibraryItem.relPath, - mediaId: oldLibraryItem.media.id, - mediaType: oldLibraryItem.mediaType, - isFile: !!oldLibraryItem.isFile, - isMissing: !!oldLibraryItem.isMissing, - isInvalid: !!oldLibraryItem.isInvalid, - mtime: oldLibraryItem.mtimeMs, - ctime: oldLibraryItem.ctimeMs, - updatedAt: oldLibraryItem.updatedAt, - birthtime: oldLibraryItem.birthtimeMs, - size: oldLibraryItem.size, - lastScan: oldLibraryItem.lastScan, - lastScanVersion: oldLibraryItem.scanVersion, - libraryId: oldLibraryItem.libraryId, - libraryFolderId: oldLibraryItem.folderId, - libraryFiles: oldLibraryItem.libraryFiles?.map((lf) => lf.toJSON()) || [], - extraData - } - } - /** * Remove library item by id * @@ -468,12 +268,14 @@ class LibraryItem extends Model { /** * * @param {import('sequelize').WhereOptions} where + * @param {import('sequelize').BindOrReplacements} [replacements] * @param {import('sequelize').IncludeOptions} [include] * @returns {Promise} */ - static async findOneExpanded(where, include = null) { + static async findOneExpanded(where, replacements = null, include = null) { const libraryItem = await this.findOne({ where, + replacements, include }) if (!libraryItem) { @@ -801,52 +603,6 @@ class LibraryItem extends Model { return (await this.count({ where: { id: libraryItemId } })) > 0 } - /** - * - * @param {import('sequelize').WhereOptions} where - * @param {import('sequelize').BindOrReplacements} replacements - * @returns {Object} oldLibraryItem - */ - static async findOneOld(where, replacements = {}) { - const libraryItem = await this.findOne({ - where, - replacements, - include: [ - { - model: this.sequelize.models.book, - include: [ - { - model: this.sequelize.models.author, - through: { - attributes: [] - } - }, - { - model: this.sequelize.models.series, - through: { - attributes: ['sequence'] - } - } - ] - }, - { - model: this.sequelize.models.podcast, - include: [ - { - model: this.sequelize.models.podcastEpisode - } - ] - } - ], - order: [ - [this.sequelize.models.book, this.sequelize.models.author, this.sequelize.models.bookAuthor, 'createdAt', 'ASC'], - [this.sequelize.models.book, this.sequelize.models.series, 'bookSeries', 'createdAt', 'ASC'] - ] - }) - if (!libraryItem) return null - return this.getOldLibraryItem(libraryItem) - } - /** * * @param {string} libraryItemId @@ -970,7 +726,7 @@ class LibraryItem extends Model { } } - Logger.debug(`Success saving abmetadata to "${metadataFilePath}"`) + Logger.debug(`[LibraryItem] Saved metadata for "${this.media.title}" file to "${metadataFilePath}"`) return metadataLibraryFile }) diff --git a/server/models/MediaItemShare.js b/server/models/MediaItemShare.js index 2d5be8f6..6bff17b8 100644 --- a/server/models/MediaItemShare.js +++ b/server/models/MediaItemShare.js @@ -87,13 +87,10 @@ class MediaItemShare extends Model { const libraryItemModel = this.sequelize.models.libraryItem if (mediaItemType === 'book') { - const libraryItem = await libraryItemModel.findOneExpanded( - { mediaId: mediaItemId }, - { - model: this.sequelize.models.library, - attributes: ['settings'] - } - ) + const libraryItem = await libraryItemModel.findOneExpanded({ mediaId: mediaItemId }, null, { + model: this.sequelize.models.library, + attributes: ['settings'] + }) return libraryItem } diff --git a/server/models/Series.js b/server/models/Series.js index c4bc1594..6ca28846 100644 --- a/server/models/Series.js +++ b/server/models/Series.js @@ -1,6 +1,6 @@ const { DataTypes, Model, where, fn, col, literal } = require('sequelize') -const { getTitlePrefixAtEnd } = require('../utils/index') +const { getTitlePrefixAtEnd, getTitleIgnorePrefix } = require('../utils/index') class Series extends Model { constructor(values, options) { @@ -66,6 +66,22 @@ class Series extends Model { return series } + /** + * + * @param {string} seriesName + * @param {string} libraryId + * @returns {Promise} + */ + static async findOrCreateByNameAndLibrary(seriesName, libraryId) { + const series = await this.getByNameAndLibrary(seriesName, libraryId) + if (series) return series + return this.create({ + name: seriesName, + nameIgnorePrefix: getTitleIgnorePrefix(seriesName), + libraryId + }) + } + /** * Initialize model * @param {import('../Database').sequelize} sequelize diff --git a/server/objects/LibraryItem.js b/server/objects/LibraryItem.js index d955356e..3cf89b10 100644 --- a/server/objects/LibraryItem.js +++ b/server/objects/LibraryItem.js @@ -135,33 +135,6 @@ class LibraryItem { } } - // Adds additional helpful fields like media duration, tracks, etc. - toJSONExpanded() { - return { - id: this.id, - ino: this.ino, - oldLibraryItemId: this.oldLibraryItemId, - libraryId: this.libraryId, - folderId: this.folderId, - path: this.path, - relPath: this.relPath, - isFile: this.isFile, - mtimeMs: this.mtimeMs, - ctimeMs: this.ctimeMs, - birthtimeMs: this.birthtimeMs, - addedAt: this.addedAt, - updatedAt: this.updatedAt, - lastScan: this.lastScan, - scanVersion: this.scanVersion, - isMissing: !!this.isMissing, - isInvalid: !!this.isInvalid, - mediaType: this.mediaType, - media: this.media.toJSONExpanded(), - libraryFiles: this.libraryFiles.map((f) => f.toJSON()), - size: this.size - } - } - get isPodcast() { return this.mediaType === 'podcast' } @@ -176,98 +149,5 @@ class LibraryItem { get hasAudioFiles() { return this.libraryFiles.some((lf) => lf.fileType === 'audio') } - - update(payload) { - const json = this.toJSON() - let hasUpdates = false - for (const key in json) { - if (payload[key] !== undefined) { - if (key === 'media') { - if (this.media.update(payload[key])) { - hasUpdates = true - } - } else if (!areEquivalent(payload[key], json[key])) { - this[key] = copyValue(payload[key]) - hasUpdates = true - } - } - } - if (hasUpdates) { - this.updatedAt = Date.now() - } - return hasUpdates - } - - updateMediaCover(coverPath) { - this.media.updateCover(coverPath) - this.updatedAt = Date.now() - return true - } - - setMissing() { - this.isMissing = true - this.updatedAt = Date.now() - } - - /** - * Save metadata.json file - * TODO: Move to new LibraryItem model - * @returns {Promise} null if not saved - */ - async saveMetadata() { - if (this.isSavingMetadata || !global.MetadataPath) return null - - this.isSavingMetadata = true - - let metadataPath = Path.join(global.MetadataPath, 'items', this.id) - let storeMetadataWithItem = global.ServerSettings.storeMetadataWithItem - if (storeMetadataWithItem && !this.isFile) { - metadataPath = this.path - } else { - // Make sure metadata book dir exists - storeMetadataWithItem = false - await fs.ensureDir(metadataPath) - } - - const metadataFilePath = Path.join(metadataPath, `metadata.${global.ServerSettings.metadataFileFormat}`) - - return fs - .writeFile(metadataFilePath, JSON.stringify(this.media.toJSONForMetadataFile(), null, 2)) - .then(async () => { - // Add metadata.json to libraryFiles array if it is new - let metadataLibraryFile = this.libraryFiles.find((lf) => lf.metadata.path === filePathToPOSIX(metadataFilePath)) - if (storeMetadataWithItem) { - if (!metadataLibraryFile) { - metadataLibraryFile = new LibraryFile() - await metadataLibraryFile.setDataFromPath(metadataFilePath, `metadata.json`) - this.libraryFiles.push(metadataLibraryFile) - } else { - const fileTimestamps = await getFileTimestampsWithIno(metadataFilePath) - if (fileTimestamps) { - metadataLibraryFile.metadata.mtimeMs = fileTimestamps.mtimeMs - metadataLibraryFile.metadata.ctimeMs = fileTimestamps.ctimeMs - metadataLibraryFile.metadata.size = fileTimestamps.size - metadataLibraryFile.ino = fileTimestamps.ino - } - } - const libraryItemDirTimestamps = await getFileTimestampsWithIno(this.path) - if (libraryItemDirTimestamps) { - this.mtimeMs = libraryItemDirTimestamps.mtimeMs - this.ctimeMs = libraryItemDirTimestamps.ctimeMs - } - } - - Logger.debug(`[LibraryItem] Success saving abmetadata to "${metadataFilePath}"`) - - return metadataLibraryFile - }) - .catch((error) => { - Logger.error(`[LibraryItem] Failed to save json file at "${metadataFilePath}"`, error) - return null - }) - .finally(() => { - this.isSavingMetadata = false - }) - } } module.exports = LibraryItem diff --git a/server/objects/mediaTypes/Book.js b/server/objects/mediaTypes/Book.js index 488c3aac..b270e0e7 100644 --- a/server/objects/mediaTypes/Book.js +++ b/server/objects/mediaTypes/Book.js @@ -68,22 +68,6 @@ class Book { } } - toJSONExpanded() { - return { - id: this.id, - libraryItemId: this.libraryItemId, - metadata: this.metadata.toJSONExpanded(), - coverPath: this.coverPath, - tags: [...this.tags], - audioFiles: this.audioFiles.map((f) => f.toJSON()), - chapters: this.chapters.map((c) => ({ ...c })), - duration: this.duration, - size: this.size, - tracks: this.tracks.map((t) => t.toJSON()), - ebookFile: this.ebookFile?.toJSON() || null - } - } - toJSONForMetadataFile() { return { tags: [...this.tags], diff --git a/server/objects/mediaTypes/Podcast.js b/server/objects/mediaTypes/Podcast.js index f27f3fa2..2ec4a873 100644 --- a/server/objects/mediaTypes/Podcast.js +++ b/server/objects/mediaTypes/Podcast.js @@ -83,23 +83,6 @@ class Podcast { } } - toJSONExpanded() { - return { - id: this.id, - libraryItemId: this.libraryItemId, - metadata: this.metadata.toJSONExpanded(), - coverPath: this.coverPath, - tags: [...this.tags], - episodes: this.episodes.map((e) => e.toJSONExpanded()), - autoDownloadEpisodes: this.autoDownloadEpisodes, - autoDownloadSchedule: this.autoDownloadSchedule, - lastEpisodeCheck: this.lastEpisodeCheck, - maxEpisodesToKeep: this.maxEpisodesToKeep, - maxNewEpisodesToDownload: this.maxNewEpisodesToDownload, - size: this.size - } - } - toJSONForMetadataFile() { return { tags: [...this.tags], diff --git a/server/objects/metadata/BookMetadata.js b/server/objects/metadata/BookMetadata.js index 0dfe1dbf..5116f2f4 100644 --- a/server/objects/metadata/BookMetadata.js +++ b/server/objects/metadata/BookMetadata.js @@ -89,31 +89,6 @@ class BookMetadata { } } - toJSONExpanded() { - return { - title: this.title, - titleIgnorePrefix: this.titlePrefixAtEnd, - subtitle: this.subtitle, - authors: this.authors.map((a) => ({ ...a })), // Author JSONMinimal with name and id - narrators: [...this.narrators], - series: this.series.map((s) => ({ ...s })), - genres: [...this.genres], - publishedYear: this.publishedYear, - publishedDate: this.publishedDate, - publisher: this.publisher, - description: this.description, - isbn: this.isbn, - asin: this.asin, - language: this.language, - explicit: this.explicit, - authorName: this.authorName, - authorNameLF: this.authorNameLF, - narratorName: this.narratorName, - seriesName: this.seriesName, - abridged: this.abridged - } - } - toJSONForMetadataFile() { const json = this.toJSON() json.authors = json.authors.map((au) => au.name) diff --git a/server/objects/metadata/PodcastMetadata.js b/server/objects/metadata/PodcastMetadata.js index 0df40df0..ccc94ce0 100644 --- a/server/objects/metadata/PodcastMetadata.js +++ b/server/objects/metadata/PodcastMetadata.js @@ -75,10 +75,6 @@ class PodcastMetadata { } } - toJSONExpanded() { - return this.toJSONMinified() - } - clone() { return new PodcastMetadata(this.toJSON()) } diff --git a/server/routers/ApiRouter.js b/server/routers/ApiRouter.js index 56f43dbf..db9e66c5 100644 --- a/server/routers/ApiRouter.js +++ b/server/routers/ApiRouter.js @@ -105,7 +105,6 @@ class ApiRouter { this.router.post('/items/batch/scan', LibraryItemController.batchScan.bind(this)) this.router.get('/items/:id', LibraryItemController.middleware.bind(this), LibraryItemController.findOne.bind(this)) - this.router.patch('/items/:id', LibraryItemController.middleware.bind(this), LibraryItemController.update.bind(this)) this.router.delete('/items/:id', LibraryItemController.middleware.bind(this), LibraryItemController.delete.bind(this)) this.router.get('/items/:id/download', LibraryItemController.middleware.bind(this), LibraryItemController.download.bind(this)) this.router.patch('/items/:id/media', LibraryItemController.middleware.bind(this), LibraryItemController.updateMedia.bind(this)) @@ -531,109 +530,5 @@ class ApiRouter { }) return listeningStats } - - async createAuthorsAndSeriesForItemUpdate(mediaPayload, libraryId) { - if (mediaPayload.metadata) { - const mediaMetadata = mediaPayload.metadata - - // Create new authors if in payload - if (mediaMetadata.authors?.length) { - const newAuthors = [] - for (let i = 0; i < mediaMetadata.authors.length; i++) { - const authorName = (mediaMetadata.authors[i].name || '').trim() - if (!authorName) { - Logger.error(`[ApiRouter] Invalid author object, no name`, mediaMetadata.authors[i]) - mediaMetadata.authors[i].id = null - continue - } - - if (mediaMetadata.authors[i].id?.startsWith('new')) { - mediaMetadata.authors[i].id = null - } - - // Ensure the ID for the author exists - if (mediaMetadata.authors[i].id && !(await Database.checkAuthorExists(libraryId, mediaMetadata.authors[i].id))) { - Logger.warn(`[ApiRouter] Author id "${mediaMetadata.authors[i].id}" does not exist`) - mediaMetadata.authors[i].id = null - } - - if (!mediaMetadata.authors[i].id) { - let author = await Database.authorModel.getByNameAndLibrary(authorName, libraryId) - if (!author) { - author = await Database.authorModel.create({ - name: authorName, - lastFirst: Database.authorModel.getLastFirst(authorName), - libraryId - }) - Logger.debug(`[ApiRouter] Creating new author "${author.name}"`) - newAuthors.push(author) - // Update filter data - Database.addAuthorToFilterData(libraryId, author.name, author.id) - } - - // Update ID in original payload - mediaMetadata.authors[i].id = author.id - } - } - // Remove authors without an id - mediaMetadata.authors = mediaMetadata.authors.filter((au) => !!au.id) - if (newAuthors.length) { - SocketAuthority.emitter( - 'authors_added', - newAuthors.map((au) => au.toOldJSON()) - ) - } - } - - // Create new series if in payload - if (mediaMetadata.series && mediaMetadata.series.length) { - const newSeries = [] - for (let i = 0; i < mediaMetadata.series.length; i++) { - const seriesName = (mediaMetadata.series[i].name || '').trim() - if (!seriesName) { - Logger.error(`[ApiRouter] Invalid series object, no name`, mediaMetadata.series[i]) - mediaMetadata.series[i].id = null - continue - } - - if (mediaMetadata.series[i].id?.startsWith('new')) { - mediaMetadata.series[i].id = null - } - - // Ensure the ID for the series exists - if (mediaMetadata.series[i].id && !(await Database.checkSeriesExists(libraryId, mediaMetadata.series[i].id))) { - Logger.warn(`[ApiRouter] Series id "${mediaMetadata.series[i].id}" does not exist`) - mediaMetadata.series[i].id = null - } - - if (!mediaMetadata.series[i].id) { - let seriesItem = await Database.seriesModel.getByNameAndLibrary(seriesName, libraryId) - if (!seriesItem) { - seriesItem = await Database.seriesModel.create({ - name: seriesName, - nameIgnorePrefix: getTitleIgnorePrefix(seriesName), - libraryId - }) - Logger.debug(`[ApiRouter] Creating new series "${seriesItem.name}"`) - newSeries.push(seriesItem) - // Update filter data - Database.addSeriesToFilterData(libraryId, seriesItem.name, seriesItem.id) - } - - // Update ID in original payload - mediaMetadata.series[i].id = seriesItem.id - } - } - // Remove series without an id - mediaMetadata.series = mediaMetadata.series.filter((se) => se.id) - if (newSeries.length) { - SocketAuthority.emitter( - 'multiple_series_added', - newSeries.map((se) => se.toOldJSON()) - ) - } - } - } - } } module.exports = ApiRouter diff --git a/server/scanner/LibraryScanner.js b/server/scanner/LibraryScanner.js index c4f6410d..1e92efde 100644 --- a/server/scanner/LibraryScanner.js +++ b/server/scanner/LibraryScanner.js @@ -582,7 +582,7 @@ class LibraryScanner { } // Check if book dir group is already an item - let existingLibraryItem = await Database.libraryItemModel.findOneOld({ + let existingLibraryItem = await Database.libraryItemModel.findOneExpanded({ libraryId: library.id, path: potentialChildDirs }) @@ -606,17 +606,17 @@ class LibraryScanner { if (existingLibraryItem.path === fullPath) { const exists = await fs.pathExists(fullPath) if (!exists) { - Logger.info(`[LibraryScanner] Scanning file update group and library item was deleted "${existingLibraryItem.media.metadata.title}" - marking as missing`) - existingLibraryItem.setMissing() - await Database.updateLibraryItem(existingLibraryItem) - SocketAuthority.emitter('item_updated', existingLibraryItem.toJSONExpanded()) + Logger.info(`[LibraryScanner] Scanning file update group and library item was deleted "${existingLibraryItem.media.title}" - marking as missing`) + existingLibraryItem.isMissing = true + await existingLibraryItem.save() + SocketAuthority.emitter('item_updated', existingLibraryItem.toOldJSONExpanded()) itemGroupingResults[itemDir] = ScanResult.REMOVED continue } } // Scan library item for updates - Logger.debug(`[LibraryScanner] Folder update for relative path "${itemDir}" is in library item "${existingLibraryItem.media.metadata.title}" with id "${existingLibraryItem.id}" - scan for updates`) + Logger.debug(`[LibraryScanner] Folder update for relative path "${itemDir}" is in library item "${existingLibraryItem.media.title}" with id "${existingLibraryItem.id}" - scan for updates`) itemGroupingResults[itemDir] = await LibraryItemScanner.scanLibraryItem(existingLibraryItem.id, updatedLibraryItemDetails) continue } else if (library.settings.audiobooksOnly && !hasAudioFiles(fileUpdateGroup, itemDir)) { @@ -672,7 +672,7 @@ function isSingleMediaFile(fileUpdateGroup, itemDir) { async function findLibraryItemByItemToItemInoMatch(libraryId, fullPath) { const ino = await fileUtils.getIno(fullPath) if (!ino) return null - const existingLibraryItem = await Database.libraryItemModel.findOneOld({ + const existingLibraryItem = await Database.libraryItemModel.findOneExpanded({ libraryId: libraryId, ino: ino }) @@ -685,7 +685,7 @@ async function findLibraryItemByItemToFileInoMatch(libraryId, fullPath, isSingle // check if it was moved from another folder by comparing the ino to the library files const ino = await fileUtils.getIno(fullPath) if (!ino) return null - const existingLibraryItem = await Database.libraryItemModel.findOneOld( + const existingLibraryItem = await Database.libraryItemModel.findOneExpanded( [ { libraryId: libraryId @@ -711,7 +711,7 @@ async function findLibraryItemByFileToItemInoMatch(libraryId, fullPath, isSingle if (ino) itemFileInos.push(ino) } if (!itemFileInos.length) return null - const existingLibraryItem = await Database.libraryItemModel.findOneOld({ + const existingLibraryItem = await Database.libraryItemModel.findOneExpanded({ libraryId: libraryId, ino: { [sequelize.Op.in]: itemFileInos diff --git a/server/scanner/Scanner.js b/server/scanner/Scanner.js index 942c4d02..5d4e1cc5 100644 --- a/server/scanner/Scanner.js +++ b/server/scanner/Scanner.js @@ -30,14 +30,14 @@ class Scanner { /** * * @param {import('../routers/ApiRouter')} apiRouterCtx - * @param {import('../objects/LibraryItem')} libraryItem + * @param {import('../models/LibraryItem')} libraryItem * @param {QuickMatchOptions} options * @returns {Promise<{updated: boolean, libraryItem: import('../objects/LibraryItem')}>} */ async quickMatchLibraryItem(apiRouterCtx, libraryItem, options = {}) { const provider = options.provider || 'google' - const searchTitle = options.title || libraryItem.media.metadata.title - const searchAuthor = options.author || libraryItem.media.metadata.authorName + const searchTitle = options.title || libraryItem.media.title + const searchAuthor = options.author || libraryItem.media.authorName // If overrideCover and overrideDetails is not sent in options than use the server setting to determine if we should override if (options.overrideCover === undefined && options.overrideDetails === undefined && Database.serverSettings.scannerPreferMatchedMetadata) { @@ -52,11 +52,11 @@ class Scanner { let existingSeries = [] if (libraryItem.isBook) { - existingAuthors = libraryItem.media.metadata.authors.map((a) => a.id) - existingSeries = libraryItem.media.metadata.series.map((s) => s.id) + existingAuthors = libraryItem.media.authors.map((a) => a.id) + existingSeries = libraryItem.media.series.map((s) => s.id) - const searchISBN = options.isbn || libraryItem.media.metadata.isbn - const searchASIN = options.asin || libraryItem.media.metadata.asin + const searchISBN = options.isbn || libraryItem.media.isbn + const searchASIN = options.asin || libraryItem.media.asin const results = await BookFinder.search(libraryItem, provider, searchTitle, searchAuthor, searchISBN, searchASIN, { maxFuzzySearches: 2 }) if (!results.length) { @@ -69,15 +69,21 @@ class Scanner { // Update cover if not set OR overrideCover flag if (matchData.cover && (!libraryItem.media.coverPath || options.overrideCover)) { Logger.debug(`[Scanner] Updating cover "${matchData.cover}"`) - var coverResult = await CoverManager.downloadCoverFromUrl(libraryItem, matchData.cover) - if (!coverResult || coverResult.error || !coverResult.cover) { - Logger.warn(`[Scanner] Match cover "${matchData.cover}" failed to use: ${coverResult ? coverResult.error : 'Unknown Error'}`) + const coverResult = await CoverManager.downloadCoverFromUrlNew(matchData.cover, libraryItem.id, libraryItem.isFile ? null : libraryItem.path) + if (coverResult.error) { + Logger.warn(`[Scanner] Match cover "${matchData.cover}" failed to use: ${coverResult.error}`) } else { + libraryItem.media.coverPath = coverResult.cover + libraryItem.media.changed('coverPath', true) // Cover path may be the same but this forces the update hasUpdated = true } } - updatePayload = await this.quickMatchBookBuildUpdatePayload(libraryItem, matchData, options) + const bookBuildUpdateData = await this.quickMatchBookBuildUpdatePayload(apiRouterCtx, libraryItem, matchData, options) + updatePayload = bookBuildUpdateData.updatePayload + if (bookBuildUpdateData.hasSeriesUpdates || bookBuildUpdateData.hasAuthorUpdates) { + hasUpdated = true + } } else if (libraryItem.isPodcast) { // Podcast quick match const results = await PodcastFinder.search(searchTitle) @@ -91,10 +97,12 @@ class Scanner { // Update cover if not set OR overrideCover flag if (matchData.cover && (!libraryItem.media.coverPath || options.overrideCover)) { Logger.debug(`[Scanner] Updating cover "${matchData.cover}"`) - var coverResult = await CoverManager.downloadCoverFromUrl(libraryItem, matchData.cover) - if (!coverResult || coverResult.error || !coverResult.cover) { - Logger.warn(`[Scanner] Match cover "${matchData.cover}" failed to use: ${coverResult ? coverResult.error : 'Unknown Error'}`) + const coverResult = await CoverManager.downloadCoverFromUrlNew(matchData.cover, libraryItem.id, libraryItem.path) + if (coverResult.error) { + Logger.warn(`[Scanner] Match cover "${matchData.cover}" failed to use: ${coverResult.error}`) } else { + libraryItem.media.coverPath = coverResult.cover + libraryItem.media.changed('coverPath', true) // Cover path may be the same but this forces the update hasUpdated = true } } @@ -103,44 +111,45 @@ class Scanner { } if (Object.keys(updatePayload).length) { - Logger.debug('[Scanner] Updating details', updatePayload) - if (libraryItem.media.update(updatePayload)) { + Logger.debug('[Scanner] Updating details with payload', updatePayload) + libraryItem.media.set(updatePayload) + if (libraryItem.media.changed()) { + Logger.debug(`[Scanner] Updating library item "${libraryItem.media.title}" keys`, libraryItem.media.changed()) hasUpdated = true } } if (hasUpdated) { - if (libraryItem.isPodcast && libraryItem.media.metadata.feedUrl) { + if (libraryItem.isPodcast && libraryItem.media.feedURL) { // Quick match all unmatched podcast episodes await this.quickMatchPodcastEpisodes(libraryItem, options) } - await Database.updateLibraryItem(libraryItem) - SocketAuthority.emitter('item_updated', libraryItem.toJSONExpanded()) + await libraryItem.media.save() - // Check if any authors or series are now empty and should be removed - if (libraryItem.isBook) { - const authorsRemoved = existingAuthors.filter((aid) => !libraryItem.media.metadata.authors.find((au) => au.id === aid)) - const seriesRemoved = existingSeries.filter((sid) => !libraryItem.media.metadata.series.find((se) => se.id === sid)) + libraryItem.changed('updatedAt', true) + await libraryItem.save() - if (authorsRemoved.length) { - await apiRouterCtx.checkRemoveAuthorsWithNoBooks(authorsRemoved) - } - if (seriesRemoved.length) { - await apiRouterCtx.checkRemoveEmptySeries(seriesRemoved) - } - } + await libraryItem.saveMetadataFile() + + SocketAuthority.emitter('item_updated', libraryItem.toOldJSONExpanded()) } return { updated: hasUpdated, - libraryItem: libraryItem.toJSONExpanded() + libraryItem: libraryItem.toOldJSONExpanded() } } + /** + * + * @param {import('../models/LibraryItem')} libraryItem + * @param {*} matchData + * @param {QuickMatchOptions} options + * @returns {Map} - Update payload + */ quickMatchPodcastBuildUpdatePayload(libraryItem, matchData, options) { const updatePayload = {} - updatePayload.metadata = {} const matchDataTransformed = { title: matchData.title || null, @@ -158,7 +167,7 @@ class Scanner { for (const key in matchDataTransformed) { if (matchDataTransformed[key]) { if (key === 'genres') { - if (!libraryItem.media.metadata.genres.length || options.overrideDetails) { + if (!libraryItem.media.genres.length || options.overrideDetails) { var genresArray = [] if (Array.isArray(matchDataTransformed[key])) genresArray = [...matchDataTransformed[key]] else { @@ -169,46 +178,42 @@ class Scanner { .map((v) => v.trim()) .filter((v) => !!v) } - updatePayload.metadata[key] = genresArray + updatePayload[key] = genresArray } - } else if (libraryItem.media.metadata[key] !== matchDataTransformed[key] && (!libraryItem.media.metadata[key] || options.overrideDetails)) { - updatePayload.metadata[key] = matchDataTransformed[key] + } else if (libraryItem.media[key] !== matchDataTransformed[key] && (!libraryItem.media[key] || options.overrideDetails)) { + updatePayload[key] = matchDataTransformed[key] } } } - if (!Object.keys(updatePayload.metadata).length) { - delete updatePayload.metadata - } - return updatePayload } /** * - * @param {import('../objects/LibraryItem')} libraryItem + * @param {import('../routers/ApiRouter')} apiRouterCtx + * @param {import('../models/LibraryItem')} libraryItem * @param {*} matchData * @param {QuickMatchOptions} options - * @returns + * @returns {Promise<{updatePayload: Map, seriesIdsRemoved: string[], hasSeriesUpdates: boolean, authorIdsRemoved: string[], hasAuthorUpdates: boolean}>} */ - async quickMatchBookBuildUpdatePayload(libraryItem, matchData, options) { + async quickMatchBookBuildUpdatePayload(apiRouterCtx, libraryItem, matchData, options) { // Update media metadata if not set OR overrideDetails flag const detailKeysToUpdate = ['title', 'subtitle', 'description', 'narrator', 'publisher', 'publishedYear', 'genres', 'tags', 'language', 'explicit', 'abridged', 'asin', 'isbn'] const updatePayload = {} - updatePayload.metadata = {} for (const key in matchData) { if (matchData[key] && detailKeysToUpdate.includes(key)) { if (key === 'narrator') { - if (!libraryItem.media.metadata.narratorName || options.overrideDetails) { - updatePayload.metadata.narrators = matchData[key] + if (!libraryItem.media.narrators?.length || options.overrideDetails) { + updatePayload.narrators = matchData[key] .split(',') .map((v) => v.trim()) .filter((v) => !!v) } } else if (key === 'genres') { - if (!libraryItem.media.metadata.genres.length || options.overrideDetails) { - var genresArray = [] + if (!libraryItem.media.genres.length || options.overrideDetails) { + let genresArray = [] if (Array.isArray(matchData[key])) genresArray = [...matchData[key]] else { // Genres should always be passed in as an array but just incase handle a string @@ -218,11 +223,11 @@ class Scanner { .map((v) => v.trim()) .filter((v) => !!v) } - updatePayload.metadata[key] = genresArray + updatePayload[key] = genresArray } } else if (key === 'tags') { if (!libraryItem.media.tags.length || options.overrideDetails) { - var tagsArray = [] + let tagsArray = [] if (Array.isArray(matchData[key])) tagsArray = [...matchData[key]] else tagsArray = matchData[key] @@ -231,94 +236,174 @@ class Scanner { .filter((v) => !!v) updatePayload[key] = tagsArray } - } else if (!libraryItem.media.metadata[key] || options.overrideDetails) { - updatePayload.metadata[key] = matchData[key] + } else if (!libraryItem.media[key] || options.overrideDetails) { + updatePayload[key] = matchData[key] } } } // Add or set author if not set - if (matchData.author && (!libraryItem.media.metadata.authorName || options.overrideDetails)) { + let hasAuthorUpdates = false + if (matchData.author && (!libraryItem.media.authorName || options.overrideDetails)) { if (!Array.isArray(matchData.author)) { matchData.author = matchData.author .split(',') .map((au) => au.trim()) .filter((au) => !!au) } - const authorPayload = [] + const authorIdsRemoved = [] for (const authorName of matchData.author) { - let author = await Database.authorModel.getByNameAndLibrary(authorName, libraryItem.libraryId) - if (!author) { - author = await Database.authorModel.create({ - name: authorName, - lastFirst: Database.authorModel.getLastFirst(authorName), - libraryId: libraryItem.libraryId - }) - SocketAuthority.emitter('author_added', author.toOldJSON()) - // Update filter data - Database.addAuthorToFilterData(libraryItem.libraryId, author.name, author.id) + const existingAuthor = libraryItem.media.authors.find((a) => a.name.toLowerCase() === authorName.toLowerCase()) + if (!existingAuthor) { + let author = await Database.authorModel.getByNameAndLibrary(authorName, libraryItem.libraryId) + if (!author) { + author = await Database.authorModel.create({ + name: authorName, + lastFirst: Database.authorModel.getLastFirst(authorName), + libraryId: libraryItem.libraryId + }) + SocketAuthority.emitter('author_added', author.toOldJSON()) + // Update filter data + Database.addAuthorToFilterData(libraryItem.libraryId, author.name, author.id) + + await Database.bookAuthorModel + .create({ + authorId: author.id, + bookId: libraryItem.media.id + }) + .then(() => { + Logger.info(`[Scanner] quickMatchBookBuildUpdatePayload: Added author "${author.name}" to "${libraryItem.media.title}"`) + libraryItem.media.authors.push(author) + hasAuthorUpdates = true + }) + } + } + const authorsRemoved = libraryItem.media.authors.filter((a) => !matchData.author.find((ma) => ma.toLowerCase() === a.name.toLowerCase())) + if (authorsRemoved.length) { + for (const author of authorsRemoved) { + await Database.bookAuthorModel.destroy({ where: { authorId: author.id, bookId: libraryItem.media.id } }) + libraryItem.media.authors = libraryItem.media.authors.filter((a) => a.id !== author.id) + authorIdsRemoved.push(author.id) + Logger.info(`[Scanner] quickMatchBookBuildUpdatePayload: Removed author "${author.name}" from "${libraryItem.media.title}"`) + } + hasAuthorUpdates = true } - authorPayload.push(author.toJSONMinimal()) } - updatePayload.metadata.authors = authorPayload + + // For all authors removed from book, check if they are empty now and should be removed + if (authorIdsRemoved.length) { + await apiRouterCtx.checkRemoveAuthorsWithNoBooks(authorIdsRemoved) + } } // Add or set series if not set - if (matchData.series && (!libraryItem.media.metadata.seriesName || options.overrideDetails)) { + let hasSeriesUpdates = false + if (matchData.series && (!libraryItem.media.seriesName || options.overrideDetails)) { if (!Array.isArray(matchData.series)) matchData.series = [{ series: matchData.series, sequence: matchData.sequence }] - const seriesPayload = [] + const seriesIdsRemoved = [] for (const seriesMatchItem of matchData.series) { - let seriesItem = await Database.seriesModel.getByNameAndLibrary(seriesMatchItem.series, libraryItem.libraryId) - if (!seriesItem) { - seriesItem = await Database.seriesModel.create({ - name: seriesMatchItem.series, - nameIgnorePrefix: getTitleIgnorePrefix(seriesMatchItem.series), - libraryId: libraryItem.libraryId + const existingSeries = libraryItem.media.series.find((s) => s.name.toLowerCase() === seriesMatchItem.series.toLowerCase()) + if (existingSeries) { + if (existingSeries.bookSeries.sequence !== seriesMatchItem.sequence) { + existingSeries.bookSeries.sequence = seriesMatchItem.sequence + await existingSeries.bookSeries.save() + Logger.info(`[Scanner] quickMatchBookBuildUpdatePayload: Updated series sequence for "${existingSeries.name}" to ${seriesMatchItem.sequence} in "${libraryItem.media.title}"`) + hasSeriesUpdates = true + } + } else { + let seriesItem = await Database.seriesModel.getByNameAndLibrary(seriesMatchItem.series, libraryItem.libraryId) + if (!seriesItem) { + seriesItem = await Database.seriesModel.create({ + name: seriesMatchItem.series, + nameIgnorePrefix: getTitleIgnorePrefix(seriesMatchItem.series), + libraryId: libraryItem.libraryId + }) + // Update filter data + Database.addSeriesToFilterData(libraryItem.libraryId, seriesItem.name, seriesItem.id) + SocketAuthority.emitter('series_added', seriesItem.toOldJSON()) + } + const bookSeries = await Database.bookSeriesModel.create({ + seriesId: seriesItem.id, + bookId: libraryItem.media.id, + sequence: seriesMatchItem.sequence }) - // Update filter data - Database.addSeriesToFilterData(libraryItem.libraryId, seriesItem.name, seriesItem.id) - SocketAuthority.emitter('series_added', seriesItem.toOldJSON()) + seriesItem.bookSeries = bookSeries + libraryItem.media.series.push(seriesItem) + Logger.info(`[Scanner] quickMatchBookBuildUpdatePayload: Added series "${seriesItem.name}" to "${libraryItem.media.title}"`) + hasSeriesUpdates = true + } + const seriesRemoved = libraryItem.media.series.filter((s) => !matchData.series.find((ms) => ms.series.toLowerCase() === s.name.toLowerCase())) + if (seriesRemoved.length) { + for (const series of seriesRemoved) { + await series.bookSeries.destroy() + libraryItem.media.series = libraryItem.media.series.filter((s) => s.id !== series.id) + seriesIdsRemoved.push(series.id) + Logger.info(`[Scanner] quickMatchBookBuildUpdatePayload: Removed series "${series.name}" from "${libraryItem.media.title}"`) + } + hasSeriesUpdates = true } - seriesPayload.push(seriesItem.toJSONMinimal(seriesMatchItem.sequence)) } - updatePayload.metadata.series = seriesPayload + + // For all series removed from book, check if it is empty now and should be removed + if (seriesIdsRemoved.length) { + await apiRouterCtx.checkRemoveEmptySeries(seriesIdsRemoved) + } } - if (!Object.keys(updatePayload.metadata).length) { - delete updatePayload.metadata + return { + updatePayload, + hasSeriesUpdates, + hasAuthorUpdates } - - return updatePayload } + /** + * + * @param {import('../models/LibraryItem')} libraryItem + * @param {QuickMatchOptions} options + * @returns {Promise} - Number of episodes updated + */ async quickMatchPodcastEpisodes(libraryItem, options = {}) { - const episodesToQuickMatch = libraryItem.media.episodes.filter((ep) => !ep.enclosureUrl) // Only quick match episodes without enclosure - if (!episodesToQuickMatch.length) return false + /** @type {import('../models/PodcastEpisode')[]} */ + const episodesToQuickMatch = libraryItem.media.podcastEpisodes.filter((ep) => !ep.enclosureURL) // Only quick match episodes that are not already matched + if (!episodesToQuickMatch.length) return 0 - const feed = await getPodcastFeed(libraryItem.media.metadata.feedUrl) + const feed = await getPodcastFeed(libraryItem.media.feedURL) if (!feed) { - Logger.error(`[Scanner] quickMatchPodcastEpisodes: Unable to quick match episodes feed not found for "${libraryItem.media.metadata.feedUrl}"`) - return false + Logger.error(`[Scanner] quickMatchPodcastEpisodes: Unable to quick match episodes feed not found for "${libraryItem.media.feedURL}"`) + return 0 } let numEpisodesUpdated = 0 for (const episode of episodesToQuickMatch) { const episodeMatches = findMatchingEpisodesInFeed(feed, episode.title) - if (episodeMatches && episodeMatches.length) { - const wasUpdated = this.updateEpisodeWithMatch(libraryItem, episode, episodeMatches[0].episode, options) + if (episodeMatches?.length) { + const wasUpdated = await this.updateEpisodeWithMatch(episode, episodeMatches[0].episode, options) if (wasUpdated) numEpisodesUpdated++ } } + if (numEpisodesUpdated) { + Logger.info(`[Scanner] quickMatchPodcastEpisodes: Updated ${numEpisodesUpdated} episodes for "${libraryItem.media.title}"`) + } return numEpisodesUpdated } - updateEpisodeWithMatch(libraryItem, episode, episodeToMatch, options = {}) { + /** + * + * @param {import('../models/PodcastEpisode')} episode + * @param {import('../utils/podcastUtils').RssPodcastEpisode} episodeToMatch + * @param {QuickMatchOptions} options + * @returns {Promise} - true if episode was updated + */ + async updateEpisodeWithMatch(episode, episodeToMatch, options = {}) { Logger.debug(`[Scanner] quickMatchPodcastEpisodes: Found episode match for "${episode.title}" => ${episodeToMatch.title}`) const matchDataTransformed = { title: episodeToMatch.title || '', subtitle: episodeToMatch.subtitle || '', description: episodeToMatch.description || '', - enclosure: episodeToMatch.enclosure || null, + enclosureURL: episodeToMatch.enclosure?.url || null, + enclosureSize: episodeToMatch.enclosure?.length || null, + enclosureType: episodeToMatch.enclosure?.type || null, episode: episodeToMatch.episode || '', episodeType: episodeToMatch.episodeType || 'full', season: episodeToMatch.season || '', @@ -328,20 +413,19 @@ class Scanner { const updatePayload = {} for (const key in matchDataTransformed) { if (matchDataTransformed[key]) { - if (key === 'enclosure') { - if (!episode.enclosure || JSON.stringify(episode.enclosure) !== JSON.stringify(matchDataTransformed.enclosure)) { - updatePayload[key] = { - ...matchDataTransformed.enclosure - } - } - } else if (episode[key] !== matchDataTransformed[key] && (!episode[key] || options.overrideDetails)) { + if (episode[key] !== matchDataTransformed[key] && (!episode[key] || options.overrideDetails)) { updatePayload[key] = matchDataTransformed[key] } } } if (Object.keys(updatePayload).length) { - return libraryItem.media.updateEpisode(episode.id, updatePayload) + episode.set(updatePayload) + if (episode.changed()) { + Logger.debug(`[Scanner] quickMatchPodcastEpisodes: Updating episode "${episode.title}" keys`, episode.changed()) + await episode.save() + return true + } } return false } @@ -351,7 +435,7 @@ class Scanner { * * @param {import('../routers/ApiRouter')} apiRouterCtx * @param {import('../models/Library')} library - * @param {import('../objects/LibraryItem')[]} libraryItems + * @param {import('../models/LibraryItem')[]} libraryItems * @param {LibraryScan} libraryScan * @returns {Promise} false if scan canceled */ @@ -359,20 +443,20 @@ class Scanner { for (let i = 0; i < libraryItems.length; i++) { const libraryItem = libraryItems[i] - if (libraryItem.media.metadata.asin && library.settings.skipMatchingMediaWithAsin) { - Logger.debug(`[Scanner] matchLibraryItems: Skipping "${libraryItem.media.metadata.title}" because it already has an ASIN (${i + 1} of ${libraryItems.length})`) + if (libraryItem.media.asin && library.settings.skipMatchingMediaWithAsin) { + Logger.debug(`[Scanner] matchLibraryItems: Skipping "${libraryItem.media.title}" because it already has an ASIN (${i + 1} of ${libraryItems.length})`) continue } - if (libraryItem.media.metadata.isbn && library.settings.skipMatchingMediaWithIsbn) { - Logger.debug(`[Scanner] matchLibraryItems: Skipping "${libraryItem.media.metadata.title}" because it already has an ISBN (${i + 1} of ${libraryItems.length})`) + if (libraryItem.media.isbn && library.settings.skipMatchingMediaWithIsbn) { + Logger.debug(`[Scanner] matchLibraryItems: Skipping "${libraryItem.media.title}" because it already has an ISBN (${i + 1} of ${libraryItems.length})`) continue } - Logger.debug(`[Scanner] matchLibraryItems: Quick matching "${libraryItem.media.metadata.title}" (${i + 1} of ${libraryItems.length})`) + Logger.debug(`[Scanner] matchLibraryItems: Quick matching "${libraryItem.media.title}" (${i + 1} of ${libraryItems.length})`) const result = await this.quickMatchLibraryItem(apiRouterCtx, libraryItem, { provider: library.provider }) if (result.warning) { - Logger.warn(`[Scanner] matchLibraryItems: Match warning ${result.warning} for library item "${libraryItem.media.metadata.title}"`) + Logger.warn(`[Scanner] matchLibraryItems: Match warning ${result.warning} for library item "${libraryItem.media.title}"`) } else if (result.updated) { libraryScan.resultsUpdated++ } @@ -430,9 +514,8 @@ class Scanner { offset += limit hasMoreChunks = libraryItems.length === limit - let oldLibraryItems = libraryItems.map((li) => Database.libraryItemModel.getOldLibraryItem(li)) - const shouldContinue = await this.matchLibraryItemsChunk(apiRouterCtx, library, oldLibraryItems, libraryScan) + const shouldContinue = await this.matchLibraryItemsChunk(apiRouterCtx, library, libraryItems, libraryScan) if (!shouldContinue) { isCanceled = true break diff --git a/server/utils/podcastUtils.js b/server/utils/podcastUtils.js index d28c3b9d..bc9892b2 100644 --- a/server/utils/podcastUtils.js +++ b/server/utils/podcastUtils.js @@ -330,6 +330,12 @@ module.exports.findMatchingEpisodes = async (feedUrl, searchTitle) => { return this.findMatchingEpisodesInFeed(feed, searchTitle) } +/** + * + * @param {RssPodcast} feed + * @param {string} searchTitle + * @returns {Array<{ episode: RssPodcastEpisode, levenshtein: number }>} + */ module.exports.findMatchingEpisodesInFeed = (feed, searchTitle) => { searchTitle = searchTitle.toLowerCase().trim() if (!feed?.episodes) {