Merge pull request #3800 from advplyr/migrate-library-item-in-scanner

Migrate to new library item in scanner
This commit is contained in:
advplyr 2025-01-05 14:31:42 -06:00 committed by GitHub
commit 57d742b862
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
35 changed files with 568 additions and 2187 deletions

View File

@ -401,23 +401,6 @@ class Database {
return this.models.setting.updateSettingObj(settings.toJSON()) return this.models.setting.updateSettingObj(settings.toJSON())
} }
/**
* Save metadata file and update library item
*
* @param {import('./objects/LibraryItem')} oldLibraryItem
* @returns {Promise<boolean>}
*/
async updateLibraryItem(oldLibraryItem) {
if (!this.sequelize) return false
await oldLibraryItem.saveMetadata()
const updated = await this.models.libraryItem.fullUpdateFromOld(oldLibraryItem)
// Clear library filter data cache
if (updated) {
delete this.libraryFilterData[oldLibraryItem.libraryId]
}
return updated
}
getPlaybackSessions(where = null) { getPlaybackSessions(where = null) {
if (!this.sequelize) return false if (!this.sequelize) return false
return this.models.playbackSession.getOldPlaybackSessions(where) return this.models.playbackSession.getOldPlaybackSessions(where)

View File

@ -242,8 +242,18 @@ class AuthorController {
await CacheManager.purgeImageCache(req.author.id) // Purge cache await CacheManager.purgeImageCache(req.author.id) // Purge cache
} }
// Load library items so that metadata file can be updated
const allItemsWithAuthor = await Database.authorModel.getAllLibraryItemsForAuthor(req.author.id)
allItemsWithAuthor.forEach((libraryItem) => {
libraryItem.media.authors = libraryItem.media.authors.filter((au) => au.id !== req.author.id)
})
await req.author.destroy() await req.author.destroy()
for (const libraryItem of allItemsWithAuthor) {
await libraryItem.saveMetadataFile()
}
SocketAuthority.emitter('author_removed', req.author.toOldJSON()) SocketAuthority.emitter('author_removed', req.author.toOldJSON())
// Update filter data // Update filter data

View File

@ -81,31 +81,6 @@ class LibraryItemController {
res.json(req.libraryItem.toOldJSON()) res.json(req.libraryItem.toOldJSON())
} }
/**
* PATCH: /api/items/:id
*
* @deprecated
* Use the updateMedia /api/items/:id/media endpoint instead or updateCover /api/items/:id/cover
*
* @param {LibraryItemControllerRequest} req
* @param {Response} res
*/
async update(req, res) {
// Item has cover and update is removing cover so purge it from cache
if (req.libraryItem.media.coverPath && req.body.media && (req.body.media.coverPath === '' || req.body.media.coverPath === null)) {
await CacheManager.purgeCoverCache(req.libraryItem.id)
}
const oldLibraryItem = Database.libraryItemModel.getOldLibraryItem(req.libraryItem)
const hasUpdates = oldLibraryItem.update(req.body)
if (hasUpdates) {
Logger.debug(`[LibraryItemController] Updated now saving`)
await Database.updateLibraryItem(oldLibraryItem)
SocketAuthority.emitter('item_updated', oldLibraryItem.toJSONExpanded())
}
res.json(oldLibraryItem.toJSON())
}
/** /**
* DELETE: /api/items/:id * DELETE: /api/items/:id
* Delete library item. Will delete from database and file system if hard delete is requested. * Delete library item. Will delete from database and file system if hard delete is requested.
@ -219,11 +194,6 @@ class LibraryItemController {
if (res.writableEnded || res.headersSent) return if (res.writableEnded || res.headersSent) return
} }
// Book specific
if (req.libraryItem.isBook) {
await this.createAuthorsAndSeriesForItemUpdate(mediaPayload, req.libraryItem.libraryId)
}
// Podcast specific // Podcast specific
let isPodcastAutoDownloadUpdated = false let isPodcastAutoDownloadUpdated = false
if (req.libraryItem.isPodcast) { if (req.libraryItem.isPodcast) {
@ -234,41 +204,56 @@ class LibraryItemController {
} }
} }
// Book specific - Get all series being removed from this item let hasUpdates = (await req.libraryItem.media.updateFromRequest(mediaPayload)) || mediaPayload.url
let seriesRemoved = []
if (req.libraryItem.isBook && mediaPayload.metadata?.series) { if (req.libraryItem.isBook && Array.isArray(mediaPayload.metadata?.series)) {
const seriesIdsInUpdate = mediaPayload.metadata.series?.map((se) => se.id) || [] const seriesUpdateData = await req.libraryItem.media.updateSeriesFromRequest(mediaPayload.metadata.series, req.libraryItem.libraryId)
seriesRemoved = req.libraryItem.media.series.filter((se) => !seriesIdsInUpdate.includes(se.id)) if (seriesUpdateData?.seriesRemoved.length) {
// Check remove empty series
Logger.debug(`[LibraryItemController] Series were removed from book. Check if series are now empty.`)
await this.checkRemoveEmptySeries(seriesUpdateData.seriesRemoved.map((se) => se.id))
}
if (seriesUpdateData?.seriesAdded.length) {
// Add series to filter data
seriesUpdateData.seriesAdded.forEach((se) => {
Database.addSeriesToFilterData(req.libraryItem.libraryId, se.name, se.id)
})
}
if (seriesUpdateData?.hasUpdates) {
hasUpdates = true
}
} }
let authorsRemoved = [] if (req.libraryItem.isBook && Array.isArray(mediaPayload.metadata?.authors)) {
if (req.libraryItem.isBook && mediaPayload.metadata?.authors) { const authorNames = mediaPayload.metadata.authors.map((au) => (typeof au.name === 'string' ? au.name.trim() : null)).filter((au) => au)
const authorIdsInUpdate = mediaPayload.metadata.authors.map((au) => au.id) const authorUpdateData = await req.libraryItem.media.updateAuthorsFromRequest(authorNames, req.libraryItem.libraryId)
authorsRemoved = req.libraryItem.media.authors.filter((au) => !authorIdsInUpdate.includes(au.id)) if (authorUpdateData?.authorsRemoved.length) {
// Check remove empty authors
Logger.debug(`[LibraryItemController] Authors were removed from book. Check if authors are now empty.`)
await this.checkRemoveAuthorsWithNoBooks(authorUpdateData.authorsRemoved.map((au) => au.id))
hasUpdates = true
}
if (authorUpdateData?.authorsAdded.length) {
// Add authors to filter data
authorUpdateData.authorsAdded.forEach((au) => {
Database.addAuthorToFilterData(req.libraryItem.libraryId, au.name, au.id)
})
hasUpdates = true
}
} }
const hasUpdates = (await req.libraryItem.media.updateFromRequest(mediaPayload)) || mediaPayload.url
if (hasUpdates) { if (hasUpdates) {
req.libraryItem.changed('updatedAt', true) req.libraryItem.changed('updatedAt', true)
await req.libraryItem.save() await req.libraryItem.save()
await req.libraryItem.saveMetadataFile()
if (isPodcastAutoDownloadUpdated) { if (isPodcastAutoDownloadUpdated) {
this.cronManager.checkUpdatePodcastCron(req.libraryItem) this.cronManager.checkUpdatePodcastCron(req.libraryItem)
} }
Logger.debug(`[LibraryItemController] Updated library item media ${req.libraryItem.media.title}`) Logger.debug(`[LibraryItemController] Updated library item media ${req.libraryItem.media.title}`)
SocketAuthority.emitter('item_updated', req.libraryItem.toOldJSONExpanded()) SocketAuthority.emitter('item_updated', req.libraryItem.toOldJSONExpanded())
if (authorsRemoved.length) {
// Check remove empty authors
Logger.debug(`[LibraryItemController] Authors were removed from book. Check if authors are now empty.`)
await this.checkRemoveAuthorsWithNoBooks(authorsRemoved.map((au) => au.id))
}
if (seriesRemoved.length) {
// Check remove empty series
Logger.debug(`[LibraryItemController] Series were removed from book. Check if series are now empty.`)
await this.checkRemoveEmptySeries(seriesRemoved.map((se) => se.id))
}
} }
res.json({ res.json({
updated: hasUpdates, updated: hasUpdates,
@ -527,8 +512,7 @@ class LibraryItemController {
options.overrideDetails = !!reqBody.overrideDetails options.overrideDetails = !!reqBody.overrideDetails
} }
const oldLibraryItem = Database.libraryItemModel.getOldLibraryItem(req.libraryItem) const matchResult = await Scanner.quickMatchLibraryItem(this, req.libraryItem, options)
var matchResult = await Scanner.quickMatchLibraryItem(this, oldLibraryItem, options)
res.json(matchResult) res.json(matchResult)
} }
@ -640,26 +624,44 @@ class LibraryItemController {
const mediaPayload = updatePayload.mediaPayload const mediaPayload = updatePayload.mediaPayload
const libraryItem = libraryItems.find((li) => li.id === updatePayload.id) const libraryItem = libraryItems.find((li) => li.id === updatePayload.id)
await this.createAuthorsAndSeriesForItemUpdate(mediaPayload, libraryItem.libraryId) let hasUpdates = await libraryItem.media.updateFromRequest(mediaPayload)
if (libraryItem.isBook) { if (libraryItem.isBook && Array.isArray(mediaPayload.metadata?.series)) {
if (Array.isArray(mediaPayload.metadata?.series)) { const seriesUpdateData = await libraryItem.media.updateSeriesFromRequest(mediaPayload.metadata.series, libraryItem.libraryId)
const seriesIdsInUpdate = mediaPayload.metadata.series.map((se) => se.id) if (seriesUpdateData?.seriesRemoved.length) {
const seriesRemoved = libraryItem.media.series.filter((se) => !seriesIdsInUpdate.includes(se.id)) seriesIdsRemoved.push(...seriesUpdateData.seriesRemoved.map((se) => se.id))
seriesIdsRemoved.push(...seriesRemoved.map((se) => se.id))
} }
if (Array.isArray(mediaPayload.metadata?.authors)) { if (seriesUpdateData?.seriesAdded.length) {
const authorIdsInUpdate = mediaPayload.metadata.authors.map((au) => au.id) seriesUpdateData.seriesAdded.forEach((se) => {
const authorsRemoved = libraryItem.media.authors.filter((au) => !authorIdsInUpdate.includes(au.id)) Database.addSeriesToFilterData(libraryItem.libraryId, se.name, se.id)
authorIdsRemoved.push(...authorsRemoved.map((au) => au.id)) })
}
if (seriesUpdateData?.hasUpdates) {
hasUpdates = true
}
}
if (libraryItem.isBook && Array.isArray(mediaPayload.metadata?.authors)) {
const authorNames = mediaPayload.metadata.authors.map((au) => (typeof au.name === 'string' ? au.name.trim() : null)).filter((au) => au)
const authorUpdateData = await libraryItem.media.updateAuthorsFromRequest(authorNames, libraryItem.libraryId)
if (authorUpdateData?.authorsRemoved.length) {
authorIdsRemoved.push(...authorUpdateData.authorsRemoved.map((au) => au.id))
hasUpdates = true
}
if (authorUpdateData?.authorsAdded.length) {
authorUpdateData.authorsAdded.forEach((au) => {
Database.addAuthorToFilterData(libraryItem.libraryId, au.name, au.id)
})
hasUpdates = true
} }
} }
const hasUpdates = await libraryItem.media.updateFromRequest(mediaPayload)
if (hasUpdates) { if (hasUpdates) {
libraryItem.changed('updatedAt', true) libraryItem.changed('updatedAt', true)
await libraryItem.save() await libraryItem.save()
await libraryItem.saveMetadataFile()
Logger.debug(`[LibraryItemController] Updated library item media "${libraryItem.media.title}"`) Logger.debug(`[LibraryItemController] Updated library item media "${libraryItem.media.title}"`)
SocketAuthority.emitter('item_updated', libraryItem.toOldJSONExpanded()) SocketAuthority.emitter('item_updated', libraryItem.toOldJSONExpanded())
itemsUpdated++ itemsUpdated++
@ -739,8 +741,7 @@ class LibraryItemController {
} }
for (const libraryItem of libraryItems) { for (const libraryItem of libraryItems) {
const oldLibraryItem = Database.libraryItemModel.getOldLibraryItem(libraryItem) const matchResult = await Scanner.quickMatchLibraryItem(this, libraryItem, options)
const matchResult = await Scanner.quickMatchLibraryItem(this, oldLibraryItem, options)
if (matchResult.updated) { if (matchResult.updated) {
itemsUpdated++ itemsUpdated++
} else if (matchResult.warning) { } else if (matchResult.warning) {
@ -891,6 +892,8 @@ class LibraryItemController {
req.libraryItem.media.changed('chapters', true) req.libraryItem.media.changed('chapters', true)
await req.libraryItem.media.save() await req.libraryItem.media.save()
await req.libraryItem.saveMetadataFile()
SocketAuthority.emitter('item_updated', req.libraryItem.toOldJSONExpanded()) SocketAuthority.emitter('item_updated', req.libraryItem.toOldJSONExpanded())
} }

View File

@ -375,11 +375,9 @@ class PodcastController {
} }
const overrideDetails = req.query.override === '1' const overrideDetails = req.query.override === '1'
const oldLibraryItem = Database.libraryItemModel.getOldLibraryItem(req.libraryItem) const episodesUpdated = await Scanner.quickMatchPodcastEpisodes(req.libraryItem, { overrideDetails })
const episodesUpdated = await Scanner.quickMatchPodcastEpisodes(oldLibraryItem, { overrideDetails })
if (episodesUpdated) { if (episodesUpdated) {
await Database.updateLibraryItem(oldLibraryItem) SocketAuthority.emitter('item_updated', req.libraryItem.toOldJSONExpanded())
SocketAuthority.emitter('item_updated', oldLibraryItem.toJSONExpanded())
} }
res.json({ res.json({

View File

@ -24,7 +24,7 @@ class SearchController {
*/ */
async findBooks(req, res) { async findBooks(req, res) {
const id = req.query.id const id = req.query.id
const libraryItem = await Database.libraryItemModel.getOldById(id) const libraryItem = await Database.libraryItemModel.getExpandedById(id)
const provider = req.query.provider || 'google' const provider = req.query.provider || 'google'
const title = req.query.title || '' const title = req.query.title || ''
const author = req.query.author || '' const author = req.query.author || ''

View File

@ -7,6 +7,11 @@ const Database = require('../Database')
* @property {import('../models/User')} user * @property {import('../models/User')} user
* *
* @typedef {Request & RequestUserObject} RequestWithUser * @typedef {Request & RequestUserObject} RequestWithUser
*
* @typedef RequestEntityObject
* @property {import('../models/LibraryItem')} libraryItem
*
* @typedef {RequestWithUser & RequestEntityObject} RequestWithLibraryItem
*/ */
class ToolsController { class ToolsController {
@ -18,7 +23,7 @@ class ToolsController {
* *
* @this import('../routers/ApiRouter') * @this import('../routers/ApiRouter')
* *
* @param {RequestWithUser} req * @param {RequestWithLibraryItem} req
* @param {Response} res * @param {Response} res
*/ */
async encodeM4b(req, res) { async encodeM4b(req, res) {
@ -27,12 +32,12 @@ class ToolsController {
return res.status(404).send('Audiobook not found') return res.status(404).send('Audiobook not found')
} }
if (req.libraryItem.mediaType !== 'book') { if (!req.libraryItem.isBook) {
Logger.error(`[MiscController] encodeM4b: Invalid library item ${req.params.id}: not a book`) Logger.error(`[MiscController] encodeM4b: Invalid library item ${req.params.id}: not a book`)
return res.status(400).send('Invalid library item: not a book') return res.status(400).send('Invalid library item: not a book')
} }
if (req.libraryItem.media.tracks.length <= 0) { if (!req.libraryItem.hasAudioTracks) {
Logger.error(`[MiscController] encodeM4b: Invalid audiobook ${req.params.id}: no audio tracks`) Logger.error(`[MiscController] encodeM4b: Invalid audiobook ${req.params.id}: no audio tracks`)
return res.status(400).send('Invalid audiobook: no audio tracks') return res.status(400).send('Invalid audiobook: no audio tracks')
} }
@ -72,11 +77,11 @@ class ToolsController {
* *
* @this import('../routers/ApiRouter') * @this import('../routers/ApiRouter')
* *
* @param {RequestWithUser} req * @param {RequestWithLibraryItem} req
* @param {Response} res * @param {Response} res
*/ */
async embedAudioFileMetadata(req, res) { async embedAudioFileMetadata(req, res) {
if (req.libraryItem.isMissing || !req.libraryItem.hasAudioFiles || !req.libraryItem.isBook) { if (req.libraryItem.isMissing || !req.libraryItem.hasAudioTracks || !req.libraryItem.isBook) {
Logger.error(`[ToolsController] Invalid library item`) Logger.error(`[ToolsController] Invalid library item`)
return res.sendStatus(400) return res.sendStatus(400)
} }
@ -111,7 +116,7 @@ class ToolsController {
const libraryItems = [] const libraryItems = []
for (const libraryItemId of libraryItemIds) { for (const libraryItemId of libraryItemIds) {
const libraryItem = await Database.libraryItemModel.getOldById(libraryItemId) const libraryItem = await Database.libraryItemModel.getExpandedById(libraryItemId)
if (!libraryItem) { if (!libraryItem) {
Logger.error(`[ToolsController] Batch embed metadata library item (${libraryItemId}) not found`) Logger.error(`[ToolsController] Batch embed metadata library item (${libraryItemId}) not found`)
return res.sendStatus(404) return res.sendStatus(404)
@ -123,7 +128,7 @@ class ToolsController {
return res.sendStatus(403) return res.sendStatus(403)
} }
if (libraryItem.isMissing || !libraryItem.hasAudioFiles || !libraryItem.isBook) { if (libraryItem.isMissing || !libraryItem.hasAudioTracks || !libraryItem.isBook) {
Logger.error(`[ToolsController] Batch embed invalid library item (${libraryItemId})`) Logger.error(`[ToolsController] Batch embed invalid library item (${libraryItemId})`)
return res.sendStatus(400) return res.sendStatus(400)
} }
@ -157,7 +162,7 @@ class ToolsController {
} }
if (req.params.id) { if (req.params.id) {
const item = await Database.libraryItemModel.getOldById(req.params.id) const item = await Database.libraryItemModel.getExpandedById(req.params.id)
if (!item?.media) return res.sendStatus(404) if (!item?.media) return res.sendStatus(404)
// Check user can access this library item // Check user can access this library item

View File

@ -361,7 +361,7 @@ class BookFinder {
/** /**
* Search for books including fuzzy searches * Search for books including fuzzy searches
* *
* @param {Object} libraryItem * @param {import('../models/LibraryItem')} libraryItem
* @param {string} provider * @param {string} provider
* @param {string} title * @param {string} title
* @param {string} author * @param {string} author

View File

@ -51,7 +51,7 @@ class AbMergeManager {
/** /**
* *
* @param {string} userId * @param {string} userId
* @param {import('../objects/LibraryItem')} libraryItem * @param {import('../models/LibraryItem')} libraryItem
* @param {AbMergeEncodeOptions} [options={}] * @param {AbMergeEncodeOptions} [options={}]
*/ */
async startAudiobookMerge(userId, libraryItem, options = {}) { async startAudiobookMerge(userId, libraryItem, options = {}) {
@ -67,7 +67,7 @@ class AbMergeManager {
libraryItemId: libraryItem.id, libraryItemId: libraryItem.id,
libraryItemDir, libraryItemDir,
userId, userId,
originalTrackPaths: libraryItem.media.tracks.map((t) => t.metadata.path), originalTrackPaths: libraryItem.media.includedAudioFiles.map((t) => t.metadata.path),
inos: libraryItem.media.includedAudioFiles.map((f) => f.ino), inos: libraryItem.media.includedAudioFiles.map((f) => f.ino),
tempFilepath, tempFilepath,
targetFilename, targetFilename,
@ -86,9 +86,9 @@ class AbMergeManager {
key: 'MessageTaskEncodingM4b' key: 'MessageTaskEncodingM4b'
} }
const taskDescriptionString = { const taskDescriptionString = {
text: `Encoding audiobook "${libraryItem.media.metadata.title}" into a single m4b file.`, text: `Encoding audiobook "${libraryItem.media.title}" into a single m4b file.`,
key: 'MessageTaskEncodingM4bDescription', key: 'MessageTaskEncodingM4bDescription',
subs: [libraryItem.media.metadata.title] subs: [libraryItem.media.title]
} }
task.setData('encode-m4b', taskTitleString, taskDescriptionString, false, taskData) task.setData('encode-m4b', taskTitleString, taskDescriptionString, false, taskData)
TaskManager.addTask(task) TaskManager.addTask(task)
@ -103,7 +103,7 @@ class AbMergeManager {
/** /**
* *
* @param {import('../objects/LibraryItem')} libraryItem * @param {import('../models/LibraryItem')} libraryItem
* @param {Task} task * @param {Task} task
* @param {AbMergeEncodeOptions} encodingOptions * @param {AbMergeEncodeOptions} encodingOptions
*/ */
@ -141,7 +141,7 @@ class AbMergeManager {
const embedFraction = 1 - encodeFraction const embedFraction = 1 - encodeFraction
try { try {
const trackProgressMonitor = new TrackProgressMonitor( const trackProgressMonitor = new TrackProgressMonitor(
libraryItem.media.tracks.map((t) => t.duration), libraryItem.media.includedAudioFiles.map((t) => t.duration),
(trackIndex) => SocketAuthority.adminEmitter('track_started', { libraryItemId: libraryItem.id, ino: task.data.inos[trackIndex] }), (trackIndex) => SocketAuthority.adminEmitter('track_started', { libraryItemId: libraryItem.id, ino: task.data.inos[trackIndex] }),
(trackIndex, progressInTrack, taskProgress) => { (trackIndex, progressInTrack, taskProgress) => {
SocketAuthority.adminEmitter('track_progress', { libraryItemId: libraryItem.id, ino: task.data.inos[trackIndex], progress: progressInTrack }) SocketAuthority.adminEmitter('track_progress', { libraryItemId: libraryItem.id, ino: task.data.inos[trackIndex], progress: progressInTrack })
@ -150,7 +150,7 @@ class AbMergeManager {
(trackIndex) => SocketAuthority.adminEmitter('track_finished', { libraryItemId: libraryItem.id, ino: task.data.inos[trackIndex] }) (trackIndex) => SocketAuthority.adminEmitter('track_finished', { libraryItemId: libraryItem.id, ino: task.data.inos[trackIndex] })
) )
task.data.ffmpeg = new Ffmpeg() task.data.ffmpeg = new Ffmpeg()
await ffmpegHelpers.mergeAudioFiles(libraryItem.media.tracks, task.data.duration, task.data.itemCachePath, task.data.tempFilepath, encodingOptions, (progress) => trackProgressMonitor.update(progress), task.data.ffmpeg) await ffmpegHelpers.mergeAudioFiles(libraryItem.media.includedAudioFiles, task.data.duration, task.data.itemCachePath, task.data.tempFilepath, encodingOptions, (progress) => trackProgressMonitor.update(progress), task.data.ffmpeg)
delete task.data.ffmpeg delete task.data.ffmpeg
trackProgressMonitor.finish() trackProgressMonitor.finish()
} catch (error) { } catch (error) {

View File

@ -40,14 +40,14 @@ class AudioMetadataMangaer {
* @returns * @returns
*/ */
getMetadataObjectForApi(libraryItem) { getMetadataObjectForApi(libraryItem) {
return ffmpegHelpers.getFFMetadataObject(libraryItem.toOldJSONExpanded(), libraryItem.media.includedAudioFiles.length) return ffmpegHelpers.getFFMetadataObject(libraryItem, libraryItem.media.includedAudioFiles.length)
} }
/** /**
* *
* @param {string} userId * @param {string} userId
* @param {*} libraryItems * @param {import('../models/LibraryItem')[]} libraryItems
* @param {*} options * @param {UpdateMetadataOptions} options
*/ */
handleBatchEmbed(userId, libraryItems, options = {}) { handleBatchEmbed(userId, libraryItems, options = {}) {
libraryItems.forEach((li) => { libraryItems.forEach((li) => {
@ -58,7 +58,7 @@ class AudioMetadataMangaer {
/** /**
* *
* @param {string} userId * @param {string} userId
* @param {import('../objects/LibraryItem')} libraryItem * @param {import('../models/LibraryItem')} libraryItem
* @param {UpdateMetadataOptions} [options={}] * @param {UpdateMetadataOptions} [options={}]
*/ */
async updateMetadataForItem(userId, libraryItem, options = {}) { async updateMetadataForItem(userId, libraryItem, options = {}) {
@ -108,14 +108,14 @@ class AudioMetadataMangaer {
key: 'MessageTaskEmbeddingMetadata' key: 'MessageTaskEmbeddingMetadata'
} }
const taskDescriptionString = { const taskDescriptionString = {
text: `Embedding metadata in audiobook "${libraryItem.media.metadata.title}".`, text: `Embedding metadata in audiobook "${libraryItem.media.title}".`,
key: 'MessageTaskEmbeddingMetadataDescription', key: 'MessageTaskEmbeddingMetadataDescription',
subs: [libraryItem.media.metadata.title] subs: [libraryItem.media.title]
} }
task.setData('embed-metadata', taskTitleString, taskDescriptionString, false, taskData) task.setData('embed-metadata', taskTitleString, taskDescriptionString, false, taskData)
if (this.tasksRunning.length >= this.MAX_CONCURRENT_TASKS) { if (this.tasksRunning.length >= this.MAX_CONCURRENT_TASKS) {
Logger.info(`[AudioMetadataManager] Queueing embed metadata for audiobook "${libraryItem.media.metadata.title}"`) Logger.info(`[AudioMetadataManager] Queueing embed metadata for audiobook "${libraryItem.media.title}"`)
SocketAuthority.adminEmitter('metadata_embed_queue_update', { SocketAuthority.adminEmitter('metadata_embed_queue_update', {
libraryItemId: libraryItem.id, libraryItemId: libraryItem.id,
queued: true queued: true

View File

@ -123,61 +123,6 @@ class CoverManager {
} }
} }
/**
*
* @param {Object} libraryItem - old library item
* @param {string} url
* @param {boolean} [forceLibraryItemFolder=false]
* @returns {Promise<{error:string}|{cover:string}>}
*/
async downloadCoverFromUrl(libraryItem, url, forceLibraryItemFolder = false) {
try {
// Force save cover with library item is used for adding new podcasts
var coverDirPath = forceLibraryItemFolder ? libraryItem.path : this.getCoverDirectory(libraryItem)
await fs.ensureDir(coverDirPath)
var temppath = Path.posix.join(coverDirPath, 'cover')
let errorMsg = ''
let success = await downloadImageFile(url, temppath)
.then(() => true)
.catch((err) => {
errorMsg = err.message || 'Unknown error'
Logger.error(`[CoverManager] Download image file failed for "${url}"`, errorMsg)
return false
})
if (!success) {
return {
error: 'Failed to download image from url: ' + errorMsg
}
}
var imgtype = await this.checkFileIsValidImage(temppath, true)
if (imgtype.error) {
return imgtype
}
var coverFilename = `cover.${imgtype.ext}`
var coverFullPath = Path.posix.join(coverDirPath, coverFilename)
await fs.rename(temppath, coverFullPath)
await this.removeOldCovers(coverDirPath, '.' + imgtype.ext)
await CacheManager.purgeCoverCache(libraryItem.id)
Logger.info(`[CoverManager] Downloaded libraryItem cover "${coverFullPath}" from url "${url}" for "${libraryItem.media.metadata.title}"`)
libraryItem.updateMediaCover(coverFullPath)
return {
cover: coverFullPath
}
} catch (error) {
Logger.error(`[CoverManager] Fetch cover image from url "${url}" failed`, error)
return {
error: 'Failed to fetch image from url'
}
}
}
/** /**
* *
* @param {string} coverPath * @param {string} coverPath

View File

@ -107,6 +107,22 @@ class Author extends Model {
return libraryItems return libraryItems
} }
/**
*
* @param {string} name
* @param {string} libraryId
* @returns {Promise<Author>}
*/
static async findOrCreateByNameAndLibrary(name, libraryId) {
const author = await this.getByNameAndLibrary(name, libraryId)
if (author) return author
return this.create({
name,
lastFirst: this.getLastFirst(name),
libraryId
})
}
/** /**
* Initialize model * Initialize model
* @param {import('../Database').sequelize} sequelize * @param {import('../Database').sequelize} sequelize

View File

@ -130,130 +130,6 @@ class Book extends Model {
this.series this.series
} }
static getOldBook(libraryItemExpanded) {
const bookExpanded = libraryItemExpanded.media
let authors = []
if (bookExpanded.authors?.length) {
authors = bookExpanded.authors.map((au) => {
return {
id: au.id,
name: au.name
}
})
} else if (bookExpanded.bookAuthors?.length) {
authors = bookExpanded.bookAuthors
.map((ba) => {
if (ba.author) {
return {
id: ba.author.id,
name: ba.author.name
}
} else {
Logger.error(`[Book] Invalid bookExpanded bookAuthors: no author`, ba)
return null
}
})
.filter((a) => a)
}
let series = []
if (bookExpanded.series?.length) {
series = bookExpanded.series.map((se) => {
return {
id: se.id,
name: se.name,
sequence: se.bookSeries.sequence
}
})
} else if (bookExpanded.bookSeries?.length) {
series = bookExpanded.bookSeries
.map((bs) => {
if (bs.series) {
return {
id: bs.series.id,
name: bs.series.name,
sequence: bs.sequence
}
} else {
Logger.error(`[Book] Invalid bookExpanded bookSeries: no series`, bs)
return null
}
})
.filter((s) => s)
}
return {
id: bookExpanded.id,
libraryItemId: libraryItemExpanded.id,
coverPath: bookExpanded.coverPath,
tags: bookExpanded.tags,
audioFiles: bookExpanded.audioFiles,
chapters: bookExpanded.chapters,
ebookFile: bookExpanded.ebookFile,
metadata: {
title: bookExpanded.title,
subtitle: bookExpanded.subtitle,
authors: authors,
narrators: bookExpanded.narrators,
series: series,
genres: bookExpanded.genres,
publishedYear: bookExpanded.publishedYear,
publishedDate: bookExpanded.publishedDate,
publisher: bookExpanded.publisher,
description: bookExpanded.description,
isbn: bookExpanded.isbn,
asin: bookExpanded.asin,
language: bookExpanded.language,
explicit: bookExpanded.explicit,
abridged: bookExpanded.abridged
}
}
}
/**
* @param {object} oldBook
* @returns {boolean} true if updated
*/
static saveFromOld(oldBook) {
const book = this.getFromOld(oldBook)
return this.update(book, {
where: {
id: book.id
}
})
.then((result) => result[0] > 0)
.catch((error) => {
Logger.error(`[Book] Failed to save book ${book.id}`, error)
return false
})
}
static getFromOld(oldBook) {
return {
id: oldBook.id,
title: oldBook.metadata.title,
titleIgnorePrefix: oldBook.metadata.titleIgnorePrefix,
subtitle: oldBook.metadata.subtitle,
publishedYear: oldBook.metadata.publishedYear,
publishedDate: oldBook.metadata.publishedDate,
publisher: oldBook.metadata.publisher,
description: oldBook.metadata.description,
isbn: oldBook.metadata.isbn,
asin: oldBook.metadata.asin,
language: oldBook.metadata.language,
explicit: !!oldBook.metadata.explicit,
abridged: !!oldBook.metadata.abridged,
narrators: oldBook.metadata.narrators,
ebookFile: oldBook.ebookFile?.toJSON() || null,
coverPath: oldBook.coverPath,
duration: oldBook.duration,
audioFiles: oldBook.audioFiles?.map((af) => af.toJSON()) || [],
chapters: oldBook.chapters,
tags: oldBook.tags,
genres: oldBook.metadata.genres
}
}
/** /**
* Initialize model * Initialize model
* @param {import('../Database').sequelize} sequelize * @param {import('../Database').sequelize} sequelize
@ -542,47 +418,111 @@ class Book extends Model {
await this.save() await this.save()
} }
if (Array.isArray(payload.metadata?.authors)) { return hasUpdates
const authorsRemoved = this.authors.filter((au) => !payload.metadata.authors.some((a) => a.id === au.id)) }
const newAuthors = payload.metadata.authors.filter((a) => !this.authors.some((au) => au.id === a.id))
/**
* Creates or removes authors from the book using the author names from the request
*
* @param {string[]} authors
* @param {string} libraryId
* @returns {Promise<{authorsRemoved: import('./Author')[], authorsAdded: import('./Author')[]}>}
*/
async updateAuthorsFromRequest(authors, libraryId) {
if (!Array.isArray(authors)) return null
if (!this.authors) {
throw new Error(`[Book] Cannot update authors because authors are not loaded for book ${this.id}`)
}
/** @type {typeof import('./Author')} */
const authorModel = this.sequelize.models.author
/** @type {typeof import('./BookAuthor')} */
const bookAuthorModel = this.sequelize.models.bookAuthor
const authorsCleaned = authors.map((a) => a.toLowerCase()).filter((a) => a)
const authorsRemoved = this.authors.filter((au) => !authorsCleaned.includes(au.name.toLowerCase()))
const newAuthorNames = authors.filter((a) => !this.authors.some((au) => au.name.toLowerCase() === a.toLowerCase()))
for (const author of authorsRemoved) { for (const author of authorsRemoved) {
await this.sequelize.models.bookAuthor.removeByIds(author.id, this.id) await bookAuthorModel.removeByIds(author.id, this.id)
Logger.debug(`[Book] "${this.title}" Removed author ${author.id}`) Logger.debug(`[Book] "${this.title}" Removed author "${author.name}"`)
hasUpdates = true this.authors = this.authors.filter((au) => au.id !== author.id)
} }
for (const author of newAuthors) { const authorsAdded = []
await this.sequelize.models.bookAuthor.create({ bookId: this.id, authorId: author.id }) for (const authorName of newAuthorNames) {
Logger.debug(`[Book] "${this.title}" Added author ${author.id}`) const author = await authorModel.findOrCreateByNameAndLibrary(authorName, libraryId)
hasUpdates = true await bookAuthorModel.create({ bookId: this.id, authorId: author.id })
Logger.debug(`[Book] "${this.title}" Added author "${author.name}"`)
this.authors.push(author)
authorsAdded.push(author)
}
return {
authorsRemoved,
authorsAdded
} }
} }
if (Array.isArray(payload.metadata?.series)) { /**
const seriesRemoved = this.series.filter((se) => !payload.metadata.series.some((s) => s.id === se.id)) * Creates or removes series from the book using the series names from the request.
const newSeries = payload.metadata.series.filter((s) => !this.series.some((se) => se.id === s.id)) * Updates series sequence if it has changed.
*
* @param {{ name: string, sequence: string }[]} seriesObjects
* @param {string} libraryId
* @returns {Promise<{seriesRemoved: import('./Series')[], seriesAdded: import('./Series')[], hasUpdates: boolean}>}
*/
async updateSeriesFromRequest(seriesObjects, libraryId) {
if (!Array.isArray(seriesObjects) || seriesObjects.some((se) => !se.name || typeof se.name !== 'string')) return null
if (!this.series) {
throw new Error(`[Book] Cannot update series because series are not loaded for book ${this.id}`)
}
/** @type {typeof import('./Series')} */
const seriesModel = this.sequelize.models.series
/** @type {typeof import('./BookSeries')} */
const bookSeriesModel = this.sequelize.models.bookSeries
const seriesNamesCleaned = seriesObjects.map((se) => se.name.toLowerCase())
const seriesRemoved = this.series.filter((se) => !seriesNamesCleaned.includes(se.name.toLowerCase()))
const seriesAdded = []
let hasUpdates = false
for (const seriesObj of seriesObjects) {
const seriesObjSequence = typeof seriesObj.sequence === 'string' ? seriesObj.sequence : null
const existingSeries = this.series.find((se) => se.name.toLowerCase() === seriesObj.name.toLowerCase())
if (existingSeries) {
if (existingSeries.bookSeries.sequence !== seriesObjSequence) {
existingSeries.bookSeries.sequence = seriesObjSequence
await existingSeries.bookSeries.save()
hasUpdates = true
Logger.debug(`[Book] "${this.title}" Updated series "${existingSeries.name}" sequence ${seriesObjSequence}`)
}
} else {
const series = await seriesModel.findOrCreateByNameAndLibrary(seriesObj.name, libraryId)
series.bookSeries = await bookSeriesModel.create({ bookId: this.id, seriesId: series.id, sequence: seriesObjSequence })
this.series.push(series)
seriesAdded.push(series)
hasUpdates = true
Logger.debug(`[Book] "${this.title}" Added series "${series.name}"`)
}
}
for (const series of seriesRemoved) { for (const series of seriesRemoved) {
await this.sequelize.models.bookSeries.removeByIds(series.id, this.id) await bookSeriesModel.removeByIds(series.id, this.id)
this.series = this.series.filter((se) => se.id !== series.id)
Logger.debug(`[Book] "${this.title}" Removed series ${series.id}`) Logger.debug(`[Book] "${this.title}" Removed series ${series.id}`)
hasUpdates = true hasUpdates = true
} }
for (const series of newSeries) {
await this.sequelize.models.bookSeries.create({ bookId: this.id, seriesId: series.id, sequence: series.sequence })
Logger.debug(`[Book] "${this.title}" Added series ${series.id}`)
hasUpdates = true
}
for (const series of payload.metadata.series) {
const existingSeries = this.series.find((se) => se.id === series.id)
if (existingSeries && existingSeries.bookSeries.sequence !== series.sequence) {
await existingSeries.bookSeries.update({ sequence: series.sequence })
Logger.debug(`[Book] "${this.title}" Updated series ${series.id} sequence ${series.sequence}`)
hasUpdates = true
}
}
}
return hasUpdates return {
seriesRemoved,
seriesAdded,
hasUpdates
}
} }
/** /**

View File

@ -1,11 +1,8 @@
const util = require('util')
const Path = require('path') const Path = require('path')
const { DataTypes, Model } = require('sequelize') const { DataTypes, Model } = require('sequelize')
const fsExtra = require('../libs/fsExtra') const fsExtra = require('../libs/fsExtra')
const Logger = require('../Logger') const Logger = require('../Logger')
const oldLibraryItem = require('../objects/LibraryItem')
const libraryFilters = require('../utils/queries/libraryFilters') const libraryFilters = require('../utils/queries/libraryFilters')
const { areEquivalent } = require('../utils/index')
const { filePathToPOSIX, getFileTimestampsWithIno } = require('../utils/fileUtils') const { filePathToPOSIX, getFileTimestampsWithIno } = require('../utils/fileUtils')
const LibraryFile = require('../objects/files/LibraryFile') const LibraryFile = require('../objects/files/LibraryFile')
const Book = require('./Book') const Book = require('./Book')
@ -122,244 +119,6 @@ class LibraryItem extends Model {
}) })
} }
/**
* Convert an expanded LibraryItem into an old library item
*
* @param {Model<LibraryItem>} libraryItemExpanded
* @returns {oldLibraryItem}
*/
static getOldLibraryItem(libraryItemExpanded) {
let media = null
if (libraryItemExpanded.mediaType === 'book') {
media = this.sequelize.models.book.getOldBook(libraryItemExpanded)
} else if (libraryItemExpanded.mediaType === 'podcast') {
media = this.sequelize.models.podcast.getOldPodcast(libraryItemExpanded)
}
return new oldLibraryItem({
id: libraryItemExpanded.id,
ino: libraryItemExpanded.ino,
oldLibraryItemId: libraryItemExpanded.extraData?.oldLibraryItemId || null,
libraryId: libraryItemExpanded.libraryId,
folderId: libraryItemExpanded.libraryFolderId,
path: libraryItemExpanded.path,
relPath: libraryItemExpanded.relPath,
isFile: libraryItemExpanded.isFile,
mtimeMs: libraryItemExpanded.mtime?.valueOf(),
ctimeMs: libraryItemExpanded.ctime?.valueOf(),
birthtimeMs: libraryItemExpanded.birthtime?.valueOf(),
addedAt: libraryItemExpanded.createdAt.valueOf(),
updatedAt: libraryItemExpanded.updatedAt.valueOf(),
lastScan: libraryItemExpanded.lastScan?.valueOf(),
scanVersion: libraryItemExpanded.lastScanVersion,
isMissing: !!libraryItemExpanded.isMissing,
isInvalid: !!libraryItemExpanded.isInvalid,
mediaType: libraryItemExpanded.mediaType,
media,
libraryFiles: libraryItemExpanded.libraryFiles
})
}
/**
* Updates libraryItem, book, authors and series from old library item
*
* @param {oldLibraryItem} oldLibraryItem
* @returns {Promise<boolean>} true if updates were made
*/
static async fullUpdateFromOld(oldLibraryItem) {
const libraryItemExpanded = await this.getExpandedById(oldLibraryItem.id)
if (!libraryItemExpanded) return false
let hasUpdates = false
// Check update Book/Podcast
if (libraryItemExpanded.media) {
let updatedMedia = null
if (libraryItemExpanded.mediaType === 'podcast') {
updatedMedia = this.sequelize.models.podcast.getFromOld(oldLibraryItem.media)
const existingPodcastEpisodes = libraryItemExpanded.media.podcastEpisodes || []
const updatedPodcastEpisodes = oldLibraryItem.media.episodes || []
for (const existingPodcastEpisode of existingPodcastEpisodes) {
// Episode was removed
if (!updatedPodcastEpisodes.some((ep) => ep.id === existingPodcastEpisode.id)) {
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" episode "${existingPodcastEpisode.title}" was removed`)
await existingPodcastEpisode.destroy()
hasUpdates = true
}
}
for (const updatedPodcastEpisode of updatedPodcastEpisodes) {
const existingEpisodeMatch = existingPodcastEpisodes.find((ep) => ep.id === updatedPodcastEpisode.id)
if (!existingEpisodeMatch) {
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" episode "${updatedPodcastEpisode.title}" was added`)
await this.sequelize.models.podcastEpisode.createFromOld(updatedPodcastEpisode)
hasUpdates = true
} else {
const updatedEpisodeCleaned = this.sequelize.models.podcastEpisode.getFromOld(updatedPodcastEpisode)
let episodeHasUpdates = false
for (const key in updatedEpisodeCleaned) {
let existingValue = existingEpisodeMatch[key]
if (existingValue instanceof Date) existingValue = existingValue.valueOf()
if (!areEquivalent(updatedEpisodeCleaned[key], existingValue, true)) {
Logger.debug(util.format(`[LibraryItem] "${libraryItemExpanded.media.title}" episode "${existingEpisodeMatch.title}" ${key} was updated from %j to %j`, existingValue, updatedEpisodeCleaned[key]))
episodeHasUpdates = true
}
}
if (episodeHasUpdates) {
await existingEpisodeMatch.update(updatedEpisodeCleaned)
hasUpdates = true
}
}
}
} else if (libraryItemExpanded.mediaType === 'book') {
updatedMedia = this.sequelize.models.book.getFromOld(oldLibraryItem.media)
const existingAuthors = libraryItemExpanded.media.authors || []
const existingSeriesAll = libraryItemExpanded.media.series || []
const updatedAuthors = oldLibraryItem.media.metadata.authors || []
const uniqueUpdatedAuthors = updatedAuthors.filter((au, idx) => updatedAuthors.findIndex((a) => a.id === au.id) === idx)
const updatedSeriesAll = oldLibraryItem.media.metadata.series || []
for (const existingAuthor of existingAuthors) {
// Author was removed from Book
if (!uniqueUpdatedAuthors.some((au) => au.id === existingAuthor.id)) {
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" author "${existingAuthor.name}" was removed`)
await this.sequelize.models.bookAuthor.removeByIds(existingAuthor.id, libraryItemExpanded.media.id)
hasUpdates = true
}
}
for (const updatedAuthor of uniqueUpdatedAuthors) {
// Author was added
if (!existingAuthors.some((au) => au.id === updatedAuthor.id)) {
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" author "${updatedAuthor.name}" was added`)
await this.sequelize.models.bookAuthor.create({ authorId: updatedAuthor.id, bookId: libraryItemExpanded.media.id })
hasUpdates = true
}
}
for (const existingSeries of existingSeriesAll) {
// Series was removed
if (!updatedSeriesAll.some((se) => se.id === existingSeries.id)) {
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" series "${existingSeries.name}" was removed`)
await this.sequelize.models.bookSeries.removeByIds(existingSeries.id, libraryItemExpanded.media.id)
hasUpdates = true
}
}
for (const updatedSeries of updatedSeriesAll) {
// Series was added/updated
const existingSeriesMatch = existingSeriesAll.find((se) => se.id === updatedSeries.id)
if (!existingSeriesMatch) {
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" series "${updatedSeries.name}" was added`)
await this.sequelize.models.bookSeries.create({ seriesId: updatedSeries.id, bookId: libraryItemExpanded.media.id, sequence: updatedSeries.sequence })
hasUpdates = true
} else if (existingSeriesMatch.bookSeries.sequence !== updatedSeries.sequence) {
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" series "${updatedSeries.name}" sequence was updated from "${existingSeriesMatch.bookSeries.sequence}" to "${updatedSeries.sequence}"`)
await existingSeriesMatch.bookSeries.update({ id: updatedSeries.id, sequence: updatedSeries.sequence })
hasUpdates = true
}
}
}
let hasMediaUpdates = false
for (const key in updatedMedia) {
let existingValue = libraryItemExpanded.media[key]
if (existingValue instanceof Date) existingValue = existingValue.valueOf()
if (!areEquivalent(updatedMedia[key], existingValue, true)) {
if (key === 'chapters') {
// Handle logging of chapters separately because the object is large
const chaptersRemoved = libraryItemExpanded.media.chapters.filter((ch) => !updatedMedia.chapters.some((uch) => uch.id === ch.id))
if (chaptersRemoved.length) {
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" chapters removed: ${chaptersRemoved.map((ch) => ch.title).join(', ')}`)
}
const chaptersAdded = updatedMedia.chapters.filter((uch) => !libraryItemExpanded.media.chapters.some((ch) => ch.id === uch.id))
if (chaptersAdded.length) {
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" chapters added: ${chaptersAdded.map((ch) => ch.title).join(', ')}`)
}
if (!chaptersRemoved.length && !chaptersAdded.length) {
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" chapters updated`)
}
} else {
Logger.debug(util.format(`[LibraryItem] "${libraryItemExpanded.media.title}" ${libraryItemExpanded.mediaType}.${key} updated from %j to %j`, existingValue, updatedMedia[key]))
}
hasMediaUpdates = true
}
}
if (hasMediaUpdates && updatedMedia) {
await libraryItemExpanded.media.update(updatedMedia)
hasUpdates = true
}
}
const updatedLibraryItem = this.getFromOld(oldLibraryItem)
let hasLibraryItemUpdates = false
for (const key in updatedLibraryItem) {
let existingValue = libraryItemExpanded[key]
if (existingValue instanceof Date) existingValue = existingValue.valueOf()
if (!areEquivalent(updatedLibraryItem[key], existingValue, true)) {
if (key === 'libraryFiles') {
// Handle logging of libraryFiles separately because the object is large (should be addressed when migrating off the old library item model)
const libraryFilesRemoved = libraryItemExpanded.libraryFiles.filter((lf) => !updatedLibraryItem.libraryFiles.some((ulf) => ulf.ino === lf.ino))
if (libraryFilesRemoved.length) {
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" library files removed: ${libraryFilesRemoved.map((lf) => lf.metadata.path).join(', ')}`)
}
const libraryFilesAdded = updatedLibraryItem.libraryFiles.filter((ulf) => !libraryItemExpanded.libraryFiles.some((lf) => lf.ino === ulf.ino))
if (libraryFilesAdded.length) {
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" library files added: ${libraryFilesAdded.map((lf) => lf.metadata.path).join(', ')}`)
}
if (!libraryFilesRemoved.length && !libraryFilesAdded.length) {
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" library files updated`)
}
} else {
Logger.debug(util.format(`[LibraryItem] "${libraryItemExpanded.media.title}" ${key} updated from %j to %j`, existingValue, updatedLibraryItem[key]))
}
hasLibraryItemUpdates = true
if (key === 'updatedAt') {
libraryItemExpanded.changed('updatedAt', true)
}
}
}
if (hasLibraryItemUpdates) {
await libraryItemExpanded.update(updatedLibraryItem)
Logger.info(`[LibraryItem] Library item "${libraryItemExpanded.id}" updated`)
hasUpdates = true
}
return hasUpdates
}
static getFromOld(oldLibraryItem) {
const extraData = {}
if (oldLibraryItem.oldLibraryItemId) {
extraData.oldLibraryItemId = oldLibraryItem.oldLibraryItemId
}
return {
id: oldLibraryItem.id,
ino: oldLibraryItem.ino,
path: oldLibraryItem.path,
relPath: oldLibraryItem.relPath,
mediaId: oldLibraryItem.media.id,
mediaType: oldLibraryItem.mediaType,
isFile: !!oldLibraryItem.isFile,
isMissing: !!oldLibraryItem.isMissing,
isInvalid: !!oldLibraryItem.isInvalid,
mtime: oldLibraryItem.mtimeMs,
ctime: oldLibraryItem.ctimeMs,
updatedAt: oldLibraryItem.updatedAt,
birthtime: oldLibraryItem.birthtimeMs,
size: oldLibraryItem.size,
lastScan: oldLibraryItem.lastScan,
lastScanVersion: oldLibraryItem.scanVersion,
libraryId: oldLibraryItem.libraryId,
libraryFolderId: oldLibraryItem.folderId,
libraryFiles: oldLibraryItem.libraryFiles?.map((lf) => lf.toJSON()) || [],
extraData
}
}
/** /**
* Remove library item by id * Remove library item by id
* *
@ -468,12 +227,14 @@ class LibraryItem extends Model {
/** /**
* *
* @param {import('sequelize').WhereOptions} where * @param {import('sequelize').WhereOptions} where
* @param {import('sequelize').BindOrReplacements} [replacements]
* @param {import('sequelize').IncludeOptions} [include] * @param {import('sequelize').IncludeOptions} [include]
* @returns {Promise<LibraryItemExpanded>} * @returns {Promise<LibraryItemExpanded>}
*/ */
static async findOneExpanded(where, include = null) { static async findOneExpanded(where, replacements = null, include = null) {
const libraryItem = await this.findOne({ const libraryItem = await this.findOne({
where, where,
replacements,
include include
}) })
if (!libraryItem) { if (!libraryItem) {
@ -516,61 +277,12 @@ class LibraryItem extends Model {
return libraryItem return libraryItem
} }
/**
* Get old library item by id
* @param {string} libraryItemId
* @returns {oldLibraryItem}
*/
static async getOldById(libraryItemId) {
if (!libraryItemId) return null
const libraryItem = await this.findByPk(libraryItemId)
if (!libraryItem) {
Logger.error(`[LibraryItem] Library item not found with id "${libraryItemId}"`)
return null
}
if (libraryItem.mediaType === 'podcast') {
libraryItem.media = await libraryItem.getMedia({
include: [
{
model: this.sequelize.models.podcastEpisode
}
]
})
} else {
libraryItem.media = await libraryItem.getMedia({
include: [
{
model: this.sequelize.models.author,
through: {
attributes: []
}
},
{
model: this.sequelize.models.series,
through: {
attributes: ['sequence']
}
}
],
order: [
[this.sequelize.models.author, this.sequelize.models.bookAuthor, 'createdAt', 'ASC'],
[this.sequelize.models.series, 'bookSeries', 'createdAt', 'ASC']
]
})
}
if (!libraryItem.media) return null
return this.getOldLibraryItem(libraryItem)
}
/** /**
* Get library items using filter and sort * Get library items using filter and sort
* @param {import('./Library')} library * @param {import('./Library')} library
* @param {import('./User')} user * @param {import('./User')} user
* @param {object} options * @param {object} options
* @returns {{ libraryItems:oldLibraryItem[], count:number }} * @returns {{ libraryItems:Object[], count:number }}
*/ */
static async getByFilterAndSort(library, user, options) { static async getByFilterAndSort(library, user, options) {
let start = Date.now() let start = Date.now()
@ -624,17 +336,19 @@ class LibraryItem extends Model {
// "Continue Listening" shelf // "Continue Listening" shelf
const itemsInProgressPayload = await libraryFilters.getMediaItemsInProgress(library, user, include, limit, false) const itemsInProgressPayload = await libraryFilters.getMediaItemsInProgress(library, user, include, limit, false)
if (itemsInProgressPayload.items.length) { if (itemsInProgressPayload.items.length) {
const ebookOnlyItemsInProgress = itemsInProgressPayload.items.filter((li) => li.media.isEBookOnly) const ebookOnlyItemsInProgress = itemsInProgressPayload.items.filter((li) => li.media.ebookFormat && !li.media.numTracks)
const audioOnlyItemsInProgress = itemsInProgressPayload.items.filter((li) => !li.media.isEBookOnly) const audioItemsInProgress = itemsInProgressPayload.items.filter((li) => li.media.numTracks)
if (audioItemsInProgress.length) {
shelves.push({ shelves.push({
id: 'continue-listening', id: 'continue-listening',
label: 'Continue Listening', label: 'Continue Listening',
labelStringKey: 'LabelContinueListening', labelStringKey: 'LabelContinueListening',
type: library.isPodcast ? 'episode' : 'book', type: library.isPodcast ? 'episode' : 'book',
entities: audioOnlyItemsInProgress, entities: audioItemsInProgress,
total: itemsInProgressPayload.count total: itemsInProgressPayload.count
}) })
}
if (ebookOnlyItemsInProgress.length) { if (ebookOnlyItemsInProgress.length) {
// "Continue Reading" shelf // "Continue Reading" shelf
@ -733,17 +447,19 @@ class LibraryItem extends Model {
// "Listen Again" shelf // "Listen Again" shelf
const mediaFinishedPayload = await libraryFilters.getMediaFinished(library, user, include, limit) const mediaFinishedPayload = await libraryFilters.getMediaFinished(library, user, include, limit)
if (mediaFinishedPayload.items.length) { if (mediaFinishedPayload.items.length) {
const ebookOnlyItemsInProgress = mediaFinishedPayload.items.filter((li) => li.media.isEBookOnly) const ebookOnlyItemsInProgress = mediaFinishedPayload.items.filter((li) => li.media.ebookFormat && !li.media.numTracks)
const audioOnlyItemsInProgress = mediaFinishedPayload.items.filter((li) => !li.media.isEBookOnly) const audioItemsInProgress = mediaFinishedPayload.items.filter((li) => li.media.numTracks)
if (audioItemsInProgress.length) {
shelves.push({ shelves.push({
id: 'listen-again', id: 'listen-again',
label: 'Listen Again', label: 'Listen Again',
labelStringKey: 'LabelListenAgain', labelStringKey: 'LabelListenAgain',
type: library.isPodcast ? 'episode' : 'book', type: library.isPodcast ? 'episode' : 'book',
entities: audioOnlyItemsInProgress, entities: audioItemsInProgress,
total: mediaFinishedPayload.count total: mediaFinishedPayload.count
}) })
}
// "Read Again" shelf // "Read Again" shelf
if (ebookOnlyItemsInProgress.length) { if (ebookOnlyItemsInProgress.length) {
@ -801,52 +517,6 @@ class LibraryItem extends Model {
return (await this.count({ where: { id: libraryItemId } })) > 0 return (await this.count({ where: { id: libraryItemId } })) > 0
} }
/**
*
* @param {import('sequelize').WhereOptions} where
* @param {import('sequelize').BindOrReplacements} replacements
* @returns {Object} oldLibraryItem
*/
static async findOneOld(where, replacements = {}) {
const libraryItem = await this.findOne({
where,
replacements,
include: [
{
model: this.sequelize.models.book,
include: [
{
model: this.sequelize.models.author,
through: {
attributes: []
}
},
{
model: this.sequelize.models.series,
through: {
attributes: ['sequence']
}
}
]
},
{
model: this.sequelize.models.podcast,
include: [
{
model: this.sequelize.models.podcastEpisode
}
]
}
],
order: [
[this.sequelize.models.book, this.sequelize.models.author, this.sequelize.models.bookAuthor, 'createdAt', 'ASC'],
[this.sequelize.models.book, this.sequelize.models.series, 'bookSeries', 'createdAt', 'ASC']
]
})
if (!libraryItem) return null
return this.getOldLibraryItem(libraryItem)
}
/** /**
* *
* @param {string} libraryItemId * @param {string} libraryItemId
@ -970,7 +640,7 @@ class LibraryItem extends Model {
} }
} }
Logger.debug(`Success saving abmetadata to "${metadataFilePath}"`) Logger.debug(`[LibraryItem] Saved metadata for "${this.media.title}" file to "${metadataFilePath}"`)
return metadataLibraryFile return metadataLibraryFile
}) })

View File

@ -87,13 +87,10 @@ class MediaItemShare extends Model {
const libraryItemModel = this.sequelize.models.libraryItem const libraryItemModel = this.sequelize.models.libraryItem
if (mediaItemType === 'book') { if (mediaItemType === 'book') {
const libraryItem = await libraryItemModel.findOneExpanded( const libraryItem = await libraryItemModel.findOneExpanded({ mediaId: mediaItemId }, null, {
{ mediaId: mediaItemId },
{
model: this.sequelize.models.library, model: this.sequelize.models.library,
attributes: ['settings'] attributes: ['settings']
} })
)
return libraryItem return libraryItem
} }

View File

@ -36,33 +36,6 @@ class MediaProgress extends Model {
this.createdAt this.createdAt
} }
static upsertFromOld(oldMediaProgress) {
const mediaProgress = this.getFromOld(oldMediaProgress)
return this.upsert(mediaProgress)
}
static getFromOld(oldMediaProgress) {
return {
id: oldMediaProgress.id,
userId: oldMediaProgress.userId,
mediaItemId: oldMediaProgress.mediaItemId,
mediaItemType: oldMediaProgress.mediaItemType,
duration: oldMediaProgress.duration,
currentTime: oldMediaProgress.currentTime,
ebookLocation: oldMediaProgress.ebookLocation || null,
ebookProgress: oldMediaProgress.ebookProgress || null,
isFinished: !!oldMediaProgress.isFinished,
hideFromContinueListening: !!oldMediaProgress.hideFromContinueListening,
finishedAt: oldMediaProgress.finishedAt,
createdAt: oldMediaProgress.startedAt || oldMediaProgress.lastUpdate,
updatedAt: oldMediaProgress.lastUpdate,
extraData: {
libraryItemId: oldMediaProgress.libraryItemId,
progress: oldMediaProgress.progress
}
}
}
static removeById(mediaProgressId) { static removeById(mediaProgressId) {
return this.destroy({ return this.destroy({
where: { where: {
@ -71,12 +44,6 @@ class MediaProgress extends Model {
}) })
} }
getMediaItem(options) {
if (!this.mediaItemType) return Promise.resolve(null)
const mixinMethodName = `get${this.sequelize.uppercaseFirst(this.mediaItemType)}`
return this[mixinMethodName](options)
}
/** /**
* Initialize model * Initialize model
* *
@ -162,6 +129,12 @@ class MediaProgress extends Model {
MediaProgress.belongsTo(user) MediaProgress.belongsTo(user)
} }
getMediaItem(options) {
if (!this.mediaItemType) return Promise.resolve(null)
const mixinMethodName = `get${this.sequelize.uppercaseFirst(this.mediaItemType)}`
return this[mixinMethodName](options)
}
getOldMediaProgress() { getOldMediaProgress() {
const isPodcastEpisode = this.mediaItemType === 'podcastEpisode' const isPodcastEpisode = this.mediaItemType === 'podcastEpisode'

View File

@ -66,66 +66,6 @@ class Podcast extends Model {
this.podcastEpisodes this.podcastEpisodes
} }
static getOldPodcast(libraryItemExpanded) {
const podcastExpanded = libraryItemExpanded.media
const podcastEpisodes = podcastExpanded.podcastEpisodes?.map((ep) => ep.getOldPodcastEpisode(libraryItemExpanded.id).toJSON()).sort((a, b) => a.index - b.index)
return {
id: podcastExpanded.id,
libraryItemId: libraryItemExpanded.id,
metadata: {
title: podcastExpanded.title,
author: podcastExpanded.author,
description: podcastExpanded.description,
releaseDate: podcastExpanded.releaseDate,
genres: podcastExpanded.genres,
feedUrl: podcastExpanded.feedURL,
imageUrl: podcastExpanded.imageURL,
itunesPageUrl: podcastExpanded.itunesPageURL,
itunesId: podcastExpanded.itunesId,
itunesArtistId: podcastExpanded.itunesArtistId,
explicit: podcastExpanded.explicit,
language: podcastExpanded.language,
type: podcastExpanded.podcastType
},
coverPath: podcastExpanded.coverPath,
tags: podcastExpanded.tags,
episodes: podcastEpisodes || [],
autoDownloadEpisodes: podcastExpanded.autoDownloadEpisodes,
autoDownloadSchedule: podcastExpanded.autoDownloadSchedule,
lastEpisodeCheck: podcastExpanded.lastEpisodeCheck?.valueOf() || null,
maxEpisodesToKeep: podcastExpanded.maxEpisodesToKeep,
maxNewEpisodesToDownload: podcastExpanded.maxNewEpisodesToDownload
}
}
static getFromOld(oldPodcast) {
const oldPodcastMetadata = oldPodcast.metadata
return {
id: oldPodcast.id,
title: oldPodcastMetadata.title,
titleIgnorePrefix: oldPodcastMetadata.titleIgnorePrefix,
author: oldPodcastMetadata.author,
releaseDate: oldPodcastMetadata.releaseDate,
feedURL: oldPodcastMetadata.feedUrl,
imageURL: oldPodcastMetadata.imageUrl,
description: oldPodcastMetadata.description,
itunesPageURL: oldPodcastMetadata.itunesPageUrl,
itunesId: oldPodcastMetadata.itunesId,
itunesArtistId: oldPodcastMetadata.itunesArtistId,
language: oldPodcastMetadata.language,
podcastType: oldPodcastMetadata.type,
explicit: !!oldPodcastMetadata.explicit,
autoDownloadEpisodes: !!oldPodcast.autoDownloadEpisodes,
autoDownloadSchedule: oldPodcast.autoDownloadSchedule,
lastEpisodeCheck: oldPodcast.lastEpisodeCheck,
maxEpisodesToKeep: oldPodcast.maxEpisodesToKeep,
maxNewEpisodesToDownload: oldPodcast.maxNewEpisodesToDownload,
coverPath: oldPodcast.coverPath,
tags: oldPodcast.tags,
genres: oldPodcastMetadata.genres
}
}
/** /**
* Payload from the /api/podcasts POST endpoint * Payload from the /api/podcasts POST endpoint
* *

View File

@ -1,5 +1,4 @@
const { DataTypes, Model } = require('sequelize') const { DataTypes, Model } = require('sequelize')
const oldPodcastEpisode = require('../objects/entities/PodcastEpisode')
/** /**
* @typedef ChapterObject * @typedef ChapterObject
@ -53,40 +52,6 @@ class PodcastEpisode extends Model {
this.updatedAt this.updatedAt
} }
static createFromOld(oldEpisode) {
const podcastEpisode = this.getFromOld(oldEpisode)
return this.create(podcastEpisode)
}
static getFromOld(oldEpisode) {
const extraData = {}
if (oldEpisode.oldEpisodeId) {
extraData.oldEpisodeId = oldEpisode.oldEpisodeId
}
if (oldEpisode.guid) {
extraData.guid = oldEpisode.guid
}
return {
id: oldEpisode.id,
index: oldEpisode.index,
season: oldEpisode.season,
episode: oldEpisode.episode,
episodeType: oldEpisode.episodeType,
title: oldEpisode.title,
subtitle: oldEpisode.subtitle,
description: oldEpisode.description,
pubDate: oldEpisode.pubDate,
enclosureURL: oldEpisode.enclosure?.url || null,
enclosureSize: oldEpisode.enclosure?.length || null,
enclosureType: oldEpisode.enclosure?.type || null,
publishedAt: oldEpisode.publishedAt,
podcastId: oldEpisode.podcastId,
audioFile: oldEpisode.audioFile?.toJSON() || null,
chapters: oldEpisode.chapters,
extraData
}
}
/** /**
* *
* @param {import('../utils/podcastUtils').RssPodcastEpisode} rssPodcastEpisode * @param {import('../utils/podcastUtils').RssPodcastEpisode} rssPodcastEpisode
@ -208,42 +173,6 @@ class PodcastEpisode extends Model {
return track return track
} }
/**
* @param {string} libraryItemId
* @returns {oldPodcastEpisode}
*/
getOldPodcastEpisode(libraryItemId = null) {
let enclosure = null
if (this.enclosureURL) {
enclosure = {
url: this.enclosureURL,
type: this.enclosureType,
length: this.enclosureSize !== null ? String(this.enclosureSize) : null
}
}
return new oldPodcastEpisode({
libraryItemId: libraryItemId || null,
podcastId: this.podcastId,
id: this.id,
oldEpisodeId: this.extraData?.oldEpisodeId || null,
index: this.index,
season: this.season,
episode: this.episode,
episodeType: this.episodeType,
title: this.title,
subtitle: this.subtitle,
description: this.description,
enclosure,
guid: this.extraData?.guid || null,
pubDate: this.pubDate,
chapters: this.chapters,
audioFile: this.audioFile,
publishedAt: this.publishedAt?.valueOf() || null,
addedAt: this.createdAt.valueOf(),
updatedAt: this.updatedAt.valueOf()
})
}
toOldJSON(libraryItemId) { toOldJSON(libraryItemId) {
if (!libraryItemId) { if (!libraryItemId) {
throw new Error(`[PodcastEpisode] Cannot convert to old JSON because libraryItemId is not provided`) throw new Error(`[PodcastEpisode] Cannot convert to old JSON because libraryItemId is not provided`)

View File

@ -1,6 +1,6 @@
const { DataTypes, Model, where, fn, col, literal } = require('sequelize') const { DataTypes, Model, where, fn, col, literal } = require('sequelize')
const { getTitlePrefixAtEnd } = require('../utils/index') const { getTitlePrefixAtEnd, getTitleIgnorePrefix } = require('../utils/index')
class Series extends Model { class Series extends Model {
constructor(values, options) { constructor(values, options) {
@ -66,6 +66,22 @@ class Series extends Model {
return series return series
} }
/**
*
* @param {string} seriesName
* @param {string} libraryId
* @returns {Promise<Series>}
*/
static async findOrCreateByNameAndLibrary(seriesName, libraryId) {
const series = await this.getByNameAndLibrary(seriesName, libraryId)
if (series) return series
return this.create({
name: seriesName,
nameIgnorePrefix: getTitleIgnorePrefix(seriesName),
libraryId
})
}
/** /**
* Initialize model * Initialize model
* @param {import('../Database').sequelize} sequelize * @param {import('../Database').sequelize} sequelize

View File

@ -563,9 +563,8 @@ class User extends Model {
/** /**
* Check user can access library item * Check user can access library item
* TODO: Currently supports both old and new library item models
* *
* @param {import('../objects/LibraryItem')|import('./LibraryItem')} libraryItem * @param {import('./LibraryItem')} libraryItem
* @returns {boolean} * @returns {boolean}
*/ */
checkCanAccessLibraryItem(libraryItem) { checkCanAccessLibraryItem(libraryItem) {

View File

@ -1,273 +0,0 @@
const fs = require('../libs/fsExtra')
const Path = require('path')
const Logger = require('../Logger')
const LibraryFile = require('./files/LibraryFile')
const Book = require('./mediaTypes/Book')
const Podcast = require('./mediaTypes/Podcast')
const { areEquivalent, copyValue } = require('../utils/index')
const { filePathToPOSIX, getFileTimestampsWithIno } = require('../utils/fileUtils')
class LibraryItem {
constructor(libraryItem = null) {
this.id = null
this.ino = null // Inode
this.oldLibraryItemId = null
this.libraryId = null
this.folderId = null
this.path = null
this.relPath = null
this.isFile = false
this.mtimeMs = null
this.ctimeMs = null
this.birthtimeMs = null
this.addedAt = null
this.updatedAt = null
this.lastScan = null
this.scanVersion = null
// Was scanned and no longer exists
this.isMissing = false
// Was scanned and no longer has media files
this.isInvalid = false
this.mediaType = null
this.media = null
/** @type {LibraryFile[]} */
this.libraryFiles = []
if (libraryItem) {
this.construct(libraryItem)
}
// Temporary attributes
this.isSavingMetadata = false
}
construct(libraryItem) {
this.id = libraryItem.id
this.ino = libraryItem.ino || null
this.oldLibraryItemId = libraryItem.oldLibraryItemId
this.libraryId = libraryItem.libraryId
this.folderId = libraryItem.folderId
this.path = libraryItem.path
this.relPath = libraryItem.relPath
this.isFile = !!libraryItem.isFile
this.mtimeMs = libraryItem.mtimeMs || 0
this.ctimeMs = libraryItem.ctimeMs || 0
this.birthtimeMs = libraryItem.birthtimeMs || 0
this.addedAt = libraryItem.addedAt
this.updatedAt = libraryItem.updatedAt || this.addedAt
this.lastScan = libraryItem.lastScan || null
this.scanVersion = libraryItem.scanVersion || null
this.isMissing = !!libraryItem.isMissing
this.isInvalid = !!libraryItem.isInvalid
this.mediaType = libraryItem.mediaType
if (this.mediaType === 'book') {
this.media = new Book(libraryItem.media)
} else if (this.mediaType === 'podcast') {
this.media = new Podcast(libraryItem.media)
}
this.media.libraryItemId = this.id
this.libraryFiles = libraryItem.libraryFiles.map((f) => new LibraryFile(f))
// Migration for v2.2.23 to set ebook library files as supplementary
if (this.isBook && this.media.ebookFile) {
for (const libraryFile of this.libraryFiles) {
if (libraryFile.isEBookFile && libraryFile.isSupplementary === null) {
libraryFile.isSupplementary = this.media.ebookFile.ino !== libraryFile.ino
}
}
}
}
toJSON() {
return {
id: this.id,
ino: this.ino,
oldLibraryItemId: this.oldLibraryItemId,
libraryId: this.libraryId,
folderId: this.folderId,
path: this.path,
relPath: this.relPath,
isFile: this.isFile,
mtimeMs: this.mtimeMs,
ctimeMs: this.ctimeMs,
birthtimeMs: this.birthtimeMs,
addedAt: this.addedAt,
updatedAt: this.updatedAt,
lastScan: this.lastScan,
scanVersion: this.scanVersion,
isMissing: !!this.isMissing,
isInvalid: !!this.isInvalid,
mediaType: this.mediaType,
media: this.media.toJSON(),
libraryFiles: this.libraryFiles.map((f) => f.toJSON())
}
}
toJSONMinified() {
return {
id: this.id,
ino: this.ino,
oldLibraryItemId: this.oldLibraryItemId,
libraryId: this.libraryId,
folderId: this.folderId,
path: this.path,
relPath: this.relPath,
isFile: this.isFile,
mtimeMs: this.mtimeMs,
ctimeMs: this.ctimeMs,
birthtimeMs: this.birthtimeMs,
addedAt: this.addedAt,
updatedAt: this.updatedAt,
isMissing: !!this.isMissing,
isInvalid: !!this.isInvalid,
mediaType: this.mediaType,
media: this.media.toJSONMinified(),
numFiles: this.libraryFiles.length,
size: this.size
}
}
// Adds additional helpful fields like media duration, tracks, etc.
toJSONExpanded() {
return {
id: this.id,
ino: this.ino,
oldLibraryItemId: this.oldLibraryItemId,
libraryId: this.libraryId,
folderId: this.folderId,
path: this.path,
relPath: this.relPath,
isFile: this.isFile,
mtimeMs: this.mtimeMs,
ctimeMs: this.ctimeMs,
birthtimeMs: this.birthtimeMs,
addedAt: this.addedAt,
updatedAt: this.updatedAt,
lastScan: this.lastScan,
scanVersion: this.scanVersion,
isMissing: !!this.isMissing,
isInvalid: !!this.isInvalid,
mediaType: this.mediaType,
media: this.media.toJSONExpanded(),
libraryFiles: this.libraryFiles.map((f) => f.toJSON()),
size: this.size
}
}
get isPodcast() {
return this.mediaType === 'podcast'
}
get isBook() {
return this.mediaType === 'book'
}
get size() {
let total = 0
this.libraryFiles.forEach((lf) => (total += lf.metadata.size))
return total
}
get hasAudioFiles() {
return this.libraryFiles.some((lf) => lf.fileType === 'audio')
}
update(payload) {
const json = this.toJSON()
let hasUpdates = false
for (const key in json) {
if (payload[key] !== undefined) {
if (key === 'media') {
if (this.media.update(payload[key])) {
hasUpdates = true
}
} else if (!areEquivalent(payload[key], json[key])) {
this[key] = copyValue(payload[key])
hasUpdates = true
}
}
}
if (hasUpdates) {
this.updatedAt = Date.now()
}
return hasUpdates
}
updateMediaCover(coverPath) {
this.media.updateCover(coverPath)
this.updatedAt = Date.now()
return true
}
setMissing() {
this.isMissing = true
this.updatedAt = Date.now()
}
/**
* Save metadata.json file
* TODO: Move to new LibraryItem model
* @returns {Promise<LibraryFile>} null if not saved
*/
async saveMetadata() {
if (this.isSavingMetadata || !global.MetadataPath) return null
this.isSavingMetadata = true
let metadataPath = Path.join(global.MetadataPath, 'items', this.id)
let storeMetadataWithItem = global.ServerSettings.storeMetadataWithItem
if (storeMetadataWithItem && !this.isFile) {
metadataPath = this.path
} else {
// Make sure metadata book dir exists
storeMetadataWithItem = false
await fs.ensureDir(metadataPath)
}
const metadataFilePath = Path.join(metadataPath, `metadata.${global.ServerSettings.metadataFileFormat}`)
return fs
.writeFile(metadataFilePath, JSON.stringify(this.media.toJSONForMetadataFile(), null, 2))
.then(async () => {
// Add metadata.json to libraryFiles array if it is new
let metadataLibraryFile = this.libraryFiles.find((lf) => lf.metadata.path === filePathToPOSIX(metadataFilePath))
if (storeMetadataWithItem) {
if (!metadataLibraryFile) {
metadataLibraryFile = new LibraryFile()
await metadataLibraryFile.setDataFromPath(metadataFilePath, `metadata.json`)
this.libraryFiles.push(metadataLibraryFile)
} else {
const fileTimestamps = await getFileTimestampsWithIno(metadataFilePath)
if (fileTimestamps) {
metadataLibraryFile.metadata.mtimeMs = fileTimestamps.mtimeMs
metadataLibraryFile.metadata.ctimeMs = fileTimestamps.ctimeMs
metadataLibraryFile.metadata.size = fileTimestamps.size
metadataLibraryFile.ino = fileTimestamps.ino
}
}
const libraryItemDirTimestamps = await getFileTimestampsWithIno(this.path)
if (libraryItemDirTimestamps) {
this.mtimeMs = libraryItemDirTimestamps.mtimeMs
this.ctimeMs = libraryItemDirTimestamps.ctimeMs
}
}
Logger.debug(`[LibraryItem] Success saving abmetadata to "${metadataFilePath}"`)
return metadataLibraryFile
})
.catch((error) => {
Logger.error(`[LibraryItem] Failed to save json file at "${metadataFilePath}"`, error)
return null
})
.finally(() => {
this.isSavingMetadata = false
})
}
}
module.exports = LibraryItem

View File

@ -1,149 +0,0 @@
const { areEquivalent, copyValue } = require('../../utils/index')
const AudioFile = require('../files/AudioFile')
const AudioTrack = require('../files/AudioTrack')
class PodcastEpisode {
constructor(episode) {
this.libraryItemId = null
this.podcastId = null
this.id = null
this.oldEpisodeId = null
this.index = null
this.season = null
this.episode = null
this.episodeType = null
this.title = null
this.subtitle = null
this.description = null
this.enclosure = null
this.guid = null
this.pubDate = null
this.chapters = []
this.audioFile = null
this.publishedAt = null
this.addedAt = null
this.updatedAt = null
if (episode) {
this.construct(episode)
}
}
construct(episode) {
this.libraryItemId = episode.libraryItemId
this.podcastId = episode.podcastId
this.id = episode.id
this.oldEpisodeId = episode.oldEpisodeId
this.index = episode.index
this.season = episode.season
this.episode = episode.episode
this.episodeType = episode.episodeType
this.title = episode.title
this.subtitle = episode.subtitle
this.description = episode.description
this.enclosure = episode.enclosure ? { ...episode.enclosure } : null
this.guid = episode.guid || null
this.pubDate = episode.pubDate
this.chapters = episode.chapters?.map((ch) => ({ ...ch })) || []
this.audioFile = episode.audioFile ? new AudioFile(episode.audioFile) : null
this.publishedAt = episode.publishedAt
this.addedAt = episode.addedAt
this.updatedAt = episode.updatedAt
if (this.audioFile) {
this.audioFile.index = 1 // Only 1 audio file per episode
}
}
toJSON() {
return {
libraryItemId: this.libraryItemId,
podcastId: this.podcastId,
id: this.id,
oldEpisodeId: this.oldEpisodeId,
index: this.index,
season: this.season,
episode: this.episode,
episodeType: this.episodeType,
title: this.title,
subtitle: this.subtitle,
description: this.description,
enclosure: this.enclosure ? { ...this.enclosure } : null,
guid: this.guid,
pubDate: this.pubDate,
chapters: this.chapters.map((ch) => ({ ...ch })),
audioFile: this.audioFile?.toJSON() || null,
publishedAt: this.publishedAt,
addedAt: this.addedAt,
updatedAt: this.updatedAt
}
}
toJSONExpanded() {
return {
libraryItemId: this.libraryItemId,
podcastId: this.podcastId,
id: this.id,
oldEpisodeId: this.oldEpisodeId,
index: this.index,
season: this.season,
episode: this.episode,
episodeType: this.episodeType,
title: this.title,
subtitle: this.subtitle,
description: this.description,
enclosure: this.enclosure ? { ...this.enclosure } : null,
guid: this.guid,
pubDate: this.pubDate,
chapters: this.chapters.map((ch) => ({ ...ch })),
audioFile: this.audioFile?.toJSON() || null,
audioTrack: this.audioTrack?.toJSON() || null,
publishedAt: this.publishedAt,
addedAt: this.addedAt,
updatedAt: this.updatedAt,
duration: this.duration,
size: this.size
}
}
get audioTrack() {
if (!this.audioFile) return null
const audioTrack = new AudioTrack()
audioTrack.setData(this.libraryItemId, this.audioFile, 0)
return audioTrack
}
get tracks() {
return [this.audioTrack]
}
get duration() {
return this.audioFile?.duration || 0
}
get size() {
return this.audioFile?.metadata.size || 0
}
get enclosureUrl() {
return this.enclosure?.url || null
}
update(payload) {
let hasUpdates = false
for (const key in this.toJSON()) {
let newValue = payload[key]
if (newValue === '') newValue = null
let existingValue = this[key]
if (existingValue === '') existingValue = null
if (newValue != undefined && !areEquivalent(newValue, existingValue)) {
this[key] = copyValue(newValue)
hasUpdates = true
}
}
if (hasUpdates) {
this.updatedAt = Date.now()
}
return hasUpdates
}
}
module.exports = PodcastEpisode

View File

@ -1,154 +0,0 @@
const Logger = require('../../Logger')
const BookMetadata = require('../metadata/BookMetadata')
const { areEquivalent, copyValue } = require('../../utils/index')
const { filePathToPOSIX } = require('../../utils/fileUtils')
const AudioFile = require('../files/AudioFile')
const AudioTrack = require('../files/AudioTrack')
const EBookFile = require('../files/EBookFile')
class Book {
constructor(book) {
this.id = null
this.libraryItemId = null
this.metadata = null
this.coverPath = null
this.tags = []
this.audioFiles = []
this.chapters = []
this.ebookFile = null
this.lastCoverSearch = null
this.lastCoverSearchQuery = null
if (book) {
this.construct(book)
}
}
construct(book) {
this.id = book.id
this.libraryItemId = book.libraryItemId
this.metadata = new BookMetadata(book.metadata)
this.coverPath = book.coverPath
this.tags = [...book.tags]
this.audioFiles = book.audioFiles.map((f) => new AudioFile(f))
this.chapters = book.chapters.map((c) => ({ ...c }))
this.ebookFile = book.ebookFile ? new EBookFile(book.ebookFile) : null
this.lastCoverSearch = book.lastCoverSearch || null
this.lastCoverSearchQuery = book.lastCoverSearchQuery || null
}
toJSON() {
return {
id: this.id,
libraryItemId: this.libraryItemId,
metadata: this.metadata.toJSON(),
coverPath: this.coverPath,
tags: [...this.tags],
audioFiles: this.audioFiles.map((f) => f.toJSON()),
chapters: this.chapters.map((c) => ({ ...c })),
ebookFile: this.ebookFile ? this.ebookFile.toJSON() : null
}
}
toJSONMinified() {
return {
id: this.id,
metadata: this.metadata.toJSONMinified(),
coverPath: this.coverPath,
tags: [...this.tags],
numTracks: this.tracks.length,
numAudioFiles: this.audioFiles.length,
numChapters: this.chapters.length,
duration: this.duration,
size: this.size,
ebookFormat: this.ebookFile?.ebookFormat
}
}
toJSONExpanded() {
return {
id: this.id,
libraryItemId: this.libraryItemId,
metadata: this.metadata.toJSONExpanded(),
coverPath: this.coverPath,
tags: [...this.tags],
audioFiles: this.audioFiles.map((f) => f.toJSON()),
chapters: this.chapters.map((c) => ({ ...c })),
duration: this.duration,
size: this.size,
tracks: this.tracks.map((t) => t.toJSON()),
ebookFile: this.ebookFile?.toJSON() || null
}
}
toJSONForMetadataFile() {
return {
tags: [...this.tags],
chapters: this.chapters.map((c) => ({ ...c })),
...this.metadata.toJSONForMetadataFile()
}
}
get size() {
var total = 0
this.audioFiles.forEach((af) => (total += af.metadata.size))
if (this.ebookFile) {
total += this.ebookFile.metadata.size
}
return total
}
get includedAudioFiles() {
return this.audioFiles.filter((af) => !af.exclude)
}
get tracks() {
let startOffset = 0
return this.includedAudioFiles.map((af) => {
const audioTrack = new AudioTrack()
audioTrack.setData(this.libraryItemId, af, startOffset)
startOffset += audioTrack.duration
return audioTrack
})
}
get duration() {
let total = 0
this.tracks.forEach((track) => (total += track.duration))
return total
}
get numTracks() {
return this.tracks.length
}
get isEBookOnly() {
return this.ebookFile && !this.numTracks
}
update(payload) {
const json = this.toJSON()
let hasUpdates = false
for (const key in json) {
if (payload[key] !== undefined) {
if (key === 'metadata') {
if (this.metadata.update(payload.metadata)) {
hasUpdates = true
}
} else if (!areEquivalent(payload[key], json[key])) {
this[key] = copyValue(payload[key])
Logger.debug('[Book] Key updated', key, this[key])
hasUpdates = true
}
}
}
return hasUpdates
}
updateCover(coverPath) {
coverPath = filePathToPOSIX(coverPath)
if (this.coverPath === coverPath) return false
this.coverPath = coverPath
return true
}
}
module.exports = Book

View File

@ -1,178 +0,0 @@
const Logger = require('../../Logger')
const PodcastEpisode = require('../entities/PodcastEpisode')
const PodcastMetadata = require('../metadata/PodcastMetadata')
const { areEquivalent, copyValue } = require('../../utils/index')
const { filePathToPOSIX } = require('../../utils/fileUtils')
class Podcast {
constructor(podcast) {
this.id = null
this.libraryItemId = null
this.metadata = null
this.coverPath = null
this.tags = []
this.episodes = []
this.autoDownloadEpisodes = false
this.autoDownloadSchedule = null
this.lastEpisodeCheck = 0
this.maxEpisodesToKeep = 0
this.maxNewEpisodesToDownload = 3
this.lastCoverSearch = null
this.lastCoverSearchQuery = null
if (podcast) {
this.construct(podcast)
}
}
construct(podcast) {
this.id = podcast.id
this.libraryItemId = podcast.libraryItemId
this.metadata = new PodcastMetadata(podcast.metadata)
this.coverPath = podcast.coverPath
this.tags = [...podcast.tags]
this.episodes = podcast.episodes.map((e) => {
var podcastEpisode = new PodcastEpisode(e)
podcastEpisode.libraryItemId = this.libraryItemId
return podcastEpisode
})
this.autoDownloadEpisodes = !!podcast.autoDownloadEpisodes
this.autoDownloadSchedule = podcast.autoDownloadSchedule || '0 * * * *' // Added in 2.1.3 so default to hourly
this.lastEpisodeCheck = podcast.lastEpisodeCheck || 0
this.maxEpisodesToKeep = podcast.maxEpisodesToKeep || 0
// Default is 3 but 0 is allowed
if (typeof podcast.maxNewEpisodesToDownload !== 'number') {
this.maxNewEpisodesToDownload = 3
} else {
this.maxNewEpisodesToDownload = podcast.maxNewEpisodesToDownload
}
}
toJSON() {
return {
id: this.id,
libraryItemId: this.libraryItemId,
metadata: this.metadata.toJSON(),
coverPath: this.coverPath,
tags: [...this.tags],
episodes: this.episodes.map((e) => e.toJSON()),
autoDownloadEpisodes: this.autoDownloadEpisodes,
autoDownloadSchedule: this.autoDownloadSchedule,
lastEpisodeCheck: this.lastEpisodeCheck,
maxEpisodesToKeep: this.maxEpisodesToKeep,
maxNewEpisodesToDownload: this.maxNewEpisodesToDownload
}
}
toJSONMinified() {
return {
id: this.id,
metadata: this.metadata.toJSONMinified(),
coverPath: this.coverPath,
tags: [...this.tags],
numEpisodes: this.episodes.length,
autoDownloadEpisodes: this.autoDownloadEpisodes,
autoDownloadSchedule: this.autoDownloadSchedule,
lastEpisodeCheck: this.lastEpisodeCheck,
maxEpisodesToKeep: this.maxEpisodesToKeep,
maxNewEpisodesToDownload: this.maxNewEpisodesToDownload,
size: this.size
}
}
toJSONExpanded() {
return {
id: this.id,
libraryItemId: this.libraryItemId,
metadata: this.metadata.toJSONExpanded(),
coverPath: this.coverPath,
tags: [...this.tags],
episodes: this.episodes.map((e) => e.toJSONExpanded()),
autoDownloadEpisodes: this.autoDownloadEpisodes,
autoDownloadSchedule: this.autoDownloadSchedule,
lastEpisodeCheck: this.lastEpisodeCheck,
maxEpisodesToKeep: this.maxEpisodesToKeep,
maxNewEpisodesToDownload: this.maxNewEpisodesToDownload,
size: this.size
}
}
toJSONForMetadataFile() {
return {
tags: [...this.tags],
title: this.metadata.title,
author: this.metadata.author,
description: this.metadata.description,
releaseDate: this.metadata.releaseDate,
genres: [...this.metadata.genres],
feedURL: this.metadata.feedUrl,
imageURL: this.metadata.imageUrl,
itunesPageURL: this.metadata.itunesPageUrl,
itunesId: this.metadata.itunesId,
itunesArtistId: this.metadata.itunesArtistId,
explicit: this.metadata.explicit,
language: this.metadata.language,
podcastType: this.metadata.type
}
}
get size() {
var total = 0
this.episodes.forEach((ep) => (total += ep.size))
return total
}
get duration() {
let total = 0
this.episodes.forEach((ep) => (total += ep.duration))
return total
}
get numTracks() {
return this.episodes.length
}
update(payload) {
var json = this.toJSON()
delete json.episodes // do not update media entities here
var hasUpdates = false
for (const key in json) {
if (payload[key] !== undefined) {
if (key === 'metadata') {
if (this.metadata.update(payload.metadata)) {
hasUpdates = true
}
} else if (!areEquivalent(payload[key], json[key])) {
this[key] = copyValue(payload[key])
Logger.debug('[Podcast] Key updated', key, this[key])
hasUpdates = true
}
}
}
return hasUpdates
}
updateEpisode(id, payload) {
var episode = this.episodes.find((ep) => ep.id == id)
if (!episode) return false
return episode.update(payload)
}
updateCover(coverPath) {
coverPath = filePathToPOSIX(coverPath)
if (this.coverPath === coverPath) return false
this.coverPath = coverPath
return true
}
getEpisode(episodeId) {
if (!episodeId) return null
// Support old episode ids for mobile downloads
if (episodeId.startsWith('ep_')) return this.episodes.find((ep) => ep.oldEpisodeId == episodeId)
return this.episodes.find((ep) => ep.id == episodeId)
}
}
module.exports = Podcast

View File

@ -1,179 +0,0 @@
const Logger = require('../../Logger')
const { areEquivalent, copyValue, getTitleIgnorePrefix, getTitlePrefixAtEnd } = require('../../utils/index')
const parseNameString = require('../../utils/parsers/parseNameString')
class BookMetadata {
constructor(metadata) {
this.title = null
this.subtitle = null
this.authors = []
this.narrators = [] // Array of strings
this.series = []
this.genres = [] // Array of strings
this.publishedYear = null
this.publishedDate = null
this.publisher = null
this.description = null
this.isbn = null
this.asin = null
this.language = null
this.explicit = false
this.abridged = false
if (metadata) {
this.construct(metadata)
}
}
construct(metadata) {
this.title = metadata.title
this.subtitle = metadata.subtitle
this.authors = metadata.authors?.map ? metadata.authors.map((a) => ({ ...a })) : []
this.narrators = metadata.narrators ? [...metadata.narrators].filter((n) => n) : []
this.series = metadata.series?.map
? metadata.series.map((s) => ({
...s,
name: s.name || 'No Title'
}))
: []
this.genres = metadata.genres ? [...metadata.genres] : []
this.publishedYear = metadata.publishedYear || null
this.publishedDate = metadata.publishedDate || null
this.publisher = metadata.publisher
this.description = metadata.description
this.isbn = metadata.isbn
this.asin = metadata.asin
this.language = metadata.language
this.explicit = !!metadata.explicit
this.abridged = !!metadata.abridged
}
toJSON() {
return {
title: this.title,
subtitle: this.subtitle,
authors: this.authors.map((a) => ({ ...a })), // Author JSONMinimal with name and id
narrators: [...this.narrators],
series: this.series.map((s) => ({ ...s })), // Series JSONMinimal with name, id and sequence
genres: [...this.genres],
publishedYear: this.publishedYear,
publishedDate: this.publishedDate,
publisher: this.publisher,
description: this.description,
isbn: this.isbn,
asin: this.asin,
language: this.language,
explicit: this.explicit,
abridged: this.abridged
}
}
toJSONMinified() {
return {
title: this.title,
titleIgnorePrefix: this.titlePrefixAtEnd,
subtitle: this.subtitle,
authorName: this.authorName,
authorNameLF: this.authorNameLF,
narratorName: this.narratorName,
seriesName: this.seriesName,
genres: [...this.genres],
publishedYear: this.publishedYear,
publishedDate: this.publishedDate,
publisher: this.publisher,
description: this.description,
isbn: this.isbn,
asin: this.asin,
language: this.language,
explicit: this.explicit,
abridged: this.abridged
}
}
toJSONExpanded() {
return {
title: this.title,
titleIgnorePrefix: this.titlePrefixAtEnd,
subtitle: this.subtitle,
authors: this.authors.map((a) => ({ ...a })), // Author JSONMinimal with name and id
narrators: [...this.narrators],
series: this.series.map((s) => ({ ...s })),
genres: [...this.genres],
publishedYear: this.publishedYear,
publishedDate: this.publishedDate,
publisher: this.publisher,
description: this.description,
isbn: this.isbn,
asin: this.asin,
language: this.language,
explicit: this.explicit,
authorName: this.authorName,
authorNameLF: this.authorNameLF,
narratorName: this.narratorName,
seriesName: this.seriesName,
abridged: this.abridged
}
}
toJSONForMetadataFile() {
const json = this.toJSON()
json.authors = json.authors.map((au) => au.name)
json.series = json.series.map((se) => {
if (!se.sequence) return se.name
return `${se.name} #${se.sequence}`
})
return json
}
clone() {
return new BookMetadata(this.toJSON())
}
get titleIgnorePrefix() {
return getTitleIgnorePrefix(this.title)
}
get titlePrefixAtEnd() {
return getTitlePrefixAtEnd(this.title)
}
get authorName() {
if (!this.authors.length) return ''
return this.authors.map((au) => au.name).join(', ')
}
get authorNameLF() {
// Last, First
if (!this.authors.length) return ''
return this.authors.map((au) => parseNameString.nameToLastFirst(au.name)).join(', ')
}
get seriesName() {
if (!this.series.length) return ''
return this.series
.map((se) => {
if (!se.sequence) return se.name
return `${se.name} #${se.sequence}`
})
.join(', ')
}
get narratorName() {
return this.narrators.join(', ')
}
getSeries(seriesId) {
return this.series.find((se) => se.id == seriesId)
}
update(payload) {
const json = this.toJSON()
let hasUpdates = false
for (const key in json) {
if (payload[key] !== undefined) {
if (!areEquivalent(payload[key], json[key])) {
this[key] = copyValue(payload[key])
Logger.debug('[BookMetadata] Key updated', key, this[key])
hasUpdates = true
}
}
}
return hasUpdates
}
}
module.exports = BookMetadata

View File

@ -1,109 +0,0 @@
const Logger = require('../../Logger')
const { areEquivalent, copyValue, getTitleIgnorePrefix, getTitlePrefixAtEnd } = require('../../utils/index')
class PodcastMetadata {
constructor(metadata) {
this.title = null
this.author = null
this.description = null
this.releaseDate = null
this.genres = []
this.feedUrl = null
this.imageUrl = null
this.itunesPageUrl = null
this.itunesId = null
this.itunesArtistId = null
this.explicit = false
this.language = null
this.type = null
if (metadata) {
this.construct(metadata)
}
}
construct(metadata) {
this.title = metadata.title
this.author = metadata.author
this.description = metadata.description
this.releaseDate = metadata.releaseDate
this.genres = [...metadata.genres]
this.feedUrl = metadata.feedUrl
this.imageUrl = metadata.imageUrl
this.itunesPageUrl = metadata.itunesPageUrl
this.itunesId = metadata.itunesId
this.itunesArtistId = metadata.itunesArtistId
this.explicit = metadata.explicit
this.language = metadata.language || null
this.type = metadata.type || 'episodic'
}
toJSON() {
return {
title: this.title,
author: this.author,
description: this.description,
releaseDate: this.releaseDate,
genres: [...this.genres],
feedUrl: this.feedUrl,
imageUrl: this.imageUrl,
itunesPageUrl: this.itunesPageUrl,
itunesId: this.itunesId,
itunesArtistId: this.itunesArtistId,
explicit: this.explicit,
language: this.language,
type: this.type
}
}
toJSONMinified() {
return {
title: this.title,
titleIgnorePrefix: this.titlePrefixAtEnd,
author: this.author,
description: this.description,
releaseDate: this.releaseDate,
genres: [...this.genres],
feedUrl: this.feedUrl,
imageUrl: this.imageUrl,
itunesPageUrl: this.itunesPageUrl,
itunesId: this.itunesId,
itunesArtistId: this.itunesArtistId,
explicit: this.explicit,
language: this.language,
type: this.type
}
}
toJSONExpanded() {
return this.toJSONMinified()
}
clone() {
return new PodcastMetadata(this.toJSON())
}
get titleIgnorePrefix() {
return getTitleIgnorePrefix(this.title)
}
get titlePrefixAtEnd() {
return getTitlePrefixAtEnd(this.title)
}
update(payload) {
const json = this.toJSON()
let hasUpdates = false
for (const key in json) {
if (payload[key] !== undefined) {
if (!areEquivalent(payload[key], json[key])) {
this[key] = copyValue(payload[key])
Logger.debug('[PodcastMetadata] Key updated', key, this[key])
hasUpdates = true
}
}
}
return hasUpdates
}
}
module.exports = PodcastMetadata

View File

@ -105,7 +105,6 @@ class ApiRouter {
this.router.post('/items/batch/scan', LibraryItemController.batchScan.bind(this)) this.router.post('/items/batch/scan', LibraryItemController.batchScan.bind(this))
this.router.get('/items/:id', LibraryItemController.middleware.bind(this), LibraryItemController.findOne.bind(this)) this.router.get('/items/:id', LibraryItemController.middleware.bind(this), LibraryItemController.findOne.bind(this))
this.router.patch('/items/:id', LibraryItemController.middleware.bind(this), LibraryItemController.update.bind(this))
this.router.delete('/items/:id', LibraryItemController.middleware.bind(this), LibraryItemController.delete.bind(this)) this.router.delete('/items/:id', LibraryItemController.middleware.bind(this), LibraryItemController.delete.bind(this))
this.router.get('/items/:id/download', LibraryItemController.middleware.bind(this), LibraryItemController.download.bind(this)) this.router.get('/items/:id/download', LibraryItemController.middleware.bind(this), LibraryItemController.download.bind(this))
this.router.patch('/items/:id/media', LibraryItemController.middleware.bind(this), LibraryItemController.updateMedia.bind(this)) this.router.patch('/items/:id/media', LibraryItemController.middleware.bind(this), LibraryItemController.updateMedia.bind(this))
@ -531,109 +530,5 @@ class ApiRouter {
}) })
return listeningStats return listeningStats
} }
async createAuthorsAndSeriesForItemUpdate(mediaPayload, libraryId) {
if (mediaPayload.metadata) {
const mediaMetadata = mediaPayload.metadata
// Create new authors if in payload
if (mediaMetadata.authors?.length) {
const newAuthors = []
for (let i = 0; i < mediaMetadata.authors.length; i++) {
const authorName = (mediaMetadata.authors[i].name || '').trim()
if (!authorName) {
Logger.error(`[ApiRouter] Invalid author object, no name`, mediaMetadata.authors[i])
mediaMetadata.authors[i].id = null
continue
}
if (mediaMetadata.authors[i].id?.startsWith('new')) {
mediaMetadata.authors[i].id = null
}
// Ensure the ID for the author exists
if (mediaMetadata.authors[i].id && !(await Database.checkAuthorExists(libraryId, mediaMetadata.authors[i].id))) {
Logger.warn(`[ApiRouter] Author id "${mediaMetadata.authors[i].id}" does not exist`)
mediaMetadata.authors[i].id = null
}
if (!mediaMetadata.authors[i].id) {
let author = await Database.authorModel.getByNameAndLibrary(authorName, libraryId)
if (!author) {
author = await Database.authorModel.create({
name: authorName,
lastFirst: Database.authorModel.getLastFirst(authorName),
libraryId
})
Logger.debug(`[ApiRouter] Creating new author "${author.name}"`)
newAuthors.push(author)
// Update filter data
Database.addAuthorToFilterData(libraryId, author.name, author.id)
}
// Update ID in original payload
mediaMetadata.authors[i].id = author.id
}
}
// Remove authors without an id
mediaMetadata.authors = mediaMetadata.authors.filter((au) => !!au.id)
if (newAuthors.length) {
SocketAuthority.emitter(
'authors_added',
newAuthors.map((au) => au.toOldJSON())
)
}
}
// Create new series if in payload
if (mediaMetadata.series && mediaMetadata.series.length) {
const newSeries = []
for (let i = 0; i < mediaMetadata.series.length; i++) {
const seriesName = (mediaMetadata.series[i].name || '').trim()
if (!seriesName) {
Logger.error(`[ApiRouter] Invalid series object, no name`, mediaMetadata.series[i])
mediaMetadata.series[i].id = null
continue
}
if (mediaMetadata.series[i].id?.startsWith('new')) {
mediaMetadata.series[i].id = null
}
// Ensure the ID for the series exists
if (mediaMetadata.series[i].id && !(await Database.checkSeriesExists(libraryId, mediaMetadata.series[i].id))) {
Logger.warn(`[ApiRouter] Series id "${mediaMetadata.series[i].id}" does not exist`)
mediaMetadata.series[i].id = null
}
if (!mediaMetadata.series[i].id) {
let seriesItem = await Database.seriesModel.getByNameAndLibrary(seriesName, libraryId)
if (!seriesItem) {
seriesItem = await Database.seriesModel.create({
name: seriesName,
nameIgnorePrefix: getTitleIgnorePrefix(seriesName),
libraryId
})
Logger.debug(`[ApiRouter] Creating new series "${seriesItem.name}"`)
newSeries.push(seriesItem)
// Update filter data
Database.addSeriesToFilterData(libraryId, seriesItem.name, seriesItem.id)
}
// Update ID in original payload
mediaMetadata.series[i].id = seriesItem.id
}
}
// Remove series without an id
mediaMetadata.series = mediaMetadata.series.filter((se) => se.id)
if (newSeries.length) {
SocketAuthority.emitter(
'multiple_series_added',
newSeries.map((se) => se.toOldJSON())
)
}
}
}
}
} }
module.exports = ApiRouter module.exports = ApiRouter

View File

@ -582,7 +582,7 @@ class LibraryScanner {
} }
// Check if book dir group is already an item // Check if book dir group is already an item
let existingLibraryItem = await Database.libraryItemModel.findOneOld({ let existingLibraryItem = await Database.libraryItemModel.findOneExpanded({
libraryId: library.id, libraryId: library.id,
path: potentialChildDirs path: potentialChildDirs
}) })
@ -606,17 +606,17 @@ class LibraryScanner {
if (existingLibraryItem.path === fullPath) { if (existingLibraryItem.path === fullPath) {
const exists = await fs.pathExists(fullPath) const exists = await fs.pathExists(fullPath)
if (!exists) { if (!exists) {
Logger.info(`[LibraryScanner] Scanning file update group and library item was deleted "${existingLibraryItem.media.metadata.title}" - marking as missing`) Logger.info(`[LibraryScanner] Scanning file update group and library item was deleted "${existingLibraryItem.media.title}" - marking as missing`)
existingLibraryItem.setMissing() existingLibraryItem.isMissing = true
await Database.updateLibraryItem(existingLibraryItem) await existingLibraryItem.save()
SocketAuthority.emitter('item_updated', existingLibraryItem.toJSONExpanded()) SocketAuthority.emitter('item_updated', existingLibraryItem.toOldJSONExpanded())
itemGroupingResults[itemDir] = ScanResult.REMOVED itemGroupingResults[itemDir] = ScanResult.REMOVED
continue continue
} }
} }
// Scan library item for updates // Scan library item for updates
Logger.debug(`[LibraryScanner] Folder update for relative path "${itemDir}" is in library item "${existingLibraryItem.media.metadata.title}" with id "${existingLibraryItem.id}" - scan for updates`) Logger.debug(`[LibraryScanner] Folder update for relative path "${itemDir}" is in library item "${existingLibraryItem.media.title}" with id "${existingLibraryItem.id}" - scan for updates`)
itemGroupingResults[itemDir] = await LibraryItemScanner.scanLibraryItem(existingLibraryItem.id, updatedLibraryItemDetails) itemGroupingResults[itemDir] = await LibraryItemScanner.scanLibraryItem(existingLibraryItem.id, updatedLibraryItemDetails)
continue continue
} else if (library.settings.audiobooksOnly && !hasAudioFiles(fileUpdateGroup, itemDir)) { } else if (library.settings.audiobooksOnly && !hasAudioFiles(fileUpdateGroup, itemDir)) {
@ -672,7 +672,7 @@ function isSingleMediaFile(fileUpdateGroup, itemDir) {
async function findLibraryItemByItemToItemInoMatch(libraryId, fullPath) { async function findLibraryItemByItemToItemInoMatch(libraryId, fullPath) {
const ino = await fileUtils.getIno(fullPath) const ino = await fileUtils.getIno(fullPath)
if (!ino) return null if (!ino) return null
const existingLibraryItem = await Database.libraryItemModel.findOneOld({ const existingLibraryItem = await Database.libraryItemModel.findOneExpanded({
libraryId: libraryId, libraryId: libraryId,
ino: ino ino: ino
}) })
@ -685,7 +685,7 @@ async function findLibraryItemByItemToFileInoMatch(libraryId, fullPath, isSingle
// check if it was moved from another folder by comparing the ino to the library files // check if it was moved from another folder by comparing the ino to the library files
const ino = await fileUtils.getIno(fullPath) const ino = await fileUtils.getIno(fullPath)
if (!ino) return null if (!ino) return null
const existingLibraryItem = await Database.libraryItemModel.findOneOld( const existingLibraryItem = await Database.libraryItemModel.findOneExpanded(
[ [
{ {
libraryId: libraryId libraryId: libraryId
@ -711,7 +711,7 @@ async function findLibraryItemByFileToItemInoMatch(libraryId, fullPath, isSingle
if (ino) itemFileInos.push(ino) if (ino) itemFileInos.push(ino)
} }
if (!itemFileInos.length) return null if (!itemFileInos.length) return null
const existingLibraryItem = await Database.libraryItemModel.findOneOld({ const existingLibraryItem = await Database.libraryItemModel.findOneExpanded({
libraryId: libraryId, libraryId: libraryId,
ino: { ino: {
[sequelize.Op.in]: itemFileInos [sequelize.Op.in]: itemFileInos

View File

@ -30,14 +30,14 @@ class Scanner {
/** /**
* *
* @param {import('../routers/ApiRouter')} apiRouterCtx * @param {import('../routers/ApiRouter')} apiRouterCtx
* @param {import('../objects/LibraryItem')} libraryItem * @param {import('../models/LibraryItem')} libraryItem
* @param {QuickMatchOptions} options * @param {QuickMatchOptions} options
* @returns {Promise<{updated: boolean, libraryItem: import('../objects/LibraryItem')}>} * @returns {Promise<{updated: boolean, libraryItem: Object}>}
*/ */
async quickMatchLibraryItem(apiRouterCtx, libraryItem, options = {}) { async quickMatchLibraryItem(apiRouterCtx, libraryItem, options = {}) {
const provider = options.provider || 'google' const provider = options.provider || 'google'
const searchTitle = options.title || libraryItem.media.metadata.title const searchTitle = options.title || libraryItem.media.title
const searchAuthor = options.author || libraryItem.media.metadata.authorName const searchAuthor = options.author || libraryItem.media.authorName
// If overrideCover and overrideDetails is not sent in options than use the server setting to determine if we should override // If overrideCover and overrideDetails is not sent in options than use the server setting to determine if we should override
if (options.overrideCover === undefined && options.overrideDetails === undefined && Database.serverSettings.scannerPreferMatchedMetadata) { if (options.overrideCover === undefined && options.overrideDetails === undefined && Database.serverSettings.scannerPreferMatchedMetadata) {
@ -52,11 +52,11 @@ class Scanner {
let existingSeries = [] let existingSeries = []
if (libraryItem.isBook) { if (libraryItem.isBook) {
existingAuthors = libraryItem.media.metadata.authors.map((a) => a.id) existingAuthors = libraryItem.media.authors.map((a) => a.id)
existingSeries = libraryItem.media.metadata.series.map((s) => s.id) existingSeries = libraryItem.media.series.map((s) => s.id)
const searchISBN = options.isbn || libraryItem.media.metadata.isbn const searchISBN = options.isbn || libraryItem.media.isbn
const searchASIN = options.asin || libraryItem.media.metadata.asin const searchASIN = options.asin || libraryItem.media.asin
const results = await BookFinder.search(libraryItem, provider, searchTitle, searchAuthor, searchISBN, searchASIN, { maxFuzzySearches: 2 }) const results = await BookFinder.search(libraryItem, provider, searchTitle, searchAuthor, searchISBN, searchASIN, { maxFuzzySearches: 2 })
if (!results.length) { if (!results.length) {
@ -69,15 +69,21 @@ class Scanner {
// Update cover if not set OR overrideCover flag // Update cover if not set OR overrideCover flag
if (matchData.cover && (!libraryItem.media.coverPath || options.overrideCover)) { if (matchData.cover && (!libraryItem.media.coverPath || options.overrideCover)) {
Logger.debug(`[Scanner] Updating cover "${matchData.cover}"`) Logger.debug(`[Scanner] Updating cover "${matchData.cover}"`)
var coverResult = await CoverManager.downloadCoverFromUrl(libraryItem, matchData.cover) const coverResult = await CoverManager.downloadCoverFromUrlNew(matchData.cover, libraryItem.id, libraryItem.isFile ? null : libraryItem.path)
if (!coverResult || coverResult.error || !coverResult.cover) { if (coverResult.error) {
Logger.warn(`[Scanner] Match cover "${matchData.cover}" failed to use: ${coverResult ? coverResult.error : 'Unknown Error'}`) Logger.warn(`[Scanner] Match cover "${matchData.cover}" failed to use: ${coverResult.error}`)
} else { } else {
libraryItem.media.coverPath = coverResult.cover
libraryItem.media.changed('coverPath', true) // Cover path may be the same but this forces the update
hasUpdated = true hasUpdated = true
} }
} }
updatePayload = await this.quickMatchBookBuildUpdatePayload(libraryItem, matchData, options) const bookBuildUpdateData = await this.quickMatchBookBuildUpdatePayload(apiRouterCtx, libraryItem, matchData, options)
updatePayload = bookBuildUpdateData.updatePayload
if (bookBuildUpdateData.hasSeriesUpdates || bookBuildUpdateData.hasAuthorUpdates) {
hasUpdated = true
}
} else if (libraryItem.isPodcast) { } else if (libraryItem.isPodcast) {
// Podcast quick match // Podcast quick match
const results = await PodcastFinder.search(searchTitle) const results = await PodcastFinder.search(searchTitle)
@ -91,10 +97,12 @@ class Scanner {
// Update cover if not set OR overrideCover flag // Update cover if not set OR overrideCover flag
if (matchData.cover && (!libraryItem.media.coverPath || options.overrideCover)) { if (matchData.cover && (!libraryItem.media.coverPath || options.overrideCover)) {
Logger.debug(`[Scanner] Updating cover "${matchData.cover}"`) Logger.debug(`[Scanner] Updating cover "${matchData.cover}"`)
var coverResult = await CoverManager.downloadCoverFromUrl(libraryItem, matchData.cover) const coverResult = await CoverManager.downloadCoverFromUrlNew(matchData.cover, libraryItem.id, libraryItem.path)
if (!coverResult || coverResult.error || !coverResult.cover) { if (coverResult.error) {
Logger.warn(`[Scanner] Match cover "${matchData.cover}" failed to use: ${coverResult ? coverResult.error : 'Unknown Error'}`) Logger.warn(`[Scanner] Match cover "${matchData.cover}" failed to use: ${coverResult.error}`)
} else { } else {
libraryItem.media.coverPath = coverResult.cover
libraryItem.media.changed('coverPath', true) // Cover path may be the same but this forces the update
hasUpdated = true hasUpdated = true
} }
} }
@ -103,44 +111,45 @@ class Scanner {
} }
if (Object.keys(updatePayload).length) { if (Object.keys(updatePayload).length) {
Logger.debug('[Scanner] Updating details', updatePayload) Logger.debug('[Scanner] Updating details with payload', updatePayload)
if (libraryItem.media.update(updatePayload)) { libraryItem.media.set(updatePayload)
if (libraryItem.media.changed()) {
Logger.debug(`[Scanner] Updating library item "${libraryItem.media.title}" keys`, libraryItem.media.changed())
hasUpdated = true hasUpdated = true
} }
} }
if (hasUpdated) { if (hasUpdated) {
if (libraryItem.isPodcast && libraryItem.media.metadata.feedUrl) { if (libraryItem.isPodcast && libraryItem.media.feedURL) {
// Quick match all unmatched podcast episodes // Quick match all unmatched podcast episodes
await this.quickMatchPodcastEpisodes(libraryItem, options) await this.quickMatchPodcastEpisodes(libraryItem, options)
} }
await Database.updateLibraryItem(libraryItem) await libraryItem.media.save()
SocketAuthority.emitter('item_updated', libraryItem.toJSONExpanded())
// Check if any authors or series are now empty and should be removed libraryItem.changed('updatedAt', true)
if (libraryItem.isBook) { await libraryItem.save()
const authorsRemoved = existingAuthors.filter((aid) => !libraryItem.media.metadata.authors.find((au) => au.id === aid))
const seriesRemoved = existingSeries.filter((sid) => !libraryItem.media.metadata.series.find((se) => se.id === sid))
if (authorsRemoved.length) { await libraryItem.saveMetadataFile()
await apiRouterCtx.checkRemoveAuthorsWithNoBooks(authorsRemoved)
} SocketAuthority.emitter('item_updated', libraryItem.toOldJSONExpanded())
if (seriesRemoved.length) {
await apiRouterCtx.checkRemoveEmptySeries(seriesRemoved)
}
}
} }
return { return {
updated: hasUpdated, updated: hasUpdated,
libraryItem: libraryItem.toJSONExpanded() libraryItem: libraryItem.toOldJSONExpanded()
} }
} }
/**
*
* @param {import('../models/LibraryItem')} libraryItem
* @param {*} matchData
* @param {QuickMatchOptions} options
* @returns {Map<string, any>} - Update payload
*/
quickMatchPodcastBuildUpdatePayload(libraryItem, matchData, options) { quickMatchPodcastBuildUpdatePayload(libraryItem, matchData, options) {
const updatePayload = {} const updatePayload = {}
updatePayload.metadata = {}
const matchDataTransformed = { const matchDataTransformed = {
title: matchData.title || null, title: matchData.title || null,
@ -158,7 +167,7 @@ class Scanner {
for (const key in matchDataTransformed) { for (const key in matchDataTransformed) {
if (matchDataTransformed[key]) { if (matchDataTransformed[key]) {
if (key === 'genres') { if (key === 'genres') {
if (!libraryItem.media.metadata.genres.length || options.overrideDetails) { if (!libraryItem.media.genres.length || options.overrideDetails) {
var genresArray = [] var genresArray = []
if (Array.isArray(matchDataTransformed[key])) genresArray = [...matchDataTransformed[key]] if (Array.isArray(matchDataTransformed[key])) genresArray = [...matchDataTransformed[key]]
else { else {
@ -169,46 +178,42 @@ class Scanner {
.map((v) => v.trim()) .map((v) => v.trim())
.filter((v) => !!v) .filter((v) => !!v)
} }
updatePayload.metadata[key] = genresArray updatePayload[key] = genresArray
} }
} else if (libraryItem.media.metadata[key] !== matchDataTransformed[key] && (!libraryItem.media.metadata[key] || options.overrideDetails)) { } else if (libraryItem.media[key] !== matchDataTransformed[key] && (!libraryItem.media[key] || options.overrideDetails)) {
updatePayload.metadata[key] = matchDataTransformed[key] updatePayload[key] = matchDataTransformed[key]
} }
} }
} }
if (!Object.keys(updatePayload.metadata).length) {
delete updatePayload.metadata
}
return updatePayload return updatePayload
} }
/** /**
* *
* @param {import('../objects/LibraryItem')} libraryItem * @param {import('../routers/ApiRouter')} apiRouterCtx
* @param {import('../models/LibraryItem')} libraryItem
* @param {*} matchData * @param {*} matchData
* @param {QuickMatchOptions} options * @param {QuickMatchOptions} options
* @returns * @returns {Promise<{updatePayload: Map<string, any>, seriesIdsRemoved: string[], hasSeriesUpdates: boolean, authorIdsRemoved: string[], hasAuthorUpdates: boolean}>}
*/ */
async quickMatchBookBuildUpdatePayload(libraryItem, matchData, options) { async quickMatchBookBuildUpdatePayload(apiRouterCtx, libraryItem, matchData, options) {
// Update media metadata if not set OR overrideDetails flag // Update media metadata if not set OR overrideDetails flag
const detailKeysToUpdate = ['title', 'subtitle', 'description', 'narrator', 'publisher', 'publishedYear', 'genres', 'tags', 'language', 'explicit', 'abridged', 'asin', 'isbn'] const detailKeysToUpdate = ['title', 'subtitle', 'description', 'narrator', 'publisher', 'publishedYear', 'genres', 'tags', 'language', 'explicit', 'abridged', 'asin', 'isbn']
const updatePayload = {} const updatePayload = {}
updatePayload.metadata = {}
for (const key in matchData) { for (const key in matchData) {
if (matchData[key] && detailKeysToUpdate.includes(key)) { if (matchData[key] && detailKeysToUpdate.includes(key)) {
if (key === 'narrator') { if (key === 'narrator') {
if (!libraryItem.media.metadata.narratorName || options.overrideDetails) { if (!libraryItem.media.narrators?.length || options.overrideDetails) {
updatePayload.metadata.narrators = matchData[key] updatePayload.narrators = matchData[key]
.split(',') .split(',')
.map((v) => v.trim()) .map((v) => v.trim())
.filter((v) => !!v) .filter((v) => !!v)
} }
} else if (key === 'genres') { } else if (key === 'genres') {
if (!libraryItem.media.metadata.genres.length || options.overrideDetails) { if (!libraryItem.media.genres.length || options.overrideDetails) {
var genresArray = [] let genresArray = []
if (Array.isArray(matchData[key])) genresArray = [...matchData[key]] if (Array.isArray(matchData[key])) genresArray = [...matchData[key]]
else { else {
// Genres should always be passed in as an array but just incase handle a string // Genres should always be passed in as an array but just incase handle a string
@ -218,11 +223,11 @@ class Scanner {
.map((v) => v.trim()) .map((v) => v.trim())
.filter((v) => !!v) .filter((v) => !!v)
} }
updatePayload.metadata[key] = genresArray updatePayload[key] = genresArray
} }
} else if (key === 'tags') { } else if (key === 'tags') {
if (!libraryItem.media.tags.length || options.overrideDetails) { if (!libraryItem.media.tags.length || options.overrideDetails) {
var tagsArray = [] let tagsArray = []
if (Array.isArray(matchData[key])) tagsArray = [...matchData[key]] if (Array.isArray(matchData[key])) tagsArray = [...matchData[key]]
else else
tagsArray = matchData[key] tagsArray = matchData[key]
@ -231,22 +236,25 @@ class Scanner {
.filter((v) => !!v) .filter((v) => !!v)
updatePayload[key] = tagsArray updatePayload[key] = tagsArray
} }
} else if (!libraryItem.media.metadata[key] || options.overrideDetails) { } else if (!libraryItem.media[key] || options.overrideDetails) {
updatePayload.metadata[key] = matchData[key] updatePayload[key] = matchData[key]
} }
} }
} }
// Add or set author if not set // Add or set author if not set
if (matchData.author && (!libraryItem.media.metadata.authorName || options.overrideDetails)) { let hasAuthorUpdates = false
if (matchData.author && (!libraryItem.media.authorName || options.overrideDetails)) {
if (!Array.isArray(matchData.author)) { if (!Array.isArray(matchData.author)) {
matchData.author = matchData.author matchData.author = matchData.author
.split(',') .split(',')
.map((au) => au.trim()) .map((au) => au.trim())
.filter((au) => !!au) .filter((au) => !!au)
} }
const authorPayload = [] const authorIdsRemoved = []
for (const authorName of matchData.author) { for (const authorName of matchData.author) {
const existingAuthor = libraryItem.media.authors.find((a) => a.name.toLowerCase() === authorName.toLowerCase())
if (!existingAuthor) {
let author = await Database.authorModel.getByNameAndLibrary(authorName, libraryItem.libraryId) let author = await Database.authorModel.getByNameAndLibrary(authorName, libraryItem.libraryId)
if (!author) { if (!author) {
author = await Database.authorModel.create({ author = await Database.authorModel.create({
@ -257,17 +265,52 @@ class Scanner {
SocketAuthority.emitter('author_added', author.toOldJSON()) SocketAuthority.emitter('author_added', author.toOldJSON())
// Update filter data // Update filter data
Database.addAuthorToFilterData(libraryItem.libraryId, author.name, author.id) Database.addAuthorToFilterData(libraryItem.libraryId, author.name, author.id)
await Database.bookAuthorModel
.create({
authorId: author.id,
bookId: libraryItem.media.id
})
.then(() => {
Logger.info(`[Scanner] quickMatchBookBuildUpdatePayload: Added author "${author.name}" to "${libraryItem.media.title}"`)
libraryItem.media.authors.push(author)
hasAuthorUpdates = true
})
} }
authorPayload.push(author.toJSONMinimal())
} }
updatePayload.metadata.authors = authorPayload const authorsRemoved = libraryItem.media.authors.filter((a) => !matchData.author.find((ma) => ma.toLowerCase() === a.name.toLowerCase()))
if (authorsRemoved.length) {
for (const author of authorsRemoved) {
await Database.bookAuthorModel.destroy({ where: { authorId: author.id, bookId: libraryItem.media.id } })
libraryItem.media.authors = libraryItem.media.authors.filter((a) => a.id !== author.id)
authorIdsRemoved.push(author.id)
Logger.info(`[Scanner] quickMatchBookBuildUpdatePayload: Removed author "${author.name}" from "${libraryItem.media.title}"`)
}
hasAuthorUpdates = true
}
}
// For all authors removed from book, check if they are empty now and should be removed
if (authorIdsRemoved.length) {
await apiRouterCtx.checkRemoveAuthorsWithNoBooks(authorIdsRemoved)
}
} }
// Add or set series if not set // Add or set series if not set
if (matchData.series && (!libraryItem.media.metadata.seriesName || options.overrideDetails)) { let hasSeriesUpdates = false
if (matchData.series && (!libraryItem.media.seriesName || options.overrideDetails)) {
if (!Array.isArray(matchData.series)) matchData.series = [{ series: matchData.series, sequence: matchData.sequence }] if (!Array.isArray(matchData.series)) matchData.series = [{ series: matchData.series, sequence: matchData.sequence }]
const seriesPayload = [] const seriesIdsRemoved = []
for (const seriesMatchItem of matchData.series) { for (const seriesMatchItem of matchData.series) {
const existingSeries = libraryItem.media.series.find((s) => s.name.toLowerCase() === seriesMatchItem.series.toLowerCase())
if (existingSeries) {
if (existingSeries.bookSeries.sequence !== seriesMatchItem.sequence) {
existingSeries.bookSeries.sequence = seriesMatchItem.sequence
await existingSeries.bookSeries.save()
Logger.info(`[Scanner] quickMatchBookBuildUpdatePayload: Updated series sequence for "${existingSeries.name}" to ${seriesMatchItem.sequence} in "${libraryItem.media.title}"`)
hasSeriesUpdates = true
}
} else {
let seriesItem = await Database.seriesModel.getByNameAndLibrary(seriesMatchItem.series, libraryItem.libraryId) let seriesItem = await Database.seriesModel.getByNameAndLibrary(seriesMatchItem.series, libraryItem.libraryId)
if (!seriesItem) { if (!seriesItem) {
seriesItem = await Database.seriesModel.create({ seriesItem = await Database.seriesModel.create({
@ -279,46 +322,88 @@ class Scanner {
Database.addSeriesToFilterData(libraryItem.libraryId, seriesItem.name, seriesItem.id) Database.addSeriesToFilterData(libraryItem.libraryId, seriesItem.name, seriesItem.id)
SocketAuthority.emitter('series_added', seriesItem.toOldJSON()) SocketAuthority.emitter('series_added', seriesItem.toOldJSON())
} }
seriesPayload.push(seriesItem.toJSONMinimal(seriesMatchItem.sequence)) const bookSeries = await Database.bookSeriesModel.create({
seriesId: seriesItem.id,
bookId: libraryItem.media.id,
sequence: seriesMatchItem.sequence
})
seriesItem.bookSeries = bookSeries
libraryItem.media.series.push(seriesItem)
Logger.info(`[Scanner] quickMatchBookBuildUpdatePayload: Added series "${seriesItem.name}" to "${libraryItem.media.title}"`)
hasSeriesUpdates = true
}
const seriesRemoved = libraryItem.media.series.filter((s) => !matchData.series.find((ms) => ms.series.toLowerCase() === s.name.toLowerCase()))
if (seriesRemoved.length) {
for (const series of seriesRemoved) {
await series.bookSeries.destroy()
libraryItem.media.series = libraryItem.media.series.filter((s) => s.id !== series.id)
seriesIdsRemoved.push(series.id)
Logger.info(`[Scanner] quickMatchBookBuildUpdatePayload: Removed series "${series.name}" from "${libraryItem.media.title}"`)
}
hasSeriesUpdates = true
} }
updatePayload.metadata.series = seriesPayload
} }
if (!Object.keys(updatePayload.metadata).length) { // For all series removed from book, check if it is empty now and should be removed
delete updatePayload.metadata if (seriesIdsRemoved.length) {
await apiRouterCtx.checkRemoveEmptySeries(seriesIdsRemoved)
}
} }
return updatePayload return {
updatePayload,
hasSeriesUpdates,
hasAuthorUpdates
}
} }
/**
*
* @param {import('../models/LibraryItem')} libraryItem
* @param {QuickMatchOptions} options
* @returns {Promise<number>} - Number of episodes updated
*/
async quickMatchPodcastEpisodes(libraryItem, options = {}) { async quickMatchPodcastEpisodes(libraryItem, options = {}) {
const episodesToQuickMatch = libraryItem.media.episodes.filter((ep) => !ep.enclosureUrl) // Only quick match episodes without enclosure /** @type {import('../models/PodcastEpisode')[]} */
if (!episodesToQuickMatch.length) return false const episodesToQuickMatch = libraryItem.media.podcastEpisodes.filter((ep) => !ep.enclosureURL) // Only quick match episodes that are not already matched
if (!episodesToQuickMatch.length) return 0
const feed = await getPodcastFeed(libraryItem.media.metadata.feedUrl) const feed = await getPodcastFeed(libraryItem.media.feedURL)
if (!feed) { if (!feed) {
Logger.error(`[Scanner] quickMatchPodcastEpisodes: Unable to quick match episodes feed not found for "${libraryItem.media.metadata.feedUrl}"`) Logger.error(`[Scanner] quickMatchPodcastEpisodes: Unable to quick match episodes feed not found for "${libraryItem.media.feedURL}"`)
return false return 0
} }
let numEpisodesUpdated = 0 let numEpisodesUpdated = 0
for (const episode of episodesToQuickMatch) { for (const episode of episodesToQuickMatch) {
const episodeMatches = findMatchingEpisodesInFeed(feed, episode.title) const episodeMatches = findMatchingEpisodesInFeed(feed, episode.title)
if (episodeMatches && episodeMatches.length) { if (episodeMatches?.length) {
const wasUpdated = this.updateEpisodeWithMatch(libraryItem, episode, episodeMatches[0].episode, options) const wasUpdated = await this.updateEpisodeWithMatch(episode, episodeMatches[0].episode, options)
if (wasUpdated) numEpisodesUpdated++ if (wasUpdated) numEpisodesUpdated++
} }
} }
if (numEpisodesUpdated) {
Logger.info(`[Scanner] quickMatchPodcastEpisodes: Updated ${numEpisodesUpdated} episodes for "${libraryItem.media.title}"`)
}
return numEpisodesUpdated return numEpisodesUpdated
} }
updateEpisodeWithMatch(libraryItem, episode, episodeToMatch, options = {}) { /**
*
* @param {import('../models/PodcastEpisode')} episode
* @param {import('../utils/podcastUtils').RssPodcastEpisode} episodeToMatch
* @param {QuickMatchOptions} options
* @returns {Promise<boolean>} - true if episode was updated
*/
async updateEpisodeWithMatch(episode, episodeToMatch, options = {}) {
Logger.debug(`[Scanner] quickMatchPodcastEpisodes: Found episode match for "${episode.title}" => ${episodeToMatch.title}`) Logger.debug(`[Scanner] quickMatchPodcastEpisodes: Found episode match for "${episode.title}" => ${episodeToMatch.title}`)
const matchDataTransformed = { const matchDataTransformed = {
title: episodeToMatch.title || '', title: episodeToMatch.title || '',
subtitle: episodeToMatch.subtitle || '', subtitle: episodeToMatch.subtitle || '',
description: episodeToMatch.description || '', description: episodeToMatch.description || '',
enclosure: episodeToMatch.enclosure || null, enclosureURL: episodeToMatch.enclosure?.url || null,
enclosureSize: episodeToMatch.enclosure?.length || null,
enclosureType: episodeToMatch.enclosure?.type || null,
episode: episodeToMatch.episode || '', episode: episodeToMatch.episode || '',
episodeType: episodeToMatch.episodeType || 'full', episodeType: episodeToMatch.episodeType || 'full',
season: episodeToMatch.season || '', season: episodeToMatch.season || '',
@ -328,20 +413,19 @@ class Scanner {
const updatePayload = {} const updatePayload = {}
for (const key in matchDataTransformed) { for (const key in matchDataTransformed) {
if (matchDataTransformed[key]) { if (matchDataTransformed[key]) {
if (key === 'enclosure') { if (episode[key] !== matchDataTransformed[key] && (!episode[key] || options.overrideDetails)) {
if (!episode.enclosure || JSON.stringify(episode.enclosure) !== JSON.stringify(matchDataTransformed.enclosure)) {
updatePayload[key] = {
...matchDataTransformed.enclosure
}
}
} else if (episode[key] !== matchDataTransformed[key] && (!episode[key] || options.overrideDetails)) {
updatePayload[key] = matchDataTransformed[key] updatePayload[key] = matchDataTransformed[key]
} }
} }
} }
if (Object.keys(updatePayload).length) { if (Object.keys(updatePayload).length) {
return libraryItem.media.updateEpisode(episode.id, updatePayload) episode.set(updatePayload)
if (episode.changed()) {
Logger.debug(`[Scanner] quickMatchPodcastEpisodes: Updating episode "${episode.title}" keys`, episode.changed())
await episode.save()
return true
}
} }
return false return false
} }
@ -351,7 +435,7 @@ class Scanner {
* *
* @param {import('../routers/ApiRouter')} apiRouterCtx * @param {import('../routers/ApiRouter')} apiRouterCtx
* @param {import('../models/Library')} library * @param {import('../models/Library')} library
* @param {import('../objects/LibraryItem')[]} libraryItems * @param {import('../models/LibraryItem')[]} libraryItems
* @param {LibraryScan} libraryScan * @param {LibraryScan} libraryScan
* @returns {Promise<boolean>} false if scan canceled * @returns {Promise<boolean>} false if scan canceled
*/ */
@ -359,20 +443,20 @@ class Scanner {
for (let i = 0; i < libraryItems.length; i++) { for (let i = 0; i < libraryItems.length; i++) {
const libraryItem = libraryItems[i] const libraryItem = libraryItems[i]
if (libraryItem.media.metadata.asin && library.settings.skipMatchingMediaWithAsin) { if (libraryItem.media.asin && library.settings.skipMatchingMediaWithAsin) {
Logger.debug(`[Scanner] matchLibraryItems: Skipping "${libraryItem.media.metadata.title}" because it already has an ASIN (${i + 1} of ${libraryItems.length})`) Logger.debug(`[Scanner] matchLibraryItems: Skipping "${libraryItem.media.title}" because it already has an ASIN (${i + 1} of ${libraryItems.length})`)
continue continue
} }
if (libraryItem.media.metadata.isbn && library.settings.skipMatchingMediaWithIsbn) { if (libraryItem.media.isbn && library.settings.skipMatchingMediaWithIsbn) {
Logger.debug(`[Scanner] matchLibraryItems: Skipping "${libraryItem.media.metadata.title}" because it already has an ISBN (${i + 1} of ${libraryItems.length})`) Logger.debug(`[Scanner] matchLibraryItems: Skipping "${libraryItem.media.title}" because it already has an ISBN (${i + 1} of ${libraryItems.length})`)
continue continue
} }
Logger.debug(`[Scanner] matchLibraryItems: Quick matching "${libraryItem.media.metadata.title}" (${i + 1} of ${libraryItems.length})`) Logger.debug(`[Scanner] matchLibraryItems: Quick matching "${libraryItem.media.title}" (${i + 1} of ${libraryItems.length})`)
const result = await this.quickMatchLibraryItem(apiRouterCtx, libraryItem, { provider: library.provider }) const result = await this.quickMatchLibraryItem(apiRouterCtx, libraryItem, { provider: library.provider })
if (result.warning) { if (result.warning) {
Logger.warn(`[Scanner] matchLibraryItems: Match warning ${result.warning} for library item "${libraryItem.media.metadata.title}"`) Logger.warn(`[Scanner] matchLibraryItems: Match warning ${result.warning} for library item "${libraryItem.media.title}"`)
} else if (result.updated) { } else if (result.updated) {
libraryScan.resultsUpdated++ libraryScan.resultsUpdated++
} }
@ -430,9 +514,8 @@ class Scanner {
offset += limit offset += limit
hasMoreChunks = libraryItems.length === limit hasMoreChunks = libraryItems.length === limit
let oldLibraryItems = libraryItems.map((li) => Database.libraryItemModel.getOldLibraryItem(li))
const shouldContinue = await this.matchLibraryItemsChunk(apiRouterCtx, library, oldLibraryItems, libraryScan) const shouldContinue = await this.matchLibraryItemsChunk(apiRouterCtx, library, libraryItems, libraryScan)
if (!shouldContinue) { if (!shouldContinue) {
isCanceled = true isCanceled = true
break break

View File

@ -5,7 +5,6 @@ const fs = require('../libs/fsExtra')
const Path = require('path') const Path = require('path')
const Logger = require('../Logger') const Logger = require('../Logger')
const { filePathToPOSIX, copyToExisting } = require('./fileUtils') const { filePathToPOSIX, copyToExisting } = require('./fileUtils')
const LibraryItem = require('../objects/LibraryItem')
function escapeSingleQuotes(path) { function escapeSingleQuotes(path) {
// A ' within a quoted string is escaped with '\'' in ffmpeg (see https://www.ffmpeg.org/ffmpeg-utils.html#Quoting-and-escaping) // A ' within a quoted string is escaped with '\'' in ffmpeg (see https://www.ffmpeg.org/ffmpeg-utils.html#Quoting-and-escaping)
@ -365,28 +364,26 @@ function escapeFFMetadataValue(value) {
/** /**
* Retrieves the FFmpeg metadata object for a given library item. * Retrieves the FFmpeg metadata object for a given library item.
* *
* @param {LibraryItem} libraryItem - The library item containing the media metadata. * @param {import('../models/LibraryItem')} libraryItem - The library item containing the media metadata.
* @param {number} audioFilesLength - The length of the audio files. * @param {number} audioFilesLength - The length of the audio files.
* @returns {Object} - The FFmpeg metadata object. * @returns {Object} - The FFmpeg metadata object.
*/ */
function getFFMetadataObject(libraryItem, audioFilesLength) { function getFFMetadataObject(libraryItem, audioFilesLength) {
const metadata = libraryItem.media.metadata
const ffmetadata = { const ffmetadata = {
title: metadata.title, title: libraryItem.media.title,
artist: metadata.authorName, artist: libraryItem.media.authorName,
album_artist: metadata.authorName, album_artist: libraryItem.media.authorName,
album: (metadata.title || '') + (metadata.subtitle ? `: ${metadata.subtitle}` : ''), album: (libraryItem.media.title || '') + (libraryItem.media.subtitle ? `: ${libraryItem.media.subtitle}` : ''),
TIT3: metadata.subtitle, // mp3 only TIT3: libraryItem.media.subtitle, // mp3 only
genre: metadata.genres?.join('; '), genre: libraryItem.media.genres?.join('; '),
date: metadata.publishedYear, date: libraryItem.media.publishedYear,
comment: metadata.description, comment: libraryItem.media.description,
description: metadata.description, description: libraryItem.media.description,
composer: metadata.narratorName, composer: (libraryItem.media.narrators || []).join(', '),
copyright: metadata.publisher, copyright: libraryItem.media.publisher,
publisher: metadata.publisher, // mp3 only publisher: libraryItem.media.publisher, // mp3 only
TRACKTOTAL: `${audioFilesLength}`, // mp3 only TRACKTOTAL: `${audioFilesLength}`, // mp3 only
grouping: metadata.series?.map((s) => s.name + (s.sequence ? ` #${s.sequence}` : '')).join('; ') grouping: libraryItem.media.series?.map((s) => s.name + (s.bookSeries.sequence ? ` #${s.bookSeries.sequence}` : '')).join('; ')
} }
Object.keys(ffmetadata).forEach((key) => { Object.keys(ffmetadata).forEach((key) => {
if (!ffmetadata[key]) { if (!ffmetadata[key]) {
@ -402,7 +399,7 @@ module.exports.getFFMetadataObject = getFFMetadataObject
/** /**
* Merges audio files into a single output file using FFmpeg. * Merges audio files into a single output file using FFmpeg.
* *
* @param {Array} audioTracks - The audio tracks to merge. * @param {import('../models/Book').AudioFileObject} audioTracks - The audio tracks to merge.
* @param {number} duration - The total duration of the audio tracks. * @param {number} duration - The total duration of the audio tracks.
* @param {string} itemCachePath - The path to the item cache. * @param {string} itemCachePath - The path to the item cache.
* @param {string} outputFilePath - The path to the output file. * @param {string} outputFilePath - The path to the output file.

View File

@ -6,35 +6,41 @@ const naturalSort = createNewSortInstance({
}) })
module.exports = { module.exports = {
getSeriesFromBooks(books, filterSeries, hideSingleBookSeries) { /**
*
* @param {import('../models/LibraryItem')[]} libraryItems
* @param {*} filterSeries
* @param {*} hideSingleBookSeries
* @returns
*/
getSeriesFromBooks(libraryItems, filterSeries, hideSingleBookSeries) {
const _series = {} const _series = {}
const seriesToFilterOut = {} const seriesToFilterOut = {}
books.forEach((libraryItem) => { libraryItems.forEach((libraryItem) => {
// get all book series for item that is not already filtered out // get all book series for item that is not already filtered out
const bookSeries = (libraryItem.media.metadata.series || []).filter((se) => !seriesToFilterOut[se.id]) const allBookSeries = (libraryItem.media.series || []).filter((se) => !seriesToFilterOut[se.id])
if (!bookSeries.length) return if (!allBookSeries.length) return
bookSeries.forEach((bookSeriesObj) => { allBookSeries.forEach((bookSeries) => {
// const series = allSeries.find(se => se.id === bookSeriesObj.id) const abJson = libraryItem.toOldJSONMinified()
abJson.sequence = bookSeries.bookSeries.sequence
const abJson = libraryItem.toJSONMinified()
abJson.sequence = bookSeriesObj.sequence
if (filterSeries) { if (filterSeries) {
abJson.filterSeriesSequence = libraryItem.media.metadata.getSeries(filterSeries).sequence const series = libraryItem.media.series.find((se) => se.id === filterSeries)
abJson.filterSeriesSequence = series.bookSeries.sequence
} }
if (!_series[bookSeriesObj.id]) { if (!_series[bookSeries.id]) {
_series[bookSeriesObj.id] = { _series[bookSeries.id] = {
id: bookSeriesObj.id, id: bookSeries.id,
name: bookSeriesObj.name, name: bookSeries.name,
nameIgnorePrefix: getTitlePrefixAtEnd(bookSeriesObj.name), nameIgnorePrefix: getTitlePrefixAtEnd(bookSeries.name),
nameIgnorePrefixSort: getTitleIgnorePrefix(bookSeriesObj.name), nameIgnorePrefixSort: getTitleIgnorePrefix(bookSeries.name),
type: 'series', type: 'series',
books: [abJson], books: [abJson],
totalDuration: isNullOrNaN(abJson.media.duration) ? 0 : Number(abJson.media.duration) totalDuration: isNullOrNaN(abJson.media.duration) ? 0 : Number(abJson.media.duration)
} }
} else { } else {
_series[bookSeriesObj.id].books.push(abJson) _series[bookSeries.id].books.push(abJson)
_series[bookSeriesObj.id].totalDuration += isNullOrNaN(abJson.media.duration) ? 0 : Number(abJson.media.duration) _series[bookSeries.id].totalDuration += isNullOrNaN(abJson.media.duration) ? 0 : Number(abJson.media.duration)
} }
}) })
}) })
@ -52,6 +58,13 @@ module.exports = {
}) })
}, },
/**
*
* @param {import('../models/LibraryItem')[]} libraryItems
* @param {string} filterSeries - series id
* @param {boolean} hideSingleBookSeries
* @returns
*/
collapseBookSeries(libraryItems, filterSeries, hideSingleBookSeries) { collapseBookSeries(libraryItems, filterSeries, hideSingleBookSeries) {
// Get series from the library items. If this list is being collapsed after filtering for a series, // Get series from the library items. If this list is being collapsed after filtering for a series,
// don't collapse that series, only books that are in other series. // don't collapse that series, only books that are in other series.
@ -123,8 +136,9 @@ module.exports = {
let libraryItems = books let libraryItems = books
.map((book) => { .map((book) => {
const libraryItem = book.libraryItem const libraryItem = book.libraryItem
delete book.libraryItem
libraryItem.media = book libraryItem.media = book
return Database.libraryItemModel.getOldLibraryItem(libraryItem) return libraryItem
}) })
.filter((li) => { .filter((li) => {
return user.checkCanAccessLibraryItem(li) return user.checkCanAccessLibraryItem(li)
@ -143,15 +157,18 @@ module.exports = {
if (!payload.sortBy || payload.sortBy === 'sequence') { if (!payload.sortBy || payload.sortBy === 'sequence') {
sortArray = [ sortArray = [
{ {
[direction]: (li) => li.media.metadata.getSeries(seriesId).sequence [direction]: (li) => {
const series = li.media.series.find((se) => se.id === seriesId)
return series.bookSeries.sequence
}
}, },
{ {
// If no series sequence then fallback to sorting by title (or collapsed series name for sub-series) // If no series sequence then fallback to sorting by title (or collapsed series name for sub-series)
[direction]: (li) => { [direction]: (li) => {
if (sortingIgnorePrefix) { if (sortingIgnorePrefix) {
return li.collapsedSeries?.nameIgnorePrefix || li.media.metadata.titleIgnorePrefix return li.collapsedSeries?.nameIgnorePrefix || li.media.titleIgnorePrefix
} else { } else {
return li.collapsedSeries?.name || li.media.metadata.title return li.collapsedSeries?.name || li.media.title
} }
} }
} }
@ -174,9 +191,9 @@ module.exports = {
[direction]: (li) => { [direction]: (li) => {
if (payload.sortBy === 'media.metadata.title') { if (payload.sortBy === 'media.metadata.title') {
if (sortingIgnorePrefix) { if (sortingIgnorePrefix) {
return li.collapsedSeries?.nameIgnorePrefix || li.media.metadata.titleIgnorePrefix return li.collapsedSeries?.nameIgnorePrefix || li.media.titleIgnorePrefix
} else { } else {
return li.collapsedSeries?.name || li.media.metadata.title return li.collapsedSeries?.name || li.media.title
} }
} else { } else {
return payload.sortBy.split('.').reduce((a, b) => a[b], li) return payload.sortBy.split('.').reduce((a, b) => a[b], li)
@ -194,12 +211,12 @@ module.exports = {
return Promise.all( return Promise.all(
libraryItems.map(async (li) => { libraryItems.map(async (li) => {
const filteredSeries = li.media.metadata.getSeries(seriesId) const filteredSeries = li.media.series.find((se) => se.id === seriesId)
const json = li.toJSONMinified() const json = li.toOldJSONMinified()
json.media.metadata.series = { json.media.metadata.series = {
id: filteredSeries.id, id: filteredSeries.id,
name: filteredSeries.name, name: filteredSeries.name,
sequence: filteredSeries.sequence sequence: filteredSeries.bookSeries.sequence
} }
if (li.collapsedSeries) { if (li.collapsedSeries) {

View File

@ -1200,7 +1200,7 @@ async function migrationPatchNewColumns(queryInterface) {
*/ */
async function handleOldLibraryItems(ctx) { async function handleOldLibraryItems(ctx) {
const oldLibraryItems = await oldDbFiles.loadOldData('libraryItems') const oldLibraryItems = await oldDbFiles.loadOldData('libraryItems')
const libraryItems = (await ctx.models.libraryItem.findAllExpandedWhere()).map((li) => ctx.models.libraryItem.getOldLibraryItem(li)) const libraryItems = await ctx.models.libraryItem.findAllExpandedWhere()
const bulkUpdateItems = [] const bulkUpdateItems = []
const bulkUpdateEpisodes = [] const bulkUpdateEpisodes = []
@ -1218,8 +1218,8 @@ async function handleOldLibraryItems(ctx) {
} }
}) })
if (libraryItem.media.episodes?.length && matchingOldLibraryItem.media.episodes?.length) { if (libraryItem.media.podcastEpisodes?.length && matchingOldLibraryItem.media.episodes?.length) {
for (const podcastEpisode of libraryItem.media.episodes) { for (const podcastEpisode of libraryItem.media.podcastEpisodes) {
// Find matching old episode by audio file ino // Find matching old episode by audio file ino
const matchingOldPodcastEpisode = matchingOldLibraryItem.media.episodes.find((oep) => oep.audioFile?.ino && oep.audioFile.ino === podcastEpisode.audioFile?.ino) const matchingOldPodcastEpisode = matchingOldLibraryItem.media.episodes.find((oep) => oep.audioFile?.ino && oep.audioFile.ino === podcastEpisode.audioFile?.ino)
if (matchingOldPodcastEpisode) { if (matchingOldPodcastEpisode) {

View File

@ -330,6 +330,12 @@ module.exports.findMatchingEpisodes = async (feedUrl, searchTitle) => {
return this.findMatchingEpisodesInFeed(feed, searchTitle) return this.findMatchingEpisodesInFeed(feed, searchTitle)
} }
/**
*
* @param {RssPodcast} feed
* @param {string} searchTitle
* @returns {Array<{ episode: RssPodcastEpisode, levenshtein: number }>}
*/
module.exports.findMatchingEpisodesInFeed = (feed, searchTitle) => { module.exports.findMatchingEpisodesInFeed = (feed, searchTitle) => {
searchTitle = searchTitle.toLowerCase().trim() searchTitle = searchTitle.toLowerCase().trim()
if (!feed?.episodes) { if (!feed?.episodes) {

View File

@ -415,7 +415,7 @@ module.exports = {
* @param {import('../../models/User')} user * @param {import('../../models/User')} user
* @param {number} limit * @param {number} limit
* @param {number} offset * @param {number} offset
* @returns {Promise<{ libraryItems:import('../../objects/LibraryItem')[], count:number }>} * @returns {Promise<{ libraryItems:import('../../models/LibraryItem')[], count:number }>}
*/ */
async getLibraryItemsForAuthor(author, user, limit, offset) { async getLibraryItemsForAuthor(author, user, limit, offset) {
const { libraryItems, count } = await libraryItemsBookFilters.getFilteredLibraryItems(author.libraryId, user, 'authors', author.id, 'addedAt', true, false, [], limit, offset) const { libraryItems, count } = await libraryItemsBookFilters.getFilteredLibraryItems(author.libraryId, user, 'authors', author.id, 'addedAt', true, false, [], limit, offset)

View File

@ -297,7 +297,7 @@ module.exports = {
delete podcast.libraryItem delete podcast.libraryItem
libraryItem.media = podcast libraryItem.media = podcast
libraryItem.recentEpisode = ep.getOldPodcastEpisode(libraryItem.id).toJSON() libraryItem.recentEpisode = ep.toOldJSON(libraryItem.id)
return libraryItem return libraryItem
}) })
@ -460,13 +460,14 @@ module.exports = {
}) })
const episodeResults = episodes.map((ep) => { const episodeResults = episodes.map((ep) => {
const libraryItem = ep.podcast.libraryItem ep.podcast.podcastEpisodes = [] // Not needed
libraryItem.media = ep.podcast const oldPodcastJson = ep.podcast.toOldJSON(ep.podcast.libraryItem.id)
const oldPodcast = Database.podcastModel.getOldPodcast(libraryItem)
const oldPodcastEpisode = ep.getOldPodcastEpisode(libraryItem.id).toJSONExpanded() const oldPodcastEpisodeJson = ep.toOldJSONExpanded(ep.podcast.libraryItem.id)
oldPodcastEpisode.podcast = oldPodcast
oldPodcastEpisode.libraryId = libraryItem.libraryId oldPodcastEpisodeJson.podcast = oldPodcastJson
return oldPodcastEpisode oldPodcastEpisodeJson.libraryId = ep.podcast.libraryItem.libraryId
return oldPodcastEpisodeJson
}) })
return episodeResults return episodeResults

View File

@ -157,7 +157,7 @@ describe('LibraryItemController', () => {
it('should remove authors and series with no books on library item update media', async () => { it('should remove authors and series with no books on library item update media', async () => {
const libraryItem = await Database.libraryItemModel.getExpandedById(libraryItem1Id) const libraryItem = await Database.libraryItemModel.getExpandedById(libraryItem1Id)
libraryItem.saveMetadataFile = sinon.stub()
// Update library item 1 remove all authors and series // Update library item 1 remove all authors and series
const fakeReq = { const fakeReq = {
query: {}, query: {},