mirror of
https://github.com/advplyr/audiobookshelf.git
synced 2024-12-27 00:59:14 +01:00
Update db migration for duration, size, lastFirst, and ignore prefix columns
This commit is contained in:
parent
0ca4ff4fca
commit
4dbe8d29d9
@ -150,7 +150,8 @@ class Database {
|
|||||||
// Version specific migrations
|
// Version specific migrations
|
||||||
if (this.serverSettings.version === '2.3.0' && this.compareVersions(packageJson.version, '2.3.0') > 1) {
|
if (this.serverSettings.version === '2.3.0' && this.compareVersions(packageJson.version, '2.3.0') > 1) {
|
||||||
await dbMigration.migrationPatch(this)
|
await dbMigration.migrationPatch(this)
|
||||||
} else if (this.serverSettings.version === '2.3.3' && this.compareVersions(packageJson.version, '2.3.3') >= 0) { // TODO: Update to > 1 after 2.3.4
|
}
|
||||||
|
if (this.serverSettings.version === '2.3.3' && this.compareVersions(packageJson.version, '2.3.3') >= 0) { // TODO: Update to > 1 after 2.3.4
|
||||||
await dbMigration.migrationPatch2(this)
|
await dbMigration.migrationPatch2(this)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -189,6 +189,31 @@ class LibraryController {
|
|||||||
return res.json(libraryJson)
|
return res.json(libraryJson)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async getLibraryItemsNew(req, res) {
|
||||||
|
const include = (req.query.include || '').split(',').map(v => v.trim().toLowerCase()).filter(v => !!v)
|
||||||
|
|
||||||
|
const payload = {
|
||||||
|
results: [],
|
||||||
|
total: undefined,
|
||||||
|
limit: req.query.limit && !isNaN(req.query.limit) ? Number(req.query.limit) : 0,
|
||||||
|
page: req.query.page && !isNaN(req.query.page) ? Number(req.query.page) : 0,
|
||||||
|
sortBy: req.query.sort,
|
||||||
|
sortDesc: req.query.desc === '1',
|
||||||
|
filterBy: req.query.filter,
|
||||||
|
mediaType: req.library.mediaType,
|
||||||
|
minified: req.query.minified === '1',
|
||||||
|
collapseseries: req.query.collapseseries === '1',
|
||||||
|
include: include.join(',')
|
||||||
|
}
|
||||||
|
payload.offset = payload.page * payload.limit
|
||||||
|
|
||||||
|
const { libraryItems, count } = await Database.models.libraryItem.getByFilterAndSort(req.library.id, req.user.id, payload)
|
||||||
|
payload.results = libraryItems.map(li => li.toJSONMinified())
|
||||||
|
payload.total = count
|
||||||
|
|
||||||
|
res.json(payload)
|
||||||
|
}
|
||||||
|
|
||||||
// api/libraries/:id/items
|
// api/libraries/:id/items
|
||||||
// TODO: Optimize this method, items are iterated through several times but can be combined
|
// TODO: Optimize this method, items are iterated through several times but can be combined
|
||||||
async getLibraryItems(req, res) {
|
async getLibraryItems(req, res) {
|
||||||
|
@ -45,6 +45,7 @@ module.exports = (sequelize) => {
|
|||||||
return {
|
return {
|
||||||
id: oldAuthor.id,
|
id: oldAuthor.id,
|
||||||
name: oldAuthor.name,
|
name: oldAuthor.name,
|
||||||
|
lastFirst: oldAuthor.lastFirst,
|
||||||
asin: oldAuthor.asin,
|
asin: oldAuthor.asin,
|
||||||
description: oldAuthor.description,
|
description: oldAuthor.description,
|
||||||
imagePath: oldAuthor.imagePath,
|
imagePath: oldAuthor.imagePath,
|
||||||
@ -68,6 +69,7 @@ module.exports = (sequelize) => {
|
|||||||
primaryKey: true
|
primaryKey: true
|
||||||
},
|
},
|
||||||
name: DataTypes.STRING,
|
name: DataTypes.STRING,
|
||||||
|
lastFirst: DataTypes.STRING,
|
||||||
asin: DataTypes.STRING,
|
asin: DataTypes.STRING,
|
||||||
description: DataTypes.TEXT,
|
description: DataTypes.TEXT,
|
||||||
imagePath: DataTypes.STRING
|
imagePath: DataTypes.STRING
|
||||||
|
@ -5,19 +5,52 @@ module.exports = (sequelize) => {
|
|||||||
class Book extends Model {
|
class Book extends Model {
|
||||||
static getOldBook(libraryItemExpanded) {
|
static getOldBook(libraryItemExpanded) {
|
||||||
const bookExpanded = libraryItemExpanded.media
|
const bookExpanded = libraryItemExpanded.media
|
||||||
const authors = bookExpanded.authors.map(au => {
|
let authors = []
|
||||||
return {
|
if (bookExpanded.authors?.length) {
|
||||||
id: au.id,
|
authors = bookExpanded.authors.map(au => {
|
||||||
name: au.name
|
return {
|
||||||
}
|
id: au.id,
|
||||||
})
|
name: au.name
|
||||||
const series = bookExpanded.series.map(se => {
|
}
|
||||||
return {
|
})
|
||||||
id: se.id,
|
} else if (bookExpanded.bookAuthors?.length) {
|
||||||
name: se.name,
|
authors = bookExpanded.bookAuthors.map(ba => {
|
||||||
sequence: se.bookSeries.sequence
|
if (ba.author) {
|
||||||
}
|
return {
|
||||||
})
|
id: ba.author.id,
|
||||||
|
name: ba.author.name
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Logger.error(`[Book] Invalid bookExpanded bookAuthors: no author`, ba)
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}).filter(a => a)
|
||||||
|
}
|
||||||
|
|
||||||
|
let series = []
|
||||||
|
if (bookExpanded.series?.length) {
|
||||||
|
series = bookExpanded.series.map(se => {
|
||||||
|
return {
|
||||||
|
id: se.id,
|
||||||
|
name: se.name,
|
||||||
|
sequence: se.bookSeries.sequence
|
||||||
|
}
|
||||||
|
})
|
||||||
|
} else if (bookExpanded.bookSeries?.length) {
|
||||||
|
series = bookExpanded.bookSeries.map(bs => {
|
||||||
|
if (bs.series) {
|
||||||
|
return {
|
||||||
|
id: bs.series.id,
|
||||||
|
name: bs.series.name,
|
||||||
|
sequence: bs.sequence
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Logger.error(`[Book] Invalid bookExpanded bookSeries: no series`, bs)
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}).filter(s => s)
|
||||||
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
id: bookExpanded.id,
|
id: bookExpanded.id,
|
||||||
libraryItemId: libraryItemExpanded.id,
|
libraryItemId: libraryItemExpanded.id,
|
||||||
@ -66,6 +99,7 @@ module.exports = (sequelize) => {
|
|||||||
return {
|
return {
|
||||||
id: oldBook.id,
|
id: oldBook.id,
|
||||||
title: oldBook.metadata.title,
|
title: oldBook.metadata.title,
|
||||||
|
titleIgnorePrefix: oldBook.metadata.titleIgnorePrefix,
|
||||||
subtitle: oldBook.metadata.subtitle,
|
subtitle: oldBook.metadata.subtitle,
|
||||||
publishedYear: oldBook.metadata.publishedYear,
|
publishedYear: oldBook.metadata.publishedYear,
|
||||||
publishedDate: oldBook.metadata.publishedDate,
|
publishedDate: oldBook.metadata.publishedDate,
|
||||||
@ -79,6 +113,7 @@ module.exports = (sequelize) => {
|
|||||||
narrators: oldBook.metadata.narrators,
|
narrators: oldBook.metadata.narrators,
|
||||||
ebookFile: oldBook.ebookFile?.toJSON() || null,
|
ebookFile: oldBook.ebookFile?.toJSON() || null,
|
||||||
coverPath: oldBook.coverPath,
|
coverPath: oldBook.coverPath,
|
||||||
|
duration: oldBook.duration,
|
||||||
audioFiles: oldBook.audioFiles?.map(af => af.toJSON()) || [],
|
audioFiles: oldBook.audioFiles?.map(af => af.toJSON()) || [],
|
||||||
chapters: oldBook.chapters,
|
chapters: oldBook.chapters,
|
||||||
tags: oldBook.tags,
|
tags: oldBook.tags,
|
||||||
@ -94,6 +129,7 @@ module.exports = (sequelize) => {
|
|||||||
primaryKey: true
|
primaryKey: true
|
||||||
},
|
},
|
||||||
title: DataTypes.STRING,
|
title: DataTypes.STRING,
|
||||||
|
titleIgnorePrefix: DataTypes.STRING,
|
||||||
subtitle: DataTypes.STRING,
|
subtitle: DataTypes.STRING,
|
||||||
publishedYear: DataTypes.STRING,
|
publishedYear: DataTypes.STRING,
|
||||||
publishedDate: DataTypes.STRING,
|
publishedDate: DataTypes.STRING,
|
||||||
@ -105,6 +141,7 @@ module.exports = (sequelize) => {
|
|||||||
explicit: DataTypes.BOOLEAN,
|
explicit: DataTypes.BOOLEAN,
|
||||||
abridged: DataTypes.BOOLEAN,
|
abridged: DataTypes.BOOLEAN,
|
||||||
coverPath: DataTypes.STRING,
|
coverPath: DataTypes.STRING,
|
||||||
|
duration: DataTypes.FLOAT,
|
||||||
|
|
||||||
narrators: DataTypes.JSON,
|
narrators: DataTypes.JSON,
|
||||||
audioFiles: DataTypes.JSON,
|
audioFiles: DataTypes.JSON,
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
const { DataTypes, Model } = require('sequelize')
|
const { DataTypes, Model } = require('sequelize')
|
||||||
const Logger = require('../Logger')
|
const Logger = require('../Logger')
|
||||||
const oldLibraryItem = require('../objects/LibraryItem')
|
const oldLibraryItem = require('../objects/LibraryItem')
|
||||||
|
const libraryFilters = require('../utils/queries/libraryFilters')
|
||||||
const { areEquivalent } = require('../utils/index')
|
const { areEquivalent } = require('../utils/index')
|
||||||
|
|
||||||
module.exports = (sequelize) => {
|
module.exports = (sequelize) => {
|
||||||
@ -374,6 +375,7 @@ module.exports = (sequelize) => {
|
|||||||
mtime: oldLibraryItem.mtimeMs,
|
mtime: oldLibraryItem.mtimeMs,
|
||||||
ctime: oldLibraryItem.ctimeMs,
|
ctime: oldLibraryItem.ctimeMs,
|
||||||
birthtime: oldLibraryItem.birthtimeMs,
|
birthtime: oldLibraryItem.birthtimeMs,
|
||||||
|
size: oldLibraryItem.size,
|
||||||
lastScan: oldLibraryItem.lastScan,
|
lastScan: oldLibraryItem.lastScan,
|
||||||
lastScanVersion: oldLibraryItem.scanVersion,
|
lastScanVersion: oldLibraryItem.scanVersion,
|
||||||
libraryId: oldLibraryItem.libraryId,
|
libraryId: oldLibraryItem.libraryId,
|
||||||
@ -392,6 +394,14 @@ module.exports = (sequelize) => {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static async getByFilterAndSort(libraryId, userId, { filterBy, sortBy, sortDesc, limit, offset }) {
|
||||||
|
const { libraryItems, count } = await libraryFilters.getFilteredLibraryItems(libraryId, filterBy, sortBy, sortDesc, limit, offset, userId)
|
||||||
|
return {
|
||||||
|
libraryItems: libraryItems.map(ti => this.getOldLibraryItem(ti)),
|
||||||
|
count
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
getMedia(options) {
|
getMedia(options) {
|
||||||
if (!this.mediaType) return Promise.resolve(null)
|
if (!this.mediaType) return Promise.resolve(null)
|
||||||
const mixinMethodName = `get${sequelize.uppercaseFirst(this.mediaType)}`
|
const mixinMethodName = `get${sequelize.uppercaseFirst(this.mediaType)}`
|
||||||
@ -416,6 +426,7 @@ module.exports = (sequelize) => {
|
|||||||
mtime: DataTypes.DATE(6),
|
mtime: DataTypes.DATE(6),
|
||||||
ctime: DataTypes.DATE(6),
|
ctime: DataTypes.DATE(6),
|
||||||
birthtime: DataTypes.DATE(6),
|
birthtime: DataTypes.DATE(6),
|
||||||
|
size: DataTypes.BIGINT,
|
||||||
lastScan: DataTypes.DATE,
|
lastScan: DataTypes.DATE,
|
||||||
lastScanVersion: DataTypes.STRING,
|
lastScanVersion: DataTypes.STRING,
|
||||||
libraryFiles: DataTypes.JSON,
|
libraryFiles: DataTypes.JSON,
|
||||||
|
@ -17,7 +17,7 @@ module.exports = (sequelize) => {
|
|||||||
mediaItemId: this.mediaItemId,
|
mediaItemId: this.mediaItemId,
|
||||||
mediaItemType: this.mediaItemType,
|
mediaItemType: this.mediaItemType,
|
||||||
duration: this.duration,
|
duration: this.duration,
|
||||||
progress: this.extraData?.progress || null,
|
progress: this.extraData?.progress || 0,
|
||||||
currentTime: this.currentTime,
|
currentTime: this.currentTime,
|
||||||
isFinished: !!this.isFinished,
|
isFinished: !!this.isFinished,
|
||||||
hideFromContinueListening: !!this.hideFromContinueListening,
|
hideFromContinueListening: !!this.hideFromContinueListening,
|
||||||
|
@ -39,6 +39,7 @@ module.exports = (sequelize) => {
|
|||||||
return {
|
return {
|
||||||
id: oldPodcast.id,
|
id: oldPodcast.id,
|
||||||
title: oldPodcastMetadata.title,
|
title: oldPodcastMetadata.title,
|
||||||
|
titleIgnorePrefix: oldPodcastMetadata.titleIgnorePrefix,
|
||||||
author: oldPodcastMetadata.author,
|
author: oldPodcastMetadata.author,
|
||||||
releaseDate: oldPodcastMetadata.releaseDate,
|
releaseDate: oldPodcastMetadata.releaseDate,
|
||||||
feedURL: oldPodcastMetadata.feedUrl,
|
feedURL: oldPodcastMetadata.feedUrl,
|
||||||
@ -69,6 +70,7 @@ module.exports = (sequelize) => {
|
|||||||
primaryKey: true
|
primaryKey: true
|
||||||
},
|
},
|
||||||
title: DataTypes.STRING,
|
title: DataTypes.STRING,
|
||||||
|
titleIgnorePrefix: DataTypes.STRING,
|
||||||
author: DataTypes.STRING,
|
author: DataTypes.STRING,
|
||||||
releaseDate: DataTypes.STRING,
|
releaseDate: DataTypes.STRING,
|
||||||
feedURL: DataTypes.STRING,
|
feedURL: DataTypes.STRING,
|
||||||
|
@ -43,6 +43,7 @@ module.exports = (sequelize) => {
|
|||||||
return {
|
return {
|
||||||
id: oldSeries.id,
|
id: oldSeries.id,
|
||||||
name: oldSeries.name,
|
name: oldSeries.name,
|
||||||
|
nameIgnorePrefix: oldSeries.nameIgnorePrefix,
|
||||||
description: oldSeries.description,
|
description: oldSeries.description,
|
||||||
libraryId: oldSeries.libraryId
|
libraryId: oldSeries.libraryId
|
||||||
}
|
}
|
||||||
@ -64,6 +65,7 @@ module.exports = (sequelize) => {
|
|||||||
primaryKey: true
|
primaryKey: true
|
||||||
},
|
},
|
||||||
name: DataTypes.STRING,
|
name: DataTypes.STRING,
|
||||||
|
nameIgnorePrefix: DataTypes.STRING,
|
||||||
description: DataTypes.TEXT
|
description: DataTypes.TEXT
|
||||||
}, {
|
}, {
|
||||||
sequelize,
|
sequelize,
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
const Logger = require('../../Logger')
|
const Logger = require('../../Logger')
|
||||||
const uuidv4 = require("uuid").v4
|
const uuidv4 = require("uuid").v4
|
||||||
const { checkNamesAreEqual } = require('../../utils/parsers/parseNameString')
|
const { checkNamesAreEqual, nameToLastFirst } = require('../../utils/parsers/parseNameString')
|
||||||
|
|
||||||
class Author {
|
class Author {
|
||||||
constructor(author) {
|
constructor(author) {
|
||||||
@ -29,6 +29,11 @@ class Author {
|
|||||||
this.libraryId = author.libraryId
|
this.libraryId = author.libraryId
|
||||||
}
|
}
|
||||||
|
|
||||||
|
get lastFirst() {
|
||||||
|
if (!this.name) return ''
|
||||||
|
return nameToLastFirst(this.name)
|
||||||
|
}
|
||||||
|
|
||||||
toJSON() {
|
toJSON() {
|
||||||
return {
|
return {
|
||||||
id: this.id,
|
id: this.id,
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
const uuidv4 = require("uuid").v4
|
const uuidv4 = require("uuid").v4
|
||||||
|
const { getTitleIgnorePrefix } = require('../../utils/index')
|
||||||
|
|
||||||
class Series {
|
class Series {
|
||||||
constructor(series) {
|
constructor(series) {
|
||||||
@ -23,6 +24,11 @@ class Series {
|
|||||||
this.libraryId = series.libraryId
|
this.libraryId = series.libraryId
|
||||||
}
|
}
|
||||||
|
|
||||||
|
get nameIgnorePrefix() {
|
||||||
|
if (!this.name) return ''
|
||||||
|
return getTitleIgnorePrefix(this.name)
|
||||||
|
}
|
||||||
|
|
||||||
toJSON() {
|
toJSON() {
|
||||||
return {
|
return {
|
||||||
id: this.id,
|
id: this.id,
|
||||||
|
@ -72,6 +72,10 @@ class MediaProgress {
|
|||||||
return !this.isFinished && (this.progress > 0 || (this.ebookLocation != null && this.ebookProgress > 0))
|
return !this.isFinished && (this.progress > 0 || (this.ebookLocation != null && this.ebookProgress > 0))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
get notStarted() {
|
||||||
|
return !this.isFinished && this.progress == 0
|
||||||
|
}
|
||||||
|
|
||||||
setData(libraryItem, progress, episodeId, userId) {
|
setData(libraryItem, progress, episodeId, userId) {
|
||||||
this.id = uuidv4()
|
this.id = uuidv4()
|
||||||
this.userId = userId
|
this.userId = userId
|
||||||
|
@ -74,6 +74,7 @@ class ApiRouter {
|
|||||||
this.router.patch('/libraries/:id', LibraryController.middleware.bind(this), LibraryController.update.bind(this))
|
this.router.patch('/libraries/:id', LibraryController.middleware.bind(this), LibraryController.update.bind(this))
|
||||||
this.router.delete('/libraries/:id', LibraryController.middleware.bind(this), LibraryController.delete.bind(this))
|
this.router.delete('/libraries/:id', LibraryController.middleware.bind(this), LibraryController.delete.bind(this))
|
||||||
|
|
||||||
|
this.router.get('/libraries/:id/items2', LibraryController.middleware.bind(this), LibraryController.getLibraryItemsNew.bind(this))
|
||||||
this.router.get('/libraries/:id/items', LibraryController.middleware.bind(this), LibraryController.getLibraryItems.bind(this))
|
this.router.get('/libraries/:id/items', LibraryController.middleware.bind(this), LibraryController.getLibraryItems.bind(this))
|
||||||
this.router.delete('/libraries/:id/issues', LibraryController.middleware.bind(this), LibraryController.removeLibraryItemsWithIssues.bind(this))
|
this.router.delete('/libraries/:id/issues', LibraryController.middleware.bind(this), LibraryController.removeLibraryItemsWithIssues.bind(this))
|
||||||
this.router.get('/libraries/:id/episode-downloads', LibraryController.middleware.bind(this), LibraryController.getEpisodeDownloadQueue.bind(this))
|
this.router.get('/libraries/:id/episode-downloads', LibraryController.middleware.bind(this), LibraryController.getEpisodeDownloadQueue.bind(this))
|
||||||
|
@ -137,8 +137,7 @@ module.exports.cleanStringForSearch = (str) => {
|
|||||||
|
|
||||||
const getTitleParts = (title) => {
|
const getTitleParts = (title) => {
|
||||||
if (!title) return ['', null]
|
if (!title) return ['', null]
|
||||||
var prefixesToIgnore = global.ServerSettings.sortingPrefixes || []
|
const prefixesToIgnore = global.ServerSettings.sortingPrefixes || []
|
||||||
prefixes = []
|
|
||||||
for (const prefix of prefixesToIgnore) {
|
for (const prefix of prefixesToIgnore) {
|
||||||
// e.g. for prefix "the". If title is "The Book" return "Book, The"
|
// e.g. for prefix "the". If title is "The Book" return "Book, The"
|
||||||
if (title.toLowerCase().startsWith(`${prefix} `)) {
|
if (title.toLowerCase().startsWith(`${prefix} `)) {
|
||||||
|
@ -34,7 +34,7 @@ module.exports = {
|
|||||||
filtered = filtered.filter(li => {
|
filtered = filtered.filter(li => {
|
||||||
const itemProgress = user.getMediaProgress(li.id)
|
const itemProgress = user.getMediaProgress(li.id)
|
||||||
if (filter === 'finished' && (itemProgress && itemProgress.isFinished)) return true
|
if (filter === 'finished' && (itemProgress && itemProgress.isFinished)) return true
|
||||||
if (filter === 'not-started' && !itemProgress) return true
|
if (filter === 'not-started' && (!itemProgress || itemProgress.notStarted)) return true
|
||||||
if (filter === 'not-finished' && (!itemProgress || !itemProgress.isFinished)) return true
|
if (filter === 'not-finished' && (!itemProgress || !itemProgress.isFinished)) return true
|
||||||
if (filter === 'in-progress' && (itemProgress && itemProgress.inProgress)) return true
|
if (filter === 'in-progress' && (itemProgress && itemProgress.inProgress)) return true
|
||||||
return false
|
return false
|
||||||
|
@ -4,6 +4,7 @@ const uuidv4 = require("uuid").v4
|
|||||||
const Logger = require('../../Logger')
|
const Logger = require('../../Logger')
|
||||||
const fs = require('../../libs/fsExtra')
|
const fs = require('../../libs/fsExtra')
|
||||||
const oldDbFiles = require('./oldDbFiles')
|
const oldDbFiles = require('./oldDbFiles')
|
||||||
|
const parseNameString = require('../parsers/parseNameString')
|
||||||
|
|
||||||
const oldDbIdMap = {
|
const oldDbIdMap = {
|
||||||
users: {},
|
users: {},
|
||||||
@ -19,6 +20,18 @@ const oldDbIdMap = {
|
|||||||
devices: {} // key is a json stringify of the old DeviceInfo data OR deviceId if it exists
|
devices: {} // key is a json stringify of the old DeviceInfo data OR deviceId if it exists
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let prefixesToIgnore = ['the']
|
||||||
|
function getTitleIgnorePrefix(title) {
|
||||||
|
if (!title?.trim()) return title
|
||||||
|
for (const prefix of prefixesToIgnore) {
|
||||||
|
// e.g. for prefix "the". If title is "The Book" return "Book"
|
||||||
|
if (title.toLowerCase().startsWith(`${prefix} `)) {
|
||||||
|
return title.substring(prefix.length).trim()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return title
|
||||||
|
}
|
||||||
|
|
||||||
function getDeviceInfoString(deviceInfo, UserId) {
|
function getDeviceInfoString(deviceInfo, UserId) {
|
||||||
if (!deviceInfo) return null
|
if (!deviceInfo) return null
|
||||||
if (deviceInfo.deviceId) return deviceInfo.deviceId
|
if (deviceInfo.deviceId) return deviceInfo.deviceId
|
||||||
@ -54,12 +67,21 @@ function migrateBook(oldLibraryItem, LibraryItem) {
|
|||||||
bookAuthor: []
|
bookAuthor: []
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const tracks = (oldBook.audioFiles || []).filter(af => !af.exclude && !af.invalid)
|
||||||
|
let duration = 0
|
||||||
|
for (const track of tracks) {
|
||||||
|
if (track.duration !== null && !isNaN(track.duration)) {
|
||||||
|
duration += track.duration
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
//
|
//
|
||||||
// Migrate Book
|
// Migrate Book
|
||||||
//
|
//
|
||||||
const Book = {
|
const Book = {
|
||||||
id: uuidv4(),
|
id: uuidv4(),
|
||||||
title: oldBook.metadata.title,
|
title: oldBook.metadata.title,
|
||||||
|
titleIgnorePrefix: getTitleIgnorePrefix(oldBook.metadata.title),
|
||||||
subtitle: oldBook.metadata.subtitle,
|
subtitle: oldBook.metadata.subtitle,
|
||||||
publishedYear: oldBook.metadata.publishedYear,
|
publishedYear: oldBook.metadata.publishedYear,
|
||||||
publishedDate: oldBook.metadata.publishedDate,
|
publishedDate: oldBook.metadata.publishedDate,
|
||||||
@ -77,6 +99,7 @@ function migrateBook(oldLibraryItem, LibraryItem) {
|
|||||||
narrators: oldBook.metadata.narrators,
|
narrators: oldBook.metadata.narrators,
|
||||||
ebookFile: oldBook.ebookFile,
|
ebookFile: oldBook.ebookFile,
|
||||||
coverPath: oldBook.coverPath,
|
coverPath: oldBook.coverPath,
|
||||||
|
duration,
|
||||||
audioFiles: oldBook.audioFiles,
|
audioFiles: oldBook.audioFiles,
|
||||||
chapters: oldBook.chapters,
|
chapters: oldBook.chapters,
|
||||||
tags: oldBook.tags,
|
tags: oldBook.tags,
|
||||||
@ -152,6 +175,7 @@ function migratePodcast(oldLibraryItem, LibraryItem) {
|
|||||||
const Podcast = {
|
const Podcast = {
|
||||||
id: uuidv4(),
|
id: uuidv4(),
|
||||||
title: oldPodcastMetadata.title,
|
title: oldPodcastMetadata.title,
|
||||||
|
titleIgnorePrefix: getTitleIgnorePrefix(oldPodcastMetadata.title),
|
||||||
author: oldPodcastMetadata.author,
|
author: oldPodcastMetadata.author,
|
||||||
releaseDate: oldPodcastMetadata.releaseDate,
|
releaseDate: oldPodcastMetadata.releaseDate,
|
||||||
feedURL: oldPodcastMetadata.feedUrl,
|
feedURL: oldPodcastMetadata.feedUrl,
|
||||||
@ -243,6 +267,13 @@ function migrateLibraryItems(oldLibraryItems) {
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let size = 0
|
||||||
|
for (const libraryFile of oldLibraryItem.libraryFiles) {
|
||||||
|
if (libraryFile.metadata?.size && !isNaN(libraryFile.metadata?.size)) {
|
||||||
|
size += libraryFile.metadata.size
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
//
|
//
|
||||||
// Migrate LibraryItem
|
// Migrate LibraryItem
|
||||||
//
|
//
|
||||||
@ -260,6 +291,7 @@ function migrateLibraryItems(oldLibraryItems) {
|
|||||||
mtime: oldLibraryItem.mtimeMs,
|
mtime: oldLibraryItem.mtimeMs,
|
||||||
ctime: oldLibraryItem.ctimeMs,
|
ctime: oldLibraryItem.ctimeMs,
|
||||||
birthtime: oldLibraryItem.birthtimeMs,
|
birthtime: oldLibraryItem.birthtimeMs,
|
||||||
|
size,
|
||||||
lastScan: oldLibraryItem.lastScan,
|
lastScan: oldLibraryItem.lastScan,
|
||||||
lastScanVersion: oldLibraryItem.scanVersion,
|
lastScanVersion: oldLibraryItem.scanVersion,
|
||||||
createdAt: oldLibraryItem.addedAt,
|
createdAt: oldLibraryItem.addedAt,
|
||||||
@ -371,9 +403,11 @@ function migrateAuthors(oldAuthors, oldLibraryItems) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for (const libraryId of librariesWithThisAuthor) {
|
for (const libraryId of librariesWithThisAuthor) {
|
||||||
|
const lastFirst = oldAuthor.name ? parseNameString.nameToLastFirst(oldAuthor.name) : ''
|
||||||
const Author = {
|
const Author = {
|
||||||
id: uuidv4(),
|
id: uuidv4(),
|
||||||
name: oldAuthor.name,
|
name: oldAuthor.name,
|
||||||
|
lastFirst,
|
||||||
asin: oldAuthor.asin || null,
|
asin: oldAuthor.asin || null,
|
||||||
description: oldAuthor.description,
|
description: oldAuthor.description,
|
||||||
imagePath: oldAuthor.imagePath,
|
imagePath: oldAuthor.imagePath,
|
||||||
@ -415,6 +449,7 @@ function migrateSeries(oldSerieses, oldLibraryItems) {
|
|||||||
const Series = {
|
const Series = {
|
||||||
id: uuidv4(),
|
id: uuidv4(),
|
||||||
name: oldSeries.name,
|
name: oldSeries.name,
|
||||||
|
nameIgnorePrefix: getTitleIgnorePrefix(oldSeries.name),
|
||||||
description: oldSeries.description || null,
|
description: oldSeries.description || null,
|
||||||
createdAt: oldSeries.addedAt || Date.now(),
|
createdAt: oldSeries.addedAt || Date.now(),
|
||||||
updatedAt: oldSeries.updatedAt || Date.now(),
|
updatedAt: oldSeries.updatedAt || Date.now(),
|
||||||
@ -886,6 +921,11 @@ function migrateSettings(oldSettings) {
|
|||||||
key: 'server-settings',
|
key: 'server-settings',
|
||||||
value: serverSettings
|
value: serverSettings
|
||||||
})
|
})
|
||||||
|
|
||||||
|
if (serverSettings.sortingPrefixes?.length) {
|
||||||
|
// Used for migrating titles/names
|
||||||
|
prefixesToIgnore = serverSettings.sortingPrefixes
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (notificationSettings) {
|
if (notificationSettings) {
|
||||||
@ -1311,6 +1351,198 @@ module.exports.migrationPatch = async (ctx) => {
|
|||||||
Logger.info(`[dbMigration] Migration patch 2.3.0+ finished. Elapsed ${(elapsed / 1000).toFixed(2)}s`)
|
Logger.info(`[dbMigration] Migration patch 2.3.0+ finished. Elapsed ${(elapsed / 1000).toFixed(2)}s`)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Migration from 2.3.3 to 2.3.4
|
||||||
|
* Populating the size column on libraryItem
|
||||||
|
* @param {/src/Database} ctx
|
||||||
|
* @param {number} offset
|
||||||
|
*/
|
||||||
|
async function migrationPatch2LibraryItems(ctx, offset = 0) {
|
||||||
|
const libraryItems = await ctx.models.libraryItem.findAll({
|
||||||
|
limit: 500,
|
||||||
|
offset
|
||||||
|
})
|
||||||
|
if (!libraryItems.length) return
|
||||||
|
|
||||||
|
const bulkUpdateItems = []
|
||||||
|
for (const libraryItem of libraryItems) {
|
||||||
|
if (libraryItem.libraryFiles?.length) {
|
||||||
|
let size = 0
|
||||||
|
libraryItem.libraryFiles.forEach(lf => {
|
||||||
|
if (!isNaN(lf.metadata?.size)) {
|
||||||
|
size += Number(lf.metadata.size)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
bulkUpdateItems.push({
|
||||||
|
id: libraryItem.id,
|
||||||
|
size
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (bulkUpdateItems.length) {
|
||||||
|
Logger.info(`[dbMigration] Migration patch 2.3.3+ - patching ${bulkUpdateItems.length} library items`)
|
||||||
|
await ctx.models.libraryItem.bulkCreate(bulkUpdateItems, {
|
||||||
|
updateOnDuplicate: ['size']
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (libraryItems.length < 500) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
return migrationPatch2LibraryItems(ctx, offset + libraryItems.length)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Migration from 2.3.3 to 2.3.4
|
||||||
|
* Populating the duration & titleIgnorePrefix column on book
|
||||||
|
* @param {/src/Database} ctx
|
||||||
|
* @param {number} offset
|
||||||
|
*/
|
||||||
|
async function migrationPatch2Books(ctx, offset = 0) {
|
||||||
|
const books = await ctx.models.book.findAll({
|
||||||
|
limit: 500,
|
||||||
|
offset
|
||||||
|
})
|
||||||
|
if (!books.length) return
|
||||||
|
|
||||||
|
const bulkUpdateItems = []
|
||||||
|
for (const book of books) {
|
||||||
|
let duration = 0
|
||||||
|
|
||||||
|
if (book.audioFiles?.length) {
|
||||||
|
const tracks = book.audioFiles.filter(af => !af.exclude && !af.invalid)
|
||||||
|
for (const track of tracks) {
|
||||||
|
if (track.duration !== null && !isNaN(track.duration)) {
|
||||||
|
duration += track.duration
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bulkUpdateItems.push({
|
||||||
|
id: book.id,
|
||||||
|
titleIgnorePrefix: getTitleIgnorePrefix(book.title),
|
||||||
|
duration
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (bulkUpdateItems.length) {
|
||||||
|
Logger.info(`[dbMigration] Migration patch 2.3.3+ - patching ${bulkUpdateItems.length} books`)
|
||||||
|
await ctx.models.book.bulkCreate(bulkUpdateItems, {
|
||||||
|
updateOnDuplicate: ['duration', 'titleIgnorePrefix']
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (books.length < 500) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
return migrationPatch2Books(ctx, offset + books.length)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Migration from 2.3.3 to 2.3.4
|
||||||
|
* Populating the titleIgnorePrefix column on podcast
|
||||||
|
* @param {/src/Database} ctx
|
||||||
|
* @param {number} offset
|
||||||
|
*/
|
||||||
|
async function migrationPatch2Podcasts(ctx, offset = 0) {
|
||||||
|
const podcasts = await ctx.models.podcast.findAll({
|
||||||
|
limit: 500,
|
||||||
|
offset
|
||||||
|
})
|
||||||
|
if (!podcasts.length) return
|
||||||
|
|
||||||
|
const bulkUpdateItems = []
|
||||||
|
for (const podcast of podcasts) {
|
||||||
|
bulkUpdateItems.push({
|
||||||
|
id: podcast.id,
|
||||||
|
titleIgnorePrefix: getTitleIgnorePrefix(podcast.title)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (bulkUpdateItems.length) {
|
||||||
|
Logger.info(`[dbMigration] Migration patch 2.3.3+ - patching ${bulkUpdateItems.length} podcasts`)
|
||||||
|
await ctx.models.podcast.bulkCreate(bulkUpdateItems, {
|
||||||
|
updateOnDuplicate: ['titleIgnorePrefix']
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (podcasts.length < 500) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
return migrationPatch2Podcasts(ctx, offset + podcasts.length)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Migration from 2.3.3 to 2.3.4
|
||||||
|
* Populating the nameIgnorePrefix column on series
|
||||||
|
* @param {/src/Database} ctx
|
||||||
|
* @param {number} offset
|
||||||
|
*/
|
||||||
|
async function migrationPatch2Series(ctx, offset = 0) {
|
||||||
|
const allSeries = await ctx.models.series.findAll({
|
||||||
|
limit: 500,
|
||||||
|
offset
|
||||||
|
})
|
||||||
|
if (!allSeries.length) return
|
||||||
|
|
||||||
|
const bulkUpdateItems = []
|
||||||
|
for (const series of allSeries) {
|
||||||
|
bulkUpdateItems.push({
|
||||||
|
id: series.id,
|
||||||
|
nameIgnorePrefix: getTitleIgnorePrefix(series.name)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (bulkUpdateItems.length) {
|
||||||
|
Logger.info(`[dbMigration] Migration patch 2.3.3+ - patching ${bulkUpdateItems.length} series`)
|
||||||
|
await ctx.models.series.bulkCreate(bulkUpdateItems, {
|
||||||
|
updateOnDuplicate: ['nameIgnorePrefix']
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (allSeries.length < 500) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
return migrationPatch2Series(ctx, offset + allSeries.length)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Migration from 2.3.3 to 2.3.4
|
||||||
|
* Populating the lastFirst column on author
|
||||||
|
* @param {/src/Database} ctx
|
||||||
|
* @param {number} offset
|
||||||
|
*/
|
||||||
|
async function migrationPatch2Authors(ctx, offset = 0) {
|
||||||
|
const authors = await ctx.models.author.findAll({
|
||||||
|
limit: 500,
|
||||||
|
offset
|
||||||
|
})
|
||||||
|
if (!authors.length) return
|
||||||
|
|
||||||
|
const bulkUpdateItems = []
|
||||||
|
for (const author of authors) {
|
||||||
|
if (author.name?.trim()) {
|
||||||
|
bulkUpdateItems.push({
|
||||||
|
id: author.id,
|
||||||
|
lastFirst: parseNameString.nameToLastFirst(author.name)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (bulkUpdateItems.length) {
|
||||||
|
Logger.info(`[dbMigration] Migration patch 2.3.3+ - patching ${bulkUpdateItems.length} authors`)
|
||||||
|
await ctx.models.author.bulkCreate(bulkUpdateItems, {
|
||||||
|
updateOnDuplicate: ['lastFirst']
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (authors.length < 500) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
return migrationPatch2Authors(ctx, offset + authors.length)
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Migration from 2.3.3 to 2.3.4
|
* Migration from 2.3.3 to 2.3.4
|
||||||
* Adding coverPath column to Feed model
|
* Adding coverPath column to Feed model
|
||||||
@ -1319,21 +1551,65 @@ module.exports.migrationPatch = async (ctx) => {
|
|||||||
module.exports.migrationPatch2 = async (ctx) => {
|
module.exports.migrationPatch2 = async (ctx) => {
|
||||||
const queryInterface = ctx.sequelize.getQueryInterface()
|
const queryInterface = ctx.sequelize.getQueryInterface()
|
||||||
const feedTableDescription = await queryInterface.describeTable('feeds')
|
const feedTableDescription = await queryInterface.describeTable('feeds')
|
||||||
|
const authorsTableDescription = await queryInterface.describeTable('authors')
|
||||||
|
|
||||||
if (feedTableDescription?.coverPath) {
|
if (feedTableDescription?.coverPath && authorsTableDescription?.lastFirst) {
|
||||||
Logger.info(`[dbMigration] Migration patch 2.3.3+ - coverPath column is already on model`)
|
Logger.info(`[dbMigration] Migration patch 2.3.3+ - columns already on model`)
|
||||||
return
|
return false
|
||||||
}
|
}
|
||||||
|
Logger.info(`[dbMigration] Applying migration patch from 2.3.3+`)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await queryInterface.sequelize.transaction(t => {
|
await queryInterface.sequelize.transaction(t => {
|
||||||
return Promise.all([
|
const queries = [
|
||||||
queryInterface.addColumn('feeds', 'coverPath', {
|
queryInterface.addColumn('authors', 'lastFirst', {
|
||||||
type: DataTypes.STRING
|
type: DataTypes.STRING
|
||||||
}, { transaction: t })
|
}, { transaction: t }),
|
||||||
])
|
queryInterface.addColumn('libraryItems', 'size', {
|
||||||
|
type: DataTypes.BIGINT
|
||||||
|
}, { transaction: t }),
|
||||||
|
queryInterface.addColumn('books', 'duration', {
|
||||||
|
type: DataTypes.FLOAT
|
||||||
|
}, { transaction: t }),
|
||||||
|
queryInterface.addColumn('books', 'titleIgnorePrefix', {
|
||||||
|
type: DataTypes.STRING
|
||||||
|
}, { transaction: t }),
|
||||||
|
queryInterface.addColumn('podcasts', 'titleIgnorePrefix', {
|
||||||
|
type: DataTypes.STRING
|
||||||
|
}, { transaction: t }),
|
||||||
|
queryInterface.addColumn('series', 'nameIgnorePrefix', {
|
||||||
|
type: DataTypes.STRING
|
||||||
|
}, { transaction: t }),
|
||||||
|
]
|
||||||
|
if (!feedTableDescription?.coverPath) {
|
||||||
|
queries.push(queryInterface.addColumn('feeds', 'coverPath', {
|
||||||
|
type: DataTypes.STRING
|
||||||
|
}, { transaction: t }))
|
||||||
|
}
|
||||||
|
return Promise.all(queries)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
if (global.ServerSettings.sortingPrefixes?.length) {
|
||||||
|
prefixesToIgnore = global.ServerSettings.sortingPrefixes
|
||||||
|
}
|
||||||
|
|
||||||
|
// Patch library items size column
|
||||||
|
await migrationPatch2LibraryItems(ctx, 0)
|
||||||
|
|
||||||
|
// Patch books duration & titleIgnorePrefix column
|
||||||
|
await migrationPatch2Books(ctx, 0)
|
||||||
|
|
||||||
|
// Patch podcasts titleIgnorePrefix column
|
||||||
|
await migrationPatch2Podcasts(ctx, 0)
|
||||||
|
|
||||||
|
// Patch authors lastFirst column
|
||||||
|
await migrationPatch2Authors(ctx, 0)
|
||||||
|
|
||||||
|
// Patch series nameIgnorePrefix column
|
||||||
|
await migrationPatch2Series(ctx, 0)
|
||||||
|
|
||||||
Logger.info(`[dbMigration] Migration patch 2.3.3+ finished`)
|
Logger.info(`[dbMigration] Migration patch 2.3.3+ finished`)
|
||||||
|
return true
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
Logger.error(`[dbMigration] Migration from 2.3.3+ column creation failed`, error)
|
Logger.error(`[dbMigration] Migration from 2.3.3+ column creation failed`, error)
|
||||||
throw new Error('Migration 2.3.3+ failed ' + error)
|
throw new Error('Migration 2.3.3+ failed ' + error)
|
||||||
|
154
server/utils/queries/libraryFilters.js
Normal file
154
server/utils/queries/libraryFilters.js
Normal file
@ -0,0 +1,154 @@
|
|||||||
|
const { Op, literal, col, fn, where } = require('sequelize')
|
||||||
|
const Database = require('../../Database')
|
||||||
|
const libraryItemsSeriesFilters = require('./libraryItemsSeriesFilters')
|
||||||
|
const libraryItemsProgressFilters = require('./libraryItemsProgressFilters')
|
||||||
|
const Logger = require('../../Logger')
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
decode(text) {
|
||||||
|
return Buffer.from(decodeURIComponent(text), 'base64').toString()
|
||||||
|
},
|
||||||
|
|
||||||
|
getMediaGroupQuery(group, value) {
|
||||||
|
let mediaWhere = {}
|
||||||
|
|
||||||
|
if (['genres', 'tags', 'narrators'].includes(group)) {
|
||||||
|
mediaWhere[group] = {
|
||||||
|
[Op.substring]: `"${value}"`
|
||||||
|
}
|
||||||
|
} else if (group === 'publishers') {
|
||||||
|
mediaWhere['publisher'] = {
|
||||||
|
[Op.substring]: `"${value}"`
|
||||||
|
}
|
||||||
|
} else if (group === 'languages') {
|
||||||
|
mediaWhere['language'] = {
|
||||||
|
[Op.substring]: `"${value}"`
|
||||||
|
}
|
||||||
|
} else if (group === 'tracks') {
|
||||||
|
if (value === 'multi') {
|
||||||
|
mediaWhere = where(fn('json_array_length', col('audioFiles')), {
|
||||||
|
[Op.gt]: 1
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
mediaWhere = where(fn('json_array_length', col('audioFiles')), 1)
|
||||||
|
}
|
||||||
|
} else if (group === 'ebooks') {
|
||||||
|
if (value === 'ebook') {
|
||||||
|
mediaWhere['ebookFile'] = {
|
||||||
|
[Op.not]: null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return mediaWhere
|
||||||
|
},
|
||||||
|
|
||||||
|
getOrder(sortBy, sortDesc) {
|
||||||
|
const dir = sortDesc ? 'DESC' : 'ASC'
|
||||||
|
if (sortBy === 'addedAt') {
|
||||||
|
return [['createdAt', dir]]
|
||||||
|
} else if (sortBy === 'size') {
|
||||||
|
return [['size', dir]]
|
||||||
|
} else if (sortBy === 'birthtimeMs') {
|
||||||
|
return [['birthtime', dir]]
|
||||||
|
} else if (sortBy === 'mtimeMs') {
|
||||||
|
return [['mtime', dir]]
|
||||||
|
} else if (sortBy === 'media.duration') {
|
||||||
|
return [[literal('book.duration'), dir]]
|
||||||
|
} else if (sortBy === 'media.metadata.publishedYear') {
|
||||||
|
return [[literal('book.publishedYear'), dir]]
|
||||||
|
} else if (sortBy === 'media.metadata.authorNameLF') {
|
||||||
|
return [[literal('book.authors.lastFirst'), dir]]
|
||||||
|
} else if (sortBy === 'media.metadata.authorName') {
|
||||||
|
return [[literal('book.authors.name'), dir]]
|
||||||
|
} else if (sortBy === 'media.metadata.title') {
|
||||||
|
if (global.ServerSettings.sortingIgnorePrefix) {
|
||||||
|
return [[literal('book.titleIgnorePrefix'), dir]]
|
||||||
|
} else {
|
||||||
|
return [[literal('book.title'), dir]]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return []
|
||||||
|
},
|
||||||
|
|
||||||
|
async getFilteredLibraryItems(libraryId, filterBy, sortBy, sortDesc, limit, offset, userId) {
|
||||||
|
const libraryItemModel = Database.models.libraryItem
|
||||||
|
|
||||||
|
let mediaWhereQuery = null
|
||||||
|
let mediaAttributes = null
|
||||||
|
let itemWhereQuery = {
|
||||||
|
libraryId
|
||||||
|
}
|
||||||
|
|
||||||
|
const itemIncludes = []
|
||||||
|
|
||||||
|
let authorInclude = {
|
||||||
|
model: Database.models.author,
|
||||||
|
through: {
|
||||||
|
attributes: []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let seriesInclude = {
|
||||||
|
model: Database.models.series,
|
||||||
|
through: {
|
||||||
|
attributes: ['sequence']
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const searchGroups = ['genres', 'tags', 'series', 'authors', 'progress', 'narrators', 'publishers', 'missing', 'languages', 'tracks', 'ebooks']
|
||||||
|
const group = searchGroups.find(_group => filterBy.startsWith(_group + '.'))
|
||||||
|
if (group) {
|
||||||
|
// e.g. genre id
|
||||||
|
const value = this.decode(filterBy.replace(`${group}.`, ''))
|
||||||
|
|
||||||
|
if (group === 'series' && value === 'no-series') {
|
||||||
|
return libraryItemsSeriesFilters.getLibraryItemsWithNoSeries(libraryId, sortBy, sortDesc, limit, offset)
|
||||||
|
} else if (group === 'progress') {
|
||||||
|
return libraryItemsProgressFilters.getLibraryItemsWithProgressFilter(value, libraryId, userId, sortBy, sortDesc, limit, offset)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (group === 'authors') {
|
||||||
|
authorInclude.where = {
|
||||||
|
id: value
|
||||||
|
}
|
||||||
|
authorInclude.required = true
|
||||||
|
} else if (group === 'series') {
|
||||||
|
seriesInclude.where = {
|
||||||
|
id: value
|
||||||
|
}
|
||||||
|
seriesInclude.required = true
|
||||||
|
} else {
|
||||||
|
mediaWhereQuery = this.getMediaGroupQuery(group, value)
|
||||||
|
}
|
||||||
|
} else if (filterBy === 'abridged') {
|
||||||
|
mediaWhereQuery = {
|
||||||
|
abridged: true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const { rows: libraryItems, count } = await libraryItemModel.findAndCountAll({
|
||||||
|
where: itemWhereQuery,
|
||||||
|
attributes: {
|
||||||
|
include: [
|
||||||
|
[fn('group_concat', col('book.author.name'), ', '), 'author_name']
|
||||||
|
]
|
||||||
|
},
|
||||||
|
distinct: true,
|
||||||
|
subQuery: false,
|
||||||
|
include: [
|
||||||
|
{
|
||||||
|
model: Database.models.book,
|
||||||
|
attributes: mediaAttributes,
|
||||||
|
where: mediaWhereQuery,
|
||||||
|
required: true,
|
||||||
|
include: [authorInclude, seriesInclude, ...itemIncludes]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
order: this.getOrder(sortBy, sortDesc),
|
||||||
|
limit,
|
||||||
|
offset
|
||||||
|
})
|
||||||
|
Logger.debug('Found', libraryItems.length, 'library items', 'total=', count)
|
||||||
|
return { libraryItems, count }
|
||||||
|
}
|
||||||
|
}
|
130
server/utils/queries/libraryItemsProgressFilters.js
Normal file
130
server/utils/queries/libraryItemsProgressFilters.js
Normal file
@ -0,0 +1,130 @@
|
|||||||
|
const Sequelize = require('sequelize')
|
||||||
|
const Database = require('../../Database')
|
||||||
|
const Logger = require('../../Logger')
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
getOrder(sortBy, sortDesc) {
|
||||||
|
const dir = sortDesc ? 'DESC' : 'ASC'
|
||||||
|
if (sortBy === 'addedAt') {
|
||||||
|
return [[Sequelize.literal('libraryItem.createdAt'), dir]]
|
||||||
|
} else if (sortBy === 'size') {
|
||||||
|
return [[Sequelize.literal('libraryItem.size'), dir]]
|
||||||
|
} else if (sortBy === 'birthtimeMs') {
|
||||||
|
return [[Sequelize.literal('libraryItem.birthtime'), dir]]
|
||||||
|
} else if (sortBy === 'mtimeMs') {
|
||||||
|
return [[Sequelize.literal('libraryItem.mtime'), dir]]
|
||||||
|
} else if (sortBy === 'media.duration') {
|
||||||
|
return [['duration', dir]]
|
||||||
|
} else if (sortBy === 'media.metadata.publishedYear') {
|
||||||
|
return [['publishedYear', dir]]
|
||||||
|
} else if (sortBy === 'media.metadata.authorNameLF') {
|
||||||
|
return [] // TODO: Handle author filter
|
||||||
|
} else if (sortBy === 'media.metadata.authorName') {
|
||||||
|
return [] // TODO: Handle author filter
|
||||||
|
} else if (sortBy === 'media.metadata.title') {
|
||||||
|
if (global.ServerSettings.sortingIgnorePrefix) {
|
||||||
|
return [['titleIgnorePrefix', dir]]
|
||||||
|
} else {
|
||||||
|
return [['title', dir]]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return []
|
||||||
|
},
|
||||||
|
|
||||||
|
async getLibraryItemsWithProgressFilter(filterValue, libraryId, userId, sortBy, sortDesc, limit, offset) {
|
||||||
|
|
||||||
|
const bookWhere = {}
|
||||||
|
if (filterValue === 'not-finished') {
|
||||||
|
bookWhere['$mediaProgresses.isFinished$'] = {
|
||||||
|
[Sequelize.Op.or]: [null, false]
|
||||||
|
}
|
||||||
|
} else if (filterValue === 'not-started') {
|
||||||
|
bookWhere['$mediaProgresses.currentTime$'] = {
|
||||||
|
[Sequelize.Op.or]: [null, 0]
|
||||||
|
}
|
||||||
|
} else if (filterValue === 'finished') {
|
||||||
|
bookWhere['$mediaProgresses.isFinished$'] = true
|
||||||
|
} else { // in-progress
|
||||||
|
bookWhere[Sequelize.Op.and] = [
|
||||||
|
{
|
||||||
|
'$book.mediaProgresses.currentTime$': {
|
||||||
|
[Sequelize.Op.gt]: 0
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'$book.mediaProgresses.isFinished$': false
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
const { rows: books, count } = await Database.models.book.findAndCountAll({
|
||||||
|
where: bookWhere,
|
||||||
|
distinct: true,
|
||||||
|
include: [
|
||||||
|
{
|
||||||
|
model: Database.models.libraryItem,
|
||||||
|
required: true,
|
||||||
|
where: {
|
||||||
|
libraryId
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
model: Database.models.bookSeries,
|
||||||
|
attributes: ['seriesId', 'sequence'],
|
||||||
|
include: {
|
||||||
|
model: Database.models.series,
|
||||||
|
attributes: ['id', 'name']
|
||||||
|
},
|
||||||
|
separate: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
model: Database.models.bookAuthor,
|
||||||
|
attributes: ['authorId'],
|
||||||
|
include: {
|
||||||
|
model: Database.models.author,
|
||||||
|
attributes: ['id', 'name']
|
||||||
|
},
|
||||||
|
separate: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
model: Database.models.mediaProgress,
|
||||||
|
attributes: ['id', 'isFinished'],
|
||||||
|
where: {
|
||||||
|
userId
|
||||||
|
},
|
||||||
|
required: false
|
||||||
|
}
|
||||||
|
],
|
||||||
|
order: this.getOrder(sortBy, sortDesc),
|
||||||
|
subQuery: false,
|
||||||
|
limit,
|
||||||
|
offset
|
||||||
|
})
|
||||||
|
|
||||||
|
const libraryItems = books.map((bookExpanded) => {
|
||||||
|
const libraryItem = bookExpanded.libraryItem.toJSON()
|
||||||
|
const book = bookExpanded.toJSON()
|
||||||
|
delete book.libraryItem
|
||||||
|
|
||||||
|
book.authors = []
|
||||||
|
if (book.bookAuthors?.length) {
|
||||||
|
book.bookAuthors.forEach((ba) => {
|
||||||
|
if (ba.author) {
|
||||||
|
book.authors.push(ba.author)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
delete book.bookAuthors
|
||||||
|
|
||||||
|
libraryItem.media = book
|
||||||
|
|
||||||
|
return libraryItem
|
||||||
|
})
|
||||||
|
Logger.debug('Found', libraryItems.length, 'library items', 'total=', count)
|
||||||
|
return {
|
||||||
|
libraryItems,
|
||||||
|
count
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
96
server/utils/queries/libraryItemsSeriesFilters.js
Normal file
96
server/utils/queries/libraryItemsSeriesFilters.js
Normal file
@ -0,0 +1,96 @@
|
|||||||
|
const Sequelize = require('sequelize')
|
||||||
|
const Database = require('../../Database')
|
||||||
|
const Logger = require('../../Logger')
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
getOrder(sortBy, sortDesc) {
|
||||||
|
const dir = sortDesc ? 'DESC' : 'ASC'
|
||||||
|
if (sortBy === 'addedAt') {
|
||||||
|
return [[Sequelize.literal('libraryItem.createdAt'), dir]]
|
||||||
|
} else if (sortBy === 'size') {
|
||||||
|
return [[Sequelize.literal('libraryItem.size'), dir]]
|
||||||
|
} else if (sortBy === 'birthtimeMs') {
|
||||||
|
return [[Sequelize.literal('libraryItem.birthtime'), dir]]
|
||||||
|
} else if (sortBy === 'mtimeMs') {
|
||||||
|
return [[Sequelize.literal('libraryItem.mtime'), dir]]
|
||||||
|
} else if (sortBy === 'media.duration') {
|
||||||
|
return [['duration', dir]]
|
||||||
|
} else if (sortBy === 'media.metadata.publishedYear') {
|
||||||
|
return [['publishedYear', dir]]
|
||||||
|
} else if (sortBy === 'media.metadata.authorNameLF') {
|
||||||
|
return [] // TODO: Handle author filter
|
||||||
|
} else if (sortBy === 'media.metadata.authorName') {
|
||||||
|
return [] // TODO: Handle author filter
|
||||||
|
} else if (sortBy === 'media.metadata.title') {
|
||||||
|
if (global.ServerSettings.sortingIgnorePrefix) {
|
||||||
|
return [['titleIgnorePrefix', dir]]
|
||||||
|
} else {
|
||||||
|
return [['title', dir]]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return []
|
||||||
|
},
|
||||||
|
|
||||||
|
async getLibraryItemsWithNoSeries(libraryId, sortBy, sortDesc, limit, offset) {
|
||||||
|
const { rows: books, count } = await Database.models.book.findAndCountAll({
|
||||||
|
where: {
|
||||||
|
'$series.id$': null
|
||||||
|
},
|
||||||
|
distinct: true,
|
||||||
|
include: [
|
||||||
|
{
|
||||||
|
model: Database.models.libraryItem,
|
||||||
|
required: true,
|
||||||
|
where: {
|
||||||
|
libraryId
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
model: Database.models.series,
|
||||||
|
attributes: ['id', 'name'],
|
||||||
|
through: {
|
||||||
|
attributes: ['sequence']
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
model: Database.models.bookAuthor,
|
||||||
|
attributes: ['authorId'],
|
||||||
|
include: {
|
||||||
|
model: Database.models.author,
|
||||||
|
attributes: ['id', 'name']
|
||||||
|
},
|
||||||
|
separate: true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
order: this.getOrder(sortBy, sortDesc),
|
||||||
|
subQuery: false,
|
||||||
|
limit,
|
||||||
|
offset
|
||||||
|
})
|
||||||
|
|
||||||
|
const libraryItems = books.map((bookExpanded) => {
|
||||||
|
const libraryItem = bookExpanded.libraryItem.toJSON()
|
||||||
|
const book = bookExpanded.toJSON()
|
||||||
|
delete book.libraryItem
|
||||||
|
|
||||||
|
book.authors = []
|
||||||
|
if (book.bookAuthors?.length) {
|
||||||
|
book.bookAuthors.forEach((ba) => {
|
||||||
|
if (ba.author) {
|
||||||
|
book.authors.push(ba.author)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
delete book.bookAuthors
|
||||||
|
|
||||||
|
libraryItem.media = book
|
||||||
|
|
||||||
|
return libraryItem
|
||||||
|
})
|
||||||
|
Logger.debug('Found', libraryItems.length, 'library items', 'total=', count)
|
||||||
|
return {
|
||||||
|
libraryItems,
|
||||||
|
count
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user