mirror of
https://github.com/advplyr/audiobookshelf.git
synced 2025-01-13 17:38:40 +01:00
Fixes for db migration & local playback sessions
This commit is contained in:
parent
63e5cf2e60
commit
d99b2c25e8
@ -128,6 +128,18 @@ class Database {
|
||||
|
||||
const startTime = Date.now()
|
||||
|
||||
const settingsData = await this.models.setting.getOldSettings()
|
||||
this.settings = settingsData.settings
|
||||
this.emailSettings = settingsData.emailSettings
|
||||
this.serverSettings = settingsData.serverSettings
|
||||
this.notificationSettings = settingsData.notificationSettings
|
||||
global.ServerSettings = this.serverSettings.toJSON()
|
||||
|
||||
// Version specific migrations
|
||||
if (this.serverSettings.version === '2.3.0' && packageJson.version !== '2.3.0') {
|
||||
await dbMigration.migrationPatch(this)
|
||||
}
|
||||
|
||||
this.libraryItems = await this.models.libraryItem.getAllOldLibraryItems()
|
||||
this.users = await this.models.user.getOldUsers()
|
||||
this.libraries = await this.models.library.getAllOldLibraries()
|
||||
@ -137,13 +149,6 @@ class Database {
|
||||
this.series = await this.models.series.getAllOldSeries()
|
||||
this.feeds = await this.models.feed.getOldFeeds()
|
||||
|
||||
const settingsData = await this.models.setting.getOldSettings()
|
||||
this.settings = settingsData.settings
|
||||
this.emailSettings = settingsData.emailSettings
|
||||
this.serverSettings = settingsData.serverSettings
|
||||
this.notificationSettings = settingsData.notificationSettings
|
||||
global.ServerSettings = this.serverSettings.toJSON()
|
||||
|
||||
Logger.info(`[Database] Db data loaded in ${Date.now() - startTime}ms`)
|
||||
|
||||
if (packageJson.version !== this.serverSettings.version) {
|
||||
@ -357,7 +362,11 @@ class Database {
|
||||
}
|
||||
|
||||
getLibraryItem(libraryItemId) {
|
||||
if (!this.sequelize) return false
|
||||
if (!this.sequelize || !libraryItemId) return false
|
||||
|
||||
// Temp support for old library item ids from mobile
|
||||
if (libraryItemId.startsWith('li_')) return this.libraryItems.find(li => li.oldLibraryItemId === libraryItemId)
|
||||
|
||||
return this.libraryItems.find(li => li.id === libraryItemId)
|
||||
}
|
||||
|
||||
|
@ -94,7 +94,7 @@ class SessionController {
|
||||
|
||||
// POST: api/session/local
|
||||
syncLocal(req, res) {
|
||||
this.playbackSessionManager.syncLocalSessionRequest(req.user, req.body, res)
|
||||
this.playbackSessionManager.syncLocalSessionRequest(req, res)
|
||||
}
|
||||
|
||||
// POST: api/session/local-all
|
||||
|
@ -1,3 +1,4 @@
|
||||
const uuidv4 = require("uuid").v4
|
||||
const Path = require('path')
|
||||
const serverVersion = require('../../package.json').version
|
||||
const Logger = require('../Logger')
|
||||
@ -19,6 +20,7 @@ class PlaybackSessionManager {
|
||||
constructor() {
|
||||
this.StreamsPath = Path.join(global.MetadataPath, 'streams')
|
||||
|
||||
this.oldPlaybackSessionMap = {} // TODO: Remove after updated mobile versions
|
||||
this.sessions = []
|
||||
}
|
||||
|
||||
@ -74,13 +76,14 @@ class PlaybackSessionManager {
|
||||
}
|
||||
|
||||
async syncLocalSessionsRequest(req, res) {
|
||||
const deviceInfo = await this.getDeviceInfo(req)
|
||||
const user = req.user
|
||||
const sessions = req.body.sessions || []
|
||||
|
||||
const syncResults = []
|
||||
for (const sessionJson of sessions) {
|
||||
Logger.info(`[PlaybackSessionManager] Syncing local session "${sessionJson.displayTitle}" (${sessionJson.id})`)
|
||||
const result = await this.syncLocalSession(user, sessionJson)
|
||||
const result = await this.syncLocalSession(user, sessionJson, deviceInfo)
|
||||
syncResults.push(result)
|
||||
}
|
||||
|
||||
@ -89,7 +92,7 @@ class PlaybackSessionManager {
|
||||
})
|
||||
}
|
||||
|
||||
async syncLocalSession(user, sessionJson) {
|
||||
async syncLocalSession(user, sessionJson, deviceInfo) {
|
||||
const libraryItem = Database.getLibraryItem(sessionJson.libraryItemId)
|
||||
const episode = (sessionJson.episodeId && libraryItem && libraryItem.isPodcast) ? libraryItem.media.getEpisode(sessionJson.episodeId) : null
|
||||
if (!libraryItem || (libraryItem.isPodcast && !episode)) {
|
||||
@ -101,10 +104,37 @@ class PlaybackSessionManager {
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Temp update local playback session id to uuidv4 & library item/book/episode ids
|
||||
if (sessionJson.id?.startsWith('play_local_')) {
|
||||
if (!this.oldPlaybackSessionMap[sessionJson.id]) {
|
||||
const newSessionId = uuidv4()
|
||||
this.oldPlaybackSessionMap[sessionJson.id] = newSessionId
|
||||
sessionJson.id = newSessionId
|
||||
} else {
|
||||
sessionJson.id = this.oldPlaybackSessionMap[sessionJson.id]
|
||||
}
|
||||
}
|
||||
if (sessionJson.libraryItemId !== libraryItem.id) {
|
||||
Logger.info(`[PlaybackSessionManager] Mapped old libraryItemId "${sessionJson.libraryItemId}" to ${libraryItem.id}`)
|
||||
sessionJson.libraryItemId = libraryItem.id
|
||||
sessionJson.bookId = episode ? null : libraryItem.media.id
|
||||
}
|
||||
if (!sessionJson.bookId && !episode) {
|
||||
sessionJson.bookId = libraryItem.media.id
|
||||
}
|
||||
if (episode && sessionJson.episodeId !== episode.id) {
|
||||
Logger.info(`[PlaybackSessionManager] Mapped old episodeId "${sessionJson.episodeId}" to ${episode.id}`)
|
||||
sessionJson.episodeId = episode.id
|
||||
}
|
||||
if (sessionJson.libraryId !== libraryItem.libraryId) {
|
||||
sessionJson.libraryId = libraryItem.libraryId
|
||||
}
|
||||
|
||||
let session = await Database.getPlaybackSession(sessionJson.id)
|
||||
if (!session) {
|
||||
// New session from local
|
||||
session = new PlaybackSession(sessionJson)
|
||||
session.deviceInfo = deviceInfo
|
||||
Logger.debug(`[PlaybackSessionManager] Inserting new session for "${session.displayTitle}" (${session.id})`)
|
||||
await Database.createPlaybackSession(session)
|
||||
} else {
|
||||
@ -152,8 +182,11 @@ class PlaybackSessionManager {
|
||||
return result
|
||||
}
|
||||
|
||||
async syncLocalSessionRequest(user, sessionJson, res) {
|
||||
const result = await this.syncLocalSession(user, sessionJson)
|
||||
async syncLocalSessionRequest(req, res) {
|
||||
const deviceInfo = await this.getDeviceInfo(req)
|
||||
const user = req.user
|
||||
const sessionJson = req.body
|
||||
const result = await this.syncLocalSession(user, sessionJson, deviceInfo)
|
||||
if (result.error) {
|
||||
res.status(500).send(result.error)
|
||||
} else {
|
||||
|
@ -22,6 +22,7 @@ module.exports = (sequelize) => {
|
||||
})
|
||||
return new oldLibrary({
|
||||
id: libraryExpanded.id,
|
||||
oldLibraryId: libraryExpanded.extraData?.oldLibraryId || null,
|
||||
name: libraryExpanded.name,
|
||||
folders,
|
||||
displayOrder: libraryExpanded.displayOrder,
|
||||
@ -92,6 +93,10 @@ module.exports = (sequelize) => {
|
||||
}
|
||||
|
||||
static getFromOld(oldLibrary) {
|
||||
const extraData = {}
|
||||
if (oldLibrary.oldLibraryId) {
|
||||
extraData.oldLibraryId = oldLibrary.oldLibraryId
|
||||
}
|
||||
return {
|
||||
id: oldLibrary.id,
|
||||
name: oldLibrary.name,
|
||||
@ -101,7 +106,8 @@ module.exports = (sequelize) => {
|
||||
provider: oldLibrary.provider,
|
||||
settings: oldLibrary.settings?.toJSON() || {},
|
||||
createdAt: oldLibrary.createdAt,
|
||||
updatedAt: oldLibrary.lastUpdate
|
||||
updatedAt: oldLibrary.lastUpdate,
|
||||
extraData
|
||||
}
|
||||
}
|
||||
|
||||
@ -127,7 +133,8 @@ module.exports = (sequelize) => {
|
||||
provider: DataTypes.STRING,
|
||||
lastScan: DataTypes.DATE,
|
||||
lastScanVersion: DataTypes.STRING,
|
||||
settings: DataTypes.JSON
|
||||
settings: DataTypes.JSON,
|
||||
extraData: DataTypes.JSON
|
||||
}, {
|
||||
sequelize,
|
||||
modelName: 'library'
|
||||
|
@ -49,6 +49,7 @@ module.exports = (sequelize) => {
|
||||
return new oldLibraryItem({
|
||||
id: libraryItemExpanded.id,
|
||||
ino: libraryItemExpanded.ino,
|
||||
oldLibraryItemId: libraryItemExpanded.extraData?.oldLibraryItemId || null,
|
||||
libraryId: libraryItemExpanded.libraryId,
|
||||
folderId: libraryItemExpanded.libraryFolderId,
|
||||
path: libraryItemExpanded.path,
|
||||
@ -261,6 +262,10 @@ module.exports = (sequelize) => {
|
||||
}
|
||||
|
||||
static getFromOld(oldLibraryItem) {
|
||||
const extraData = {}
|
||||
if (oldLibraryItem.oldLibraryItemId) {
|
||||
extraData.oldLibraryItemId = oldLibraryItem.oldLibraryItemId
|
||||
}
|
||||
return {
|
||||
id: oldLibraryItem.id,
|
||||
ino: oldLibraryItem.ino,
|
||||
@ -278,7 +283,8 @@ module.exports = (sequelize) => {
|
||||
lastScanVersion: oldLibraryItem.scanVersion,
|
||||
libraryId: oldLibraryItem.libraryId,
|
||||
libraryFolderId: oldLibraryItem.folderId,
|
||||
libraryFiles: oldLibraryItem.libraryFiles?.map(lf => lf.toJSON()) || []
|
||||
libraryFiles: oldLibraryItem.libraryFiles?.map(lf => lf.toJSON()) || [],
|
||||
extraData
|
||||
}
|
||||
}
|
||||
|
||||
@ -317,7 +323,8 @@ module.exports = (sequelize) => {
|
||||
birthtime: DataTypes.DATE(6),
|
||||
lastScan: DataTypes.DATE,
|
||||
lastScanVersion: DataTypes.STRING,
|
||||
libraryFiles: DataTypes.JSON
|
||||
libraryFiles: DataTypes.JSON,
|
||||
extraData: DataTypes.JSON
|
||||
}, {
|
||||
sequelize,
|
||||
modelName: 'libraryItem'
|
||||
|
@ -15,6 +15,7 @@ module.exports = (sequelize) => {
|
||||
libraryItemId: libraryItemId || null,
|
||||
podcastId: this.podcastId,
|
||||
id: this.id,
|
||||
oldEpisodeId: this.extraData?.oldEpisodeId || null,
|
||||
index: this.index,
|
||||
season: this.season,
|
||||
episode: this.episode,
|
||||
@ -38,6 +39,10 @@ module.exports = (sequelize) => {
|
||||
}
|
||||
|
||||
static getFromOld(oldEpisode) {
|
||||
const extraData = {}
|
||||
if (oldEpisode.oldEpisodeId) {
|
||||
extraData.oldEpisodeId = oldEpisode.oldEpisodeId
|
||||
}
|
||||
return {
|
||||
id: oldEpisode.id,
|
||||
index: oldEpisode.index,
|
||||
@ -54,7 +59,8 @@ module.exports = (sequelize) => {
|
||||
publishedAt: oldEpisode.publishedAt,
|
||||
podcastId: oldEpisode.podcastId,
|
||||
audioFile: oldEpisode.audioFile?.toJSON() || null,
|
||||
chapters: oldEpisode.chapters
|
||||
chapters: oldEpisode.chapters,
|
||||
extraData
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -79,7 +85,8 @@ module.exports = (sequelize) => {
|
||||
publishedAt: DataTypes.DATE,
|
||||
|
||||
audioFile: DataTypes.JSON,
|
||||
chapters: DataTypes.JSON
|
||||
chapters: DataTypes.JSON,
|
||||
extraData: DataTypes.JSON
|
||||
}, {
|
||||
sequelize,
|
||||
modelName: 'podcastEpisode'
|
||||
|
@ -6,6 +6,7 @@ const { filePathToPOSIX } = require('../utils/fileUtils')
|
||||
class Library {
|
||||
constructor(library = null) {
|
||||
this.id = null
|
||||
this.oldLibraryId = null // TODO: Temp
|
||||
this.name = null
|
||||
this.folders = []
|
||||
this.displayOrder = 1
|
||||
@ -39,6 +40,7 @@ class Library {
|
||||
|
||||
construct(library) {
|
||||
this.id = library.id
|
||||
this.oldLibraryId = library.oldLibraryId
|
||||
this.name = library.name
|
||||
this.folders = (library.folders || []).map(f => new Folder(f))
|
||||
this.displayOrder = library.displayOrder || 1
|
||||
@ -74,6 +76,7 @@ class Library {
|
||||
toJSON() {
|
||||
return {
|
||||
id: this.id,
|
||||
oldLibraryId: this.oldLibraryId,
|
||||
name: this.name,
|
||||
folders: (this.folders || []).map(f => f.toJSON()),
|
||||
displayOrder: this.displayOrder,
|
||||
|
@ -16,6 +16,7 @@ class LibraryItem {
|
||||
constructor(libraryItem = null) {
|
||||
this.id = null
|
||||
this.ino = null // Inode
|
||||
this.oldLibraryItemId = null
|
||||
|
||||
this.libraryId = null
|
||||
this.folderId = null
|
||||
@ -52,6 +53,7 @@ class LibraryItem {
|
||||
construct(libraryItem) {
|
||||
this.id = libraryItem.id
|
||||
this.ino = libraryItem.ino || null
|
||||
this.oldLibraryItemId = libraryItem.oldLibraryItemId
|
||||
this.libraryId = libraryItem.libraryId
|
||||
this.folderId = libraryItem.folderId
|
||||
this.path = libraryItem.path
|
||||
@ -97,6 +99,7 @@ class LibraryItem {
|
||||
return {
|
||||
id: this.id,
|
||||
ino: this.ino,
|
||||
oldLibraryItemId: this.oldLibraryItemId,
|
||||
libraryId: this.libraryId,
|
||||
folderId: this.folderId,
|
||||
path: this.path,
|
||||
@ -121,6 +124,7 @@ class LibraryItem {
|
||||
return {
|
||||
id: this.id,
|
||||
ino: this.ino,
|
||||
oldLibraryItemId: this.oldLibraryItemId,
|
||||
libraryId: this.libraryId,
|
||||
folderId: this.folderId,
|
||||
path: this.path,
|
||||
@ -145,6 +149,7 @@ class LibraryItem {
|
||||
return {
|
||||
id: this.id,
|
||||
ino: this.ino,
|
||||
oldLibraryItemId: this.oldLibraryItemId,
|
||||
libraryId: this.libraryId,
|
||||
folderId: this.folderId,
|
||||
path: this.path,
|
||||
|
@ -115,13 +115,24 @@ class PlaybackSession {
|
||||
this.userId = session.userId
|
||||
this.libraryId = session.libraryId || null
|
||||
this.libraryItemId = session.libraryItemId
|
||||
this.bookId = session.bookId
|
||||
this.bookId = session.bookId || null
|
||||
this.episodeId = session.episodeId
|
||||
this.mediaType = session.mediaType
|
||||
this.duration = session.duration
|
||||
this.playMethod = session.playMethod
|
||||
this.mediaPlayer = session.mediaPlayer || null
|
||||
|
||||
// Temp do not store old IDs
|
||||
if (this.libraryId?.startsWith('lib_')) {
|
||||
this.libraryId = null
|
||||
}
|
||||
if (this.libraryItemId?.startsWith('li_') || this.libraryItemId?.startsWith('local_')) {
|
||||
this.libraryItemId = null
|
||||
}
|
||||
if (this.episodeId?.startsWith('ep_') || this.episodeId?.startsWith('local_')) {
|
||||
this.episodeId = null
|
||||
}
|
||||
|
||||
if (session.deviceInfo instanceof DeviceInfo) {
|
||||
this.deviceInfo = new DeviceInfo(session.deviceInfo.toJSON())
|
||||
} else {
|
||||
|
@ -10,6 +10,7 @@ class PodcastEpisode {
|
||||
this.libraryItemId = null
|
||||
this.podcastId = null
|
||||
this.id = null
|
||||
this.oldEpisodeId = null
|
||||
this.index = null
|
||||
|
||||
this.season = null
|
||||
@ -36,6 +37,7 @@ class PodcastEpisode {
|
||||
this.libraryItemId = episode.libraryItemId
|
||||
this.podcastId = episode.podcastId
|
||||
this.id = episode.id
|
||||
this.oldEpisodeId = episode.oldEpisodeId
|
||||
this.index = episode.index
|
||||
this.season = episode.season
|
||||
this.episode = episode.episode
|
||||
@ -59,6 +61,7 @@ class PodcastEpisode {
|
||||
libraryItemId: this.libraryItemId,
|
||||
podcastId: this.podcastId,
|
||||
id: this.id,
|
||||
oldEpisodeId: this.oldEpisodeId,
|
||||
index: this.index,
|
||||
season: this.season,
|
||||
episode: this.episode,
|
||||
@ -81,6 +84,7 @@ class PodcastEpisode {
|
||||
libraryItemId: this.libraryItemId,
|
||||
podcastId: this.podcastId,
|
||||
id: this.id,
|
||||
oldEpisodeId: this.oldEpisodeId,
|
||||
index: this.index,
|
||||
season: this.season,
|
||||
episode: this.episode,
|
||||
|
@ -335,6 +335,11 @@ class Podcast {
|
||||
}
|
||||
|
||||
getEpisode(episodeId) {
|
||||
if (!episodeId) return null
|
||||
|
||||
// Support old episode ids for mobile downloads
|
||||
if (episodeId.startsWith('ep_')) return this.episodes.find(ep => ep.oldEpisodeId == episodeId)
|
||||
|
||||
return this.episodes.find(ep => ep.id == episodeId)
|
||||
}
|
||||
|
||||
|
@ -1,3 +1,4 @@
|
||||
const { DataTypes, QueryInterface } = require('sequelize')
|
||||
const Path = require('path')
|
||||
const uuidv4 = require("uuid").v4
|
||||
const Logger = require('../../Logger')
|
||||
@ -17,29 +18,6 @@ const oldDbIdMap = {
|
||||
podcasts: {}, // key is library item id
|
||||
devices: {} // key is a json stringify of the old DeviceInfo data OR deviceId if it exists
|
||||
}
|
||||
const newRecords = {
|
||||
user: [],
|
||||
library: [],
|
||||
libraryFolder: [],
|
||||
author: [],
|
||||
book: [],
|
||||
podcast: [],
|
||||
libraryItem: [],
|
||||
bookAuthor: [],
|
||||
series: [],
|
||||
bookSeries: [],
|
||||
podcastEpisode: [],
|
||||
mediaProgress: [],
|
||||
device: [],
|
||||
playbackSession: [],
|
||||
collection: [],
|
||||
collectionBook: [],
|
||||
playlist: [],
|
||||
playlistMediaItem: [],
|
||||
feed: [],
|
||||
feedEpisode: [],
|
||||
setting: []
|
||||
}
|
||||
|
||||
function getDeviceInfoString(deviceInfo, UserId) {
|
||||
if (!deviceInfo) return null
|
||||
@ -60,9 +38,22 @@ function getDeviceInfoString(deviceInfo, UserId) {
|
||||
return 'temp-' + Buffer.from(keys.join('-'), 'utf-8').toString('base64')
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrate oldLibraryItem.media to Book model
|
||||
* Migrate BookSeries and BookAuthor
|
||||
* @param {objects.LibraryItem} oldLibraryItem
|
||||
* @param {object} LibraryItem models.LibraryItem object
|
||||
* @returns {object} { book: object, bookSeries: [], bookAuthor: [] }
|
||||
*/
|
||||
function migrateBook(oldLibraryItem, LibraryItem) {
|
||||
const oldBook = oldLibraryItem.media
|
||||
|
||||
const _newRecords = {
|
||||
book: null,
|
||||
bookSeries: [],
|
||||
bookAuthor: []
|
||||
}
|
||||
|
||||
//
|
||||
// Migrate Book
|
||||
//
|
||||
@ -91,17 +82,23 @@ function migrateBook(oldLibraryItem, LibraryItem) {
|
||||
tags: oldBook.tags,
|
||||
genres: oldBook.metadata.genres
|
||||
}
|
||||
newRecords.book.push(Book)
|
||||
_newRecords.book = Book
|
||||
oldDbIdMap.books[oldLibraryItem.id] = Book.id
|
||||
|
||||
//
|
||||
// Migrate BookAuthors
|
||||
//
|
||||
const bookAuthorsInserted = []
|
||||
for (const oldBookAuthor of oldBook.metadata.authors) {
|
||||
if (oldDbIdMap.authors[LibraryItem.libraryId][oldBookAuthor.id]) {
|
||||
newRecords.bookAuthor.push({
|
||||
const authorId = oldDbIdMap.authors[LibraryItem.libraryId][oldBookAuthor.id]
|
||||
|
||||
if (bookAuthorsInserted.includes(authorId)) continue // Duplicate prevention
|
||||
bookAuthorsInserted.push(authorId)
|
||||
|
||||
_newRecords.bookAuthor.push({
|
||||
id: uuidv4(),
|
||||
authorId: oldDbIdMap.authors[LibraryItem.libraryId][oldBookAuthor.id],
|
||||
authorId,
|
||||
bookId: Book.id
|
||||
})
|
||||
} else {
|
||||
@ -112,22 +109,40 @@ function migrateBook(oldLibraryItem, LibraryItem) {
|
||||
//
|
||||
// Migrate BookSeries
|
||||
//
|
||||
const bookSeriesInserted = []
|
||||
for (const oldBookSeries of oldBook.metadata.series) {
|
||||
if (oldDbIdMap.series[LibraryItem.libraryId][oldBookSeries.id]) {
|
||||
const BookSeries = {
|
||||
const seriesId = oldDbIdMap.series[LibraryItem.libraryId][oldBookSeries.id]
|
||||
|
||||
if (bookSeriesInserted.includes(seriesId)) continue // Duplicate prevention
|
||||
bookSeriesInserted.push(seriesId)
|
||||
|
||||
_newRecords.bookSeries.push({
|
||||
id: uuidv4(),
|
||||
sequence: oldBookSeries.sequence,
|
||||
seriesId: oldDbIdMap.series[LibraryItem.libraryId][oldBookSeries.id],
|
||||
bookId: Book.id
|
||||
}
|
||||
newRecords.bookSeries.push(BookSeries)
|
||||
})
|
||||
} else {
|
||||
Logger.warn(`[dbMigration] migrateBook: Series not found "${oldBookSeries.name}"`)
|
||||
}
|
||||
}
|
||||
return _newRecords
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrate oldLibraryItem.media to Podcast model
|
||||
* Migrate PodcastEpisode
|
||||
* @param {objects.LibraryItem} oldLibraryItem
|
||||
* @param {object} LibraryItem models.LibraryItem object
|
||||
* @returns {object} { podcast: object, podcastEpisode: [] }
|
||||
*/
|
||||
function migratePodcast(oldLibraryItem, LibraryItem) {
|
||||
const _newRecords = {
|
||||
podcast: null,
|
||||
podcastEpisode: []
|
||||
}
|
||||
|
||||
const oldPodcast = oldLibraryItem.media
|
||||
const oldPodcastMetadata = oldPodcast.metadata
|
||||
|
||||
@ -161,7 +176,7 @@ function migratePodcast(oldLibraryItem, LibraryItem) {
|
||||
tags: oldPodcast.tags,
|
||||
genres: oldPodcastMetadata.genres
|
||||
}
|
||||
newRecords.podcast.push(Podcast)
|
||||
_newRecords.podcast = Podcast
|
||||
oldDbIdMap.podcasts[oldLibraryItem.id] = Podcast.id
|
||||
|
||||
//
|
||||
@ -173,6 +188,7 @@ function migratePodcast(oldLibraryItem, LibraryItem) {
|
||||
|
||||
const PodcastEpisode = {
|
||||
id: uuidv4(),
|
||||
oldEpisodeId: oldEpisode.id,
|
||||
index: oldEpisode.index,
|
||||
season: oldEpisode.season || null,
|
||||
episode: oldEpisode.episode || null,
|
||||
@ -191,12 +207,26 @@ function migratePodcast(oldLibraryItem, LibraryItem) {
|
||||
audioFile: oldEpisode.audioFile,
|
||||
chapters: oldEpisode.chapters || []
|
||||
}
|
||||
newRecords.podcastEpisode.push(PodcastEpisode)
|
||||
_newRecords.podcastEpisode.push(PodcastEpisode)
|
||||
oldDbIdMap.podcastEpisodes[oldEpisode.id] = PodcastEpisode.id
|
||||
}
|
||||
return _newRecords
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrate libraryItems to LibraryItem, Book, Podcast models
|
||||
* @param {Array<objects.LibraryItem>} oldLibraryItems
|
||||
* @returns {object} { libraryItem: [], book: [], podcast: [], podcastEpisode: [], bookSeries: [], bookAuthor: [] }
|
||||
*/
|
||||
function migrateLibraryItems(oldLibraryItems) {
|
||||
const _newRecords = {
|
||||
book: [],
|
||||
podcast: [],
|
||||
podcastEpisode: [],
|
||||
bookSeries: [],
|
||||
bookAuthor: [],
|
||||
libraryItem: []
|
||||
}
|
||||
for (const oldLibraryItem of oldLibraryItems) {
|
||||
const libraryFolderId = oldDbIdMap.libraryFolders[oldLibraryItem.folderId]
|
||||
if (!libraryFolderId) {
|
||||
@ -218,6 +248,7 @@ function migrateLibraryItems(oldLibraryItems) {
|
||||
//
|
||||
const LibraryItem = {
|
||||
id: uuidv4(),
|
||||
oldLibraryItemId: oldLibraryItem.id,
|
||||
ino: oldLibraryItem.ino,
|
||||
path: oldLibraryItem.path,
|
||||
relPath: oldLibraryItem.relPath,
|
||||
@ -241,22 +272,39 @@ function migrateLibraryItems(oldLibraryItems) {
|
||||
})
|
||||
}
|
||||
oldDbIdMap.libraryItems[oldLibraryItem.id] = LibraryItem.id
|
||||
newRecords.libraryItem.push(LibraryItem)
|
||||
_newRecords.libraryItem.push(LibraryItem)
|
||||
|
||||
//
|
||||
// Migrate Book/Podcast
|
||||
//
|
||||
if (oldLibraryItem.mediaType === 'book') {
|
||||
migrateBook(oldLibraryItem, LibraryItem)
|
||||
const bookRecords = migrateBook(oldLibraryItem, LibraryItem)
|
||||
_newRecords.book.push(bookRecords.book)
|
||||
_newRecords.bookAuthor.push(...bookRecords.bookAuthor)
|
||||
_newRecords.bookSeries.push(...bookRecords.bookSeries)
|
||||
|
||||
LibraryItem.mediaId = oldDbIdMap.books[oldLibraryItem.id]
|
||||
} else if (oldLibraryItem.mediaType === 'podcast') {
|
||||
migratePodcast(oldLibraryItem, LibraryItem)
|
||||
const podcastRecords = migratePodcast(oldLibraryItem, LibraryItem)
|
||||
_newRecords.podcast.push(podcastRecords.podcast)
|
||||
_newRecords.podcastEpisode.push(...podcastRecords.podcastEpisode)
|
||||
|
||||
LibraryItem.mediaId = oldDbIdMap.podcasts[oldLibraryItem.id]
|
||||
}
|
||||
}
|
||||
return _newRecords
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrate Library and LibraryFolder
|
||||
* @param {Array<objects.Library>} oldLibraries
|
||||
* @returns {object} { library: [], libraryFolder: [] }
|
||||
*/
|
||||
function migrateLibraries(oldLibraries) {
|
||||
const _newRecords = {
|
||||
library: [],
|
||||
libraryFolder: []
|
||||
}
|
||||
for (const oldLibrary of oldLibraries) {
|
||||
if (!['book', 'podcast'].includes(oldLibrary.mediaType)) {
|
||||
Logger.error(`[dbMigration] migrateLibraries: Not migrating library with mediaType=${oldLibrary.mediaType}`)
|
||||
@ -268,6 +316,7 @@ function migrateLibraries(oldLibraries) {
|
||||
//
|
||||
const Library = {
|
||||
id: uuidv4(),
|
||||
oldLibraryId: oldLibrary.id,
|
||||
name: oldLibrary.name,
|
||||
displayOrder: oldLibrary.displayOrder,
|
||||
icon: oldLibrary.icon || null,
|
||||
@ -278,7 +327,7 @@ function migrateLibraries(oldLibraries) {
|
||||
updatedAt: oldLibrary.lastUpdate
|
||||
}
|
||||
oldDbIdMap.libraries[oldLibrary.id] = Library.id
|
||||
newRecords.library.push(Library)
|
||||
_newRecords.library.push(Library)
|
||||
|
||||
//
|
||||
// Migrate LibraryFolders
|
||||
@ -292,12 +341,21 @@ function migrateLibraries(oldLibraries) {
|
||||
libraryId: Library.id
|
||||
}
|
||||
oldDbIdMap.libraryFolders[oldFolder.id] = LibraryFolder.id
|
||||
newRecords.libraryFolder.push(LibraryFolder)
|
||||
_newRecords.libraryFolder.push(LibraryFolder)
|
||||
}
|
||||
}
|
||||
return _newRecords
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrate Author
|
||||
* Previously Authors were shared between libraries, this will ensure every author has one library
|
||||
* @param {Array<objects.entities.Author>} oldAuthors
|
||||
* @param {Array<objects.LibraryItem>} oldLibraryItems
|
||||
* @returns {Array<object>} Array of Author model objs
|
||||
*/
|
||||
function migrateAuthors(oldAuthors, oldLibraryItems) {
|
||||
const _newRecords = []
|
||||
for (const oldAuthor of oldAuthors) {
|
||||
// Get an array of NEW library ids that have this author
|
||||
const librariesWithThisAuthor = [...new Set(oldLibraryItems.map(li => {
|
||||
@ -325,12 +383,21 @@ function migrateAuthors(oldAuthors, oldLibraryItems) {
|
||||
}
|
||||
if (!oldDbIdMap.authors[libraryId]) oldDbIdMap.authors[libraryId] = {}
|
||||
oldDbIdMap.authors[libraryId][oldAuthor.id] = Author.id
|
||||
newRecords.author.push(Author)
|
||||
_newRecords.push(Author)
|
||||
}
|
||||
}
|
||||
return _newRecords
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrate Series
|
||||
* Previously Series were shared between libraries, this will ensure every series has one library
|
||||
* @param {Array<objects.entities.Series>} oldSerieses
|
||||
* @param {Array<objects.LibraryItem>} oldLibraryItems
|
||||
* @returns {Array<object>} Array of Series model objs
|
||||
*/
|
||||
function migrateSeries(oldSerieses, oldLibraryItems) {
|
||||
const _newRecords = []
|
||||
// Originaly series were shared between libraries if they had the same name
|
||||
// Series will be separate between libraries
|
||||
for (const oldSeries of oldSerieses) {
|
||||
@ -355,16 +422,47 @@ function migrateSeries(oldSerieses, oldLibraryItems) {
|
||||
}
|
||||
if (!oldDbIdMap.series[libraryId]) oldDbIdMap.series[libraryId] = {}
|
||||
oldDbIdMap.series[libraryId][oldSeries.id] = Series.id
|
||||
newRecords.series.push(Series)
|
||||
_newRecords.push(Series)
|
||||
}
|
||||
}
|
||||
return _newRecords
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrate users to User and MediaProgress models
|
||||
* @param {Array<objects.User>} oldUsers
|
||||
* @returns {object} { user: [], mediaProgress: [] }
|
||||
*/
|
||||
function migrateUsers(oldUsers) {
|
||||
const _newRecords = {
|
||||
user: [],
|
||||
mediaProgress: []
|
||||
}
|
||||
for (const oldUser of oldUsers) {
|
||||
//
|
||||
// Migrate User
|
||||
//
|
||||
// Convert old library ids to new ids
|
||||
const librariesAccessible = (oldUser.librariesAccessible || []).map((lid) => oldDbIdMap.libraries[lid]).filter(li => li)
|
||||
|
||||
// Convert old library item ids to new ids
|
||||
const bookmarks = (oldUser.bookmarks || []).map(bm => {
|
||||
bm.libraryItemId = oldDbIdMap.libraryItems[bm.libraryItemId]
|
||||
return bm
|
||||
}).filter(bm => bm.libraryItemId)
|
||||
|
||||
// Convert old series ids to new
|
||||
const seriesHideFromContinueListening = (oldUser.seriesHideFromContinueListening || []).map(oldSeriesId => {
|
||||
// Series were split to be per library
|
||||
// This will use the first series it finds
|
||||
for (const libraryId in oldDbIdMap.series) {
|
||||
if (oldDbIdMap.series[libraryId][oldSeriesId]) {
|
||||
return oldDbIdMap.series[libraryId][oldSeriesId]
|
||||
}
|
||||
}
|
||||
return null
|
||||
}).filter(se => se)
|
||||
|
||||
const User = {
|
||||
id: uuidv4(),
|
||||
username: oldUser.username,
|
||||
@ -374,19 +472,19 @@ function migrateUsers(oldUsers) {
|
||||
isActive: !!oldUser.isActive,
|
||||
lastSeen: oldUser.lastSeen || null,
|
||||
extraData: {
|
||||
seriesHideFromContinueListening: oldUser.seriesHideFromContinueListening || [],
|
||||
seriesHideFromContinueListening,
|
||||
oldUserId: oldUser.id // Used to keep old tokens
|
||||
},
|
||||
createdAt: oldUser.createdAt || Date.now(),
|
||||
permissions: {
|
||||
...oldUser.permissions,
|
||||
librariesAccessible: oldUser.librariesAccessible || [],
|
||||
librariesAccessible,
|
||||
itemTagsSelected: oldUser.itemTagsSelected || []
|
||||
},
|
||||
bookmarks: oldUser.bookmarks
|
||||
bookmarks
|
||||
}
|
||||
oldDbIdMap.users[oldUser.id] = User.id
|
||||
newRecords.user.push(User)
|
||||
_newRecords.user.push(User)
|
||||
|
||||
//
|
||||
// Migrate MediaProgress
|
||||
@ -425,12 +523,23 @@ function migrateUsers(oldUsers) {
|
||||
progress: oldMediaProgress.progress
|
||||
}
|
||||
}
|
||||
newRecords.mediaProgress.push(MediaProgress)
|
||||
_newRecords.mediaProgress.push(MediaProgress)
|
||||
}
|
||||
}
|
||||
return _newRecords
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrate playbackSessions to PlaybackSession and Device models
|
||||
* @param {Array<objects.PlaybackSession>} oldSessions
|
||||
* @returns {object} { playbackSession: [], device: [] }
|
||||
*/
|
||||
function migrateSessions(oldSessions) {
|
||||
const _newRecords = {
|
||||
device: [],
|
||||
playbackSession: []
|
||||
}
|
||||
|
||||
for (const oldSession of oldSessions) {
|
||||
const userId = oldDbIdMap.users[oldSession.userId]
|
||||
if (!userId) {
|
||||
@ -495,12 +604,12 @@ function migrateSessions(oldSessions) {
|
||||
userId,
|
||||
extraData
|
||||
}
|
||||
newRecords.device.push(Device)
|
||||
deviceId = Device.id
|
||||
_newRecords.device.push(Device)
|
||||
oldDbIdMap.devices[deviceDeviceId] = Device.id
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
//
|
||||
// Migrate PlaybackSession
|
||||
//
|
||||
@ -528,7 +637,7 @@ function migrateSessions(oldSessions) {
|
||||
serverVersion: oldSession.deviceInfo?.serverVersion || null,
|
||||
createdAt: oldSession.startedAt,
|
||||
updatedAt: oldSession.updatedAt,
|
||||
userId, // Can be null
|
||||
userId,
|
||||
deviceId,
|
||||
timeListening: oldSession.timeListening,
|
||||
coverPath: oldSession.coverPath,
|
||||
@ -539,11 +648,21 @@ function migrateSessions(oldSessions) {
|
||||
libraryItemId: oldDbIdMap.libraryItems[oldSession.libraryItemId]
|
||||
}
|
||||
}
|
||||
newRecords.playbackSession.push(PlaybackSession)
|
||||
_newRecords.playbackSession.push(PlaybackSession)
|
||||
}
|
||||
return _newRecords
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrate collections to Collection & CollectionBook
|
||||
* @param {Array<objects.Collection>} oldCollections
|
||||
* @returns {object} { collection: [], collectionBook: [] }
|
||||
*/
|
||||
function migrateCollections(oldCollections) {
|
||||
const _newRecords = {
|
||||
collection: [],
|
||||
collectionBook: []
|
||||
}
|
||||
for (const oldCollection of oldCollections) {
|
||||
const libraryId = oldDbIdMap.libraries[oldCollection.libraryId]
|
||||
if (!libraryId) {
|
||||
@ -566,7 +685,7 @@ function migrateCollections(oldCollections) {
|
||||
libraryId
|
||||
}
|
||||
oldDbIdMap.collections[oldCollection.id] = Collection.id
|
||||
newRecords.collection.push(Collection)
|
||||
_newRecords.collection.push(Collection)
|
||||
|
||||
let order = 1
|
||||
BookIds.forEach((bookId) => {
|
||||
@ -577,12 +696,22 @@ function migrateCollections(oldCollections) {
|
||||
collectionId: Collection.id,
|
||||
order: order++
|
||||
}
|
||||
newRecords.collectionBook.push(CollectionBook)
|
||||
_newRecords.collectionBook.push(CollectionBook)
|
||||
})
|
||||
}
|
||||
return _newRecords
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrate playlists to Playlist and PlaylistMediaItem
|
||||
* @param {Array<objects.Playlist>} oldPlaylists
|
||||
* @returns {object} { playlist: [], playlistMediaItem: [] }
|
||||
*/
|
||||
function migratePlaylists(oldPlaylists) {
|
||||
const _newRecords = {
|
||||
playlist: [],
|
||||
playlistMediaItem: []
|
||||
}
|
||||
for (const oldPlaylist of oldPlaylists) {
|
||||
const libraryId = oldDbIdMap.libraries[oldPlaylist.libraryId]
|
||||
if (!libraryId) {
|
||||
@ -622,7 +751,7 @@ function migratePlaylists(oldPlaylists) {
|
||||
userId,
|
||||
libraryId
|
||||
}
|
||||
newRecords.playlist.push(Playlist)
|
||||
_newRecords.playlist.push(Playlist)
|
||||
|
||||
let order = 1
|
||||
MediaItemIds.forEach((mediaItemId) => {
|
||||
@ -634,12 +763,22 @@ function migratePlaylists(oldPlaylists) {
|
||||
playlistId: Playlist.id,
|
||||
order: order++
|
||||
}
|
||||
newRecords.playlistMediaItem.push(PlaylistMediaItem)
|
||||
_newRecords.playlistMediaItem.push(PlaylistMediaItem)
|
||||
})
|
||||
}
|
||||
return _newRecords
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrate feeds to Feed and FeedEpisode models
|
||||
* @param {Array<objects.Feed>} oldFeeds
|
||||
* @returns {object} { feed: [], feedEpisode: [] }
|
||||
*/
|
||||
function migrateFeeds(oldFeeds) {
|
||||
const _newRecords = {
|
||||
feed: [],
|
||||
feedEpisode: []
|
||||
}
|
||||
for (const oldFeed of oldFeeds) {
|
||||
if (!oldFeed.episodes?.length) {
|
||||
continue
|
||||
@ -698,7 +837,7 @@ function migrateFeeds(oldFeeds) {
|
||||
updatedAt: oldFeed.updatedAt,
|
||||
userId
|
||||
}
|
||||
newRecords.feed.push(Feed)
|
||||
_newRecords.feed.push(Feed)
|
||||
|
||||
//
|
||||
// Migrate FeedEpisodes
|
||||
@ -724,65 +863,227 @@ function migrateFeeds(oldFeeds) {
|
||||
updatedAt: oldFeed.updatedAt,
|
||||
feedId: Feed.id
|
||||
}
|
||||
newRecords.feedEpisode.push(FeedEpisode)
|
||||
_newRecords.feedEpisode.push(FeedEpisode)
|
||||
}
|
||||
}
|
||||
return _newRecords
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrate ServerSettings, NotificationSettings and EmailSettings to Setting model
|
||||
* @param {Array<objects.settings.*>} oldSettings
|
||||
* @returns {Array<object>} Array of Setting model objs
|
||||
*/
|
||||
function migrateSettings(oldSettings) {
|
||||
const _newRecords = []
|
||||
const serverSettings = oldSettings.find(s => s.id === 'server-settings')
|
||||
const notificationSettings = oldSettings.find(s => s.id === 'notification-settings')
|
||||
const emailSettings = oldSettings.find(s => s.id === 'email-settings')
|
||||
|
||||
if (serverSettings) {
|
||||
newRecords.setting.push({
|
||||
_newRecords.push({
|
||||
key: 'server-settings',
|
||||
value: serverSettings
|
||||
})
|
||||
}
|
||||
|
||||
if (notificationSettings) {
|
||||
newRecords.setting.push({
|
||||
_newRecords.push({
|
||||
key: 'notification-settings',
|
||||
value: notificationSettings
|
||||
})
|
||||
}
|
||||
|
||||
if (emailSettings) {
|
||||
newRecords.setting.push({
|
||||
_newRecords.push({
|
||||
key: 'email-settings',
|
||||
value: emailSettings
|
||||
})
|
||||
}
|
||||
return _newRecords
|
||||
}
|
||||
|
||||
/**
|
||||
* Load old libraries and bulkCreate new Library and LibraryFolder rows
|
||||
* @param {Map<string,Model>} DatabaseModels
|
||||
*/
|
||||
async function handleMigrateLibraries(DatabaseModels) {
|
||||
const oldLibraries = await oldDbFiles.loadOldData('libraries')
|
||||
const newLibraryRecords = migrateLibraries(oldLibraries)
|
||||
for (const model in newLibraryRecords) {
|
||||
Logger.info(`[dbMigration] Inserting ${newLibraryRecords[model].length} ${model} rows`)
|
||||
await DatabaseModels[model].bulkCreate(newLibraryRecords[model])
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load old EmailSettings, NotificationSettings and ServerSettings and bulkCreate new Setting rows
|
||||
* @param {Map<string,Model>} DatabaseModels
|
||||
*/
|
||||
async function handleMigrateSettings(DatabaseModels) {
|
||||
const oldSettings = await oldDbFiles.loadOldData('settings')
|
||||
const newSettings = migrateSettings(oldSettings)
|
||||
Logger.info(`[dbMigration] Inserting ${newSettings.length} setting rows`)
|
||||
await DatabaseModels.setting.bulkCreate(newSettings)
|
||||
}
|
||||
|
||||
/**
|
||||
* Load old authors and bulkCreate new Author rows
|
||||
* @param {Map<string,Model>} DatabaseModels
|
||||
* @param {Array<objects.LibraryItem>} oldLibraryItems
|
||||
*/
|
||||
async function handleMigrateAuthors(DatabaseModels, oldLibraryItems) {
|
||||
const oldAuthors = await oldDbFiles.loadOldData('authors')
|
||||
const newAuthors = migrateAuthors(oldAuthors, oldLibraryItems)
|
||||
Logger.info(`[dbMigration] Inserting ${newAuthors.length} author rows`)
|
||||
await DatabaseModels.author.bulkCreate(newAuthors)
|
||||
}
|
||||
|
||||
/**
|
||||
* Load old series and bulkCreate new Series rows
|
||||
* @param {Map<string,Model>} DatabaseModels
|
||||
* @param {Array<objects.LibraryItem>} oldLibraryItems
|
||||
*/
|
||||
async function handleMigrateSeries(DatabaseModels, oldLibraryItems) {
|
||||
const oldSeries = await oldDbFiles.loadOldData('series')
|
||||
const newSeries = migrateSeries(oldSeries, oldLibraryItems)
|
||||
Logger.info(`[dbMigration] Inserting ${newSeries.length} series rows`)
|
||||
await DatabaseModels.series.bulkCreate(newSeries)
|
||||
}
|
||||
|
||||
/**
|
||||
* bulkCreate new LibraryItem, Book and Podcast rows
|
||||
* @param {Map<string,Model>} DatabaseModels
|
||||
* @param {Array<objects.LibraryItem>} oldLibraryItems
|
||||
*/
|
||||
async function handleMigrateLibraryItems(DatabaseModels, oldLibraryItems) {
|
||||
const newItemsBooksPodcasts = migrateLibraryItems(oldLibraryItems)
|
||||
for (const model in newItemsBooksPodcasts) {
|
||||
Logger.info(`[dbMigration] Inserting ${newItemsBooksPodcasts[model].length} ${model} rows`)
|
||||
await DatabaseModels[model].bulkCreate(newItemsBooksPodcasts[model])
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrate authors, series then library items in chunks
|
||||
* Authors and series require old library items loaded first
|
||||
* @param {Map<string,Model>} DatabaseModels
|
||||
*/
|
||||
async function handleMigrateAuthorsSeriesAndLibraryItems(DatabaseModels) {
|
||||
const oldLibraryItems = await oldDbFiles.loadOldData('libraryItems')
|
||||
await handleMigrateAuthors(DatabaseModels, oldLibraryItems)
|
||||
|
||||
await handleMigrateSeries(DatabaseModels, oldLibraryItems)
|
||||
|
||||
// Migrate library items in chunks of 1000
|
||||
const numChunks = Math.ceil(oldLibraryItems.length / 1000)
|
||||
for (let i = 0; i < numChunks; i++) {
|
||||
let start = i * 1000
|
||||
await handleMigrateLibraryItems(DatabaseModels, oldLibraryItems.slice(start, start + 1000))
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load old users and bulkCreate new User rows
|
||||
* @param {Map<string,Model>} DatabaseModels
|
||||
*/
|
||||
async function handleMigrateUsers(DatabaseModels) {
|
||||
const oldUsers = await oldDbFiles.loadOldData('users')
|
||||
const newUserRecords = migrateUsers(oldUsers)
|
||||
for (const model in newUserRecords) {
|
||||
Logger.info(`[dbMigration] Inserting ${newUserRecords[model].length} ${model} rows`)
|
||||
await DatabaseModels[model].bulkCreate(newUserRecords[model])
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load old sessions and bulkCreate new PlaybackSession & Device rows
|
||||
* @param {Map<string,Model>} DatabaseModels
|
||||
*/
|
||||
async function handleMigrateSessions(DatabaseModels) {
|
||||
const oldSessions = await oldDbFiles.loadOldData('sessions')
|
||||
|
||||
let chunkSize = 1000
|
||||
let numChunks = Math.ceil(oldSessions.length / chunkSize)
|
||||
|
||||
for (let i = 0; i < numChunks; i++) {
|
||||
let start = i * chunkSize
|
||||
const newSessionRecords = migrateSessions(oldSessions.slice(start, start + chunkSize))
|
||||
for (const model in newSessionRecords) {
|
||||
Logger.info(`[dbMigration] Inserting ${newSessionRecords[model].length} ${model} rows`)
|
||||
await DatabaseModels[model].bulkCreate(newSessionRecords[model])
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Load old collections and bulkCreate new Collection, CollectionBook models
|
||||
* @param {Map<string,Model>} DatabaseModels
|
||||
*/
|
||||
async function handleMigrateCollections(DatabaseModels) {
|
||||
const oldCollections = await oldDbFiles.loadOldData('collections')
|
||||
const newCollectionRecords = migrateCollections(oldCollections)
|
||||
for (const model in newCollectionRecords) {
|
||||
Logger.info(`[dbMigration] Inserting ${newCollectionRecords[model].length} ${model} rows`)
|
||||
await DatabaseModels[model].bulkCreate(newCollectionRecords[model])
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load old playlists and bulkCreate new Playlist, PlaylistMediaItem models
|
||||
* @param {Map<string,Model>} DatabaseModels
|
||||
*/
|
||||
async function handleMigratePlaylists(DatabaseModels) {
|
||||
const oldPlaylists = await oldDbFiles.loadOldData('playlists')
|
||||
const newPlaylistRecords = migratePlaylists(oldPlaylists)
|
||||
for (const model in newPlaylistRecords) {
|
||||
Logger.info(`[dbMigration] Inserting ${newPlaylistRecords[model].length} ${model} rows`)
|
||||
await DatabaseModels[model].bulkCreate(newPlaylistRecords[model])
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load old feeds and bulkCreate new Feed, FeedEpisode models
|
||||
* @param {Map<string,Model>} DatabaseModels
|
||||
*/
|
||||
async function handleMigrateFeeds(DatabaseModels) {
|
||||
const oldFeeds = await oldDbFiles.loadOldData('feeds')
|
||||
const newFeedRecords = migrateFeeds(oldFeeds)
|
||||
for (const model in newFeedRecords) {
|
||||
Logger.info(`[dbMigration] Inserting ${newFeedRecords[model].length} ${model} rows`)
|
||||
await DatabaseModels[model].bulkCreate(newFeedRecords[model])
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.migrate = async (DatabaseModels) => {
|
||||
Logger.info(`[dbMigration] Starting migration`)
|
||||
|
||||
const data = await oldDbFiles.init()
|
||||
|
||||
const start = Date.now()
|
||||
migrateSettings(data.settings)
|
||||
migrateLibraries(data.libraries)
|
||||
migrateAuthors(data.authors, data.libraryItems)
|
||||
migrateSeries(data.series, data.libraryItems)
|
||||
migrateLibraryItems(data.libraryItems)
|
||||
migrateUsers(data.users)
|
||||
migrateSessions(data.sessions)
|
||||
migrateCollections(data.collections)
|
||||
migratePlaylists(data.playlists)
|
||||
migrateFeeds(data.feeds)
|
||||
|
||||
let totalRecords = 0
|
||||
for (const model in newRecords) {
|
||||
Logger.info(`[dbMigration] Inserting ${newRecords[model].length} ${model} rows`)
|
||||
if (newRecords[model].length) {
|
||||
await DatabaseModels[model].bulkCreate(newRecords[model])
|
||||
totalRecords += newRecords[model].length
|
||||
}
|
||||
}
|
||||
// Migrate to Library and LibraryFolder models
|
||||
await handleMigrateLibraries(DatabaseModels)
|
||||
|
||||
const elapsed = Date.now() - start
|
||||
// Migrate EmailSettings, NotificationSettings and ServerSettings to Setting model
|
||||
await handleMigrateSettings(DatabaseModels)
|
||||
|
||||
// Migrate Series, Author, LibraryItem, Book, Podcast
|
||||
await handleMigrateAuthorsSeriesAndLibraryItems(DatabaseModels)
|
||||
|
||||
// Migrate User, MediaProgress
|
||||
await handleMigrateUsers(DatabaseModels)
|
||||
|
||||
// Migrate PlaybackSession, Device
|
||||
await handleMigrateSessions(DatabaseModels)
|
||||
|
||||
// Migrate Collection, CollectionBook
|
||||
await handleMigrateCollections(DatabaseModels)
|
||||
|
||||
// Migrate Playlist, PlaylistMediaItem
|
||||
await handleMigratePlaylists(DatabaseModels)
|
||||
|
||||
// Migrate Feed, FeedEpisode
|
||||
await handleMigrateFeeds(DatabaseModels)
|
||||
|
||||
// Purge author images and cover images from cache
|
||||
try {
|
||||
@ -796,7 +1097,8 @@ module.exports.migrate = async (DatabaseModels) => {
|
||||
// Put all old db folders into a zipfile oldDb.zip
|
||||
await oldDbFiles.zipWrapOldDb()
|
||||
|
||||
Logger.info(`[dbMigration] Migration complete. ${totalRecords} rows. Elapsed ${(elapsed / 1000).toFixed(2)}s`)
|
||||
const elapsed = Date.now() - start
|
||||
Logger.info(`[dbMigration] Migration complete. Elapsed ${(elapsed / 1000).toFixed(2)}s`)
|
||||
}
|
||||
|
||||
/**
|
||||
@ -805,4 +1107,205 @@ module.exports.migrate = async (DatabaseModels) => {
|
||||
module.exports.checkShouldMigrate = async () => {
|
||||
if (await oldDbFiles.checkHasOldDb()) return true
|
||||
return oldDbFiles.checkHasOldDbZip()
|
||||
}
|
||||
|
||||
/**
|
||||
* Migration from 2.3.0 to 2.3.1 - create extraData columns in LibraryItem and PodcastEpisode
|
||||
* @param {QueryInterface} queryInterface
|
||||
*/
|
||||
async function migrationPatchNewColumns(queryInterface) {
|
||||
try {
|
||||
return queryInterface.sequelize.transaction(t => {
|
||||
return Promise.all([
|
||||
queryInterface.addColumn('libraryItems', 'extraData', {
|
||||
type: DataTypes.JSON
|
||||
}, { transaction: t }),
|
||||
queryInterface.addColumn('podcastEpisodes', 'extraData', {
|
||||
type: DataTypes.JSON
|
||||
}, { transaction: t }),
|
||||
queryInterface.addColumn('libraries', 'extraData', {
|
||||
type: DataTypes.JSON
|
||||
}, { transaction: t })
|
||||
])
|
||||
})
|
||||
} catch (error) {
|
||||
Logger.error(`[dbMigration] Migration from 2.3.0+ column creation failed`, error)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Migration from 2.3.0 to 2.3.1 - old library item ids
|
||||
* @param {/src/Database} ctx
|
||||
*/
|
||||
async function handleOldLibraryItems(ctx) {
|
||||
const oldLibraryItems = await oldDbFiles.loadOldData('libraryItems')
|
||||
const libraryItems = await ctx.models.libraryItem.getAllOldLibraryItems()
|
||||
|
||||
const bulkUpdateItems = []
|
||||
const bulkUpdateEpisodes = []
|
||||
|
||||
for (const libraryItem of libraryItems) {
|
||||
// Find matching old library item by ino
|
||||
const matchingOldLibraryItem = oldLibraryItems.find(oli => oli.ino === libraryItem.ino)
|
||||
if (matchingOldLibraryItem) {
|
||||
oldDbIdMap.libraryItems[matchingOldLibraryItem.id] = libraryItem.id
|
||||
|
||||
bulkUpdateItems.push({
|
||||
id: libraryItem.id,
|
||||
extraData: {
|
||||
oldLibraryItemId: matchingOldLibraryItem.id
|
||||
}
|
||||
})
|
||||
|
||||
if (libraryItem.media.episodes?.length && matchingOldLibraryItem.media.episodes?.length) {
|
||||
for (const podcastEpisode of libraryItem.media.episodes) {
|
||||
// Find matching old episode by audio file ino
|
||||
const matchingOldPodcastEpisode = matchingOldLibraryItem.media.episodes.find(oep => oep.audioFile?.ino && oep.audioFile.ino === podcastEpisode.audioFile?.ino)
|
||||
if (matchingOldPodcastEpisode) {
|
||||
oldDbIdMap.podcastEpisodes[matchingOldPodcastEpisode.id] = podcastEpisode.id
|
||||
|
||||
bulkUpdateEpisodes.push({
|
||||
id: podcastEpisode.id,
|
||||
extraData: {
|
||||
oldEpisodeId: matchingOldPodcastEpisode.id
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (bulkUpdateEpisodes.length) {
|
||||
await ctx.models.podcastEpisode.bulkCreate(bulkUpdateEpisodes, {
|
||||
updateOnDuplicate: ['extraData']
|
||||
})
|
||||
}
|
||||
|
||||
if (bulkUpdateItems.length) {
|
||||
await ctx.models.libraryItem.bulkCreate(bulkUpdateItems, {
|
||||
updateOnDuplicate: ['extraData']
|
||||
})
|
||||
}
|
||||
|
||||
Logger.info(`[dbMigration] Migration 2.3.0+: Updated ${bulkUpdateItems.length} library items & ${bulkUpdateEpisodes.length} episodes`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Migration from 2.3.0 to 2.3.1 - updating oldLibraryId
|
||||
* @param {/src/Database} ctx
|
||||
*/
|
||||
async function handleOldLibraries(ctx) {
|
||||
const oldLibraries = await oldDbFiles.loadOldData('libraries')
|
||||
const libraries = await ctx.models.library.getAllOldLibraries()
|
||||
|
||||
let librariesUpdated = 0
|
||||
for (const library of libraries) {
|
||||
// Find matching old library using exact match on folder paths, exact match on library name
|
||||
const matchingOldLibrary = oldLibraries.find(ol => {
|
||||
if (ol.name !== library.name) {
|
||||
return false
|
||||
}
|
||||
const folderPaths = ol.folders?.map(f => f.fullPath) || []
|
||||
return folderPaths.join(',') === library.folderPaths.join(',')
|
||||
})
|
||||
|
||||
if (matchingOldLibrary) {
|
||||
library.oldLibraryId = matchingOldLibrary.id
|
||||
oldDbIdMap.libraries[library.oldLibraryId] = library.id
|
||||
await ctx.models.library.updateFromOld(library)
|
||||
librariesUpdated++
|
||||
}
|
||||
}
|
||||
Logger.info(`[dbMigration] Migration 2.3.0+: Updated ${librariesUpdated} libraries`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Migration from 2.3.0 to 2.3.1 - fixing librariesAccessible and bookmarks
|
||||
* @param {/src/Database} ctx
|
||||
*/
|
||||
async function handleOldUsers(ctx) {
|
||||
const users = await ctx.models.user.getOldUsers()
|
||||
|
||||
let usersUpdated = 0
|
||||
for (const user of users) {
|
||||
let hasUpdates = false
|
||||
if (user.bookmarks?.length) {
|
||||
user.bookmarks = user.bookmarks.map(bm => {
|
||||
// Only update if this is not the old id format
|
||||
if (!bm.libraryItemId.startsWith('li_')) return bm
|
||||
|
||||
bm.libraryItemId = oldDbIdMap.libraryItems[bm.libraryItemId]
|
||||
hasUpdates = true
|
||||
return bm
|
||||
}).filter(bm => bm.libraryItemId)
|
||||
}
|
||||
|
||||
// Convert old library ids to new library ids
|
||||
if (user.librariesAccessible?.length) {
|
||||
user.librariesAccessible = user.librariesAccessible.map(lid => {
|
||||
if (!lid.startsWith('lib_')) return lid // Already not an old library id so dont change
|
||||
hasUpdates = true
|
||||
return oldDbIdMap.libraries[lid]
|
||||
}).filter(lid => lid)
|
||||
}
|
||||
|
||||
if (user.seriesHideFromContinueListening?.length) {
|
||||
user.seriesHideFromContinueListening = user.seriesHideFromContinueListening.map((seriesId) => {
|
||||
if (seriesId.startsWith('se_')) {
|
||||
hasUpdates = true
|
||||
return null // Filter out old series ids
|
||||
}
|
||||
return seriesId
|
||||
}).filter(se => se)
|
||||
}
|
||||
|
||||
if (hasUpdates) {
|
||||
await ctx.models.user.updateFromOld(user)
|
||||
usersUpdated++
|
||||
}
|
||||
}
|
||||
Logger.info(`[dbMigration] Migration 2.3.0+: Updated ${usersUpdated} users`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Migration from 2.3.0 to 2.3.1
|
||||
* @param {/src/Database} ctx
|
||||
*/
|
||||
module.exports.migrationPatch = async (ctx) => {
|
||||
const queryInterface = ctx.sequelize.getQueryInterface()
|
||||
const librariesTableDescription = await queryInterface.describeTable('libraries')
|
||||
|
||||
if (librariesTableDescription?.extraData) {
|
||||
Logger.info(`[dbMigration] Migration patch 2.3.0+ - extraData columns already on model`)
|
||||
} else {
|
||||
const migrationResult = await migrationPatchNewColumns(queryInterface)
|
||||
if (migrationResult === false) {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
const oldDbPath = Path.join(global.ConfigPath, 'oldDb.zip')
|
||||
if (!await fs.pathExists(oldDbPath)) {
|
||||
Logger.info(`[dbMigration] Migration patch 2.3.0+ unnecessary - no oldDb.zip found`)
|
||||
return
|
||||
}
|
||||
|
||||
const migrationStart = Date.now()
|
||||
Logger.info(`[dbMigration] Applying migration patch from 2.3.0+`)
|
||||
|
||||
// Extract from oldDb.zip
|
||||
if (!await oldDbFiles.checkExtractItemsUsersAndLibraries()) {
|
||||
return
|
||||
}
|
||||
|
||||
await handleOldLibraryItems(ctx)
|
||||
await handleOldLibraries(ctx)
|
||||
await handleOldUsers(ctx)
|
||||
|
||||
await oldDbFiles.removeOldItemsUsersAndLibrariesFolders()
|
||||
|
||||
const elapsed = Date.now() - migrationStart
|
||||
Logger.info(`[dbMigration] Migration patch 2.3.0+ finished. Elapsed ${(elapsed / 1000).toFixed(2)}s`)
|
||||
}
|
@ -71,27 +71,11 @@ async function loadDbData(dbpath) {
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.init = async () => {
|
||||
const dbs = {
|
||||
libraryItems: Path.join(global.ConfigPath, 'libraryItems', 'data'),
|
||||
users: Path.join(global.ConfigPath, 'users', 'data'),
|
||||
sessions: Path.join(global.ConfigPath, 'sessions', 'data'),
|
||||
libraries: Path.join(global.ConfigPath, 'libraries', 'data'),
|
||||
settings: Path.join(global.ConfigPath, 'settings', 'data'),
|
||||
collections: Path.join(global.ConfigPath, 'collections', 'data'),
|
||||
playlists: Path.join(global.ConfigPath, 'playlists', 'data'),
|
||||
authors: Path.join(global.ConfigPath, 'authors', 'data'),
|
||||
series: Path.join(global.ConfigPath, 'series', 'data'),
|
||||
feeds: Path.join(global.ConfigPath, 'feeds', 'data')
|
||||
}
|
||||
|
||||
const data = {}
|
||||
for (const key in dbs) {
|
||||
data[key] = await loadDbData(dbs[key])
|
||||
Logger.info(`[oldDbFiles] ${data[key].length} ${key} loaded`)
|
||||
}
|
||||
|
||||
return data
|
||||
module.exports.loadOldData = async (dbName) => {
|
||||
const dbPath = Path.join(global.ConfigPath, dbName, 'data')
|
||||
const dbData = await loadDbData(dbPath) || []
|
||||
Logger.info(`[oldDbFiles] ${dbData.length} ${dbName} loaded`)
|
||||
return dbData
|
||||
}
|
||||
|
||||
module.exports.zipWrapOldDb = async () => {
|
||||
@ -184,6 +168,59 @@ module.exports.checkHasOldDbZip = async () => {
|
||||
// Extract oldDb.zip
|
||||
const zip = new StreamZip.async({ file: oldDbPath })
|
||||
await zip.extract(null, global.ConfigPath)
|
||||
await zip.close()
|
||||
|
||||
return this.checkHasOldDb()
|
||||
}
|
||||
|
||||
/**
|
||||
* Used for migration from 2.3.0 -> 2.3.1
|
||||
* @returns {boolean} true if extracted
|
||||
*/
|
||||
module.exports.checkExtractItemsUsersAndLibraries = async () => {
|
||||
const oldDbPath = Path.join(global.ConfigPath, 'oldDb.zip')
|
||||
|
||||
const zip = new StreamZip.async({ file: oldDbPath })
|
||||
const libraryItemsPath = Path.join(global.ConfigPath, 'libraryItems')
|
||||
await zip.extract('libraryItems/', libraryItemsPath)
|
||||
|
||||
if (!await fs.pathExists(libraryItemsPath)) {
|
||||
Logger.error(`[oldDbFiles] Failed to extract old libraryItems from oldDb.zip`)
|
||||
return false
|
||||
}
|
||||
|
||||
const usersPath = Path.join(global.ConfigPath, 'users')
|
||||
await zip.extract('users/', usersPath)
|
||||
|
||||
if (!await fs.pathExists(usersPath)) {
|
||||
Logger.error(`[oldDbFiles] Failed to extract old users from oldDb.zip`)
|
||||
await fs.remove(libraryItemsPath) // Remove old library items folder
|
||||
return false
|
||||
}
|
||||
|
||||
const librariesPath = Path.join(global.ConfigPath, 'libraries')
|
||||
await zip.extract('libraries/', librariesPath)
|
||||
|
||||
if (!await fs.pathExists(librariesPath)) {
|
||||
Logger.error(`[oldDbFiles] Failed to extract old libraries from oldDb.zip`)
|
||||
await fs.remove(usersPath) // Remove old users folder
|
||||
await fs.remove(libraryItemsPath) // Remove old library items folder
|
||||
return false
|
||||
}
|
||||
|
||||
await zip.close()
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Used for migration from 2.3.0 -> 2.3.1
|
||||
*/
|
||||
module.exports.removeOldItemsUsersAndLibrariesFolders = async () => {
|
||||
const libraryItemsPath = Path.join(global.ConfigPath, 'libraryItems')
|
||||
const usersPath = Path.join(global.ConfigPath, 'users')
|
||||
const librariesPath = Path.join(global.ConfigPath, 'libraries')
|
||||
await fs.remove(libraryItemsPath)
|
||||
await fs.remove(usersPath)
|
||||
await fs.remove(librariesPath)
|
||||
}
|
Loading…
Reference in New Issue
Block a user