mirror of
https://github.com/advplyr/audiobookshelf.git
synced 2024-12-25 16:18:54 +01:00
Migrate backups manager
This commit is contained in:
parent
0a179e4eed
commit
254ba1f089
@ -95,8 +95,9 @@ export default {
|
||||
})
|
||||
.catch((error) => {
|
||||
this.isBackingUp = false
|
||||
console.error('Failed', error)
|
||||
this.$toast.error(this.$strings.ToastBackupRestoreFailed)
|
||||
console.error('Failed to apply backup', error)
|
||||
const errorMsg = error.response.data || this.$strings.ToastBackupRestoreFailed
|
||||
this.$toast.error(errorMsg)
|
||||
})
|
||||
},
|
||||
deleteBackupClick(backup) {
|
||||
|
@ -11,7 +11,7 @@ class Database {
|
||||
constructor() {
|
||||
this.sequelize = null
|
||||
this.dbPath = null
|
||||
this.isNew = false // New database.sqlite created
|
||||
this.isNew = false // New absdatabase.sqlite created
|
||||
|
||||
// Temporarily using format of old DB
|
||||
// below data should be loaded from the DB as needed
|
||||
@ -40,14 +40,14 @@ class Database {
|
||||
|
||||
async checkHasDb() {
|
||||
if (!await fs.pathExists(this.dbPath)) {
|
||||
Logger.info(`[Database] database.sqlite not found at ${this.dbPath}`)
|
||||
Logger.info(`[Database] absdatabase.sqlite not found at ${this.dbPath}`)
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
async init(force = false) {
|
||||
this.dbPath = Path.join(global.ConfigPath, 'database.sqlite')
|
||||
this.dbPath = Path.join(global.ConfigPath, 'absdatabase.sqlite')
|
||||
|
||||
// First check if this is a new database
|
||||
this.isNew = !(await this.checkHasDb()) || force
|
||||
@ -59,7 +59,7 @@ class Database {
|
||||
await this.buildModels(force)
|
||||
Logger.info(`[Database] Db initialized`, Object.keys(this.sequelize.models))
|
||||
|
||||
await this.loadData(force)
|
||||
await this.loadData()
|
||||
}
|
||||
|
||||
async connect() {
|
||||
@ -83,6 +83,17 @@ class Database {
|
||||
}
|
||||
}
|
||||
|
||||
async disconnect() {
|
||||
Logger.info(`[Database] Disconnecting sqlite db`)
|
||||
await this.sequelize.close()
|
||||
this.sequelize = null
|
||||
}
|
||||
|
||||
async reconnect() {
|
||||
Logger.info(`[Database] Reconnecting sqlite db`)
|
||||
await this.init()
|
||||
}
|
||||
|
||||
buildModels(force = false) {
|
||||
require('./models/User')(this.sequelize)
|
||||
require('./models/Library')(this.sequelize)
|
||||
@ -109,8 +120,8 @@ class Database {
|
||||
return this.sequelize.sync({ force, alter: false })
|
||||
}
|
||||
|
||||
async loadData(force = false) {
|
||||
if (this.isNew && await dbMigration.checkShouldMigrate(force)) {
|
||||
async loadData() {
|
||||
if (this.isNew && await dbMigration.checkShouldMigrate()) {
|
||||
Logger.info(`[Database] New database was created and old database was detected - migrating old to new`)
|
||||
await dbMigration.migrate(this.models)
|
||||
}
|
||||
@ -143,6 +154,7 @@ class Database {
|
||||
}
|
||||
|
||||
async createRootUser(username, pash, token) {
|
||||
if (!this.sequelize) return false
|
||||
const newUser = await this.models.user.createRootUser(username, pash, token)
|
||||
if (newUser) {
|
||||
this.users.push(newUser)
|
||||
@ -152,60 +164,73 @@ class Database {
|
||||
}
|
||||
|
||||
updateServerSettings() {
|
||||
if (!this.sequelize) return false
|
||||
global.ServerSettings = this.serverSettings.toJSON()
|
||||
return this.updateSetting(this.serverSettings)
|
||||
}
|
||||
|
||||
updateSetting(settings) {
|
||||
if (!this.sequelize) return false
|
||||
return this.models.setting.updateSettingObj(settings.toJSON())
|
||||
}
|
||||
|
||||
async createUser(oldUser) {
|
||||
if (!this.sequelize) return false
|
||||
await this.models.user.createFromOld(oldUser)
|
||||
this.users.push(oldUser)
|
||||
return true
|
||||
}
|
||||
|
||||
updateUser(oldUser) {
|
||||
if (!this.sequelize) return false
|
||||
return this.models.user.updateFromOld(oldUser)
|
||||
}
|
||||
|
||||
updateBulkUsers(oldUsers) {
|
||||
if (!this.sequelize) return false
|
||||
return Promise.all(oldUsers.map(u => this.updateUser(u)))
|
||||
}
|
||||
|
||||
async removeUser(userId) {
|
||||
if (!this.sequelize) return false
|
||||
await this.models.user.removeById(userId)
|
||||
this.users = this.users.filter(u => u.id !== userId)
|
||||
}
|
||||
|
||||
upsertMediaProgress(oldMediaProgress) {
|
||||
if (!this.sequelize) return false
|
||||
return this.models.mediaProgress.upsertFromOld(oldMediaProgress)
|
||||
}
|
||||
|
||||
removeMediaProgress(mediaProgressId) {
|
||||
if (!this.sequelize) return false
|
||||
return this.models.mediaProgress.removeById(mediaProgressId)
|
||||
}
|
||||
|
||||
updateBulkBooks(oldBooks) {
|
||||
if (!this.sequelize) return false
|
||||
return Promise.all(oldBooks.map(oldBook => this.models.book.saveFromOld(oldBook)))
|
||||
}
|
||||
|
||||
async createLibrary(oldLibrary) {
|
||||
if (!this.sequelize) return false
|
||||
await this.models.library.createFromOld(oldLibrary)
|
||||
this.libraries.push(oldLibrary)
|
||||
}
|
||||
|
||||
updateLibrary(oldLibrary) {
|
||||
if (!this.sequelize) return false
|
||||
return this.models.library.updateFromOld(oldLibrary)
|
||||
}
|
||||
|
||||
async removeLibrary(libraryId) {
|
||||
if (!this.sequelize) return false
|
||||
await this.models.library.removeById(libraryId)
|
||||
this.libraries = this.libraries.filter(lib => lib.id !== libraryId)
|
||||
}
|
||||
|
||||
async createCollection(oldCollection) {
|
||||
if (!this.sequelize) return false
|
||||
const newCollection = await this.models.collection.createFromOld(oldCollection)
|
||||
// Create CollectionBooks
|
||||
if (newCollection) {
|
||||
@ -227,6 +252,7 @@ class Database {
|
||||
}
|
||||
|
||||
updateCollection(oldCollection) {
|
||||
if (!this.sequelize) return false
|
||||
const collectionBooks = []
|
||||
let order = 1
|
||||
oldCollection.books.forEach((libraryItemId) => {
|
||||
@ -242,23 +268,28 @@ class Database {
|
||||
}
|
||||
|
||||
async removeCollection(collectionId) {
|
||||
if (!this.sequelize) return false
|
||||
await this.models.collection.removeById(collectionId)
|
||||
this.collections = this.collections.filter(c => c.id !== collectionId)
|
||||
}
|
||||
|
||||
createCollectionBook(collectionBook) {
|
||||
if (!this.sequelize) return false
|
||||
return this.models.collectionBook.create(collectionBook)
|
||||
}
|
||||
|
||||
createBulkCollectionBooks(collectionBooks) {
|
||||
if (!this.sequelize) return false
|
||||
return this.models.collectionBook.bulkCreate(collectionBooks)
|
||||
}
|
||||
|
||||
removeCollectionBook(collectionId, bookId) {
|
||||
if (!this.sequelize) return false
|
||||
return this.models.collectionBook.removeByIds(collectionId, bookId)
|
||||
}
|
||||
|
||||
async createPlaylist(oldPlaylist) {
|
||||
if (!this.sequelize) return false
|
||||
const newPlaylist = await this.models.playlist.createFromOld(oldPlaylist)
|
||||
if (newPlaylist) {
|
||||
const playlistMediaItems = []
|
||||
@ -288,6 +319,7 @@ class Database {
|
||||
}
|
||||
|
||||
updatePlaylist(oldPlaylist) {
|
||||
if (!this.sequelize) return false
|
||||
const playlistMediaItems = []
|
||||
let order = 1
|
||||
oldPlaylist.items.forEach((item) => {
|
||||
@ -304,36 +336,44 @@ class Database {
|
||||
}
|
||||
|
||||
async removePlaylist(playlistId) {
|
||||
if (!this.sequelize) return false
|
||||
await this.models.playlist.removeById(playlistId)
|
||||
this.playlists = this.playlists.filter(p => p.id !== playlistId)
|
||||
}
|
||||
|
||||
createPlaylistMediaItem(playlistMediaItem) {
|
||||
if (!this.sequelize) return false
|
||||
return this.models.playlistMediaItem.create(playlistMediaItem)
|
||||
}
|
||||
|
||||
createBulkPlaylistMediaItems(playlistMediaItems) {
|
||||
if (!this.sequelize) return false
|
||||
return this.models.playlistMediaItem.bulkCreate(playlistMediaItems)
|
||||
}
|
||||
|
||||
removePlaylistMediaItem(playlistId, mediaItemId) {
|
||||
if (!this.sequelize) return false
|
||||
return this.models.playlistMediaItem.removeByIds(playlistId, mediaItemId)
|
||||
}
|
||||
|
||||
getLibraryItem(libraryItemId) {
|
||||
if (!this.sequelize) return false
|
||||
return this.libraryItems.find(li => li.id === libraryItemId)
|
||||
}
|
||||
|
||||
async createLibraryItem(oldLibraryItem) {
|
||||
if (!this.sequelize) return false
|
||||
await this.models.libraryItem.fullCreateFromOld(oldLibraryItem)
|
||||
this.libraryItems.push(oldLibraryItem)
|
||||
}
|
||||
|
||||
updateLibraryItem(oldLibraryItem) {
|
||||
if (!this.sequelize) return false
|
||||
return this.models.libraryItem.fullUpdateFromOld(oldLibraryItem)
|
||||
}
|
||||
|
||||
async updateBulkLibraryItems(oldLibraryItems) {
|
||||
if (!this.sequelize) return false
|
||||
let updatesMade = 0
|
||||
for (const oldLibraryItem of oldLibraryItems) {
|
||||
const hasUpdates = await this.models.libraryItem.fullUpdateFromOld(oldLibraryItem)
|
||||
@ -343,6 +383,7 @@ class Database {
|
||||
}
|
||||
|
||||
async createBulkLibraryItems(oldLibraryItems) {
|
||||
if (!this.sequelize) return false
|
||||
for (const oldLibraryItem of oldLibraryItems) {
|
||||
await this.models.libraryItem.fullCreateFromOld(oldLibraryItem)
|
||||
this.libraryItems.push(oldLibraryItem)
|
||||
@ -350,68 +391,82 @@ class Database {
|
||||
}
|
||||
|
||||
async removeLibraryItem(libraryItemId) {
|
||||
if (!this.sequelize) return false
|
||||
await this.models.libraryItem.removeById(libraryItemId)
|
||||
this.libraryItems = this.libraryItems.filter(li => li.id !== libraryItemId)
|
||||
}
|
||||
|
||||
async createFeed(oldFeed) {
|
||||
if (!this.sequelize) return false
|
||||
await this.models.feed.fullCreateFromOld(oldFeed)
|
||||
this.feeds.push(oldFeed)
|
||||
}
|
||||
|
||||
updateFeed(oldFeed) {
|
||||
if (!this.sequelize) return false
|
||||
return this.models.feed.fullUpdateFromOld(oldFeed)
|
||||
}
|
||||
|
||||
async removeFeed(feedId) {
|
||||
if (!this.sequelize) return false
|
||||
await this.models.feed.removeById(feedId)
|
||||
this.feeds = this.feeds.filter(f => f.id !== feedId)
|
||||
}
|
||||
|
||||
updateSeries(oldSeries) {
|
||||
if (!this.sequelize) return false
|
||||
return this.models.series.updateFromOld(oldSeries)
|
||||
}
|
||||
|
||||
async createSeries(oldSeries) {
|
||||
if (!this.sequelize) return false
|
||||
await this.models.series.createFromOld(oldSeries)
|
||||
this.series.push(oldSeries)
|
||||
}
|
||||
|
||||
async createBulkSeries(oldSeriesObjs) {
|
||||
if (!this.sequelize) return false
|
||||
await this.models.series.createBulkFromOld(oldSeriesObjs)
|
||||
this.series.push(...oldSeriesObjs)
|
||||
}
|
||||
|
||||
async removeSeries(seriesId) {
|
||||
if (!this.sequelize) return false
|
||||
await this.models.series.removeById(seriesId)
|
||||
this.series = this.series.filter(se => se.id !== seriesId)
|
||||
}
|
||||
|
||||
async createAuthor(oldAuthor) {
|
||||
if (!this.sequelize) return false
|
||||
await this.models.createFromOld(oldAuthor)
|
||||
this.authors.push(oldAuthor)
|
||||
}
|
||||
|
||||
async createBulkAuthors(oldAuthors) {
|
||||
if (!this.sequelize) return false
|
||||
await this.models.author.createBulkFromOld(oldAuthors)
|
||||
this.authors.push(...oldAuthors)
|
||||
}
|
||||
|
||||
updateAuthor(oldAuthor) {
|
||||
if (!this.sequelize) return false
|
||||
return this.models.author.updateFromOld(oldAuthor)
|
||||
}
|
||||
|
||||
async removeAuthor(authorId) {
|
||||
if (!this.sequelize) return false
|
||||
await this.models.author.removeById(authorId)
|
||||
this.authors = this.authors.filter(au => au.id !== authorId)
|
||||
}
|
||||
|
||||
async createBulkBookAuthors(bookAuthors) {
|
||||
if (!this.sequelize) return false
|
||||
await this.models.bookAuthor.bulkCreate(bookAuthors)
|
||||
this.authors.push(...bookAuthors)
|
||||
}
|
||||
|
||||
async removeBulkBookAuthors(authorId = null, bookId = null) {
|
||||
if (!this.sequelize) return false
|
||||
if (!authorId && !bookId) return
|
||||
await this.models.bookAuthor.removeByIds(authorId, bookId)
|
||||
this.authors = this.authors.filter(au => {
|
||||
@ -422,34 +477,42 @@ class Database {
|
||||
}
|
||||
|
||||
getPlaybackSessions(where = null) {
|
||||
if (!this.sequelize) return false
|
||||
return this.models.playbackSession.getOldPlaybackSessions(where)
|
||||
}
|
||||
|
||||
getPlaybackSession(sessionId) {
|
||||
if (!this.sequelize) return false
|
||||
return this.models.playbackSession.getById(sessionId)
|
||||
}
|
||||
|
||||
createPlaybackSession(oldSession) {
|
||||
if (!this.sequelize) return false
|
||||
return this.models.playbackSession.createFromOld(oldSession)
|
||||
}
|
||||
|
||||
updatePlaybackSession(oldSession) {
|
||||
if (!this.sequelize) return false
|
||||
return this.models.playbackSession.updateFromOld(oldSession)
|
||||
}
|
||||
|
||||
removePlaybackSession(sessionId) {
|
||||
if (!this.sequelize) return false
|
||||
return this.models.playbackSession.removeById(sessionId)
|
||||
}
|
||||
|
||||
getDeviceByDeviceId(deviceId) {
|
||||
if (!this.sequelize) return false
|
||||
return this.models.device.getOldDeviceByDeviceId(deviceId)
|
||||
}
|
||||
|
||||
updateDevice(oldDevice) {
|
||||
if (!this.sequelize) return false
|
||||
return this.models.device.updateFromOld(oldDevice)
|
||||
}
|
||||
|
||||
createDevice(oldDevice) {
|
||||
if (!this.sequelize) return false
|
||||
return this.models.device.createFromOld(oldDevice)
|
||||
}
|
||||
}
|
||||
|
@ -29,7 +29,7 @@ const CoverManager = require('./managers/CoverManager')
|
||||
const AbMergeManager = require('./managers/AbMergeManager')
|
||||
const CacheManager = require('./managers/CacheManager')
|
||||
const LogManager = require('./managers/LogManager')
|
||||
// const BackupManager = require('./managers/BackupManager') // TODO
|
||||
const BackupManager = require('./managers/BackupManager')
|
||||
const PlaybackSessionManager = require('./managers/PlaybackSessionManager')
|
||||
const PodcastManager = require('./managers/PodcastManager')
|
||||
const AudioMetadataMangaer = require('./managers/AudioMetadataManager')
|
||||
@ -59,7 +59,6 @@ class Server {
|
||||
filePerms.setDefaultDirSync(global.MetadataPath, false)
|
||||
}
|
||||
|
||||
// this.db = new Db()
|
||||
this.watcher = new Watcher()
|
||||
this.auth = new Auth()
|
||||
|
||||
@ -67,7 +66,7 @@ class Server {
|
||||
this.taskManager = new TaskManager()
|
||||
this.notificationManager = new NotificationManager()
|
||||
this.emailManager = new EmailManager()
|
||||
// this.backupManager = new BackupManager(this.db)
|
||||
this.backupManager = new BackupManager()
|
||||
this.logManager = new LogManager()
|
||||
this.cacheManager = new CacheManager()
|
||||
this.abMergeManager = new AbMergeManager(this.taskManager)
|
||||
@ -109,7 +108,7 @@ class Server {
|
||||
await this.purgeMetadata() // Remove metadata folders without library item
|
||||
await this.cacheManager.ensureCachePaths()
|
||||
|
||||
// await this.backupManager.init() // TODO: Implement backups
|
||||
await this.backupManager.init()
|
||||
await this.logManager.init()
|
||||
await this.apiRouter.checkRemoveEmptySeries(Database.series) // Remove empty series
|
||||
await this.rssFeedManager.init()
|
||||
|
@ -43,9 +43,8 @@ class BackupController {
|
||||
res.sendFile(req.backup.fullPath)
|
||||
}
|
||||
|
||||
async apply(req, res) {
|
||||
await this.backupManager.requestApplyBackup(req.backup)
|
||||
res.sendStatus(200)
|
||||
apply(req, res) {
|
||||
this.backupManager.requestApplyBackup(req.backup, res)
|
||||
}
|
||||
|
||||
middleware(req, res, next) {
|
||||
|
@ -1,6 +1,8 @@
|
||||
const sqlite3 = require('sqlite3')
|
||||
const Path = require('path')
|
||||
const Logger = require('../Logger')
|
||||
const SocketAuthority = require('../SocketAuthority')
|
||||
const Database = require('../Database')
|
||||
|
||||
const cron = require('../libs/nodeCron')
|
||||
const fs = require('../libs/fsExtra')
|
||||
@ -14,27 +16,32 @@ const filePerms = require('../utils/filePerms')
|
||||
const Backup = require('../objects/Backup')
|
||||
|
||||
class BackupManager {
|
||||
constructor(db) {
|
||||
constructor() {
|
||||
this.BackupPath = Path.join(global.MetadataPath, 'backups')
|
||||
this.ItemsMetadataPath = Path.join(global.MetadataPath, 'items')
|
||||
this.AuthorsMetadataPath = Path.join(global.MetadataPath, 'authors')
|
||||
|
||||
this.db = db
|
||||
|
||||
this.scheduleTask = null
|
||||
|
||||
this.backups = []
|
||||
}
|
||||
|
||||
get serverSettings() {
|
||||
return this.db.serverSettings || {}
|
||||
get backupSchedule() {
|
||||
return global.ServerSettings.backupSchedule
|
||||
}
|
||||
|
||||
get backupsToKeep() {
|
||||
return global.ServerSettings.backupsToKeep || 2
|
||||
}
|
||||
|
||||
get maxBackupSize() {
|
||||
return global.ServerSettings.maxBackupSize || 1
|
||||
}
|
||||
|
||||
async init() {
|
||||
const backupsDirExists = await fs.pathExists(this.BackupPath)
|
||||
if (!backupsDirExists) {
|
||||
await fs.ensureDir(this.BackupPath)
|
||||
await filePerms.setDefault(this.BackupPath)
|
||||
}
|
||||
|
||||
await this.loadBackups()
|
||||
@ -42,42 +49,42 @@ class BackupManager {
|
||||
}
|
||||
|
||||
scheduleCron() {
|
||||
if (!this.serverSettings.backupSchedule) {
|
||||
if (!this.backupSchedule) {
|
||||
Logger.info(`[BackupManager] Auto Backups are disabled`)
|
||||
return
|
||||
}
|
||||
try {
|
||||
var cronSchedule = this.serverSettings.backupSchedule
|
||||
var cronSchedule = this.backupSchedule
|
||||
this.scheduleTask = cron.schedule(cronSchedule, this.runBackup.bind(this))
|
||||
} catch (error) {
|
||||
Logger.error(`[BackupManager] Failed to schedule backup cron ${this.serverSettings.backupSchedule}`, error)
|
||||
Logger.error(`[BackupManager] Failed to schedule backup cron ${this.backupSchedule}`, error)
|
||||
}
|
||||
}
|
||||
|
||||
updateCronSchedule() {
|
||||
if (this.scheduleTask && !this.serverSettings.backupSchedule) {
|
||||
if (this.scheduleTask && !this.backupSchedule) {
|
||||
Logger.info(`[BackupManager] Disabling backup schedule`)
|
||||
if (this.scheduleTask.stop) this.scheduleTask.stop()
|
||||
this.scheduleTask = null
|
||||
} else if (!this.scheduleTask && this.serverSettings.backupSchedule) {
|
||||
Logger.info(`[BackupManager] Starting backup schedule ${this.serverSettings.backupSchedule}`)
|
||||
} else if (!this.scheduleTask && this.backupSchedule) {
|
||||
Logger.info(`[BackupManager] Starting backup schedule ${this.backupSchedule}`)
|
||||
this.scheduleCron()
|
||||
} else if (this.serverSettings.backupSchedule) {
|
||||
Logger.info(`[BackupManager] Restarting backup schedule ${this.serverSettings.backupSchedule}`)
|
||||
} else if (this.backupSchedule) {
|
||||
Logger.info(`[BackupManager] Restarting backup schedule ${this.backupSchedule}`)
|
||||
if (this.scheduleTask.stop) this.scheduleTask.stop()
|
||||
this.scheduleCron()
|
||||
}
|
||||
}
|
||||
|
||||
async uploadBackup(req, res) {
|
||||
var backupFile = req.files.file
|
||||
const backupFile = req.files.file
|
||||
if (Path.extname(backupFile.name) !== '.audiobookshelf') {
|
||||
Logger.error(`[BackupManager] Invalid backup file uploaded "${backupFile.name}"`)
|
||||
return res.status(500).send('Invalid backup file')
|
||||
}
|
||||
|
||||
var tempPath = Path.join(this.BackupPath, backupFile.name)
|
||||
var success = await backupFile.mv(tempPath).then(() => true).catch((error) => {
|
||||
const tempPath = Path.join(this.BackupPath, backupFile.name)
|
||||
const success = await backupFile.mv(tempPath).then(() => true).catch((error) => {
|
||||
Logger.error('[BackupManager] Failed to move backup file', path, error)
|
||||
return false
|
||||
})
|
||||
@ -86,10 +93,17 @@ class BackupManager {
|
||||
}
|
||||
|
||||
const zip = new StreamZip.async({ file: tempPath })
|
||||
const data = await zip.entryData('details')
|
||||
var details = data.toString('utf8').split('\n')
|
||||
|
||||
var backup = new Backup({ details, fullPath: tempPath })
|
||||
const entries = await zip.entries()
|
||||
if (!Object.keys(entries).includes('absdatabase.sqlite')) {
|
||||
Logger.error(`[BackupManager] Invalid backup with no absdatabase.sqlite file - might be a backup created on an old Audiobookshelf server.`)
|
||||
return res.status(500).send('Invalid backup with no absdatabase.sqlite file - might be a backup created on an old Audiobookshelf server.')
|
||||
}
|
||||
|
||||
const data = await zip.entryData('details')
|
||||
const details = data.toString('utf8').split('\n')
|
||||
|
||||
const backup = new Backup({ details, fullPath: tempPath })
|
||||
|
||||
if (!backup.serverVersion) {
|
||||
Logger.error(`[BackupManager] Invalid backup with no server version - might be a backup created before version 2.0.0`)
|
||||
@ -98,7 +112,7 @@ class BackupManager {
|
||||
|
||||
backup.fileSize = await getFileSize(backup.fullPath)
|
||||
|
||||
var existingBackupIndex = this.backups.findIndex(b => b.id === backup.id)
|
||||
const existingBackupIndex = this.backups.findIndex(b => b.id === backup.id)
|
||||
if (existingBackupIndex >= 0) {
|
||||
Logger.warn(`[BackupManager] Backup already exists with id ${backup.id} - overwriting`)
|
||||
this.backups.splice(existingBackupIndex, 1, backup)
|
||||
@ -122,14 +136,23 @@ class BackupManager {
|
||||
}
|
||||
}
|
||||
|
||||
async requestApplyBackup(backup) {
|
||||
async requestApplyBackup(backup, res) {
|
||||
const zip = new StreamZip.async({ file: backup.fullPath })
|
||||
await zip.extract('config/', global.ConfigPath)
|
||||
if (backup.backupMetadataCovers) {
|
||||
await zip.extract('metadata-items/', this.ItemsMetadataPath)
|
||||
await zip.extract('metadata-authors/', this.AuthorsMetadataPath)
|
||||
|
||||
const entries = await zip.entries()
|
||||
if (!Object.keys(entries).includes('absdatabase.sqlite')) {
|
||||
Logger.error(`[BackupManager] Cannot apply old backup ${backup.fullPath}`)
|
||||
return res.status(500).send('Invalid backup file. Does not include absdatabase.sqlite. This might be from an older Audiobookshelf server.')
|
||||
}
|
||||
await this.db.reinit()
|
||||
|
||||
await Database.disconnect()
|
||||
|
||||
await zip.extract('absdatabase.sqlite', global.ConfigPath)
|
||||
await zip.extract('metadata-items/', this.ItemsMetadataPath)
|
||||
await zip.extract('metadata-authors/', this.AuthorsMetadataPath)
|
||||
|
||||
await Database.reconnect()
|
||||
|
||||
SocketAuthority.emitter('backup_applied')
|
||||
}
|
||||
|
||||
@ -182,44 +205,52 @@ class BackupManager {
|
||||
async runBackup() {
|
||||
// Check if Metadata Path is inside Config Path (otherwise there will be an infinite loop as the archiver tries to zip itself)
|
||||
Logger.info(`[BackupManager] Running Backup`)
|
||||
var newBackup = new Backup()
|
||||
const newBackup = new Backup()
|
||||
newBackup.setData(this.BackupPath)
|
||||
|
||||
const newBackData = {
|
||||
backupMetadataCovers: this.serverSettings.backupMetadataCovers,
|
||||
backupDirPath: this.BackupPath
|
||||
await fs.ensureDir(this.AuthorsMetadataPath)
|
||||
|
||||
// Create backup sqlite file
|
||||
const sqliteBackupPath = await this.backupSqliteDb(newBackup).catch((error) => {
|
||||
Logger.error(`[BackupManager] Failed to backup sqlite db`, error)
|
||||
return false
|
||||
})
|
||||
|
||||
if (!sqliteBackupPath) {
|
||||
return false
|
||||
}
|
||||
newBackup.setData(newBackData)
|
||||
|
||||
var metadataAuthorsPath = this.AuthorsMetadataPath
|
||||
if (!await fs.pathExists(metadataAuthorsPath)) metadataAuthorsPath = null
|
||||
|
||||
var zipResult = await this.zipBackup(metadataAuthorsPath, newBackup).then(() => true).catch((error) => {
|
||||
// Zip sqlite file, /metadata/items, and /metadata/authors folders
|
||||
const zipResult = await this.zipBackup(sqliteBackupPath, newBackup).catch((error) => {
|
||||
Logger.error(`[BackupManager] Backup Failed ${error}`)
|
||||
return false
|
||||
})
|
||||
if (zipResult) {
|
||||
Logger.info(`[BackupManager] Backup successful ${newBackup.id}`)
|
||||
await filePerms.setDefault(newBackup.fullPath)
|
||||
newBackup.fileSize = await getFileSize(newBackup.fullPath)
|
||||
var existingIndex = this.backups.findIndex(b => b.id === newBackup.id)
|
||||
if (existingIndex >= 0) {
|
||||
this.backups.splice(existingIndex, 1, newBackup)
|
||||
} else {
|
||||
this.backups.push(newBackup)
|
||||
}
|
||||
|
||||
// Check remove oldest backup
|
||||
if (this.backups.length > this.serverSettings.backupsToKeep) {
|
||||
this.backups.sort((a, b) => a.createdAt - b.createdAt)
|
||||
// Remove sqlite backup
|
||||
await fs.remove(sqliteBackupPath)
|
||||
|
||||
var oldBackup = this.backups.shift()
|
||||
Logger.debug(`[BackupManager] Removing old backup ${oldBackup.id}`)
|
||||
this.removeBackup(oldBackup)
|
||||
}
|
||||
return true
|
||||
if (!zipResult) return false
|
||||
|
||||
Logger.info(`[BackupManager] Backup successful ${newBackup.id}`)
|
||||
|
||||
newBackup.fileSize = await getFileSize(newBackup.fullPath)
|
||||
|
||||
const existingIndex = this.backups.findIndex(b => b.id === newBackup.id)
|
||||
if (existingIndex >= 0) {
|
||||
this.backups.splice(existingIndex, 1, newBackup)
|
||||
} else {
|
||||
return false
|
||||
this.backups.push(newBackup)
|
||||
}
|
||||
|
||||
// Check remove oldest backup
|
||||
if (this.backups.length > this.backupsToKeep) {
|
||||
this.backups.sort((a, b) => a.createdAt - b.createdAt)
|
||||
|
||||
const oldBackup = this.backups.shift()
|
||||
Logger.debug(`[BackupManager] Removing old backup ${oldBackup.id}`)
|
||||
this.removeBackup(oldBackup)
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
async removeBackup(backup) {
|
||||
@ -233,7 +264,35 @@ class BackupManager {
|
||||
}
|
||||
}
|
||||
|
||||
zipBackup(metadataAuthorsPath, backup) {
|
||||
/**
|
||||
* @see https://github.com/TryGhost/node-sqlite3/pull/1116
|
||||
* @param {Backup} backup
|
||||
* @promise
|
||||
*/
|
||||
backupSqliteDb(backup) {
|
||||
const db = new sqlite3.Database(Database.dbPath)
|
||||
const dbFilePath = Path.join(global.ConfigPath, `absdatabase.${backup.id}.sqlite`)
|
||||
return new Promise(async (resolve, reject) => {
|
||||
const backup = db.backup(dbFilePath)
|
||||
backup.step(-1)
|
||||
backup.finish()
|
||||
|
||||
// Max time ~2 mins
|
||||
for (let i = 0; i < 240; i++) {
|
||||
if (backup.completed) {
|
||||
return resolve(dbFilePath)
|
||||
} else if (backup.failed) {
|
||||
return reject(backup.message || 'Unknown failure reason')
|
||||
}
|
||||
await new Promise((r) => setTimeout(r, 500))
|
||||
}
|
||||
|
||||
Logger.error(`[BackupManager] Backup sqlite timed out`)
|
||||
reject('Backup timed out')
|
||||
})
|
||||
}
|
||||
|
||||
zipBackup(sqliteBackupPath, backup) {
|
||||
return new Promise((resolve, reject) => {
|
||||
// create a file to stream archive data to
|
||||
const output = fs.createWriteStream(backup.fullPath)
|
||||
@ -245,7 +304,7 @@ class BackupManager {
|
||||
// 'close' event is fired only when a file descriptor is involved
|
||||
output.on('close', () => {
|
||||
Logger.info('[BackupManager]', archive.pointer() + ' total bytes')
|
||||
resolve()
|
||||
resolve(true)
|
||||
})
|
||||
|
||||
// This event is fired when the data source is drained no matter what was the data source.
|
||||
@ -281,7 +340,7 @@ class BackupManager {
|
||||
reject(err)
|
||||
})
|
||||
archive.on('progress', ({ fs: fsobj }) => {
|
||||
const maxBackupSizeInBytes = this.serverSettings.maxBackupSize * 1000 * 1000 * 1000
|
||||
const maxBackupSizeInBytes = this.maxBackupSize * 1000 * 1000 * 1000
|
||||
if (fsobj.processedBytes > maxBackupSizeInBytes) {
|
||||
Logger.error(`[BackupManager] Archiver is too large - aborting to prevent endless loop, Bytes Processed: ${fsobj.processedBytes}`)
|
||||
archive.abort()
|
||||
@ -295,26 +354,9 @@ class BackupManager {
|
||||
// pipe archive data to the file
|
||||
archive.pipe(output)
|
||||
|
||||
archive.directory(Path.join(this.db.LibraryItemsPath, 'data'), 'config/libraryItems/data')
|
||||
archive.directory(Path.join(this.db.UsersPath, 'data'), 'config/users/data')
|
||||
archive.directory(Path.join(this.db.SessionsPath, 'data'), 'config/sessions/data')
|
||||
archive.directory(Path.join(this.db.LibrariesPath, 'data'), 'config/libraries/data')
|
||||
archive.directory(Path.join(this.db.SettingsPath, 'data'), 'config/settings/data')
|
||||
archive.directory(Path.join(this.db.CollectionsPath, 'data'), 'config/collections/data')
|
||||
archive.directory(Path.join(this.db.AuthorsPath, 'data'), 'config/authors/data')
|
||||
archive.directory(Path.join(this.db.SeriesPath, 'data'), 'config/series/data')
|
||||
archive.directory(Path.join(this.db.PlaylistsPath, 'data'), 'config/playlists/data')
|
||||
archive.directory(Path.join(this.db.FeedsPath, 'data'), 'config/feeds/data')
|
||||
|
||||
if (this.serverSettings.backupMetadataCovers) {
|
||||
Logger.debug(`[BackupManager] Backing up Metadata Items "${this.ItemsMetadataPath}"`)
|
||||
archive.directory(this.ItemsMetadataPath, 'metadata-items')
|
||||
|
||||
if (metadataAuthorsPath) {
|
||||
Logger.debug(`[BackupManager] Backing up Metadata Authors "${metadataAuthorsPath}"`)
|
||||
archive.directory(metadataAuthorsPath, 'metadata-authors')
|
||||
}
|
||||
}
|
||||
archive.file(sqliteBackupPath, { name: 'absdatabase.sqlite' })
|
||||
archive.directory(this.ItemsMetadataPath, 'metadata-items')
|
||||
archive.directory(this.AuthorsMetadataPath, 'metadata-authors')
|
||||
|
||||
archive.append(backup.detailsString, { name: 'details' })
|
||||
|
||||
|
@ -6,7 +6,6 @@ class Backup {
|
||||
constructor(data = null) {
|
||||
this.id = null
|
||||
this.datePretty = null
|
||||
this.backupMetadataCovers = null
|
||||
|
||||
this.backupDirPath = null
|
||||
this.filename = null
|
||||
@ -23,9 +22,9 @@ class Backup {
|
||||
}
|
||||
|
||||
get detailsString() {
|
||||
var details = []
|
||||
const details = []
|
||||
details.push(this.id)
|
||||
details.push(this.backupMetadataCovers ? '1' : '0')
|
||||
details.push('1') // Unused old boolean spot
|
||||
details.push(this.createdAt)
|
||||
details.push(this.serverVersion)
|
||||
return details.join('\n')
|
||||
@ -33,7 +32,6 @@ class Backup {
|
||||
|
||||
construct(data) {
|
||||
this.id = data.details[0]
|
||||
this.backupMetadataCovers = data.details[1] === '1'
|
||||
this.createdAt = Number(data.details[2])
|
||||
this.serverVersion = data.details[3] || null
|
||||
|
||||
@ -48,7 +46,6 @@ class Backup {
|
||||
toJSON() {
|
||||
return {
|
||||
id: this.id,
|
||||
backupMetadataCovers: this.backupMetadataCovers,
|
||||
backupDirPath: this.backupDirPath,
|
||||
datePretty: this.datePretty,
|
||||
fullPath: this.fullPath,
|
||||
@ -60,13 +57,11 @@ class Backup {
|
||||
}
|
||||
}
|
||||
|
||||
setData(data) {
|
||||
setData(backupDirPath) {
|
||||
this.id = date.format(new Date(), 'YYYY-MM-DD[T]HHmm')
|
||||
this.datePretty = date.format(new Date(), 'ddd, MMM D YYYY HH:mm')
|
||||
|
||||
this.backupMetadataCovers = data.backupMetadataCovers
|
||||
|
||||
this.backupDirPath = data.backupDirPath
|
||||
this.backupDirPath = backupDirPath
|
||||
|
||||
this.filename = this.id + '.audiobookshelf'
|
||||
this.path = Path.join('backups', this.filename)
|
||||
|
@ -29,7 +29,6 @@ class ServerSettings {
|
||||
this.backupSchedule = false // If false then auto-backups are disabled
|
||||
this.backupsToKeep = 2
|
||||
this.maxBackupSize = 1
|
||||
this.backupMetadataCovers = true
|
||||
|
||||
// Logger
|
||||
this.loggerDailyLogsToKeep = 7
|
||||
@ -82,7 +81,6 @@ class ServerSettings {
|
||||
this.backupSchedule = settings.backupSchedule || false
|
||||
this.backupsToKeep = settings.backupsToKeep || 2
|
||||
this.maxBackupSize = settings.maxBackupSize || 1
|
||||
this.backupMetadataCovers = settings.backupMetadataCovers !== false
|
||||
|
||||
this.loggerDailyLogsToKeep = settings.loggerDailyLogsToKeep || 7
|
||||
this.loggerScannerLogsToKeep = settings.loggerScannerLogsToKeep || 2
|
||||
@ -145,7 +143,6 @@ class ServerSettings {
|
||||
backupSchedule: this.backupSchedule,
|
||||
backupsToKeep: this.backupsToKeep,
|
||||
maxBackupSize: this.maxBackupSize,
|
||||
backupMetadataCovers: this.backupMetadataCovers,
|
||||
loggerDailyLogsToKeep: this.loggerDailyLogsToKeep,
|
||||
loggerScannerLogsToKeep: this.loggerScannerLogsToKeep,
|
||||
homeBookshelfView: this.homeBookshelfView,
|
||||
|
@ -798,8 +798,7 @@ module.exports.migrate = async (DatabaseModels) => {
|
||||
/**
|
||||
* @returns {boolean} true if old database exists
|
||||
*/
|
||||
module.exports.checkShouldMigrate = async (force = false) => {
|
||||
module.exports.checkShouldMigrate = async () => {
|
||||
if (await oldDbFiles.checkHasOldDb()) return true
|
||||
if (!force) return false
|
||||
return oldDbFiles.checkHasOldDbZip()
|
||||
}
|
Loading…
Reference in New Issue
Block a user