Merge pull request #3378 from mikiher/migration-manager

Add db migration management infratructure
This commit is contained in:
advplyr 2024-09-10 16:50:39 -05:00 committed by GitHub
commit fac5de582d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
24 changed files with 1745 additions and 159 deletions

201
package-lock.json generated
View File

@ -21,6 +21,7 @@
"p-throttle": "^4.1.1",
"passport": "^0.6.0",
"passport-jwt": "^4.0.1",
"semver": "^7.6.3",
"sequelize": "^6.35.2",
"socket.io": "^4.5.4",
"sqlite3": "^5.1.6",
@ -173,6 +174,15 @@
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
"dev": true
},
"node_modules/@babel/core/node_modules/semver": {
"version": "6.3.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
"integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
"dev": true,
"bin": {
"semver": "bin/semver.js"
}
},
"node_modules/@babel/generator": {
"version": "7.23.3",
"resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.23.3.tgz",
@ -213,6 +223,15 @@
"yallist": "^3.0.2"
}
},
"node_modules/@babel/helper-compilation-targets/node_modules/semver": {
"version": "6.3.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
"integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
"dev": true,
"bin": {
"semver": "bin/semver.js"
}
},
"node_modules/@babel/helper-compilation-targets/node_modules/yallist": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz",
@ -586,17 +605,6 @@
"node-pre-gyp": "bin/node-pre-gyp"
}
},
"node_modules/@mapbox/node-pre-gyp/node_modules/lru-cache": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
"integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
"dependencies": {
"yallist": "^4.0.0"
},
"engines": {
"node": ">=10"
}
},
"node_modules/@mapbox/node-pre-gyp/node_modules/nopt": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz",
@ -611,20 +619,6 @@
"node": ">=6"
}
},
"node_modules/@mapbox/node-pre-gyp/node_modules/semver": {
"version": "7.5.3",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.5.3.tgz",
"integrity": "sha512-QBlUtyVk/5EeHbi7X0fw6liDZc7BBmEaSYn01fMU1OUYbf6GPsbTtd8WmnqbI20SeycoHSeiybkE/q1Q+qlThQ==",
"dependencies": {
"lru-cache": "^6.0.0"
},
"bin": {
"semver": "bin/semver.js"
},
"engines": {
"node": ">=10"
}
},
"node_modules/@npmcli/fs": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-1.1.1.tgz",
@ -635,33 +629,6 @@
"semver": "^7.3.5"
}
},
"node_modules/@npmcli/fs/node_modules/lru-cache": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
"integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
"optional": true,
"dependencies": {
"yallist": "^4.0.0"
},
"engines": {
"node": ">=10"
}
},
"node_modules/@npmcli/fs/node_modules/semver": {
"version": "7.5.3",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.5.3.tgz",
"integrity": "sha512-QBlUtyVk/5EeHbi7X0fw6liDZc7BBmEaSYn01fMU1OUYbf6GPsbTtd8WmnqbI20SeycoHSeiybkE/q1Q+qlThQ==",
"optional": true,
"dependencies": {
"lru-cache": "^6.0.0"
},
"bin": {
"semver": "bin/semver.js"
},
"engines": {
"node": ">=10"
}
},
"node_modules/@npmcli/move-file": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/@npmcli/move-file/-/move-file-1.1.2.tgz",
@ -2576,6 +2543,15 @@
"node": ">=8"
}
},
"node_modules/istanbul-lib-instrument/node_modules/semver": {
"version": "6.3.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
"integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
"dev": true,
"bin": {
"semver": "bin/semver.js"
}
},
"node_modules/istanbul-lib-processinfo": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/istanbul-lib-processinfo/-/istanbul-lib-processinfo-2.0.3.tgz",
@ -2628,18 +2604,6 @@
"node": ">=8"
}
},
"node_modules/istanbul-lib-report/node_modules/lru-cache": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
"integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
"dev": true,
"dependencies": {
"yallist": "^4.0.0"
},
"engines": {
"node": ">=10"
}
},
"node_modules/istanbul-lib-report/node_modules/make-dir": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz",
@ -2655,21 +2619,6 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/istanbul-lib-report/node_modules/semver": {
"version": "7.5.4",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz",
"integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==",
"dev": true,
"dependencies": {
"lru-cache": "^6.0.0"
},
"bin": {
"semver": "bin/semver.js"
},
"engines": {
"node": ">=10"
}
},
"node_modules/istanbul-lib-report/node_modules/supports-color": {
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
@ -2804,36 +2753,11 @@
"npm": ">=6"
}
},
"node_modules/jsonwebtoken/node_modules/lru-cache": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
"integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
"dependencies": {
"yallist": "^4.0.0"
},
"engines": {
"node": ">=10"
}
},
"node_modules/jsonwebtoken/node_modules/ms": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="
},
"node_modules/jsonwebtoken/node_modules/semver": {
"version": "7.5.4",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz",
"integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==",
"dependencies": {
"lru-cache": "^6.0.0"
},
"bin": {
"semver": "bin/semver.js"
},
"engines": {
"node": ">=10"
}
},
"node_modules/just-extend": {
"version": "4.2.1",
"resolved": "https://registry.npmjs.org/just-extend/-/just-extend-4.2.1.tgz",
@ -2970,6 +2894,14 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/make-dir/node_modules/semver": {
"version": "6.3.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
"integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
"bin": {
"semver": "bin/semver.js"
}
},
"node_modules/make-fetch-happen": {
"version": "9.1.0",
"resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-9.1.0.tgz",
@ -3585,18 +3517,6 @@
"node": "^12.13.0 || ^14.15.0 || >=16.0.0"
}
},
"node_modules/node-gyp/node_modules/lru-cache": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
"integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
"optional": true,
"dependencies": {
"yallist": "^4.0.0"
},
"engines": {
"node": ">=10"
}
},
"node_modules/node-gyp/node_modules/nopt": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz",
@ -3627,21 +3547,6 @@
"node": "^12.13.0 || ^14.15.0 || >=16.0.0"
}
},
"node_modules/node-gyp/node_modules/semver": {
"version": "7.5.3",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.5.3.tgz",
"integrity": "sha512-QBlUtyVk/5EeHbi7X0fw6liDZc7BBmEaSYn01fMU1OUYbf6GPsbTtd8WmnqbI20SeycoHSeiybkE/q1Q+qlThQ==",
"optional": true,
"dependencies": {
"lru-cache": "^6.0.0"
},
"bin": {
"semver": "bin/semver.js"
},
"engines": {
"node": ">=10"
}
},
"node_modules/node-preload": {
"version": "0.2.1",
"resolved": "https://registry.npmjs.org/node-preload/-/node-preload-0.2.1.tgz",
@ -4336,11 +4241,14 @@
"integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw=="
},
"node_modules/semver": {
"version": "6.3.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
"integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
"version": "7.6.3",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz",
"integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==",
"bin": {
"semver": "bin/semver.js"
},
"engines": {
"node": ">=10"
}
},
"node_modules/send": {
@ -4456,36 +4364,11 @@
}
}
},
"node_modules/sequelize/node_modules/lru-cache": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
"integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
"dependencies": {
"yallist": "^4.0.0"
},
"engines": {
"node": ">=10"
}
},
"node_modules/sequelize/node_modules/ms": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
},
"node_modules/sequelize/node_modules/semver": {
"version": "7.5.4",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz",
"integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==",
"dependencies": {
"lru-cache": "^6.0.0"
},
"bin": {
"semver": "bin/semver.js"
},
"engines": {
"node": ">=10"
}
},
"node_modules/serialize-javascript": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz",

View File

@ -47,6 +47,7 @@
"p-throttle": "^4.1.1",
"passport": "^0.6.0",
"passport-jwt": "^4.0.1",
"semver": "^7.6.3",
"sequelize": "^6.35.2",
"socket.io": "^4.5.4",
"sqlite3": "^5.1.6",

View File

@ -8,6 +8,8 @@ const Logger = require('./Logger')
const dbMigration = require('./utils/migrations/dbMigration')
const Auth = require('./Auth')
const MigrationManager = require('./managers/MigrationManager')
class Database {
constructor() {
this.sequelize = null
@ -168,6 +170,15 @@ class Database {
throw new Error('Database connection failed')
}
try {
const migrationManager = new MigrationManager(this.sequelize, global.ConfigPath)
await migrationManager.init(packageJson.version)
if (!this.isNew) await migrationManager.runMigrations()
} catch (error) {
Logger.error(`[Database] Failed to run migrations`, error)
throw new Error('Database migration failed')
}
await this.buildModels(force)
Logger.info(`[Database] Db initialized with models:`, Object.keys(this.sequelize.models).join(', '))

21
server/libs/umzug/LICENSE Normal file
View File

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2014-2017 Sequelize contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -0,0 +1,31 @@
'use strict'
var __createBinding =
(this && this.__createBinding) ||
(Object.create
? function (o, m, k, k2) {
if (k2 === undefined) k2 = k
var desc = Object.getOwnPropertyDescriptor(m, k)
if (!desc || ('get' in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = {
enumerable: true,
get: function () {
return m[k]
}
}
}
Object.defineProperty(o, k2, desc)
}
: function (o, m, k, k2) {
if (k2 === undefined) k2 = k
o[k2] = m[k]
})
var __exportStar =
(this && this.__exportStar) ||
function (m, exports) {
for (var p in m) if (p !== 'default' && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p)
}
Object.defineProperty(exports, '__esModule', { value: true })
__exportStar(require('./umzug'), exports)
__exportStar(require('./storage'), exports)
__exportStar(require('./types'), exports)
//# sourceMappingURL=index.js.map

View File

@ -0,0 +1,18 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.verifyUmzugStorage = exports.isUmzugStorage = void 0;
function isUmzugStorage(arg) {
return (arg &&
typeof arg.logMigration === 'function' &&
typeof arg.unlogMigration === 'function' &&
typeof arg.executed === 'function');
}
exports.isUmzugStorage = isUmzugStorage;
const verifyUmzugStorage = (arg) => {
if (!isUmzugStorage(arg)) {
throw new Error(`Invalid umzug storage`);
}
return arg;
};
exports.verifyUmzugStorage = verifyUmzugStorage;
//# sourceMappingURL=contract.js.map

View File

@ -0,0 +1,24 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __exportStar = (this && this.__exportStar) || function(m, exports) {
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
};
Object.defineProperty(exports, "__esModule", { value: true });
// codegen:start {preset: barrel}
__exportStar(require("./contract"), exports);
__exportStar(require("./json"), exports);
__exportStar(require("./memory"), exports);
__exportStar(require("./mongodb"), exports);
__exportStar(require("./sequelize"), exports);
// codegen:end
//# sourceMappingURL=index.js.map

View File

@ -0,0 +1,61 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.JSONStorage = void 0;
const fs_1 = require("fs");
const path = __importStar(require("path"));
const filesystem = {
/** reads a file as a string or returns null if file doesn't exist */
async readAsync(filepath) {
return fs_1.promises.readFile(filepath).then(c => c.toString(), () => null);
},
/** writes a string as file contents, creating its parent directory if necessary */
async writeAsync(filepath, content) {
await fs_1.promises.mkdir(path.dirname(filepath), { recursive: true });
await fs_1.promises.writeFile(filepath, content);
},
};
class JSONStorage {
constructor(options) {
var _a;
this.path = (_a = options === null || options === void 0 ? void 0 : options.path) !== null && _a !== void 0 ? _a : path.join(process.cwd(), 'umzug.json');
}
async logMigration({ name: migrationName }) {
const loggedMigrations = await this.executed();
loggedMigrations.push(migrationName);
await filesystem.writeAsync(this.path, JSON.stringify(loggedMigrations, null, 2));
}
async unlogMigration({ name: migrationName }) {
const loggedMigrations = await this.executed();
const updatedMigrations = loggedMigrations.filter(name => name !== migrationName);
await filesystem.writeAsync(this.path, JSON.stringify(updatedMigrations, null, 2));
}
async executed() {
const content = await filesystem.readAsync(this.path);
return content ? JSON.parse(content) : [];
}
}
exports.JSONStorage = JSONStorage;
//# sourceMappingURL=json.js.map

View File

@ -0,0 +1,17 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.memoryStorage = void 0;
const memoryStorage = () => {
let executed = [];
return {
async logMigration({ name }) {
executed.push(name);
},
async unlogMigration({ name }) {
executed = executed.filter(n => n !== name);
},
executed: async () => [...executed],
};
};
exports.memoryStorage = memoryStorage;
//# sourceMappingURL=memory.js.map

View File

@ -0,0 +1,31 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.MongoDBStorage = void 0;
function isMongoDBCollectionOptions(arg) {
return Boolean(arg.collection);
}
class MongoDBStorage {
constructor(options) {
var _a, _b;
if (!options || (!options.collection && !options.connection)) {
throw new Error('MongoDB Connection or Collection required');
}
this.collection = isMongoDBCollectionOptions(options)
? options.collection
: options.connection.collection((_a = options.collectionName) !== null && _a !== void 0 ? _a : 'migrations');
this.connection = options.connection; // TODO remove this
this.collectionName = (_b = options.collectionName) !== null && _b !== void 0 ? _b : 'migrations'; // TODO remove this
}
async logMigration({ name: migrationName }) {
await this.collection.insertOne({ migrationName });
}
async unlogMigration({ name: migrationName }) {
await this.collection.deleteOne({ migrationName });
}
async executed() {
const records = await this.collection.find({}).sort({ migrationName: 1 }).toArray();
return records.map(r => r.migrationName);
}
}
exports.MongoDBStorage = MongoDBStorage;
//# sourceMappingURL=mongodb.js.map

View File

@ -0,0 +1,85 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.SequelizeStorage = void 0;
const DIALECTS_WITH_CHARSET_AND_COLLATE = new Set(['mysql', 'mariadb']);
class SequelizeStorage {
/**
Constructs Sequelize based storage. Migrations will be stored in a SequelizeMeta table using the given instance of Sequelize.
If a model is given, it will be used directly as the model for the SequelizeMeta table. Otherwise, it will be created automatically according to the given options.
If the table does not exist it will be created automatically upon the logging of the first migration.
*/
constructor(options) {
var _a, _b, _c, _d, _e, _f;
if (!options || (!options.model && !options.sequelize)) {
throw new Error('One of "sequelize" or "model" storage option is required');
}
this.sequelize = (_a = options.sequelize) !== null && _a !== void 0 ? _a : options.model.sequelize;
this.columnType = (_b = options.columnType) !== null && _b !== void 0 ? _b : this.sequelize.constructor.DataTypes.STRING;
this.columnName = (_c = options.columnName) !== null && _c !== void 0 ? _c : 'name';
this.timestamps = (_d = options.timestamps) !== null && _d !== void 0 ? _d : false;
this.modelName = (_e = options.modelName) !== null && _e !== void 0 ? _e : 'SequelizeMeta';
this.tableName = options.tableName;
this.schema = options.schema;
this.model = (_f = options.model) !== null && _f !== void 0 ? _f : this.getModel();
}
getModel() {
var _a;
if (this.sequelize.isDefined(this.modelName)) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
return this.sequelize.model(this.modelName);
}
const dialectName = (_a = this.sequelize.dialect) === null || _a === void 0 ? void 0 : _a.name;
const hasCharsetAndCollate = dialectName && DIALECTS_WITH_CHARSET_AND_COLLATE.has(dialectName);
return this.sequelize.define(this.modelName, {
[this.columnName]: {
type: this.columnType,
allowNull: false,
unique: true,
primaryKey: true,
autoIncrement: false,
},
}, {
tableName: this.tableName,
schema: this.schema,
timestamps: this.timestamps,
charset: hasCharsetAndCollate ? 'utf8' : undefined,
collate: hasCharsetAndCollate ? 'utf8_unicode_ci' : undefined,
});
}
async syncModel() {
await this.model.sync();
}
async logMigration({ name: migrationName }) {
await this.syncModel();
await this.model.create({
[this.columnName]: migrationName,
});
}
async unlogMigration({ name: migrationName }) {
await this.syncModel();
await this.model.destroy({
where: {
[this.columnName]: migrationName,
},
});
}
async executed() {
await this.syncModel();
const migrations = await this.model.findAll({ order: [[this.columnName, 'ASC']] });
return migrations.map(migration => {
const name = migration[this.columnName];
if (typeof name !== 'string') {
throw new TypeError(`Unexpected migration name type: expected string, got ${typeof name}`);
}
return name;
});
}
// TODO remove this
_model() {
return this.model;
}
}
exports.SequelizeStorage = SequelizeStorage;
//# sourceMappingURL=sequelize.js.map

View File

@ -0,0 +1,32 @@
'use strict'
/* eslint-disable unicorn/template-indent */
// templates for migration file creation
Object.defineProperty(exports, '__esModule', { value: true })
exports.sqlDown = exports.sqlUp = exports.mjs = exports.ts = exports.js = void 0
exports.js = `
/** @type {import('umzug').MigrationFn<any>} */
exports.up = async params => {};
/** @type {import('umzug').MigrationFn<any>} */
exports.down = async params => {};
`.trimStart()
exports.ts = `
import type { MigrationFn } from 'umzug';
export const up: MigrationFn = async params => {};
export const down: MigrationFn = async params => {};
`.trimStart()
exports.mjs = `
/** @type {import('umzug').MigrationFn<any>} */
export const up = async params => {};
/** @type {import('umzug').MigrationFn<any>} */
export const down = async params => {};
`.trimStart()
exports.sqlUp = `
-- up migration
`.trimStart()
exports.sqlDown = `
-- down migration
`.trimStart()
//# sourceMappingURL=templates.js.map

View File

@ -0,0 +1,12 @@
'use strict'
Object.defineProperty(exports, '__esModule', { value: true })
exports.RerunBehavior = void 0
exports.RerunBehavior = {
/** Hard error if an up migration that has already been run, or a down migration that hasn't, is encountered */
THROW: 'THROW',
/** Silently skip up migrations that have already been run, or down migrations that haven't */
SKIP: 'SKIP',
/** Re-run up migrations that have already been run, or down migrations that haven't */
ALLOW: 'ALLOW'
}
//# sourceMappingURL=types.js.map

386
server/libs/umzug/umzug.js Normal file
View File

@ -0,0 +1,386 @@
'use strict'
var __createBinding =
(this && this.__createBinding) ||
(Object.create
? function (o, m, k, k2) {
if (k2 === undefined) k2 = k
var desc = Object.getOwnPropertyDescriptor(m, k)
if (!desc || ('get' in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = {
enumerable: true,
get: function () {
return m[k]
}
}
}
Object.defineProperty(o, k2, desc)
}
: function (o, m, k, k2) {
if (k2 === undefined) k2 = k
o[k2] = m[k]
})
var __setModuleDefault =
(this && this.__setModuleDefault) ||
(Object.create
? function (o, v) {
Object.defineProperty(o, 'default', { enumerable: true, value: v })
}
: function (o, v) {
o['default'] = v
})
var __importStar =
(this && this.__importStar) ||
function (mod) {
if (mod && mod.__esModule) return mod
var result = {}
if (mod != null) for (var k in mod) if (k !== 'default' && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k)
__setModuleDefault(result, mod)
return result
}
var __importDefault =
(this && this.__importDefault) ||
function (mod) {
return mod && mod.__esModule ? mod : { default: mod }
}
var _a
Object.defineProperty(exports, '__esModule', { value: true })
exports.Umzug = exports.MigrationError = void 0
const fs = __importStar(require('fs'))
const path = __importStar(require('path'))
const storage_1 = require('./storage')
const templates = __importStar(require('./templates'))
const types_1 = require('./types')
class MigrationError extends Error {
// TODO [>=4.0.0] Take a `{ cause: ... }` options bag like the default `Error`, it looks like this because of verror backwards-compatibility.
constructor(migration, original) {
super(`Migration ${migration.name} (${migration.direction}) failed: ${MigrationError.errorString(original)}`, {
cause: original
})
this.name = 'MigrationError'
this.migration = migration
}
// TODO [>=4.0.0] Remove this backwards-compatibility alias
get info() {
return this.migration
}
static errorString(cause) {
return cause instanceof Error ? `Original error: ${cause.message}` : `Non-error value thrown. See info for full props: ${cause}`
}
}
exports.MigrationError = MigrationError
class Umzug {
/** creates a new Umzug instance */
constructor(options) {
var _b
this.options = options
this.storage = (0, storage_1.verifyUmzugStorage)((_b = options.storage) !== null && _b !== void 0 ? _b : new storage_1.JSONStorage())
this.migrations = this.getMigrationsResolver(this.options.migrations)
}
logging(message) {
var _b
;(_b = this.options.logger) === null || _b === void 0 ? void 0 : _b.info(message)
}
/** Get the list of migrations which have already been applied */
async executed() {
return this.runCommand('executed', async ({ context }) => {
const list = await this._executed(context)
// We do the following to not expose the `up` and `down` functions to the user
return list.map((m) => ({ name: m.name, path: m.path }))
})
}
/** Get the list of migrations which have already been applied */
async _executed(context) {
const [migrations, executedNames] = await Promise.all([this.migrations(context), this.storage.executed({ context })])
const executedSet = new Set(executedNames)
return migrations.filter((m) => executedSet.has(m.name))
}
/** Get the list of migrations which are yet to be applied */
async pending() {
return this.runCommand('pending', async ({ context }) => {
const list = await this._pending(context)
// We do the following to not expose the `up` and `down` functions to the user
return list.map((m) => ({ name: m.name, path: m.path }))
})
}
async _pending(context) {
const [migrations, executedNames] = await Promise.all([this.migrations(context), this.storage.executed({ context })])
const executedSet = new Set(executedNames)
return migrations.filter((m) => !executedSet.has(m.name))
}
async runCommand(command, cb) {
const context = await this.getContext()
return await cb({ context })
}
/**
* Apply migrations. By default, runs all pending migrations.
* @see MigrateUpOptions for other use cases using `to`, `migrations` and `rerun`.
*/
async up(options = {}) {
const eligibleMigrations = async (context) => {
var _b
if (options.migrations && options.rerun === types_1.RerunBehavior.ALLOW) {
// Allow rerun means the specified migrations should be run even if they've run before - so get all migrations, not just pending
const list = await this.migrations(context)
return this.findMigrations(list, options.migrations)
}
if (options.migrations && options.rerun === types_1.RerunBehavior.SKIP) {
const executedNames = new Set((await this._executed(context)).map((m) => m.name))
const filteredMigrations = options.migrations.filter((m) => !executedNames.has(m))
return this.findMigrations(await this.migrations(context), filteredMigrations)
}
if (options.migrations) {
return this.findMigrations(await this._pending(context), options.migrations)
}
const allPending = await this._pending(context)
let sliceIndex = (_b = options.step) !== null && _b !== void 0 ? _b : allPending.length
if (options.to) {
sliceIndex = this.findNameIndex(allPending, options.to) + 1
}
return allPending.slice(0, sliceIndex)
}
return this.runCommand('up', async ({ context }) => {
const toBeApplied = await eligibleMigrations(context)
for (const m of toBeApplied) {
const start = Date.now()
const params = { name: m.name, path: m.path, context }
this.logging({ event: 'migrating', name: m.name })
try {
await m.up(params)
} catch (e) {
throw new MigrationError({ direction: 'up', ...params }, e)
}
await this.storage.logMigration(params)
const duration = (Date.now() - start) / 1000
this.logging({ event: 'migrated', name: m.name, durationSeconds: duration })
}
return toBeApplied.map((m) => ({ name: m.name, path: m.path }))
})
}
/**
* Revert migrations. By default, the last executed migration is reverted.
* @see MigrateDownOptions for other use cases using `to`, `migrations` and `rerun`.
*/
async down(options = {}) {
const eligibleMigrations = async (context) => {
var _b
if (options.migrations && options.rerun === types_1.RerunBehavior.ALLOW) {
const list = await this.migrations(context)
return this.findMigrations(list, options.migrations)
}
if (options.migrations && options.rerun === types_1.RerunBehavior.SKIP) {
const pendingNames = new Set((await this._pending(context)).map((m) => m.name))
const filteredMigrations = options.migrations.filter((m) => !pendingNames.has(m))
return this.findMigrations(await this.migrations(context), filteredMigrations)
}
if (options.migrations) {
return this.findMigrations(await this._executed(context), options.migrations)
}
const executedReversed = (await this._executed(context)).slice().reverse()
let sliceIndex = (_b = options.step) !== null && _b !== void 0 ? _b : 1
if (options.to === 0 || options.migrations) {
sliceIndex = executedReversed.length
} else if (options.to) {
sliceIndex = this.findNameIndex(executedReversed, options.to) + 1
}
return executedReversed.slice(0, sliceIndex)
}
return this.runCommand('down', async ({ context }) => {
var _b
const toBeReverted = await eligibleMigrations(context)
for (const m of toBeReverted) {
const start = Date.now()
const params = { name: m.name, path: m.path, context }
this.logging({ event: 'reverting', name: m.name })
try {
await ((_b = m.down) === null || _b === void 0 ? void 0 : _b.call(m, params))
} catch (e) {
throw new MigrationError({ direction: 'down', ...params }, e)
}
await this.storage.unlogMigration(params)
const duration = Number.parseFloat(((Date.now() - start) / 1000).toFixed(3))
this.logging({ event: 'reverted', name: m.name, durationSeconds: duration })
}
return toBeReverted.map((m) => ({ name: m.name, path: m.path }))
})
}
async create(options) {
await this.runCommand('create', async ({ context }) => {
var _b, _c, _d, _e
const isoDate = new Date().toISOString()
const prefixes = {
TIMESTAMP: isoDate.replace(/\.\d{3}Z$/, '').replace(/\W/g, '.'),
DATE: isoDate.split('T')[0].replace(/\W/g, '.'),
NONE: ''
}
const prefixType = (_b = options.prefix) !== null && _b !== void 0 ? _b : 'TIMESTAMP'
const fileBasename = [prefixes[prefixType], options.name].filter(Boolean).join('.')
const allowedExtensions = options.allowExtension ? [options.allowExtension] : ['.js', '.cjs', '.mjs', '.ts', '.cts', '.mts', '.sql']
const existing = await this.migrations(context)
const last = existing.slice(-1)[0]
const folder = options.folder || ((_c = this.options.create) === null || _c === void 0 ? void 0 : _c.folder) || ((last === null || last === void 0 ? void 0 : last.path) && path.dirname(last.path))
if (!folder) {
throw new Error(`Couldn't infer a directory to generate migration file in. Pass folder explicitly`)
}
const filepath = path.join(folder, fileBasename)
if (!options.allowConfusingOrdering) {
const confusinglyOrdered = existing.find((e) => e.path && e.path >= filepath)
if (confusinglyOrdered) {
throw new Error(`Can't create ${fileBasename}, since it's unclear if it should run before or after existing migration ${confusinglyOrdered.name}. Use allowConfusingOrdering to bypass this error.`)
}
}
const template =
typeof options.content === 'string'
? async () => [[filepath, options.content]]
: // eslint-disable-next-line @typescript-eslint/unbound-method
(_e = (_d = this.options.create) === null || _d === void 0 ? void 0 : _d.template) !== null && _e !== void 0
? _e
: Umzug.defaultCreationTemplate
const toWrite = await template(filepath)
if (toWrite.length === 0) {
toWrite.push([filepath, ''])
}
toWrite.forEach((pair) => {
if (!Array.isArray(pair) || pair.length !== 2) {
throw new Error(`Expected [filepath, content] pair. Check that the file template function returns an array of pairs.`)
}
const ext = path.extname(pair[0])
if (!allowedExtensions.includes(ext)) {
const allowStr = allowedExtensions.join(', ')
const message = `Extension ${ext} not allowed. Allowed extensions are ${allowStr}. See help for allowExtension to avoid this error.`
throw new Error(message)
}
fs.mkdirSync(path.dirname(pair[0]), { recursive: true })
fs.writeFileSync(pair[0], pair[1])
this.logging({ event: 'created', path: pair[0] })
})
if (!options.skipVerify) {
const [firstFilePath] = toWrite[0]
const pending = await this._pending(context)
if (!pending.some((p) => p.path && path.resolve(p.path) === path.resolve(firstFilePath))) {
const paths = pending.map((p) => p.path).join(', ')
throw new Error(`Expected ${firstFilePath} to be a pending migration but it wasn't! Pending migration paths: ${paths}. You should investigate this. Use skipVerify to bypass this error.`)
}
}
})
}
static defaultCreationTemplate(filepath) {
const ext = path.extname(filepath)
if ((ext === '.js' && typeof require.main === 'object') || ext === '.cjs') {
return [[filepath, templates.js]]
}
if (ext === '.ts' || ext === '.mts' || ext === '.cts') {
return [[filepath, templates.ts]]
}
if ((ext === '.js' && require.main === undefined) || ext === '.mjs') {
return [[filepath, templates.mjs]]
}
if (ext === '.sql') {
const downFilepath = path.join(path.dirname(filepath), 'down', path.basename(filepath))
return [
[filepath, templates.sqlUp],
[downFilepath, templates.sqlDown]
]
}
return []
}
findNameIndex(migrations, name) {
const index = migrations.findIndex((m) => m.name === name)
if (index === -1) {
throw new Error(`Couldn't find migration to apply with name ${JSON.stringify(name)}`)
}
return index
}
findMigrations(migrations, names) {
const map = new Map(migrations.map((m) => [m.name, m]))
return names.map((name) => {
const migration = map.get(name)
if (!migration) {
throw new Error(`Couldn't find migration to apply with name ${JSON.stringify(name)}`)
}
return migration
})
}
async getContext() {
const { context = {} } = this.options
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
return typeof context === 'function' ? context() : context
}
/** helper for parsing input migrations into a callback returning a list of ready-to-run migrations */
getMigrationsResolver(inputMigrations) {
var _b
if (Array.isArray(inputMigrations)) {
return async () => inputMigrations
}
if (typeof inputMigrations === 'function') {
// Lazy migrations definition, recurse.
return async (ctx) => {
const resolved = await inputMigrations(ctx)
return this.getMigrationsResolver(resolved)(ctx)
}
}
const paths = inputMigrations.files
const resolver = (_b = inputMigrations.resolve) !== null && _b !== void 0 ? _b : Umzug.defaultResolver
return async (context) => {
paths.sort()
return paths.map((unresolvedPath) => {
const filepath = path.resolve(unresolvedPath)
const name = path.basename(filepath)
return {
path: filepath,
...resolver({ name, path: filepath, context })
}
})
}
}
}
exports.Umzug = Umzug
_a = Umzug
Umzug.defaultResolver = ({ name, path: filepath }) => {
if (!filepath) {
throw new Error(`Can't use default resolver for non-filesystem migrations`)
}
const ext = path.extname(filepath)
const languageSpecificHelp = {
'.ts': "TypeScript files can be required by adding `ts-node` as a dependency and calling `require('ts-node/register')` at the program entrypoint before running migrations.",
'.sql': 'Try writing a resolver which reads file content and executes it as a sql query.'
}
languageSpecificHelp['.cts'] = languageSpecificHelp['.ts']
languageSpecificHelp['.mts'] = languageSpecificHelp['.ts']
let loadModule
const jsExt = ext.replace(/\.([cm]?)ts$/, '.$1js')
const getModule = async () => {
try {
return await loadModule()
} catch (e) {
if ((e instanceof SyntaxError || e instanceof MissingResolverError) && ext in languageSpecificHelp) {
e.message += '\n\n' + languageSpecificHelp[ext]
}
throw e
}
}
if ((jsExt === '.js' && typeof require.main === 'object') || jsExt === '.cjs') {
// eslint-disable-next-line @typescript-eslint/no-var-requires
loadModule = async () => require(filepath)
} else if (jsExt === '.js' || jsExt === '.mjs') {
loadModule = async () => import(filepath)
} else {
loadModule = async () => {
throw new MissingResolverError(filepath)
}
}
return {
name,
path: filepath,
up: async ({ context }) => (await getModule()).up({ path: filepath, name, context }),
down: async ({ context }) => {
var _b, _c
return (_c = (_b = await getModule()).down) === null || _c === void 0 ? void 0 : _c.call(_b, { path: filepath, name, context })
}
}
}
class MissingResolverError extends Error {
constructor(filepath) {
super(`No resolver specified for file ${filepath}. See docs for guidance on how to write a custom resolver.`)
}
}
//# sourceMappingURL=umzug.js.map

View File

@ -0,0 +1,275 @@
const { Umzug, SequelizeStorage } = require('../libs/umzug')
const { Sequelize, DataTypes } = require('sequelize')
const semver = require('semver')
const path = require('path')
const Module = require('module')
const fs = require('../libs/fsExtra')
const Logger = require('../Logger')
class MigrationManager {
static MIGRATIONS_META_TABLE = 'migrationsMeta'
/**
* @param {import('../Database').sequelize} sequelize
* @param {string} [configPath]
*/
constructor(sequelize, configPath = global.configPath) {
if (!sequelize || !(sequelize instanceof Sequelize)) throw new Error('Sequelize instance is required for MigrationManager.')
this.sequelize = sequelize
if (!configPath) throw new Error('Config path is required for MigrationManager.')
this.configPath = configPath
this.migrationsSourceDir = path.join(__dirname, '..', 'migrations')
this.initialized = false
this.migrationsDir = null
this.maxVersion = null
this.databaseVersion = null
this.serverVersion = null
this.umzug = null
}
/**
* Init version vars and copy migration files to config dir if necessary
*
* @param {string} serverVersion
*/
async init(serverVersion) {
if (!(await fs.pathExists(this.configPath))) throw new Error(`Config path does not exist: ${this.configPath}`)
this.migrationsDir = path.join(this.configPath, 'migrations')
this.serverVersion = this.extractVersionFromTag(serverVersion)
if (!this.serverVersion) throw new Error(`Invalid server version: ${serverVersion}. Expected a version tag like v1.2.3.`)
await this.fetchVersionsFromDatabase()
if (!this.maxVersion || !this.databaseVersion) throw new Error('Failed to fetch versions from the database.')
if (semver.gt(this.serverVersion, this.maxVersion)) {
try {
await this.copyMigrationsToConfigDir()
} catch (error) {
throw new Error('Failed to copy migrations to the config directory.', { cause: error })
}
try {
await this.updateMaxVersion()
} catch (error) {
throw new Error('Failed to update max version in the database.', { cause: error })
}
}
this.initialized = true
}
async runMigrations() {
if (!this.initialized) throw new Error('MigrationManager is not initialized. Call init() first.')
const versionCompare = semver.compare(this.serverVersion, this.databaseVersion)
if (versionCompare == 0) {
Logger.info('[MigrationManager] Database is already up to date.')
return
}
await this.initUmzug()
const migrations = await this.umzug.migrations()
const executedMigrations = (await this.umzug.executed()).map((m) => m.name)
const migrationDirection = versionCompare == 1 ? 'up' : 'down'
let migrationsToRun = []
migrationsToRun = this.findMigrationsToRun(migrations, executedMigrations, migrationDirection)
// Only proceed with migration if there are migrations to run
if (migrationsToRun.length > 0) {
const originalDbPath = path.join(this.configPath, 'absdatabase.sqlite')
const backupDbPath = path.join(this.configPath, 'absdatabase.backup.sqlite')
try {
Logger.info(`[MigrationManager] Migrating database ${migrationDirection} to version ${this.serverVersion}`)
Logger.info(`[MigrationManager] Migrations to run: ${migrationsToRun.join(', ')}`)
// Create a backup copy of the SQLite database before starting migrations
await fs.copy(originalDbPath, backupDbPath)
Logger.info('Created a backup of the original database.')
// Run migrations
await this.umzug[migrationDirection]({ migrations: migrationsToRun, rerun: 'ALLOW' })
// Clean up the backup
await fs.remove(backupDbPath)
Logger.info('[MigrationManager] Migrations successfully applied to the original database.')
} catch (error) {
Logger.error('[MigrationManager] Migration failed:', error)
await this.sequelize.close()
// Step 3: If migration fails, save the failed original and restore the backup
const failedDbPath = path.join(this.configPath, 'absdatabase.failed.sqlite')
await fs.move(originalDbPath, failedDbPath, { overwrite: true })
Logger.info('[MigrationManager] Saved the failed database as absdatabase.failed.sqlite.')
await fs.move(backupDbPath, originalDbPath, { overwrite: true })
Logger.info('[MigrationManager] Restored the original database from the backup.')
Logger.info('[MigrationManager] Migration failed. Exiting Audiobookshelf with code 1.')
process.exit(1)
}
} else {
Logger.info('[MigrationManager] No migrations to run.')
}
await this.updateDatabaseVersion()
}
async initUmzug(umzugStorage = new SequelizeStorage({ sequelize: this.sequelize })) {
// This check is for dependency injection in tests
const files = (await fs.readdir(this.migrationsDir)).map((file) => path.join(this.migrationsDir, file))
const parent = new Umzug({
migrations: {
files,
resolve: (params) => {
// make script think it's in migrationsSourceDir
const migrationPath = params.path
const migrationName = params.name
const contents = fs.readFileSync(migrationPath, 'utf8')
const fakePath = path.join(this.migrationsSourceDir, path.basename(migrationPath))
const module = new Module(fakePath)
module.filename = fakePath
module.paths = Module._nodeModulePaths(this.migrationsSourceDir)
module._compile(contents, fakePath)
const script = module.exports
return {
name: migrationName,
path: migrationPath,
up: script.up,
down: script.down
}
}
},
context: { queryInterface: this.sequelize.getQueryInterface(), logger: Logger },
storage: umzugStorage,
logger: Logger
})
// Sort migrations by version
this.umzug = new Umzug({
...parent.options,
migrations: async () =>
(await parent.migrations()).sort((a, b) => {
const versionA = this.extractVersionFromTag(a.name)
const versionB = this.extractVersionFromTag(b.name)
return semver.compare(versionA, versionB)
})
})
}
async fetchVersionsFromDatabase() {
await this.checkOrCreateMigrationsMetaTable()
const [{ version }] = await this.sequelize.query("SELECT value as version FROM :migrationsMeta WHERE key = 'version'", {
replacements: { migrationsMeta: MigrationManager.MIGRATIONS_META_TABLE },
type: Sequelize.QueryTypes.SELECT
})
this.databaseVersion = version
const [{ maxVersion }] = await this.sequelize.query("SELECT value as maxVersion FROM :migrationsMeta WHERE key = 'maxVersion'", {
replacements: { migrationsMeta: MigrationManager.MIGRATIONS_META_TABLE },
type: Sequelize.QueryTypes.SELECT
})
this.maxVersion = maxVersion
}
async checkOrCreateMigrationsMetaTable() {
const queryInterface = this.sequelize.getQueryInterface()
if (!(await queryInterface.tableExists(MigrationManager.MIGRATIONS_META_TABLE))) {
await queryInterface.createTable(MigrationManager.MIGRATIONS_META_TABLE, {
key: {
type: DataTypes.STRING,
allowNull: false
},
value: {
type: DataTypes.STRING,
allowNull: false
}
})
await this.sequelize.query("INSERT INTO :migrationsMeta (key, value) VALUES ('version', :version), ('maxVersion', '0.0.0')", {
replacements: { version: this.serverVersion, migrationsMeta: MigrationManager.MIGRATIONS_META_TABLE },
type: Sequelize.QueryTypes.INSERT
})
}
}
extractVersionFromTag(tag) {
if (!tag) return null
const versionMatch = tag.match(/^v?(\d+\.\d+\.\d+)/)
return versionMatch ? versionMatch[1] : null
}
async copyMigrationsToConfigDir() {
await fs.ensureDir(this.migrationsDir) // Ensure the target directory exists
if (!(await fs.pathExists(this.migrationsSourceDir))) return
const files = await fs.readdir(this.migrationsSourceDir)
await Promise.all(
files
.filter((file) => path.extname(file) === '.js')
.map(async (file) => {
const sourceFile = path.join(this.migrationsSourceDir, file)
const targetFile = path.join(this.migrationsDir, file)
await fs.copy(sourceFile, targetFile) // Asynchronously copy the files
})
)
}
/**
*
* @param {{ name: string }[]} migrations
* @param {string[]} executedMigrations - names of executed migrations
* @param {string} direction - 'up' or 'down'
* @returns {string[]} - names of migrations to run
*/
findMigrationsToRun(migrations, executedMigrations, direction) {
const migrationsToRun = migrations
.filter((migration) => {
const migrationVersion = this.extractVersionFromTag(migration.name)
if (direction === 'up') {
return semver.gt(migrationVersion, this.databaseVersion) && semver.lte(migrationVersion, this.serverVersion) && !executedMigrations.includes(migration.name)
} else {
// A down migration should be run even if the associated up migration wasn't executed before
return semver.lte(migrationVersion, this.databaseVersion) && semver.gt(migrationVersion, this.serverVersion)
}
})
.map((migration) => migration.name)
if (direction === 'down') {
return migrationsToRun.reverse()
} else {
return migrationsToRun
}
}
async updateMaxVersion() {
try {
await this.sequelize.query("UPDATE :migrationsMeta SET value = :maxVersion WHERE key = 'maxVersion'", {
replacements: { maxVersion: this.serverVersion, migrationsMeta: MigrationManager.MIGRATIONS_META_TABLE },
type: Sequelize.QueryTypes.UPDATE
})
} catch (error) {
throw new Error('Failed to update maxVersion in the migrationsMeta table.', { cause: error })
}
this.maxVersion = this.serverVersion
}
async updateDatabaseVersion() {
try {
await this.sequelize.query("UPDATE :migrationsMeta SET value = :version WHERE key = 'version'", {
replacements: { version: this.serverVersion, migrationsMeta: MigrationManager.MIGRATIONS_META_TABLE },
type: Sequelize.QueryTypes.UPDATE
})
} catch (error) {
throw new Error('Failed to update version in the migrationsMeta table.', { cause: error })
}
this.databaseVersion = this.serverVersion
}
}
module.exports = MigrationManager

View File

@ -0,0 +1,7 @@
# Migrations Changelog
Please add a record of every database migration that you create to this file. This will help us keep track of changes to the database schema over time.
| Server Version | Migration Script Name | Description |
| -------------- | --------------------- | ----------- |
| | | |

View File

@ -0,0 +1,49 @@
# Database Migrations
This directory contains all the database migration scripts for the server.
## What is a migration?
A migration is a script that changes the structure of the database. This can include creating tables, adding columns, or modifying existing columns. A migration script consists of two parts: an "up" script that applies the changes to the database, and a "down" script that undoes the changes.
## Guidelines for writing migrations
When writing a migration, keep the following guidelines in mind:
- You **_must_** name your migration script according to the following convention: `<server_version>-<migration_name>.js`. For example, `v2.14.0-create-users-table.js`.
- `server_version` should be the version of the server that the migration was created for (this should usually be the next server release).
- `migration_name` should be a short description of the changes that the migration makes.
- The script should export two async functions: `up` and `down`. The `up` function should contain the script that applies the changes to the database, and the `down` function should contain the script that undoes the changes. The `up` and `down` functions should accept a single object parameter with a `context` property that contains a reference to a Sequelize [`QueryInterface`](https://sequelize.org/docs/v6/other-topics/query-interface/) object, and a [Logger](https://github.com/advplyr/audiobookshelf/blob/423a2129d10c6d8aaac9e8c75941fa6283889602/server/Logger.js#L4) object for logging. A typical migration script might look like this:
```javascript
async function up({ context: { queryInterface, logger } }) {
// Upwards migration script
logger.info('migrating ...');
...
}
async function down({ context: { queryInterface, logger } }) {
// Downward migration script
logger.info('reverting ...');
...
}
module.exports = {up, down}
```
- Always implement both the `up` and `down` functions.
- The `up` and `down` functions should be idempotent (i.e., they should be safe to run multiple times).
- Prefer using only `queryInterface` and `logger` parameters, the `sequelize` module, and node.js built-in modules in your migration scripts. You can require other modules, but be aware that they might not be available or change from they ones you tested with.
- It's your responsibility to make sure that the down migration reverts the changes made by the up migration.
- Log detailed information on every step of the migration. Use `Logger.info()` and `Logger.error()`.
- Test tour migrations thoroughly before committing them.
- write unit tests for your migrations (see `test/server/migrations` for an example)
- you can force a server version change by modifying the `version` field in `package.json` on your dev environment (but don't forget to revert it back before committing)
## How migrations are run
Migrations are run automatically when the server starts, when the server detects that the server version has changed. Migrations are always run in server version order (from oldest to newest up migrations if the server version increased, and from newest to oldest down migrations if the server version decreased). Only the relevant migrations are run, based on the new and old server versions.
This means that you can switch between server releases without having to worry about running migrations manually. The server will automatically apply the necessary migrations when it starts.

View File

@ -0,0 +1,503 @@
const { expect } = require('chai')
const sinon = require('sinon')
const { Sequelize } = require('sequelize')
const fs = require('../../../server/libs/fsExtra')
const Logger = require('../../../server/Logger')
const MigrationManager = require('../../../server/managers/MigrationManager')
const path = require('path')
const { Umzug, memoryStorage } = require('../../../server/libs/umzug')
describe('MigrationManager', () => {
let sequelizeStub
let umzugStub
let migrationManager
let loggerInfoStub
let loggerErrorStub
let fsCopyStub
let fsMoveStub
let fsRemoveStub
let fsEnsureDirStub
let processExitStub
let configPath = '/path/to/config'
const serverVersion = '1.2.0'
beforeEach(() => {
sequelizeStub = sinon.createStubInstance(Sequelize)
umzugStub = {
migrations: sinon.stub(),
executed: sinon.stub(),
up: sinon.stub(),
down: sinon.stub()
}
sequelizeStub.getQueryInterface.returns({})
migrationManager = new MigrationManager(sequelizeStub, configPath)
migrationManager.fetchVersionsFromDatabase = sinon.stub().resolves()
migrationManager.copyMigrationsToConfigDir = sinon.stub().resolves()
migrationManager.updateMaxVersion = sinon.stub().resolves()
migrationManager.initUmzug = sinon.stub()
migrationManager.umzug = umzugStub
loggerInfoStub = sinon.stub(Logger, 'info')
loggerErrorStub = sinon.stub(Logger, 'error')
fsCopyStub = sinon.stub(fs, 'copy').resolves()
fsMoveStub = sinon.stub(fs, 'move').resolves()
fsRemoveStub = sinon.stub(fs, 'remove').resolves()
fsEnsureDirStub = sinon.stub(fs, 'ensureDir').resolves()
fsPathExistsStub = sinon.stub(fs, 'pathExists').resolves(true)
processExitStub = sinon.stub(process, 'exit')
})
afterEach(() => {
sinon.restore()
})
describe('init', () => {
it('should initialize the MigrationManager', async () => {
// arrange
migrationManager.databaseVersion = '1.1.0'
migrationManager.maxVersion = '1.1.0'
migrationManager.umzug = null
migrationManager.configPath = __dirname
// Act
await migrationManager.init(serverVersion)
// Assert
expect(migrationManager.serverVersion).to.equal(serverVersion)
expect(migrationManager.sequelize).to.equal(sequelizeStub)
expect(migrationManager.migrationsDir).to.equal(path.join(__dirname, 'migrations'))
expect(migrationManager.copyMigrationsToConfigDir.calledOnce).to.be.true
expect(migrationManager.updateMaxVersion.calledOnce).to.be.true
expect(migrationManager.initialized).to.be.true
})
it('should throw error if serverVersion is not provided', async () => {
// Act
try {
const result = await migrationManager.init()
expect.fail('Expected init to throw an error, but it did not.')
} catch (error) {
expect(error.message).to.equal('Invalid server version: undefined. Expected a version tag like v1.2.3.')
}
})
})
describe('runMigrations', () => {
it('should run up migrations successfully', async () => {
// Arrange
migrationManager.databaseVersion = '1.1.0'
migrationManager.maxVersion = '1.1.0'
migrationManager.serverVersion = '1.2.0'
migrationManager.initialized = true
umzugStub.migrations.resolves([{ name: 'v1.1.0-migration.js' }, { name: 'v1.1.1-migration.js' }, { name: 'v1.2.0-migration.js' }])
umzugStub.executed.resolves([{ name: 'v1.1.0-migration.js' }])
// Act
await migrationManager.runMigrations()
// Assert
expect(migrationManager.initUmzug.calledOnce).to.be.true
expect(umzugStub.up.calledOnce).to.be.true
expect(umzugStub.up.calledWith({ migrations: ['v1.1.1-migration.js', 'v1.2.0-migration.js'], rerun: 'ALLOW' })).to.be.true
expect(fsCopyStub.calledOnce).to.be.true
expect(fsCopyStub.calledWith(path.join(configPath, 'absdatabase.sqlite'), path.join(configPath, 'absdatabase.backup.sqlite'))).to.be.true
expect(fsRemoveStub.calledOnce).to.be.true
expect(fsRemoveStub.calledWith(path.join(configPath, 'absdatabase.backup.sqlite'))).to.be.true
expect(loggerInfoStub.calledWith(sinon.match('Migrations successfully applied'))).to.be.true
})
it('should run down migrations successfully', async () => {
// Arrange
migrationManager.databaseVersion = '1.2.0'
migrationManager.maxVersion = '1.2.0'
migrationManager.serverVersion = '1.1.0'
migrationManager.initialized = true
umzugStub.migrations.resolves([{ name: 'v1.1.0-migration.js' }, { name: 'v1.1.1-migration.js' }, { name: 'v1.2.0-migration.js' }])
umzugStub.executed.resolves([{ name: 'v1.1.0-migration.js' }, { name: 'v1.1.1-migration.js' }, { name: 'v1.2.0-migration.js' }])
// Act
await migrationManager.runMigrations()
// Assert
expect(migrationManager.initUmzug.calledOnce).to.be.true
expect(umzugStub.down.calledOnce).to.be.true
expect(umzugStub.down.calledWith({ migrations: ['v1.2.0-migration.js', 'v1.1.1-migration.js'], rerun: 'ALLOW' })).to.be.true
expect(fsCopyStub.calledOnce).to.be.true
expect(fsCopyStub.calledWith(path.join(configPath, 'absdatabase.sqlite'), path.join(configPath, 'absdatabase.backup.sqlite'))).to.be.true
expect(fsRemoveStub.calledOnce).to.be.true
expect(fsRemoveStub.calledWith(path.join(configPath, 'absdatabase.backup.sqlite'))).to.be.true
expect(loggerInfoStub.calledWith(sinon.match('Migrations successfully applied'))).to.be.true
})
it('should log that no migrations are needed if serverVersion equals databaseVersion', async () => {
// Arrange
migrationManager.serverVersion = '1.2.0'
migrationManager.databaseVersion = '1.2.0'
migrationManager.maxVersion = '1.2.0'
migrationManager.initialized = true
// Act
await migrationManager.runMigrations()
// Assert
expect(umzugStub.up.called).to.be.false
expect(loggerInfoStub.calledWith(sinon.match('Database is already up to date.'))).to.be.true
})
it('should handle migration failure and restore the original database', async () => {
// Arrange
migrationManager.serverVersion = '1.2.0'
migrationManager.databaseVersion = '1.1.0'
migrationManager.maxVersion = '1.1.0'
migrationManager.initialized = true
umzugStub.migrations.resolves([{ name: 'v1.2.0-migration.js' }])
umzugStub.executed.resolves([{ name: 'v1.1.0-migration.js' }])
umzugStub.up.rejects(new Error('Migration failed'))
const originalDbPath = path.join(configPath, 'absdatabase.sqlite')
const backupDbPath = path.join(configPath, 'absdatabase.backup.sqlite')
// Act
await migrationManager.runMigrations()
// Assert
expect(migrationManager.initUmzug.calledOnce).to.be.true
expect(umzugStub.up.calledOnce).to.be.true
expect(loggerErrorStub.calledWith(sinon.match('Migration failed'))).to.be.true
expect(fsMoveStub.calledWith(originalDbPath, sinon.match('absdatabase.failed.sqlite'), { overwrite: true })).to.be.true
expect(fsMoveStub.calledWith(backupDbPath, originalDbPath, { overwrite: true })).to.be.true
expect(loggerInfoStub.calledWith(sinon.match('Restored the original database'))).to.be.true
expect(processExitStub.calledOnce).to.be.true
})
})
describe('fetchVersionsFromDatabase', () => {
it('should fetch versions from the migrationsMeta table', async () => {
// Arrange
const sequelize = new Sequelize({ dialect: 'sqlite', storage: ':memory:', logging: false })
// Create a migrationsMeta table and populate it with version and maxVersion
await sequelize.query('CREATE TABLE migrationsMeta (key VARCHAR(255), value VARCHAR(255))')
await sequelize.query("INSERT INTO migrationsMeta (key, value) VALUES ('version', '1.1.0'), ('maxVersion', '1.1.0')")
const migrationManager = new MigrationManager(sequelize, configPath)
migrationManager.checkOrCreateMigrationsMetaTable = sinon.stub().resolves()
// Act
await migrationManager.fetchVersionsFromDatabase()
// Assert
expect(migrationManager.maxVersion).to.equal('1.1.0')
expect(migrationManager.databaseVersion).to.equal('1.1.0')
})
it('should create the migrationsMeta table if it does not exist and fetch versions from it', async () => {
// Arrange
const sequelize = new Sequelize({ dialect: 'sqlite', storage: ':memory:', logging: false })
const migrationManager = new MigrationManager(sequelize, configPath)
migrationManager.serverVersion = serverVersion
// Act
await migrationManager.fetchVersionsFromDatabase()
// Assert
const tableDescription = await sequelize.getQueryInterface().describeTable('migrationsMeta')
expect(tableDescription).to.deep.equal({
key: { type: 'VARCHAR(255)', allowNull: false, defaultValue: undefined, primaryKey: false, unique: false },
value: { type: 'VARCHAR(255)', allowNull: false, defaultValue: undefined, primaryKey: false, unique: false }
})
expect(migrationManager.maxVersion).to.equal('0.0.0')
expect(migrationManager.databaseVersion).to.equal(serverVersion)
})
it('should throw an error if the database query fails', async () => {
// Arrange
const sequelizeStub = sinon.createStubInstance(Sequelize)
sequelizeStub.query.rejects(new Error('Database query failed'))
const migrationManager = new MigrationManager(sequelizeStub, configPath)
migrationManager.checkOrCreateMigrationsMetaTable = sinon.stub().resolves()
// Act
try {
await migrationManager.fetchVersionsFromDatabase()
expect.fail('Expected fetchVersionsFromDatabase to throw an error, but it did not.')
} catch (error) {
// Assert
expect(error.message).to.equal('Database query failed')
}
})
})
describe('updateMaxVersion', () => {
it('should update the maxVersion in the database', async () => {
// Arrange
const sequelize = new Sequelize({ dialect: 'sqlite', storage: ':memory:', logging: false })
// Create a migrationsMeta table and populate it with version and maxVersion
await sequelize.query('CREATE TABLE migrationsMeta (key VARCHAR(255), value VARCHAR(255))')
await sequelize.query("INSERT INTO migrationsMeta (key, value) VALUES ('version', '1.1.0'), ('maxVersion', '1.1.0')")
const migrationManager = new MigrationManager(sequelize, configPath)
migrationManager.serverVersion = '1.2.0'
// Act
await migrationManager.updateMaxVersion()
// Assert
const [{ maxVersion }] = await sequelize.query("SELECT value AS maxVersion FROM migrationsMeta WHERE key = 'maxVersion'", {
type: Sequelize.QueryTypes.SELECT
})
expect(maxVersion).to.equal('1.2.0')
})
})
describe('extractVersionFromTag', () => {
it('should return null if tag is not provided', () => {
// Arrange
const migrationManager = new MigrationManager(sequelizeStub, configPath)
// Act
const result = migrationManager.extractVersionFromTag()
// Assert
expect(result).to.be.null
})
it('should return null if tag does not match the version format', () => {
// Arrange
const migrationManager = new MigrationManager(sequelizeStub, configPath)
const tag = 'invalid-tag'
// Act
const result = migrationManager.extractVersionFromTag(tag)
// Assert
expect(result).to.be.null
})
it('should extract the version from the tag', () => {
// Arrange
const migrationManager = new MigrationManager(sequelizeStub, configPath)
const tag = 'v1.2.3'
// Act
const result = migrationManager.extractVersionFromTag(tag)
// Assert
expect(result).to.equal('1.2.3')
})
})
describe('copyMigrationsToConfigDir', () => {
it('should copy migrations to the config directory', async () => {
// Arrange
const migrationManager = new MigrationManager(sequelizeStub, configPath)
migrationManager.migrationsDir = path.join(configPath, 'migrations')
const migrationsSourceDir = path.join(__dirname, '..', '..', '..', 'server', 'migrations')
const targetDir = migrationManager.migrationsDir
const files = ['migration1.js', 'migration2.js', 'readme.md']
const readdirStub = sinon.stub(fs, 'readdir').resolves(files)
// Act
await migrationManager.copyMigrationsToConfigDir()
// Assert
expect(fsEnsureDirStub.calledOnce).to.be.true
expect(fsEnsureDirStub.calledWith(targetDir)).to.be.true
expect(readdirStub.calledOnce).to.be.true
expect(readdirStub.calledWith(migrationsSourceDir)).to.be.true
expect(fsCopyStub.calledTwice).to.be.true
expect(fsCopyStub.calledWith(path.join(migrationsSourceDir, 'migration1.js'), path.join(targetDir, 'migration1.js'))).to.be.true
expect(fsCopyStub.calledWith(path.join(migrationsSourceDir, 'migration2.js'), path.join(targetDir, 'migration2.js'))).to.be.true
})
it('should throw an error if copying the migrations fails', async () => {
// Arrange
const migrationManager = new MigrationManager(sequelizeStub, configPath)
migrationManager.migrationsDir = path.join(configPath, 'migrations')
const migrationsSourceDir = path.join(__dirname, '..', '..', '..', 'server', 'migrations')
const targetDir = migrationManager.migrationsDir
const files = ['migration1.js', 'migration2.js', 'readme.md']
const readdirStub = sinon.stub(fs, 'readdir').resolves(files)
fsCopyStub.restore()
fsCopyStub = sinon.stub(fs, 'copy').rejects()
// Act
try {
// Act
await migrationManager.copyMigrationsToConfigDir()
expect.fail('Expected copyMigrationsToConfigDir to throw an error, but it did not.')
} catch (error) {}
// Assert
expect(fsEnsureDirStub.calledOnce).to.be.true
expect(fsEnsureDirStub.calledWith(targetDir)).to.be.true
expect(readdirStub.calledOnce).to.be.true
expect(readdirStub.calledWith(migrationsSourceDir)).to.be.true
expect(fsCopyStub.calledTwice).to.be.true
expect(fsCopyStub.calledWith(path.join(migrationsSourceDir, 'migration1.js'), path.join(targetDir, 'migration1.js'))).to.be.true
expect(fsCopyStub.calledWith(path.join(migrationsSourceDir, 'migration2.js'), path.join(targetDir, 'migration2.js'))).to.be.true
})
})
describe('findMigrationsToRun', () => {
it('should return migrations to run when direction is "up"', () => {
// Arrange
const migrations = [{ name: 'v1.0.0-migration.js' }, { name: 'v1.1.0-migration.js' }, { name: 'v1.2.0-migration.js' }, { name: 'v1.3.0-migration.js' }]
const executedMigrations = ['v1.0.0-migration.js']
migrationManager.databaseVersion = '1.0.0'
migrationManager.serverVersion = '1.2.0'
const direction = 'up'
// Act
const result = migrationManager.findMigrationsToRun(migrations, executedMigrations, direction)
// Assert
expect(result).to.deep.equal(['v1.1.0-migration.js', 'v1.2.0-migration.js'])
})
it('should return migrations to run when direction is "down"', () => {
// Arrange
const migrations = [{ name: 'v1.0.0-migration.js' }, { name: 'v1.1.0-migration.js' }, { name: 'v1.2.0-migration.js' }, { name: 'v1.3.0-migration.js' }]
const executedMigrations = ['v1.2.0-migration.js', 'v1.3.0-migration.js']
migrationManager.databaseVersion = '1.3.0'
migrationManager.serverVersion = '1.2.0'
const direction = 'down'
// Act
const result = migrationManager.findMigrationsToRun(migrations, executedMigrations, direction)
// Assert
expect(result).to.deep.equal(['v1.3.0-migration.js'])
})
it('should return empty array when no migrations to run up', () => {
// Arrange
const migrations = [{ name: 'v1.0.0-migration.js' }, { name: 'v1.1.0-migration.js' }, { name: 'v1.2.0-migration.js' }, { name: 'v1.3.0-migration.js' }]
const executedMigrations = ['v1.0.0-migration.js', 'v1.1.0-migration.js', 'v1.2.0-migration.js', 'v1.3.0-migration.js']
migrationManager.databaseVersion = '1.3.0'
migrationManager.serverVersion = '1.4.0'
const direction = 'up'
// Act
const result = migrationManager.findMigrationsToRun(migrations, executedMigrations, direction)
// Assert
expect(result).to.deep.equal([])
})
it('should return empty array when no migrations to run down', () => {
// Arrange
const migrations = [{ name: 'v1.0.0-migration.js' }, { name: 'v1.1.0-migration.js' }, { name: 'v1.2.0-migration.js' }, { name: 'v1.3.0-migration.js' }]
const executedMigrations = []
migrationManager.databaseVersion = '1.4.0'
migrationManager.serverVersion = '1.3.0'
const direction = 'down'
// Act
const result = migrationManager.findMigrationsToRun(migrations, executedMigrations, direction)
// Assert
expect(result).to.deep.equal([])
})
it('should return down migrations to run when direction is "down" and up migration was not executed', () => {
// Arrange
const migrations = [{ name: 'v1.0.0-migration.js' }, { name: 'v1.1.0-migration.js' }, { name: 'v1.2.0-migration.js' }, { name: 'v1.3.0-migration.js' }]
const executedMigrations = []
migrationManager.databaseVersion = '1.3.0'
migrationManager.serverVersion = '1.0.0'
const direction = 'down'
// Act
const result = migrationManager.findMigrationsToRun(migrations, executedMigrations, direction)
// Assert
expect(result).to.deep.equal(['v1.3.0-migration.js', 'v1.2.0-migration.js', 'v1.1.0-migration.js'])
})
it('should return empty array when direction is "down" and server version is higher than database version', () => {
// Arrange
const migrations = [{ name: 'v1.0.0-migration.js' }, { name: 'v1.1.0-migration.js' }, { name: 'v1.2.0-migration.js' }, { name: 'v1.3.0-migration.js' }]
const executedMigrations = ['v1.0.0-migration.js', 'v1.1.0-migration.js', 'v1.2.0-migration.js', 'v1.3.0-migration.js']
migrationManager.databaseVersion = '1.0.0'
migrationManager.serverVersion = '1.3.0'
const direction = 'down'
// Act
const result = migrationManager.findMigrationsToRun(migrations, executedMigrations, direction)
// Assert
expect(result).to.deep.equal([])
})
it('should return empty array when direction is "up" and server version is lower than database version', () => {
// Arrange
const migrations = [{ name: 'v1.0.0-migration.js' }, { name: 'v1.1.0-migration.js' }, { name: 'v1.2.0-migration.js' }, { name: 'v1.3.0-migration.js' }]
const executedMigrations = ['v1.0.0-migration.js', 'v1.1.0-migration.js', 'v1.2.0-migration.js', 'v1.3.0-migration.js']
migrationManager.databaseVersion = '1.3.0'
migrationManager.serverVersion = '1.0.0'
const direction = 'up'
// Act
const result = migrationManager.findMigrationsToRun(migrations, executedMigrations, direction)
// Assert
expect(result).to.deep.equal([])
})
it('should return up migrations to run when server version is between migrations', () => {
// Arrange
const migrations = [{ name: 'v1.0.0-migration.js' }, { name: 'v1.1.0-migration.js' }, { name: 'v1.2.0-migration.js' }, { name: 'v1.3.0-migration.js' }]
const executedMigrations = ['v1.0.0-migration.js', 'v1.1.0-migration.js']
migrationManager.databaseVersion = '1.1.0'
migrationManager.serverVersion = '1.2.3'
const direction = 'up'
// Act
const result = migrationManager.findMigrationsToRun(migrations, executedMigrations, direction)
// Assert
expect(result).to.deep.equal(['v1.2.0-migration.js'])
})
it('should return down migrations to run when server version is between migrations', () => {
// Arrange
const migrations = [{ name: 'v1.0.0-migration.js' }, { name: 'v1.1.0-migration.js' }, { name: 'v1.2.0-migration.js' }, { name: 'v1.3.0-migration.js' }]
const executedMigrations = ['v1.0.0-migration.js', 'v1.1.0-migration.js', 'v1.2.0-migration.js']
migrationManager.databaseVersion = '1.2.0'
migrationManager.serverVersion = '1.1.3'
const direction = 'down'
// Act
const result = migrationManager.findMigrationsToRun(migrations, executedMigrations, direction)
// Assert
expect(result).to.deep.equal(['v1.2.0-migration.js'])
})
})
describe('initUmzug', () => {
it('should initialize the umzug instance with migrations in the proper order', async () => {
// Arrange
const readdirStub = sinon.stub(fs, 'readdir').resolves(['v1.0.0-migration.js', 'v1.10.0-migration.js', 'v1.2.0-migration.js', 'v1.1.0-migration.js'])
const readFileSyncStub = sinon.stub(fs, 'readFileSync').returns('module.exports = { up: () => {}, down: () => {} }')
const umzugStorage = memoryStorage()
migrationManager = new MigrationManager(sequelizeStub, configPath)
migrationManager.migrationsDir = path.join(configPath, 'migrations')
const resolvedMigrationNames = ['v1.0.0-migration.js', 'v1.1.0-migration.js', 'v1.2.0-migration.js', 'v1.10.0-migration.js']
const resolvedMigrationPaths = resolvedMigrationNames.map((name) => path.resolve(path.join(migrationManager.migrationsDir, name)))
// Act
await migrationManager.initUmzug(umzugStorage)
// Assert
expect(readdirStub.calledOnce).to.be.true
expect(migrationManager.umzug).to.be.an.instanceOf(Umzug)
const migrations = await migrationManager.umzug.migrations()
expect(migrations.map((m) => m.name)).to.deep.equal(resolvedMigrationNames)
expect(migrations.map((m) => m.path)).to.deep.equal(resolvedMigrationPaths)
})
})
})

View File

@ -0,0 +1,9 @@
async function up() {
console.log('v1.0.0 up')
}
async function down() {
console.log('v1.0.0 down')
}
module.exports = { up, down }

View File

@ -0,0 +1,9 @@
async function up() {
console.log('v1.1.0 up')
}
async function down() {
console.log('v1.1.0 down')
}
module.exports = { up, down }

View File

@ -0,0 +1,9 @@
async function up() {
console.log('v1.10.0 up')
}
async function down() {
console.log('v1.10.0 down')
}
module.exports = { up, down }

View File

@ -0,0 +1,9 @@
async function up() {
console.log('v1.2.0 up')
}
async function down() {
console.log('v1.2.0 down')
}
module.exports = { up, down }

View File

@ -0,0 +1,50 @@
const { DataTypes } = require('sequelize')
/**
* @typedef MigrationContext
* @property {import('sequelize').QueryInterface} queryInterface - a suquelize QueryInterface object.
* @property {import('../Logger')} logger - a Logger object.
*
* @typedef MigrationOptions
* @property {MigrationContext} context - an object containing the migration context.
*/
/**
* This is an example of an upward migration script.
*
* @param {MigrationOptions} options - an object containing the migration context.
* @returns {Promise<void>} - A promise that resolves when the migration is complete.
*/
async function up({ context: { queryInterface, logger } }) {
logger.info('Running migration_example up...')
logger.info('Creating example_table...')
await queryInterface.createTable('example_table', {
id: {
type: DataTypes.INTEGER,
primaryKey: true,
autoIncrement: true
},
name: {
type: DataTypes.STRING,
allowNull: false
}
})
logger.info('example_table created.')
logger.info('migration_example up complete.')
}
/**
* This is an example of a downward migration script.
*
* @param {MigrationOptions} options - an object containing the migration context.
* @returns {Promise<void>} - A promise that resolves when the migration is complete.
*/
async function down({ context: { queryInterface, logger } }) {
logger.info('Running migration_example down...')
logger.info('Dropping example_table...')
await queryInterface.dropTable('example_table')
logger.info('example_table dropped.')
logger.info('migration_example down complete.')
}
module.exports = { up, down }

View File

@ -0,0 +1,53 @@
const { expect } = require('chai')
const sinon = require('sinon')
const { up, down } = require('./v0.0.1-migration_example')
const { Sequelize } = require('sequelize')
const Logger = require('../../../server/Logger')
describe('migration_example', () => {
let sequelize
let queryInterface
let loggerInfoStub
beforeEach(() => {
sequelize = new Sequelize({ dialect: 'sqlite', storage: ':memory:', logging: false })
queryInterface = sequelize.getQueryInterface()
loggerInfoStub = sinon.stub(Logger, 'info')
})
afterEach(() => {
sinon.restore()
})
describe('up', () => {
it('should create example_table', async () => {
await up({ context: { queryInterface, logger: Logger } })
expect(loggerInfoStub.callCount).to.equal(4)
expect(loggerInfoStub.getCall(0).calledWith(sinon.match('Running migration_example up...'))).to.be.true
expect(loggerInfoStub.getCall(1).calledWith(sinon.match('Creating example_table...'))).to.be.true
expect(loggerInfoStub.getCall(2).calledWith(sinon.match('example_table created.'))).to.be.true
expect(loggerInfoStub.getCall(3).calledWith(sinon.match('migration_example up complete.'))).to.be.true
expect(await queryInterface.showAllTables()).to.include('example_table')
const tableDescription = await queryInterface.describeTable('example_table')
expect(tableDescription).to.deep.equal({
id: { type: 'INTEGER', allowNull: true, defaultValue: undefined, primaryKey: true, unique: false },
name: { type: 'VARCHAR(255)', allowNull: false, defaultValue: undefined, primaryKey: false, unique: false }
})
})
})
describe('down', () => {
it('should drop example_table', async () => {
await up({ context: { queryInterface, logger: Logger } })
await down({ context: { queryInterface, logger: Logger } })
expect(loggerInfoStub.callCount).to.equal(8)
expect(loggerInfoStub.getCall(4).calledWith(sinon.match('Running migration_example down...'))).to.be.true
expect(loggerInfoStub.getCall(5).calledWith(sinon.match('Dropping example_table...'))).to.be.true
expect(loggerInfoStub.getCall(6).calledWith(sinon.match('example_table dropped.'))).to.be.true
expect(loggerInfoStub.getCall(7).calledWith(sinon.match('migration_example down complete.'))).to.be.true
expect(await queryInterface.showAllTables()).not.to.include('example_table')
})
})
})