mirror of
https://github.com/advplyr/audiobookshelf.git
synced 2025-08-04 02:05:06 +02:00
Put umzug in server/libs and remove unneeded dependencies from it
This commit is contained in:
parent
8a28029809
commit
6fb1202c1c
15 changed files with 808 additions and 526 deletions
21
server/libs/umzug/LICENSE
Normal file
21
server/libs/umzug/LICENSE
Normal file
|
@ -0,0 +1,21 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014-2017 Sequelize contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
31
server/libs/umzug/index.js
Normal file
31
server/libs/umzug/index.js
Normal file
|
@ -0,0 +1,31 @@
|
|||
'use strict'
|
||||
var __createBinding =
|
||||
(this && this.__createBinding) ||
|
||||
(Object.create
|
||||
? function (o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k)
|
||||
if (!desc || ('get' in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return m[k]
|
||||
}
|
||||
}
|
||||
}
|
||||
Object.defineProperty(o, k2, desc)
|
||||
}
|
||||
: function (o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k
|
||||
o[k2] = m[k]
|
||||
})
|
||||
var __exportStar =
|
||||
(this && this.__exportStar) ||
|
||||
function (m, exports) {
|
||||
for (var p in m) if (p !== 'default' && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p)
|
||||
}
|
||||
Object.defineProperty(exports, '__esModule', { value: true })
|
||||
__exportStar(require('./umzug'), exports)
|
||||
__exportStar(require('./storage'), exports)
|
||||
__exportStar(require('./types'), exports)
|
||||
//# sourceMappingURL=index.js.map
|
18
server/libs/umzug/storage/contract.js
Normal file
18
server/libs/umzug/storage/contract.js
Normal file
|
@ -0,0 +1,18 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.verifyUmzugStorage = exports.isUmzugStorage = void 0;
|
||||
function isUmzugStorage(arg) {
|
||||
return (arg &&
|
||||
typeof arg.logMigration === 'function' &&
|
||||
typeof arg.unlogMigration === 'function' &&
|
||||
typeof arg.executed === 'function');
|
||||
}
|
||||
exports.isUmzugStorage = isUmzugStorage;
|
||||
const verifyUmzugStorage = (arg) => {
|
||||
if (!isUmzugStorage(arg)) {
|
||||
throw new Error(`Invalid umzug storage`);
|
||||
}
|
||||
return arg;
|
||||
};
|
||||
exports.verifyUmzugStorage = verifyUmzugStorage;
|
||||
//# sourceMappingURL=contract.js.map
|
24
server/libs/umzug/storage/index.js
Normal file
24
server/libs/umzug/storage/index.js
Normal file
|
@ -0,0 +1,24 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
||||
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
// codegen:start {preset: barrel}
|
||||
__exportStar(require("./contract"), exports);
|
||||
__exportStar(require("./json"), exports);
|
||||
__exportStar(require("./memory"), exports);
|
||||
__exportStar(require("./mongodb"), exports);
|
||||
__exportStar(require("./sequelize"), exports);
|
||||
// codegen:end
|
||||
//# sourceMappingURL=index.js.map
|
61
server/libs/umzug/storage/json.js
Normal file
61
server/libs/umzug/storage/json.js
Normal file
|
@ -0,0 +1,61 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.JSONStorage = void 0;
|
||||
const fs_1 = require("fs");
|
||||
const path = __importStar(require("path"));
|
||||
const filesystem = {
|
||||
/** reads a file as a string or returns null if file doesn't exist */
|
||||
async readAsync(filepath) {
|
||||
return fs_1.promises.readFile(filepath).then(c => c.toString(), () => null);
|
||||
},
|
||||
/** writes a string as file contents, creating its parent directory if necessary */
|
||||
async writeAsync(filepath, content) {
|
||||
await fs_1.promises.mkdir(path.dirname(filepath), { recursive: true });
|
||||
await fs_1.promises.writeFile(filepath, content);
|
||||
},
|
||||
};
|
||||
class JSONStorage {
|
||||
constructor(options) {
|
||||
var _a;
|
||||
this.path = (_a = options === null || options === void 0 ? void 0 : options.path) !== null && _a !== void 0 ? _a : path.join(process.cwd(), 'umzug.json');
|
||||
}
|
||||
async logMigration({ name: migrationName }) {
|
||||
const loggedMigrations = await this.executed();
|
||||
loggedMigrations.push(migrationName);
|
||||
await filesystem.writeAsync(this.path, JSON.stringify(loggedMigrations, null, 2));
|
||||
}
|
||||
async unlogMigration({ name: migrationName }) {
|
||||
const loggedMigrations = await this.executed();
|
||||
const updatedMigrations = loggedMigrations.filter(name => name !== migrationName);
|
||||
await filesystem.writeAsync(this.path, JSON.stringify(updatedMigrations, null, 2));
|
||||
}
|
||||
async executed() {
|
||||
const content = await filesystem.readAsync(this.path);
|
||||
return content ? JSON.parse(content) : [];
|
||||
}
|
||||
}
|
||||
exports.JSONStorage = JSONStorage;
|
||||
//# sourceMappingURL=json.js.map
|
17
server/libs/umzug/storage/memory.js
Normal file
17
server/libs/umzug/storage/memory.js
Normal file
|
@ -0,0 +1,17 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.memoryStorage = void 0;
|
||||
const memoryStorage = () => {
|
||||
let executed = [];
|
||||
return {
|
||||
async logMigration({ name }) {
|
||||
executed.push(name);
|
||||
},
|
||||
async unlogMigration({ name }) {
|
||||
executed = executed.filter(n => n !== name);
|
||||
},
|
||||
executed: async () => [...executed],
|
||||
};
|
||||
};
|
||||
exports.memoryStorage = memoryStorage;
|
||||
//# sourceMappingURL=memory.js.map
|
31
server/libs/umzug/storage/mongodb.js
Normal file
31
server/libs/umzug/storage/mongodb.js
Normal file
|
@ -0,0 +1,31 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.MongoDBStorage = void 0;
|
||||
function isMongoDBCollectionOptions(arg) {
|
||||
return Boolean(arg.collection);
|
||||
}
|
||||
class MongoDBStorage {
|
||||
constructor(options) {
|
||||
var _a, _b;
|
||||
if (!options || (!options.collection && !options.connection)) {
|
||||
throw new Error('MongoDB Connection or Collection required');
|
||||
}
|
||||
this.collection = isMongoDBCollectionOptions(options)
|
||||
? options.collection
|
||||
: options.connection.collection((_a = options.collectionName) !== null && _a !== void 0 ? _a : 'migrations');
|
||||
this.connection = options.connection; // TODO remove this
|
||||
this.collectionName = (_b = options.collectionName) !== null && _b !== void 0 ? _b : 'migrations'; // TODO remove this
|
||||
}
|
||||
async logMigration({ name: migrationName }) {
|
||||
await this.collection.insertOne({ migrationName });
|
||||
}
|
||||
async unlogMigration({ name: migrationName }) {
|
||||
await this.collection.deleteOne({ migrationName });
|
||||
}
|
||||
async executed() {
|
||||
const records = await this.collection.find({}).sort({ migrationName: 1 }).toArray();
|
||||
return records.map(r => r.migrationName);
|
||||
}
|
||||
}
|
||||
exports.MongoDBStorage = MongoDBStorage;
|
||||
//# sourceMappingURL=mongodb.js.map
|
85
server/libs/umzug/storage/sequelize.js
Normal file
85
server/libs/umzug/storage/sequelize.js
Normal file
|
@ -0,0 +1,85 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.SequelizeStorage = void 0;
|
||||
const DIALECTS_WITH_CHARSET_AND_COLLATE = new Set(['mysql', 'mariadb']);
|
||||
class SequelizeStorage {
|
||||
/**
|
||||
Constructs Sequelize based storage. Migrations will be stored in a SequelizeMeta table using the given instance of Sequelize.
|
||||
|
||||
If a model is given, it will be used directly as the model for the SequelizeMeta table. Otherwise, it will be created automatically according to the given options.
|
||||
|
||||
If the table does not exist it will be created automatically upon the logging of the first migration.
|
||||
*/
|
||||
constructor(options) {
|
||||
var _a, _b, _c, _d, _e, _f;
|
||||
if (!options || (!options.model && !options.sequelize)) {
|
||||
throw new Error('One of "sequelize" or "model" storage option is required');
|
||||
}
|
||||
this.sequelize = (_a = options.sequelize) !== null && _a !== void 0 ? _a : options.model.sequelize;
|
||||
this.columnType = (_b = options.columnType) !== null && _b !== void 0 ? _b : this.sequelize.constructor.DataTypes.STRING;
|
||||
this.columnName = (_c = options.columnName) !== null && _c !== void 0 ? _c : 'name';
|
||||
this.timestamps = (_d = options.timestamps) !== null && _d !== void 0 ? _d : false;
|
||||
this.modelName = (_e = options.modelName) !== null && _e !== void 0 ? _e : 'SequelizeMeta';
|
||||
this.tableName = options.tableName;
|
||||
this.schema = options.schema;
|
||||
this.model = (_f = options.model) !== null && _f !== void 0 ? _f : this.getModel();
|
||||
}
|
||||
getModel() {
|
||||
var _a;
|
||||
if (this.sequelize.isDefined(this.modelName)) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
|
||||
return this.sequelize.model(this.modelName);
|
||||
}
|
||||
const dialectName = (_a = this.sequelize.dialect) === null || _a === void 0 ? void 0 : _a.name;
|
||||
const hasCharsetAndCollate = dialectName && DIALECTS_WITH_CHARSET_AND_COLLATE.has(dialectName);
|
||||
return this.sequelize.define(this.modelName, {
|
||||
[this.columnName]: {
|
||||
type: this.columnType,
|
||||
allowNull: false,
|
||||
unique: true,
|
||||
primaryKey: true,
|
||||
autoIncrement: false,
|
||||
},
|
||||
}, {
|
||||
tableName: this.tableName,
|
||||
schema: this.schema,
|
||||
timestamps: this.timestamps,
|
||||
charset: hasCharsetAndCollate ? 'utf8' : undefined,
|
||||
collate: hasCharsetAndCollate ? 'utf8_unicode_ci' : undefined,
|
||||
});
|
||||
}
|
||||
async syncModel() {
|
||||
await this.model.sync();
|
||||
}
|
||||
async logMigration({ name: migrationName }) {
|
||||
await this.syncModel();
|
||||
await this.model.create({
|
||||
[this.columnName]: migrationName,
|
||||
});
|
||||
}
|
||||
async unlogMigration({ name: migrationName }) {
|
||||
await this.syncModel();
|
||||
await this.model.destroy({
|
||||
where: {
|
||||
[this.columnName]: migrationName,
|
||||
},
|
||||
});
|
||||
}
|
||||
async executed() {
|
||||
await this.syncModel();
|
||||
const migrations = await this.model.findAll({ order: [[this.columnName, 'ASC']] });
|
||||
return migrations.map(migration => {
|
||||
const name = migration[this.columnName];
|
||||
if (typeof name !== 'string') {
|
||||
throw new TypeError(`Unexpected migration name type: expected string, got ${typeof name}`);
|
||||
}
|
||||
return name;
|
||||
});
|
||||
}
|
||||
// TODO remove this
|
||||
_model() {
|
||||
return this.model;
|
||||
}
|
||||
}
|
||||
exports.SequelizeStorage = SequelizeStorage;
|
||||
//# sourceMappingURL=sequelize.js.map
|
32
server/libs/umzug/templates.js
Normal file
32
server/libs/umzug/templates.js
Normal file
|
@ -0,0 +1,32 @@
|
|||
'use strict'
|
||||
/* eslint-disable unicorn/template-indent */
|
||||
// templates for migration file creation
|
||||
Object.defineProperty(exports, '__esModule', { value: true })
|
||||
exports.sqlDown = exports.sqlUp = exports.mjs = exports.ts = exports.js = void 0
|
||||
exports.js = `
|
||||
/** @type {import('umzug').MigrationFn<any>} */
|
||||
exports.up = async params => {};
|
||||
|
||||
/** @type {import('umzug').MigrationFn<any>} */
|
||||
exports.down = async params => {};
|
||||
`.trimStart()
|
||||
exports.ts = `
|
||||
import type { MigrationFn } from 'umzug';
|
||||
|
||||
export const up: MigrationFn = async params => {};
|
||||
export const down: MigrationFn = async params => {};
|
||||
`.trimStart()
|
||||
exports.mjs = `
|
||||
/** @type {import('umzug').MigrationFn<any>} */
|
||||
export const up = async params => {};
|
||||
|
||||
/** @type {import('umzug').MigrationFn<any>} */
|
||||
export const down = async params => {};
|
||||
`.trimStart()
|
||||
exports.sqlUp = `
|
||||
-- up migration
|
||||
`.trimStart()
|
||||
exports.sqlDown = `
|
||||
-- down migration
|
||||
`.trimStart()
|
||||
//# sourceMappingURL=templates.js.map
|
12
server/libs/umzug/types.js
Normal file
12
server/libs/umzug/types.js
Normal file
|
@ -0,0 +1,12 @@
|
|||
'use strict'
|
||||
Object.defineProperty(exports, '__esModule', { value: true })
|
||||
exports.RerunBehavior = void 0
|
||||
exports.RerunBehavior = {
|
||||
/** Hard error if an up migration that has already been run, or a down migration that hasn't, is encountered */
|
||||
THROW: 'THROW',
|
||||
/** Silently skip up migrations that have already been run, or down migrations that haven't */
|
||||
SKIP: 'SKIP',
|
||||
/** Re-run up migrations that have already been run, or down migrations that haven't */
|
||||
ALLOW: 'ALLOW'
|
||||
}
|
||||
//# sourceMappingURL=types.js.map
|
386
server/libs/umzug/umzug.js
Normal file
386
server/libs/umzug/umzug.js
Normal file
|
@ -0,0 +1,386 @@
|
|||
'use strict'
|
||||
var __createBinding =
|
||||
(this && this.__createBinding) ||
|
||||
(Object.create
|
||||
? function (o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k)
|
||||
if (!desc || ('get' in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return m[k]
|
||||
}
|
||||
}
|
||||
}
|
||||
Object.defineProperty(o, k2, desc)
|
||||
}
|
||||
: function (o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k
|
||||
o[k2] = m[k]
|
||||
})
|
||||
var __setModuleDefault =
|
||||
(this && this.__setModuleDefault) ||
|
||||
(Object.create
|
||||
? function (o, v) {
|
||||
Object.defineProperty(o, 'default', { enumerable: true, value: v })
|
||||
}
|
||||
: function (o, v) {
|
||||
o['default'] = v
|
||||
})
|
||||
var __importStar =
|
||||
(this && this.__importStar) ||
|
||||
function (mod) {
|
||||
if (mod && mod.__esModule) return mod
|
||||
var result = {}
|
||||
if (mod != null) for (var k in mod) if (k !== 'default' && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k)
|
||||
__setModuleDefault(result, mod)
|
||||
return result
|
||||
}
|
||||
var __importDefault =
|
||||
(this && this.__importDefault) ||
|
||||
function (mod) {
|
||||
return mod && mod.__esModule ? mod : { default: mod }
|
||||
}
|
||||
var _a
|
||||
Object.defineProperty(exports, '__esModule', { value: true })
|
||||
exports.Umzug = exports.MigrationError = void 0
|
||||
const fs = __importStar(require('fs'))
|
||||
const path = __importStar(require('path'))
|
||||
const storage_1 = require('./storage')
|
||||
const templates = __importStar(require('./templates'))
|
||||
const types_1 = require('./types')
|
||||
class MigrationError extends Error {
|
||||
// TODO [>=4.0.0] Take a `{ cause: ... }` options bag like the default `Error`, it looks like this because of verror backwards-compatibility.
|
||||
constructor(migration, original) {
|
||||
super(`Migration ${migration.name} (${migration.direction}) failed: ${MigrationError.errorString(original)}`, {
|
||||
cause: original
|
||||
})
|
||||
this.name = 'MigrationError'
|
||||
this.migration = migration
|
||||
}
|
||||
// TODO [>=4.0.0] Remove this backwards-compatibility alias
|
||||
get info() {
|
||||
return this.migration
|
||||
}
|
||||
static errorString(cause) {
|
||||
return cause instanceof Error ? `Original error: ${cause.message}` : `Non-error value thrown. See info for full props: ${cause}`
|
||||
}
|
||||
}
|
||||
exports.MigrationError = MigrationError
|
||||
class Umzug {
|
||||
/** creates a new Umzug instance */
|
||||
constructor(options) {
|
||||
var _b
|
||||
this.options = options
|
||||
this.storage = (0, storage_1.verifyUmzugStorage)((_b = options.storage) !== null && _b !== void 0 ? _b : new storage_1.JSONStorage())
|
||||
this.migrations = this.getMigrationsResolver(this.options.migrations)
|
||||
}
|
||||
logging(message) {
|
||||
var _b
|
||||
;(_b = this.options.logger) === null || _b === void 0 ? void 0 : _b.info(message)
|
||||
}
|
||||
/** Get the list of migrations which have already been applied */
|
||||
async executed() {
|
||||
return this.runCommand('executed', async ({ context }) => {
|
||||
const list = await this._executed(context)
|
||||
// We do the following to not expose the `up` and `down` functions to the user
|
||||
return list.map((m) => ({ name: m.name, path: m.path }))
|
||||
})
|
||||
}
|
||||
/** Get the list of migrations which have already been applied */
|
||||
async _executed(context) {
|
||||
const [migrations, executedNames] = await Promise.all([this.migrations(context), this.storage.executed({ context })])
|
||||
const executedSet = new Set(executedNames)
|
||||
return migrations.filter((m) => executedSet.has(m.name))
|
||||
}
|
||||
/** Get the list of migrations which are yet to be applied */
|
||||
async pending() {
|
||||
return this.runCommand('pending', async ({ context }) => {
|
||||
const list = await this._pending(context)
|
||||
// We do the following to not expose the `up` and `down` functions to the user
|
||||
return list.map((m) => ({ name: m.name, path: m.path }))
|
||||
})
|
||||
}
|
||||
async _pending(context) {
|
||||
const [migrations, executedNames] = await Promise.all([this.migrations(context), this.storage.executed({ context })])
|
||||
const executedSet = new Set(executedNames)
|
||||
return migrations.filter((m) => !executedSet.has(m.name))
|
||||
}
|
||||
async runCommand(command, cb) {
|
||||
const context = await this.getContext()
|
||||
return await cb({ context })
|
||||
}
|
||||
/**
|
||||
* Apply migrations. By default, runs all pending migrations.
|
||||
* @see MigrateUpOptions for other use cases using `to`, `migrations` and `rerun`.
|
||||
*/
|
||||
async up(options = {}) {
|
||||
const eligibleMigrations = async (context) => {
|
||||
var _b
|
||||
if (options.migrations && options.rerun === types_1.RerunBehavior.ALLOW) {
|
||||
// Allow rerun means the specified migrations should be run even if they've run before - so get all migrations, not just pending
|
||||
const list = await this.migrations(context)
|
||||
return this.findMigrations(list, options.migrations)
|
||||
}
|
||||
if (options.migrations && options.rerun === types_1.RerunBehavior.SKIP) {
|
||||
const executedNames = new Set((await this._executed(context)).map((m) => m.name))
|
||||
const filteredMigrations = options.migrations.filter((m) => !executedNames.has(m))
|
||||
return this.findMigrations(await this.migrations(context), filteredMigrations)
|
||||
}
|
||||
if (options.migrations) {
|
||||
return this.findMigrations(await this._pending(context), options.migrations)
|
||||
}
|
||||
const allPending = await this._pending(context)
|
||||
let sliceIndex = (_b = options.step) !== null && _b !== void 0 ? _b : allPending.length
|
||||
if (options.to) {
|
||||
sliceIndex = this.findNameIndex(allPending, options.to) + 1
|
||||
}
|
||||
return allPending.slice(0, sliceIndex)
|
||||
}
|
||||
return this.runCommand('up', async ({ context }) => {
|
||||
const toBeApplied = await eligibleMigrations(context)
|
||||
for (const m of toBeApplied) {
|
||||
const start = Date.now()
|
||||
const params = { name: m.name, path: m.path, context }
|
||||
this.logging({ event: 'migrating', name: m.name })
|
||||
try {
|
||||
await m.up(params)
|
||||
} catch (e) {
|
||||
throw new MigrationError({ direction: 'up', ...params }, e)
|
||||
}
|
||||
await this.storage.logMigration(params)
|
||||
const duration = (Date.now() - start) / 1000
|
||||
this.logging({ event: 'migrated', name: m.name, durationSeconds: duration })
|
||||
}
|
||||
return toBeApplied.map((m) => ({ name: m.name, path: m.path }))
|
||||
})
|
||||
}
|
||||
/**
|
||||
* Revert migrations. By default, the last executed migration is reverted.
|
||||
* @see MigrateDownOptions for other use cases using `to`, `migrations` and `rerun`.
|
||||
*/
|
||||
async down(options = {}) {
|
||||
const eligibleMigrations = async (context) => {
|
||||
var _b
|
||||
if (options.migrations && options.rerun === types_1.RerunBehavior.ALLOW) {
|
||||
const list = await this.migrations(context)
|
||||
return this.findMigrations(list, options.migrations)
|
||||
}
|
||||
if (options.migrations && options.rerun === types_1.RerunBehavior.SKIP) {
|
||||
const pendingNames = new Set((await this._pending(context)).map((m) => m.name))
|
||||
const filteredMigrations = options.migrations.filter((m) => !pendingNames.has(m))
|
||||
return this.findMigrations(await this.migrations(context), filteredMigrations)
|
||||
}
|
||||
if (options.migrations) {
|
||||
return this.findMigrations(await this._executed(context), options.migrations)
|
||||
}
|
||||
const executedReversed = (await this._executed(context)).slice().reverse()
|
||||
let sliceIndex = (_b = options.step) !== null && _b !== void 0 ? _b : 1
|
||||
if (options.to === 0 || options.migrations) {
|
||||
sliceIndex = executedReversed.length
|
||||
} else if (options.to) {
|
||||
sliceIndex = this.findNameIndex(executedReversed, options.to) + 1
|
||||
}
|
||||
return executedReversed.slice(0, sliceIndex)
|
||||
}
|
||||
return this.runCommand('down', async ({ context }) => {
|
||||
var _b
|
||||
const toBeReverted = await eligibleMigrations(context)
|
||||
for (const m of toBeReverted) {
|
||||
const start = Date.now()
|
||||
const params = { name: m.name, path: m.path, context }
|
||||
this.logging({ event: 'reverting', name: m.name })
|
||||
try {
|
||||
await ((_b = m.down) === null || _b === void 0 ? void 0 : _b.call(m, params))
|
||||
} catch (e) {
|
||||
throw new MigrationError({ direction: 'down', ...params }, e)
|
||||
}
|
||||
await this.storage.unlogMigration(params)
|
||||
const duration = Number.parseFloat(((Date.now() - start) / 1000).toFixed(3))
|
||||
this.logging({ event: 'reverted', name: m.name, durationSeconds: duration })
|
||||
}
|
||||
return toBeReverted.map((m) => ({ name: m.name, path: m.path }))
|
||||
})
|
||||
}
|
||||
async create(options) {
|
||||
await this.runCommand('create', async ({ context }) => {
|
||||
var _b, _c, _d, _e
|
||||
const isoDate = new Date().toISOString()
|
||||
const prefixes = {
|
||||
TIMESTAMP: isoDate.replace(/\.\d{3}Z$/, '').replace(/\W/g, '.'),
|
||||
DATE: isoDate.split('T')[0].replace(/\W/g, '.'),
|
||||
NONE: ''
|
||||
}
|
||||
const prefixType = (_b = options.prefix) !== null && _b !== void 0 ? _b : 'TIMESTAMP'
|
||||
const fileBasename = [prefixes[prefixType], options.name].filter(Boolean).join('.')
|
||||
const allowedExtensions = options.allowExtension ? [options.allowExtension] : ['.js', '.cjs', '.mjs', '.ts', '.cts', '.mts', '.sql']
|
||||
const existing = await this.migrations(context)
|
||||
const last = existing.slice(-1)[0]
|
||||
const folder = options.folder || ((_c = this.options.create) === null || _c === void 0 ? void 0 : _c.folder) || ((last === null || last === void 0 ? void 0 : last.path) && path.dirname(last.path))
|
||||
if (!folder) {
|
||||
throw new Error(`Couldn't infer a directory to generate migration file in. Pass folder explicitly`)
|
||||
}
|
||||
const filepath = path.join(folder, fileBasename)
|
||||
if (!options.allowConfusingOrdering) {
|
||||
const confusinglyOrdered = existing.find((e) => e.path && e.path >= filepath)
|
||||
if (confusinglyOrdered) {
|
||||
throw new Error(`Can't create ${fileBasename}, since it's unclear if it should run before or after existing migration ${confusinglyOrdered.name}. Use allowConfusingOrdering to bypass this error.`)
|
||||
}
|
||||
}
|
||||
const template =
|
||||
typeof options.content === 'string'
|
||||
? async () => [[filepath, options.content]]
|
||||
: // eslint-disable-next-line @typescript-eslint/unbound-method
|
||||
(_e = (_d = this.options.create) === null || _d === void 0 ? void 0 : _d.template) !== null && _e !== void 0
|
||||
? _e
|
||||
: Umzug.defaultCreationTemplate
|
||||
const toWrite = await template(filepath)
|
||||
if (toWrite.length === 0) {
|
||||
toWrite.push([filepath, ''])
|
||||
}
|
||||
toWrite.forEach((pair) => {
|
||||
if (!Array.isArray(pair) || pair.length !== 2) {
|
||||
throw new Error(`Expected [filepath, content] pair. Check that the file template function returns an array of pairs.`)
|
||||
}
|
||||
const ext = path.extname(pair[0])
|
||||
if (!allowedExtensions.includes(ext)) {
|
||||
const allowStr = allowedExtensions.join(', ')
|
||||
const message = `Extension ${ext} not allowed. Allowed extensions are ${allowStr}. See help for allowExtension to avoid this error.`
|
||||
throw new Error(message)
|
||||
}
|
||||
fs.mkdirSync(path.dirname(pair[0]), { recursive: true })
|
||||
fs.writeFileSync(pair[0], pair[1])
|
||||
this.logging({ event: 'created', path: pair[0] })
|
||||
})
|
||||
if (!options.skipVerify) {
|
||||
const [firstFilePath] = toWrite[0]
|
||||
const pending = await this._pending(context)
|
||||
if (!pending.some((p) => p.path && path.resolve(p.path) === path.resolve(firstFilePath))) {
|
||||
const paths = pending.map((p) => p.path).join(', ')
|
||||
throw new Error(`Expected ${firstFilePath} to be a pending migration but it wasn't! Pending migration paths: ${paths}. You should investigate this. Use skipVerify to bypass this error.`)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
static defaultCreationTemplate(filepath) {
|
||||
const ext = path.extname(filepath)
|
||||
if ((ext === '.js' && typeof require.main === 'object') || ext === '.cjs') {
|
||||
return [[filepath, templates.js]]
|
||||
}
|
||||
if (ext === '.ts' || ext === '.mts' || ext === '.cts') {
|
||||
return [[filepath, templates.ts]]
|
||||
}
|
||||
if ((ext === '.js' && require.main === undefined) || ext === '.mjs') {
|
||||
return [[filepath, templates.mjs]]
|
||||
}
|
||||
if (ext === '.sql') {
|
||||
const downFilepath = path.join(path.dirname(filepath), 'down', path.basename(filepath))
|
||||
return [
|
||||
[filepath, templates.sqlUp],
|
||||
[downFilepath, templates.sqlDown]
|
||||
]
|
||||
}
|
||||
return []
|
||||
}
|
||||
findNameIndex(migrations, name) {
|
||||
const index = migrations.findIndex((m) => m.name === name)
|
||||
if (index === -1) {
|
||||
throw new Error(`Couldn't find migration to apply with name ${JSON.stringify(name)}`)
|
||||
}
|
||||
return index
|
||||
}
|
||||
findMigrations(migrations, names) {
|
||||
const map = new Map(migrations.map((m) => [m.name, m]))
|
||||
return names.map((name) => {
|
||||
const migration = map.get(name)
|
||||
if (!migration) {
|
||||
throw new Error(`Couldn't find migration to apply with name ${JSON.stringify(name)}`)
|
||||
}
|
||||
return migration
|
||||
})
|
||||
}
|
||||
async getContext() {
|
||||
const { context = {} } = this.options
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
|
||||
return typeof context === 'function' ? context() : context
|
||||
}
|
||||
/** helper for parsing input migrations into a callback returning a list of ready-to-run migrations */
|
||||
getMigrationsResolver(inputMigrations) {
|
||||
var _b
|
||||
if (Array.isArray(inputMigrations)) {
|
||||
return async () => inputMigrations
|
||||
}
|
||||
if (typeof inputMigrations === 'function') {
|
||||
// Lazy migrations definition, recurse.
|
||||
return async (ctx) => {
|
||||
const resolved = await inputMigrations(ctx)
|
||||
return this.getMigrationsResolver(resolved)(ctx)
|
||||
}
|
||||
}
|
||||
const paths = inputMigrations.files
|
||||
const resolver = (_b = inputMigrations.resolve) !== null && _b !== void 0 ? _b : Umzug.defaultResolver
|
||||
return async (context) => {
|
||||
paths.sort()
|
||||
return paths.map((unresolvedPath) => {
|
||||
const filepath = path.resolve(unresolvedPath)
|
||||
const name = path.basename(filepath)
|
||||
return {
|
||||
path: filepath,
|
||||
...resolver({ name, path: filepath, context })
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.Umzug = Umzug
|
||||
_a = Umzug
|
||||
Umzug.defaultResolver = ({ name, path: filepath }) => {
|
||||
if (!filepath) {
|
||||
throw new Error(`Can't use default resolver for non-filesystem migrations`)
|
||||
}
|
||||
const ext = path.extname(filepath)
|
||||
const languageSpecificHelp = {
|
||||
'.ts': "TypeScript files can be required by adding `ts-node` as a dependency and calling `require('ts-node/register')` at the program entrypoint before running migrations.",
|
||||
'.sql': 'Try writing a resolver which reads file content and executes it as a sql query.'
|
||||
}
|
||||
languageSpecificHelp['.cts'] = languageSpecificHelp['.ts']
|
||||
languageSpecificHelp['.mts'] = languageSpecificHelp['.ts']
|
||||
let loadModule
|
||||
const jsExt = ext.replace(/\.([cm]?)ts$/, '.$1js')
|
||||
const getModule = async () => {
|
||||
try {
|
||||
return await loadModule()
|
||||
} catch (e) {
|
||||
if ((e instanceof SyntaxError || e instanceof MissingResolverError) && ext in languageSpecificHelp) {
|
||||
e.message += '\n\n' + languageSpecificHelp[ext]
|
||||
}
|
||||
throw e
|
||||
}
|
||||
}
|
||||
if ((jsExt === '.js' && typeof require.main === 'object') || jsExt === '.cjs') {
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
loadModule = async () => require(filepath)
|
||||
} else if (jsExt === '.js' || jsExt === '.mjs') {
|
||||
loadModule = async () => import(filepath)
|
||||
} else {
|
||||
loadModule = async () => {
|
||||
throw new MissingResolverError(filepath)
|
||||
}
|
||||
}
|
||||
return {
|
||||
name,
|
||||
path: filepath,
|
||||
up: async ({ context }) => (await getModule()).up({ path: filepath, name, context }),
|
||||
down: async ({ context }) => {
|
||||
var _b, _c
|
||||
return (_c = (_b = await getModule()).down) === null || _c === void 0 ? void 0 : _c.call(_b, { path: filepath, name, context })
|
||||
}
|
||||
}
|
||||
}
|
||||
class MissingResolverError extends Error {
|
||||
constructor(filepath) {
|
||||
super(`No resolver specified for file ${filepath}. See docs for guidance on how to write a custom resolver.`)
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=umzug.js.map
|
|
@ -1,4 +1,4 @@
|
|||
const { Umzug, SequelizeStorage } = require('umzug')
|
||||
const { Umzug, SequelizeStorage } = require('../libs/umzug')
|
||||
const { Sequelize, DataTypes } = require('sequelize')
|
||||
const semver = require('semver')
|
||||
const path = require('path')
|
||||
|
@ -60,7 +60,7 @@ class MigrationManager {
|
|||
return
|
||||
}
|
||||
|
||||
this.initUmzug()
|
||||
await this.initUmzug()
|
||||
const migrations = await this.umzug.migrations()
|
||||
const executedMigrations = (await this.umzug.executed()).map((m) => m.name)
|
||||
|
||||
|
@ -95,11 +95,12 @@ class MigrationManager {
|
|||
// Step 3: If migration fails, save the failed original and restore the backup
|
||||
const failedDbPath = path.join(this.configPath, 'absdatabase.failed.sqlite')
|
||||
await fs.move(originalDbPath, failedDbPath, { overwrite: true })
|
||||
await fs.move(backupDbPath, originalDbPath, { overwrite: true })
|
||||
|
||||
Logger.info('[MigrationManager] Restored the original database from the backup.')
|
||||
Logger.info('[MigrationManager] Saved the failed database as absdatabase.failed.sqlite.')
|
||||
|
||||
await fs.move(backupDbPath, originalDbPath, { overwrite: true })
|
||||
Logger.info('[MigrationManager] Restored the original database from the backup.')
|
||||
|
||||
Logger.info('[MigrationManager] Migration failed. Exiting Audiobookshelf with code 1.')
|
||||
process.exit(1)
|
||||
}
|
||||
} else {
|
||||
|
@ -109,49 +110,47 @@ class MigrationManager {
|
|||
await this.updateDatabaseVersion()
|
||||
}
|
||||
|
||||
initUmzug(umzugStorage = new SequelizeStorage({ sequelize: this.sequelize })) {
|
||||
if (!this.umzug) {
|
||||
// This check is for dependency injection in tests
|
||||
const cwd = this.migrationsDir
|
||||
async initUmzug(umzugStorage = new SequelizeStorage({ sequelize: this.sequelize })) {
|
||||
// This check is for dependency injection in tests
|
||||
const files = (await fs.readdir(this.migrationsDir)).map((file) => path.join(this.migrationsDir, file))
|
||||
|
||||
const parent = new Umzug({
|
||||
migrations: {
|
||||
glob: ['*.js', { cwd }],
|
||||
resolve: (params) => {
|
||||
// make script think it's in migrationsSourceDir
|
||||
const migrationPath = params.path
|
||||
const migrationName = params.name
|
||||
const contents = fs.readFileSync(migrationPath, 'utf8')
|
||||
const fakePath = path.join(this.migrationsSourceDir, path.basename(migrationPath))
|
||||
const module = new Module(fakePath)
|
||||
module.filename = fakePath
|
||||
module.paths = Module._nodeModulePaths(this.migrationsSourceDir)
|
||||
module._compile(contents, fakePath)
|
||||
const script = module.exports
|
||||
return {
|
||||
name: migrationName,
|
||||
path: migrationPath,
|
||||
up: script.up,
|
||||
down: script.down
|
||||
}
|
||||
const parent = new Umzug({
|
||||
migrations: {
|
||||
files,
|
||||
resolve: (params) => {
|
||||
// make script think it's in migrationsSourceDir
|
||||
const migrationPath = params.path
|
||||
const migrationName = params.name
|
||||
const contents = fs.readFileSync(migrationPath, 'utf8')
|
||||
const fakePath = path.join(this.migrationsSourceDir, path.basename(migrationPath))
|
||||
const module = new Module(fakePath)
|
||||
module.filename = fakePath
|
||||
module.paths = Module._nodeModulePaths(this.migrationsSourceDir)
|
||||
module._compile(contents, fakePath)
|
||||
const script = module.exports
|
||||
return {
|
||||
name: migrationName,
|
||||
path: migrationPath,
|
||||
up: script.up,
|
||||
down: script.down
|
||||
}
|
||||
},
|
||||
context: { queryInterface: this.sequelize.getQueryInterface(), logger: Logger },
|
||||
storage: umzugStorage,
|
||||
logger: Logger
|
||||
})
|
||||
}
|
||||
},
|
||||
context: { queryInterface: this.sequelize.getQueryInterface(), logger: Logger },
|
||||
storage: umzugStorage,
|
||||
logger: Logger
|
||||
})
|
||||
|
||||
// Sort migrations by version
|
||||
this.umzug = new Umzug({
|
||||
...parent.options,
|
||||
migrations: async () =>
|
||||
(await parent.migrations()).sort((a, b) => {
|
||||
const versionA = this.extractVersionFromTag(a.name)
|
||||
const versionB = this.extractVersionFromTag(b.name)
|
||||
return semver.compare(versionA, versionB)
|
||||
})
|
||||
})
|
||||
}
|
||||
// Sort migrations by version
|
||||
this.umzug = new Umzug({
|
||||
...parent.options,
|
||||
migrations: async () =>
|
||||
(await parent.migrations()).sort((a, b) => {
|
||||
const versionA = this.extractVersionFromTag(a.name)
|
||||
const versionB = this.extractVersionFromTag(b.name)
|
||||
return semver.compare(versionA, versionB)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
async fetchVersionsFromDatabase() {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue