Update migration to v2.14.0

This commit is contained in:
advplyr 2024-09-24 17:06:00 -05:00
parent c67b5e950e
commit 5154e31c1c
3 changed files with 65 additions and 65 deletions

View file

@ -4,4 +4,4 @@ Please add a record of every database migration that you create to this file. Th
| Server Version | Migration Script Name | Description |
| -------------- | ---------------------------- | ------------------------------------------------- |
| v2.13.5 | v2.13.5-series-column-unique | Series must have unique names in the same library |
| v2.14.0 | v2.14.0-series-column-unique | Series must have unique names in the same library |

View file

@ -16,13 +16,13 @@
*/
async function up({ context: { queryInterface, logger } }) {
// Upwards migration script
logger.info('[2.13.5 migration] UPGRADE BEGIN: 2.13.5-series-column-unique ')
logger.info('[2.14.0 migration] UPGRADE BEGIN: 2.14.0-series-column-unique ')
// Check if the unique index already exists
const seriesIndexes = await queryInterface.showIndex('Series')
if (seriesIndexes.some((index) => index.name === 'unique_series_name_per_library')) {
logger.info('[2.13.5 migration] Unique index on Series.name and Series.libraryId already exists')
logger.info('[2.13.5 migration] UPGRADE END: 2.13.5-series-column-unique ')
logger.info('[2.14.0 migration] Unique index on Series.name and Series.libraryId already exists')
logger.info('[2.14.0 migration] UPGRADE END: 2.14.0-series-column-unique ')
return
}
@ -43,12 +43,12 @@ async function up({ context: { queryInterface, logger } }) {
`)
// Print out how many duplicates were found
logger.info(`[2.13.5 migration] Found ${duplicates.length} duplicate series`)
logger.info(`[2.14.0 migration] Found ${duplicates.length} duplicate series`)
// Iterate over each duplicate series
for (const duplicate of duplicates) {
// Report the series name that is being deleted
logger.info(`[2.13.5 migration] Deduplicating series "${duplicate.name}" in library ${duplicate.libraryId}`)
logger.info(`[2.14.0 migration] Deduplicating series "${duplicate.name}" in library ${duplicate.libraryId}`)
// Determine any duplicate book IDs in the `bookSeries` table for the same series
const [duplicateBookIds] = await queryInterface.sequelize.query(
@ -73,7 +73,7 @@ async function up({ context: { queryInterface, logger } }) {
// Iterate over the duplicate book IDs if there is at least one and only keep the first row that has this bookId and seriesId
for (const { bookId } of duplicateBookIds) {
logger.info(`[2.13.5 migration] Deduplicating bookId ${bookId} in series "${duplicate.name}" of library ${duplicate.libraryId}`)
logger.info(`[2.14.0 migration] Deduplicating bookId ${bookId} in series "${duplicate.name}" of library ${duplicate.libraryId}`)
// Get all rows of `BookSeries` table that have the same `bookId` and `seriesId`. Sort by `sequence` with nulls sorted last
const [duplicateBookSeries] = await queryInterface.sequelize.query(
`
@ -113,7 +113,7 @@ async function up({ context: { queryInterface, logger } }) {
}
)
}
logger.info(`[2.13.5 migration] Finished cleanup of bookId ${bookId} in series "${duplicate.name}" of library ${duplicate.libraryId}`)
logger.info(`[2.14.0 migration] Finished cleanup of bookId ${bookId} in series "${duplicate.name}" of library ${duplicate.libraryId}`)
}
// Get all the most recent series which matches the `name` and `libraryId`
@ -174,16 +174,16 @@ async function up({ context: { queryInterface, logger } }) {
}
}
logger.info(`[2.13.5 migration] Deduplication complete`)
logger.info(`[2.14.0 migration] Deduplication complete`)
// Create a unique index based on the name and library ID for the `Series` table
await queryInterface.addIndex('Series', ['name', 'libraryId'], {
unique: true,
name: 'unique_series_name_per_library'
})
logger.info('[2.13.5 migration] Added unique index on Series.name and Series.libraryId')
logger.info('[2.14.0 migration] Added unique index on Series.name and Series.libraryId')
logger.info('[2.13.5 migration] UPGRADE END: 2.13.5-series-column-unique ')
logger.info('[2.14.0 migration] UPGRADE END: 2.14.0-series-column-unique ')
}
/**
@ -194,13 +194,13 @@ async function up({ context: { queryInterface, logger } }) {
*/
async function down({ context: { queryInterface, logger } }) {
// Downward migration script
logger.info('[2.13.5 migration] DOWNGRADE BEGIN: 2.13.5-series-column-unique ')
logger.info('[2.14.0 migration] DOWNGRADE BEGIN: 2.14.0-series-column-unique ')
// Remove the unique index
await queryInterface.removeIndex('Series', 'unique_series_name_per_library')
logger.info('[2.13.5 migration] Removed unique index on Series.name and Series.libraryId')
logger.info('[2.14.0 migration] Removed unique index on Series.name and Series.libraryId')
logger.info('[2.13.5 migration] DOWNGRADE END: 2.13.5-series-column-unique ')
logger.info('[2.14.0 migration] DOWNGRADE END: 2.14.0-series-column-unique ')
}
module.exports = { up, down }