mirror of
https://github.com/advplyr/audiobookshelf.git
synced 2025-07-10 09:24:56 +02:00
Update:Auth to use new user model
- Express requests include userNew to start migrating API controllers to new user model
This commit is contained in:
parent
59370cae81
commit
202ceb02b5
14 changed files with 626 additions and 392 deletions
|
@ -1,6 +1,6 @@
|
|||
const { DataTypes, QueryInterface } = require('sequelize')
|
||||
const Path = require('path')
|
||||
const uuidv4 = require("uuid").v4
|
||||
const uuidv4 = require('uuid').v4
|
||||
const Logger = require('../../Logger')
|
||||
const fs = require('../../libs/fsExtra')
|
||||
const oldDbFiles = require('./oldDbFiles')
|
||||
|
@ -36,25 +36,14 @@ function getDeviceInfoString(deviceInfo, UserId) {
|
|||
if (!deviceInfo) return null
|
||||
if (deviceInfo.deviceId) return deviceInfo.deviceId
|
||||
|
||||
const keys = [
|
||||
UserId,
|
||||
deviceInfo.browserName || null,
|
||||
deviceInfo.browserVersion || null,
|
||||
deviceInfo.osName || null,
|
||||
deviceInfo.osVersion || null,
|
||||
deviceInfo.clientVersion || null,
|
||||
deviceInfo.manufacturer || null,
|
||||
deviceInfo.model || null,
|
||||
deviceInfo.sdkVersion || null,
|
||||
deviceInfo.ipAddress || null
|
||||
].map(k => k || '')
|
||||
const keys = [UserId, deviceInfo.browserName || null, deviceInfo.browserVersion || null, deviceInfo.osName || null, deviceInfo.osVersion || null, deviceInfo.clientVersion || null, deviceInfo.manufacturer || null, deviceInfo.model || null, deviceInfo.sdkVersion || null, deviceInfo.ipAddress || null].map((k) => k || '')
|
||||
return 'temp-' + Buffer.from(keys.join('-'), 'utf-8').toString('base64')
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrate oldLibraryItem.media to Book model
|
||||
* Migrate BookSeries and BookAuthor
|
||||
* @param {objects.LibraryItem} oldLibraryItem
|
||||
* @param {objects.LibraryItem} oldLibraryItem
|
||||
* @param {object} LibraryItem models.LibraryItem object
|
||||
* @returns {object} { book: object, bookSeries: [], bookAuthor: [] }
|
||||
*/
|
||||
|
@ -67,7 +56,7 @@ function migrateBook(oldLibraryItem, LibraryItem) {
|
|||
bookAuthor: []
|
||||
}
|
||||
|
||||
const tracks = (oldBook.audioFiles || []).filter(af => !af.exclude && !af.invalid)
|
||||
const tracks = (oldBook.audioFiles || []).filter((af) => !af.exclude && !af.invalid)
|
||||
let duration = 0
|
||||
for (const track of tracks) {
|
||||
if (track.duration !== null && !isNaN(track.duration)) {
|
||||
|
@ -156,7 +145,7 @@ function migrateBook(oldLibraryItem, LibraryItem) {
|
|||
/**
|
||||
* Migrate oldLibraryItem.media to Podcast model
|
||||
* Migrate PodcastEpisode
|
||||
* @param {objects.LibraryItem} oldLibraryItem
|
||||
* @param {objects.LibraryItem} oldLibraryItem
|
||||
* @param {object} LibraryItem models.LibraryItem object
|
||||
* @returns {object} { podcast: object, podcastEpisode: [] }
|
||||
*/
|
||||
|
@ -239,7 +228,7 @@ function migratePodcast(oldLibraryItem, LibraryItem) {
|
|||
|
||||
/**
|
||||
* Migrate libraryItems to LibraryItem, Book, Podcast models
|
||||
* @param {Array<objects.LibraryItem>} oldLibraryItems
|
||||
* @param {Array<objects.LibraryItem>} oldLibraryItems
|
||||
* @returns {object} { libraryItem: [], book: [], podcast: [], podcastEpisode: [], bookSeries: [], bookAuthor: [] }
|
||||
*/
|
||||
function migrateLibraryItems(oldLibraryItems) {
|
||||
|
@ -298,7 +287,7 @@ function migrateLibraryItems(oldLibraryItems) {
|
|||
updatedAt: oldLibraryItem.updatedAt,
|
||||
libraryId,
|
||||
libraryFolderId,
|
||||
libraryFiles: oldLibraryItem.libraryFiles.map(lf => {
|
||||
libraryFiles: oldLibraryItem.libraryFiles.map((lf) => {
|
||||
if (lf.isSupplementary === undefined) lf.isSupplementary = null
|
||||
return lf
|
||||
})
|
||||
|
@ -306,7 +295,7 @@ function migrateLibraryItems(oldLibraryItems) {
|
|||
oldDbIdMap.libraryItems[oldLibraryItem.id] = LibraryItem.id
|
||||
_newRecords.libraryItem.push(LibraryItem)
|
||||
|
||||
//
|
||||
//
|
||||
// Migrate Book/Podcast
|
||||
//
|
||||
if (oldLibraryItem.mediaType === 'book') {
|
||||
|
@ -329,7 +318,7 @@ function migrateLibraryItems(oldLibraryItems) {
|
|||
|
||||
/**
|
||||
* Migrate Library and LibraryFolder
|
||||
* @param {Array<objects.Library>} oldLibraries
|
||||
* @param {Array<objects.Library>} oldLibraries
|
||||
* @returns {object} { library: [], libraryFolder: [] }
|
||||
*/
|
||||
function migrateLibraries(oldLibraries) {
|
||||
|
@ -343,7 +332,7 @@ function migrateLibraries(oldLibraries) {
|
|||
continue
|
||||
}
|
||||
|
||||
//
|
||||
//
|
||||
// Migrate Library
|
||||
//
|
||||
const Library = {
|
||||
|
@ -361,7 +350,7 @@ function migrateLibraries(oldLibraries) {
|
|||
oldDbIdMap.libraries[oldLibrary.id] = Library.id
|
||||
_newRecords.library.push(Library)
|
||||
|
||||
//
|
||||
//
|
||||
// Migrate LibraryFolders
|
||||
//
|
||||
for (const oldFolder of oldLibrary.folders) {
|
||||
|
@ -382,21 +371,27 @@ function migrateLibraries(oldLibraries) {
|
|||
/**
|
||||
* Migrate Author
|
||||
* Previously Authors were shared between libraries, this will ensure every author has one library
|
||||
* @param {Array<objects.entities.Author>} oldAuthors
|
||||
* @param {Array<objects.LibraryItem>} oldLibraryItems
|
||||
* @param {Array<objects.entities.Author>} oldAuthors
|
||||
* @param {Array<objects.LibraryItem>} oldLibraryItems
|
||||
* @returns {Array<object>} Array of Author model objs
|
||||
*/
|
||||
function migrateAuthors(oldAuthors, oldLibraryItems) {
|
||||
const _newRecords = []
|
||||
for (const oldAuthor of oldAuthors) {
|
||||
// Get an array of NEW library ids that have this author
|
||||
const librariesWithThisAuthor = [...new Set(oldLibraryItems.map(li => {
|
||||
if (!li.media.metadata.authors?.some(au => au.id === oldAuthor.id)) return null
|
||||
if (!oldDbIdMap.libraries[li.libraryId]) {
|
||||
Logger.warn(`[dbMigration] Authors library id ${li.libraryId} was not migrated`)
|
||||
}
|
||||
return oldDbIdMap.libraries[li.libraryId]
|
||||
}).filter(lid => lid))]
|
||||
const librariesWithThisAuthor = [
|
||||
...new Set(
|
||||
oldLibraryItems
|
||||
.map((li) => {
|
||||
if (!li.media.metadata.authors?.some((au) => au.id === oldAuthor.id)) return null
|
||||
if (!oldDbIdMap.libraries[li.libraryId]) {
|
||||
Logger.warn(`[dbMigration] Authors library id ${li.libraryId} was not migrated`)
|
||||
}
|
||||
return oldDbIdMap.libraries[li.libraryId]
|
||||
})
|
||||
.filter((lid) => lid)
|
||||
)
|
||||
]
|
||||
|
||||
if (!librariesWithThisAuthor.length) {
|
||||
Logger.error(`[dbMigration] Author ${oldAuthor.name} was not found in any libraries`)
|
||||
|
@ -426,8 +421,8 @@ function migrateAuthors(oldAuthors, oldLibraryItems) {
|
|||
/**
|
||||
* Migrate Series
|
||||
* Previously Series were shared between libraries, this will ensure every series has one library
|
||||
* @param {Array<objects.entities.Series>} oldSerieses
|
||||
* @param {Array<objects.LibraryItem>} oldLibraryItems
|
||||
* @param {Array<objects.entities.Series>} oldSerieses
|
||||
* @param {Array<objects.LibraryItem>} oldLibraryItems
|
||||
* @returns {Array<object>} Array of Series model objs
|
||||
*/
|
||||
function migrateSeries(oldSerieses, oldLibraryItems) {
|
||||
|
@ -436,10 +431,16 @@ function migrateSeries(oldSerieses, oldLibraryItems) {
|
|||
// Series will be separate between libraries
|
||||
for (const oldSeries of oldSerieses) {
|
||||
// Get an array of NEW library ids that have this series
|
||||
const librariesWithThisSeries = [...new Set(oldLibraryItems.map(li => {
|
||||
if (!li.media.metadata.series?.some(se => se.id === oldSeries.id)) return null
|
||||
return oldDbIdMap.libraries[li.libraryId]
|
||||
}).filter(lid => lid))]
|
||||
const librariesWithThisSeries = [
|
||||
...new Set(
|
||||
oldLibraryItems
|
||||
.map((li) => {
|
||||
if (!li.media.metadata.series?.some((se) => se.id === oldSeries.id)) return null
|
||||
return oldDbIdMap.libraries[li.libraryId]
|
||||
})
|
||||
.filter((lid) => lid)
|
||||
)
|
||||
]
|
||||
|
||||
if (!librariesWithThisSeries.length) {
|
||||
Logger.error(`[dbMigration] Series ${oldSeries.name} was not found in any libraries`)
|
||||
|
@ -465,7 +466,7 @@ function migrateSeries(oldSerieses, oldLibraryItems) {
|
|||
|
||||
/**
|
||||
* Migrate users to User and MediaProgress models
|
||||
* @param {Array<objects.User>} oldUsers
|
||||
* @param {Array<objects.User>} oldUsers
|
||||
* @returns {object} { user: [], mediaProgress: [] }
|
||||
*/
|
||||
function migrateUsers(oldUsers) {
|
||||
|
@ -474,29 +475,33 @@ function migrateUsers(oldUsers) {
|
|||
mediaProgress: []
|
||||
}
|
||||
for (const oldUser of oldUsers) {
|
||||
//
|
||||
//
|
||||
// Migrate User
|
||||
//
|
||||
// Convert old library ids to new ids
|
||||
const librariesAccessible = (oldUser.librariesAccessible || []).map((lid) => oldDbIdMap.libraries[lid]).filter(li => li)
|
||||
const librariesAccessible = (oldUser.librariesAccessible || []).map((lid) => oldDbIdMap.libraries[lid]).filter((li) => li)
|
||||
|
||||
// Convert old library item ids to new ids
|
||||
const bookmarks = (oldUser.bookmarks || []).map(bm => {
|
||||
bm.libraryItemId = oldDbIdMap.libraryItems[bm.libraryItemId]
|
||||
return bm
|
||||
}).filter(bm => bm.libraryItemId)
|
||||
const bookmarks = (oldUser.bookmarks || [])
|
||||
.map((bm) => {
|
||||
bm.libraryItemId = oldDbIdMap.libraryItems[bm.libraryItemId]
|
||||
return bm
|
||||
})
|
||||
.filter((bm) => bm.libraryItemId)
|
||||
|
||||
// Convert old series ids to new
|
||||
const seriesHideFromContinueListening = (oldUser.seriesHideFromContinueListening || []).map(oldSeriesId => {
|
||||
// Series were split to be per library
|
||||
// This will use the first series it finds
|
||||
for (const libraryId in oldDbIdMap.series) {
|
||||
if (oldDbIdMap.series[libraryId][oldSeriesId]) {
|
||||
return oldDbIdMap.series[libraryId][oldSeriesId]
|
||||
const seriesHideFromContinueListening = (oldUser.seriesHideFromContinueListening || [])
|
||||
.map((oldSeriesId) => {
|
||||
// Series were split to be per library
|
||||
// This will use the first series it finds
|
||||
for (const libraryId in oldDbIdMap.series) {
|
||||
if (oldDbIdMap.series[libraryId][oldSeriesId]) {
|
||||
return oldDbIdMap.series[libraryId][oldSeriesId]
|
||||
}
|
||||
}
|
||||
}
|
||||
return null
|
||||
}).filter(se => se)
|
||||
return null
|
||||
})
|
||||
.filter((se) => se)
|
||||
|
||||
const User = {
|
||||
id: uuidv4(),
|
||||
|
@ -521,7 +526,7 @@ function migrateUsers(oldUsers) {
|
|||
oldDbIdMap.users[oldUser.id] = User.id
|
||||
_newRecords.user.push(User)
|
||||
|
||||
//
|
||||
//
|
||||
// Migrate MediaProgress
|
||||
//
|
||||
for (const oldMediaProgress of oldUser.mediaProgress) {
|
||||
|
@ -566,7 +571,7 @@ function migrateUsers(oldUsers) {
|
|||
|
||||
/**
|
||||
* Migrate playbackSessions to PlaybackSession and Device models
|
||||
* @param {Array<objects.PlaybackSession>} oldSessions
|
||||
* @param {Array<objects.PlaybackSession>} oldSessions
|
||||
* @returns {object} { playbackSession: [], device: [] }
|
||||
*/
|
||||
function migrateSessions(oldSessions) {
|
||||
|
@ -690,7 +695,7 @@ function migrateSessions(oldSessions) {
|
|||
|
||||
/**
|
||||
* Migrate collections to Collection & CollectionBook
|
||||
* @param {Array<objects.Collection>} oldCollections
|
||||
* @param {Array<objects.Collection>} oldCollections
|
||||
* @returns {object} { collection: [], collectionBook: [] }
|
||||
*/
|
||||
function migrateCollections(oldCollections) {
|
||||
|
@ -705,7 +710,7 @@ function migrateCollections(oldCollections) {
|
|||
continue
|
||||
}
|
||||
|
||||
const BookIds = oldCollection.books.map(lid => oldDbIdMap.books[lid]).filter(bid => bid)
|
||||
const BookIds = oldCollection.books.map((lid) => oldDbIdMap.books[lid]).filter((bid) => bid)
|
||||
if (!BookIds.length) {
|
||||
Logger.warn(`[dbMigration] migrateCollections: Collection "${oldCollection.name}" has no books`)
|
||||
continue
|
||||
|
@ -739,7 +744,7 @@ function migrateCollections(oldCollections) {
|
|||
|
||||
/**
|
||||
* Migrate playlists to Playlist and PlaylistMediaItem
|
||||
* @param {Array<objects.Playlist>} oldPlaylists
|
||||
* @param {Array<objects.Playlist>} oldPlaylists
|
||||
* @returns {object} { playlist: [], playlistMediaItem: [] }
|
||||
*/
|
||||
function migratePlaylists(oldPlaylists) {
|
||||
|
@ -806,7 +811,7 @@ function migratePlaylists(oldPlaylists) {
|
|||
|
||||
/**
|
||||
* Migrate feeds to Feed and FeedEpisode models
|
||||
* @param {Array<objects.Feed>} oldFeeds
|
||||
* @param {Array<objects.Feed>} oldFeeds
|
||||
* @returns {object} { feed: [], feedEpisode: [] }
|
||||
*/
|
||||
function migrateFeeds(oldFeeds) {
|
||||
|
@ -907,14 +912,14 @@ function migrateFeeds(oldFeeds) {
|
|||
|
||||
/**
|
||||
* Migrate ServerSettings, NotificationSettings and EmailSettings to Setting model
|
||||
* @param {Array<objects.settings.*>} oldSettings
|
||||
* @param {Array<objects.settings.*>} oldSettings
|
||||
* @returns {Array<object>} Array of Setting model objs
|
||||
*/
|
||||
function migrateSettings(oldSettings) {
|
||||
const _newRecords = []
|
||||
const serverSettings = oldSettings.find(s => s.id === 'server-settings')
|
||||
const notificationSettings = oldSettings.find(s => s.id === 'notification-settings')
|
||||
const emailSettings = oldSettings.find(s => s.id === 'email-settings')
|
||||
const serverSettings = oldSettings.find((s) => s.id === 'server-settings')
|
||||
const notificationSettings = oldSettings.find((s) => s.id === 'notification-settings')
|
||||
const emailSettings = oldSettings.find((s) => s.id === 'email-settings')
|
||||
|
||||
if (serverSettings) {
|
||||
_newRecords.push({
|
||||
|
@ -946,7 +951,7 @@ function migrateSettings(oldSettings) {
|
|||
|
||||
/**
|
||||
* Load old libraries and bulkCreate new Library and LibraryFolder rows
|
||||
* @param {Map<string,Model>} DatabaseModels
|
||||
* @param {Map<string,Model>} DatabaseModels
|
||||
*/
|
||||
async function handleMigrateLibraries(DatabaseModels) {
|
||||
const oldLibraries = await oldDbFiles.loadOldData('libraries')
|
||||
|
@ -959,7 +964,7 @@ async function handleMigrateLibraries(DatabaseModels) {
|
|||
|
||||
/**
|
||||
* Load old EmailSettings, NotificationSettings and ServerSettings and bulkCreate new Setting rows
|
||||
* @param {Map<string,Model>} DatabaseModels
|
||||
* @param {Map<string,Model>} DatabaseModels
|
||||
*/
|
||||
async function handleMigrateSettings(DatabaseModels) {
|
||||
const oldSettings = await oldDbFiles.loadOldData('settings')
|
||||
|
@ -970,7 +975,7 @@ async function handleMigrateSettings(DatabaseModels) {
|
|||
|
||||
/**
|
||||
* Load old authors and bulkCreate new Author rows
|
||||
* @param {Map<string,Model>} DatabaseModels
|
||||
* @param {Map<string,Model>} DatabaseModels
|
||||
* @param {Array<objects.LibraryItem>} oldLibraryItems
|
||||
*/
|
||||
async function handleMigrateAuthors(DatabaseModels, oldLibraryItems) {
|
||||
|
@ -982,7 +987,7 @@ async function handleMigrateAuthors(DatabaseModels, oldLibraryItems) {
|
|||
|
||||
/**
|
||||
* Load old series and bulkCreate new Series rows
|
||||
* @param {Map<string,Model>} DatabaseModels
|
||||
* @param {Map<string,Model>} DatabaseModels
|
||||
* @param {Array<objects.LibraryItem>} oldLibraryItems
|
||||
*/
|
||||
async function handleMigrateSeries(DatabaseModels, oldLibraryItems) {
|
||||
|
@ -994,7 +999,7 @@ async function handleMigrateSeries(DatabaseModels, oldLibraryItems) {
|
|||
|
||||
/**
|
||||
* bulkCreate new LibraryItem, Book and Podcast rows
|
||||
* @param {Map<string,Model>} DatabaseModels
|
||||
* @param {Map<string,Model>} DatabaseModels
|
||||
* @param {Array<objects.LibraryItem>} oldLibraryItems
|
||||
*/
|
||||
async function handleMigrateLibraryItems(DatabaseModels, oldLibraryItems) {
|
||||
|
@ -1008,7 +1013,7 @@ async function handleMigrateLibraryItems(DatabaseModels, oldLibraryItems) {
|
|||
/**
|
||||
* Migrate authors, series then library items in chunks
|
||||
* Authors and series require old library items loaded first
|
||||
* @param {Map<string,Model>} DatabaseModels
|
||||
* @param {Map<string,Model>} DatabaseModels
|
||||
*/
|
||||
async function handleMigrateAuthorsSeriesAndLibraryItems(DatabaseModels) {
|
||||
const oldLibraryItems = await oldDbFiles.loadOldData('libraryItems')
|
||||
|
@ -1026,7 +1031,7 @@ async function handleMigrateAuthorsSeriesAndLibraryItems(DatabaseModels) {
|
|||
|
||||
/**
|
||||
* Load old users and bulkCreate new User rows
|
||||
* @param {Map<string,Model>} DatabaseModels
|
||||
* @param {Map<string,Model>} DatabaseModels
|
||||
*/
|
||||
async function handleMigrateUsers(DatabaseModels) {
|
||||
const oldUsers = await oldDbFiles.loadOldData('users')
|
||||
|
@ -1039,7 +1044,7 @@ async function handleMigrateUsers(DatabaseModels) {
|
|||
|
||||
/**
|
||||
* Load old sessions and bulkCreate new PlaybackSession & Device rows
|
||||
* @param {Map<string,Model>} DatabaseModels
|
||||
* @param {Map<string,Model>} DatabaseModels
|
||||
*/
|
||||
async function handleMigrateSessions(DatabaseModels) {
|
||||
const oldSessions = await oldDbFiles.loadOldData('sessions')
|
||||
|
@ -1055,12 +1060,11 @@ async function handleMigrateSessions(DatabaseModels) {
|
|||
await DatabaseModels[model].bulkCreate(newSessionRecords[model])
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Load old collections and bulkCreate new Collection, CollectionBook models
|
||||
* @param {Map<string,Model>} DatabaseModels
|
||||
* @param {Map<string,Model>} DatabaseModels
|
||||
*/
|
||||
async function handleMigrateCollections(DatabaseModels) {
|
||||
const oldCollections = await oldDbFiles.loadOldData('collections')
|
||||
|
@ -1073,7 +1077,7 @@ async function handleMigrateCollections(DatabaseModels) {
|
|||
|
||||
/**
|
||||
* Load old playlists and bulkCreate new Playlist, PlaylistMediaItem models
|
||||
* @param {Map<string,Model>} DatabaseModels
|
||||
* @param {Map<string,Model>} DatabaseModels
|
||||
*/
|
||||
async function handleMigratePlaylists(DatabaseModels) {
|
||||
const oldPlaylists = await oldDbFiles.loadOldData('playlists')
|
||||
|
@ -1086,7 +1090,7 @@ async function handleMigratePlaylists(DatabaseModels) {
|
|||
|
||||
/**
|
||||
* Load old feeds and bulkCreate new Feed, FeedEpisode models
|
||||
* @param {Map<string,Model>} DatabaseModels
|
||||
* @param {Map<string,Model>} DatabaseModels
|
||||
*/
|
||||
async function handleMigrateFeeds(DatabaseModels) {
|
||||
const oldFeeds = await oldDbFiles.loadOldData('feeds')
|
||||
|
@ -1152,21 +1156,36 @@ module.exports.checkShouldMigrate = async () => {
|
|||
|
||||
/**
|
||||
* Migration from 2.3.0 to 2.3.1 - create extraData columns in LibraryItem and PodcastEpisode
|
||||
* @param {QueryInterface} queryInterface
|
||||
* @param {QueryInterface} queryInterface
|
||||
*/
|
||||
async function migrationPatchNewColumns(queryInterface) {
|
||||
try {
|
||||
return queryInterface.sequelize.transaction(t => {
|
||||
return queryInterface.sequelize.transaction((t) => {
|
||||
return Promise.all([
|
||||
queryInterface.addColumn('libraryItems', 'extraData', {
|
||||
type: DataTypes.JSON
|
||||
}, { transaction: t }),
|
||||
queryInterface.addColumn('podcastEpisodes', 'extraData', {
|
||||
type: DataTypes.JSON
|
||||
}, { transaction: t }),
|
||||
queryInterface.addColumn('libraries', 'extraData', {
|
||||
type: DataTypes.JSON
|
||||
}, { transaction: t })
|
||||
queryInterface.addColumn(
|
||||
'libraryItems',
|
||||
'extraData',
|
||||
{
|
||||
type: DataTypes.JSON
|
||||
},
|
||||
{ transaction: t }
|
||||
),
|
||||
queryInterface.addColumn(
|
||||
'podcastEpisodes',
|
||||
'extraData',
|
||||
{
|
||||
type: DataTypes.JSON
|
||||
},
|
||||
{ transaction: t }
|
||||
),
|
||||
queryInterface.addColumn(
|
||||
'libraries',
|
||||
'extraData',
|
||||
{
|
||||
type: DataTypes.JSON
|
||||
},
|
||||
{ transaction: t }
|
||||
)
|
||||
])
|
||||
})
|
||||
} catch (error) {
|
||||
|
@ -1177,7 +1196,7 @@ async function migrationPatchNewColumns(queryInterface) {
|
|||
|
||||
/**
|
||||
* Migration from 2.3.0 to 2.3.1 - old library item ids
|
||||
* @param {/src/Database} ctx
|
||||
* @param {/src/Database} ctx
|
||||
*/
|
||||
async function handleOldLibraryItems(ctx) {
|
||||
const oldLibraryItems = await oldDbFiles.loadOldData('libraryItems')
|
||||
|
@ -1188,7 +1207,7 @@ async function handleOldLibraryItems(ctx) {
|
|||
|
||||
for (const libraryItem of libraryItems) {
|
||||
// Find matching old library item by ino
|
||||
const matchingOldLibraryItem = oldLibraryItems.find(oli => oli.ino === libraryItem.ino)
|
||||
const matchingOldLibraryItem = oldLibraryItems.find((oli) => oli.ino === libraryItem.ino)
|
||||
if (matchingOldLibraryItem) {
|
||||
oldDbIdMap.libraryItems[matchingOldLibraryItem.id] = libraryItem.id
|
||||
|
||||
|
@ -1202,7 +1221,7 @@ async function handleOldLibraryItems(ctx) {
|
|||
if (libraryItem.media.episodes?.length && matchingOldLibraryItem.media.episodes?.length) {
|
||||
for (const podcastEpisode of libraryItem.media.episodes) {
|
||||
// Find matching old episode by audio file ino
|
||||
const matchingOldPodcastEpisode = matchingOldLibraryItem.media.episodes.find(oep => oep.audioFile?.ino && oep.audioFile.ino === podcastEpisode.audioFile?.ino)
|
||||
const matchingOldPodcastEpisode = matchingOldLibraryItem.media.episodes.find((oep) => oep.audioFile?.ino && oep.audioFile.ino === podcastEpisode.audioFile?.ino)
|
||||
if (matchingOldPodcastEpisode) {
|
||||
oldDbIdMap.podcastEpisodes[matchingOldPodcastEpisode.id] = podcastEpisode.id
|
||||
|
||||
|
@ -1235,7 +1254,7 @@ async function handleOldLibraryItems(ctx) {
|
|||
|
||||
/**
|
||||
* Migration from 2.3.0 to 2.3.1 - updating oldLibraryId
|
||||
* @param {/src/Database} ctx
|
||||
* @param {/src/Database} ctx
|
||||
*/
|
||||
async function handleOldLibraries(ctx) {
|
||||
const oldLibraries = await oldDbFiles.loadOldData('libraries')
|
||||
|
@ -1244,11 +1263,11 @@ async function handleOldLibraries(ctx) {
|
|||
let librariesUpdated = 0
|
||||
for (const library of libraries) {
|
||||
// Find matching old library using exact match on folder paths, exact match on library name
|
||||
const matchingOldLibrary = oldLibraries.find(ol => {
|
||||
const matchingOldLibrary = oldLibraries.find((ol) => {
|
||||
if (ol.name !== library.name) {
|
||||
return false
|
||||
}
|
||||
const folderPaths = ol.folders?.map(f => f.fullPath) || []
|
||||
const folderPaths = ol.folders?.map((f) => f.fullPath) || []
|
||||
return folderPaths.join(',') === library.folderPaths.join(',')
|
||||
})
|
||||
|
||||
|
@ -1264,42 +1283,51 @@ async function handleOldLibraries(ctx) {
|
|||
|
||||
/**
|
||||
* Migration from 2.3.0 to 2.3.1 - fixing librariesAccessible and bookmarks
|
||||
* @param {/src/Database} ctx
|
||||
* @param {import('../../Database')} ctx
|
||||
*/
|
||||
async function handleOldUsers(ctx) {
|
||||
const users = await ctx.models.user.getOldUsers()
|
||||
const usersNew = await ctx.userModel.findAll({
|
||||
include: ctx.models.mediaProgress
|
||||
})
|
||||
const users = usersNew.map((u) => ctx.userModel.getOldUser(u))
|
||||
|
||||
let usersUpdated = 0
|
||||
for (const user of users) {
|
||||
let hasUpdates = false
|
||||
if (user.bookmarks?.length) {
|
||||
user.bookmarks = user.bookmarks.map(bm => {
|
||||
// Only update if this is not the old id format
|
||||
if (!bm.libraryItemId.startsWith('li_')) return bm
|
||||
user.bookmarks = user.bookmarks
|
||||
.map((bm) => {
|
||||
// Only update if this is not the old id format
|
||||
if (!bm.libraryItemId.startsWith('li_')) return bm
|
||||
|
||||
bm.libraryItemId = oldDbIdMap.libraryItems[bm.libraryItemId]
|
||||
hasUpdates = true
|
||||
return bm
|
||||
}).filter(bm => bm.libraryItemId)
|
||||
bm.libraryItemId = oldDbIdMap.libraryItems[bm.libraryItemId]
|
||||
hasUpdates = true
|
||||
return bm
|
||||
})
|
||||
.filter((bm) => bm.libraryItemId)
|
||||
}
|
||||
|
||||
// Convert old library ids to new library ids
|
||||
if (user.librariesAccessible?.length) {
|
||||
user.librariesAccessible = user.librariesAccessible.map(lid => {
|
||||
if (!lid.startsWith('lib_') && lid !== 'main') return lid // Already not an old library id so dont change
|
||||
hasUpdates = true
|
||||
return oldDbIdMap.libraries[lid]
|
||||
}).filter(lid => lid)
|
||||
user.librariesAccessible = user.librariesAccessible
|
||||
.map((lid) => {
|
||||
if (!lid.startsWith('lib_') && lid !== 'main') return lid // Already not an old library id so dont change
|
||||
hasUpdates = true
|
||||
return oldDbIdMap.libraries[lid]
|
||||
})
|
||||
.filter((lid) => lid)
|
||||
}
|
||||
|
||||
if (user.seriesHideFromContinueListening?.length) {
|
||||
user.seriesHideFromContinueListening = user.seriesHideFromContinueListening.map((seriesId) => {
|
||||
if (seriesId.startsWith('se_')) {
|
||||
hasUpdates = true
|
||||
return null // Filter out old series ids
|
||||
}
|
||||
return seriesId
|
||||
}).filter(se => se)
|
||||
user.seriesHideFromContinueListening = user.seriesHideFromContinueListening
|
||||
.map((seriesId) => {
|
||||
if (seriesId.startsWith('se_')) {
|
||||
hasUpdates = true
|
||||
return null // Filter out old series ids
|
||||
}
|
||||
return seriesId
|
||||
})
|
||||
.filter((se) => se)
|
||||
}
|
||||
|
||||
if (hasUpdates) {
|
||||
|
@ -1312,7 +1340,7 @@ async function handleOldUsers(ctx) {
|
|||
|
||||
/**
|
||||
* Migration from 2.3.0 to 2.3.1
|
||||
* @param {/src/Database} ctx
|
||||
* @param {/src/Database} ctx
|
||||
*/
|
||||
module.exports.migrationPatch = async (ctx) => {
|
||||
const queryInterface = ctx.sequelize.getQueryInterface()
|
||||
|
@ -1328,7 +1356,7 @@ module.exports.migrationPatch = async (ctx) => {
|
|||
}
|
||||
|
||||
const oldDbPath = Path.join(global.ConfigPath, 'oldDb.zip')
|
||||
if (!await fs.pathExists(oldDbPath)) {
|
||||
if (!(await fs.pathExists(oldDbPath))) {
|
||||
Logger.info(`[dbMigration] Migration patch 2.3.0+ unnecessary - no oldDb.zip found`)
|
||||
return
|
||||
}
|
||||
|
@ -1337,7 +1365,7 @@ module.exports.migrationPatch = async (ctx) => {
|
|||
Logger.info(`[dbMigration] Applying migration patch from 2.3.0+`)
|
||||
|
||||
// Extract from oldDb.zip
|
||||
if (!await oldDbFiles.checkExtractItemsUsersAndLibraries()) {
|
||||
if (!(await oldDbFiles.checkExtractItemsUsersAndLibraries())) {
|
||||
return
|
||||
}
|
||||
|
||||
|
@ -1354,8 +1382,8 @@ module.exports.migrationPatch = async (ctx) => {
|
|||
/**
|
||||
* Migration from 2.3.3 to 2.3.4
|
||||
* Populating the size column on libraryItem
|
||||
* @param {/src/Database} ctx
|
||||
* @param {number} offset
|
||||
* @param {/src/Database} ctx
|
||||
* @param {number} offset
|
||||
*/
|
||||
async function migrationPatch2LibraryItems(ctx, offset = 0) {
|
||||
const libraryItems = await ctx.models.libraryItem.findAll({
|
||||
|
@ -1368,7 +1396,7 @@ async function migrationPatch2LibraryItems(ctx, offset = 0) {
|
|||
for (const libraryItem of libraryItems) {
|
||||
if (libraryItem.libraryFiles?.length) {
|
||||
let size = 0
|
||||
libraryItem.libraryFiles.forEach(lf => {
|
||||
libraryItem.libraryFiles.forEach((lf) => {
|
||||
if (!isNaN(lf.metadata?.size)) {
|
||||
size += Number(lf.metadata.size)
|
||||
}
|
||||
|
@ -1396,8 +1424,8 @@ async function migrationPatch2LibraryItems(ctx, offset = 0) {
|
|||
/**
|
||||
* Migration from 2.3.3 to 2.3.4
|
||||
* Populating the duration & titleIgnorePrefix column on book
|
||||
* @param {/src/Database} ctx
|
||||
* @param {number} offset
|
||||
* @param {/src/Database} ctx
|
||||
* @param {number} offset
|
||||
*/
|
||||
async function migrationPatch2Books(ctx, offset = 0) {
|
||||
const books = await ctx.models.book.findAll({
|
||||
|
@ -1411,7 +1439,7 @@ async function migrationPatch2Books(ctx, offset = 0) {
|
|||
let duration = 0
|
||||
|
||||
if (book.audioFiles?.length) {
|
||||
const tracks = book.audioFiles.filter(af => !af.exclude && !af.invalid)
|
||||
const tracks = book.audioFiles.filter((af) => !af.exclude && !af.invalid)
|
||||
for (const track of tracks) {
|
||||
if (track.duration !== null && !isNaN(track.duration)) {
|
||||
duration += track.duration
|
||||
|
@ -1442,8 +1470,8 @@ async function migrationPatch2Books(ctx, offset = 0) {
|
|||
/**
|
||||
* Migration from 2.3.3 to 2.3.4
|
||||
* Populating the titleIgnorePrefix column on podcast
|
||||
* @param {/src/Database} ctx
|
||||
* @param {number} offset
|
||||
* @param {/src/Database} ctx
|
||||
* @param {number} offset
|
||||
*/
|
||||
async function migrationPatch2Podcasts(ctx, offset = 0) {
|
||||
const podcasts = await ctx.models.podcast.findAll({
|
||||
|
@ -1476,8 +1504,8 @@ async function migrationPatch2Podcasts(ctx, offset = 0) {
|
|||
/**
|
||||
* Migration from 2.3.3 to 2.3.4
|
||||
* Populating the nameIgnorePrefix column on series
|
||||
* @param {/src/Database} ctx
|
||||
* @param {number} offset
|
||||
* @param {/src/Database} ctx
|
||||
* @param {number} offset
|
||||
*/
|
||||
async function migrationPatch2Series(ctx, offset = 0) {
|
||||
const allSeries = await ctx.models.series.findAll({
|
||||
|
@ -1510,8 +1538,8 @@ async function migrationPatch2Series(ctx, offset = 0) {
|
|||
/**
|
||||
* Migration from 2.3.3 to 2.3.4
|
||||
* Populating the lastFirst column on author
|
||||
* @param {/src/Database} ctx
|
||||
* @param {number} offset
|
||||
* @param {/src/Database} ctx
|
||||
* @param {number} offset
|
||||
*/
|
||||
async function migrationPatch2Authors(ctx, offset = 0) {
|
||||
const authors = await ctx.models.author.findAll({
|
||||
|
@ -1546,8 +1574,8 @@ async function migrationPatch2Authors(ctx, offset = 0) {
|
|||
/**
|
||||
* Migration from 2.3.3 to 2.3.4
|
||||
* Populating the createdAt column on bookAuthor
|
||||
* @param {/src/Database} ctx
|
||||
* @param {number} offset
|
||||
* @param {/src/Database} ctx
|
||||
* @param {number} offset
|
||||
*/
|
||||
async function migrationPatch2BookAuthors(ctx, offset = 0) {
|
||||
const bookAuthors = await ctx.models.bookAuthor.findAll({
|
||||
|
@ -1581,8 +1609,8 @@ async function migrationPatch2BookAuthors(ctx, offset = 0) {
|
|||
/**
|
||||
* Migration from 2.3.3 to 2.3.4
|
||||
* Populating the createdAt column on bookSeries
|
||||
* @param {/src/Database} ctx
|
||||
* @param {number} offset
|
||||
* @param {/src/Database} ctx
|
||||
* @param {number} offset
|
||||
*/
|
||||
async function migrationPatch2BookSeries(ctx, offset = 0) {
|
||||
const allBookSeries = await ctx.models.bookSeries.findAll({
|
||||
|
@ -1616,7 +1644,7 @@ async function migrationPatch2BookSeries(ctx, offset = 0) {
|
|||
/**
|
||||
* Migration from 2.3.3 to 2.3.4
|
||||
* Adding coverPath column to Feed model
|
||||
* @param {/src/Database} ctx
|
||||
* @param {/src/Database} ctx
|
||||
*/
|
||||
module.exports.migrationPatch2 = async (ctx) => {
|
||||
const queryInterface = ctx.sequelize.getQueryInterface()
|
||||
|
@ -1631,44 +1659,95 @@ module.exports.migrationPatch2 = async (ctx) => {
|
|||
Logger.info(`[dbMigration] Applying migration patch from 2.3.3+`)
|
||||
|
||||
try {
|
||||
await queryInterface.sequelize.transaction(t => {
|
||||
await queryInterface.sequelize.transaction((t) => {
|
||||
const queries = []
|
||||
if (!bookAuthorsTableDescription?.createdAt) {
|
||||
queries.push(...[
|
||||
queryInterface.addColumn('bookAuthors', 'createdAt', {
|
||||
type: DataTypes.DATE
|
||||
}, { transaction: t }),
|
||||
queryInterface.addColumn('bookSeries', 'createdAt', {
|
||||
type: DataTypes.DATE
|
||||
}, { transaction: t }),
|
||||
])
|
||||
queries.push(
|
||||
...[
|
||||
queryInterface.addColumn(
|
||||
'bookAuthors',
|
||||
'createdAt',
|
||||
{
|
||||
type: DataTypes.DATE
|
||||
},
|
||||
{ transaction: t }
|
||||
),
|
||||
queryInterface.addColumn(
|
||||
'bookSeries',
|
||||
'createdAt',
|
||||
{
|
||||
type: DataTypes.DATE
|
||||
},
|
||||
{ transaction: t }
|
||||
)
|
||||
]
|
||||
)
|
||||
}
|
||||
if (!authorsTableDescription?.lastFirst) {
|
||||
queries.push(...[
|
||||
queryInterface.addColumn('authors', 'lastFirst', {
|
||||
type: DataTypes.STRING
|
||||
}, { transaction: t }),
|
||||
queryInterface.addColumn('libraryItems', 'size', {
|
||||
type: DataTypes.BIGINT
|
||||
}, { transaction: t }),
|
||||
queryInterface.addColumn('books', 'duration', {
|
||||
type: DataTypes.FLOAT
|
||||
}, { transaction: t }),
|
||||
queryInterface.addColumn('books', 'titleIgnorePrefix', {
|
||||
type: DataTypes.STRING
|
||||
}, { transaction: t }),
|
||||
queryInterface.addColumn('podcasts', 'titleIgnorePrefix', {
|
||||
type: DataTypes.STRING
|
||||
}, { transaction: t }),
|
||||
queryInterface.addColumn('series', 'nameIgnorePrefix', {
|
||||
type: DataTypes.STRING
|
||||
}, { transaction: t }),
|
||||
])
|
||||
queries.push(
|
||||
...[
|
||||
queryInterface.addColumn(
|
||||
'authors',
|
||||
'lastFirst',
|
||||
{
|
||||
type: DataTypes.STRING
|
||||
},
|
||||
{ transaction: t }
|
||||
),
|
||||
queryInterface.addColumn(
|
||||
'libraryItems',
|
||||
'size',
|
||||
{
|
||||
type: DataTypes.BIGINT
|
||||
},
|
||||
{ transaction: t }
|
||||
),
|
||||
queryInterface.addColumn(
|
||||
'books',
|
||||
'duration',
|
||||
{
|
||||
type: DataTypes.FLOAT
|
||||
},
|
||||
{ transaction: t }
|
||||
),
|
||||
queryInterface.addColumn(
|
||||
'books',
|
||||
'titleIgnorePrefix',
|
||||
{
|
||||
type: DataTypes.STRING
|
||||
},
|
||||
{ transaction: t }
|
||||
),
|
||||
queryInterface.addColumn(
|
||||
'podcasts',
|
||||
'titleIgnorePrefix',
|
||||
{
|
||||
type: DataTypes.STRING
|
||||
},
|
||||
{ transaction: t }
|
||||
),
|
||||
queryInterface.addColumn(
|
||||
'series',
|
||||
'nameIgnorePrefix',
|
||||
{
|
||||
type: DataTypes.STRING
|
||||
},
|
||||
{ transaction: t }
|
||||
)
|
||||
]
|
||||
)
|
||||
}
|
||||
if (!feedTableDescription?.coverPath) {
|
||||
queries.push(queryInterface.addColumn('feeds', 'coverPath', {
|
||||
type: DataTypes.STRING
|
||||
}, { transaction: t }))
|
||||
queries.push(
|
||||
queryInterface.addColumn(
|
||||
'feeds',
|
||||
'coverPath',
|
||||
{
|
||||
type: DataTypes.STRING
|
||||
},
|
||||
{ transaction: t }
|
||||
)
|
||||
)
|
||||
}
|
||||
return Promise.all(queries)
|
||||
})
|
||||
|
@ -1708,4 +1787,4 @@ module.exports.migrationPatch2 = async (ctx) => {
|
|||
Logger.error(`[dbMigration] Migration from 2.3.3+ column creation failed`, error)
|
||||
throw new Error('Migration 2.3.3+ failed ' + error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue