mirror of
https://github.com/advplyr/audiobookshelf.git
synced 2025-07-22 03:44:28 +02:00
Merge branch 'master' into feat/metadataForPlaybackSessions
This commit is contained in:
commit
d7f0815fb3
335 changed files with 11282 additions and 3863 deletions
|
@ -10,6 +10,7 @@ const ExtractJwt = require('passport-jwt').ExtractJwt
|
|||
const OpenIDClient = require('openid-client')
|
||||
const Database = require('./Database')
|
||||
const Logger = require('./Logger')
|
||||
const { escapeRegExp } = require('./utils')
|
||||
|
||||
/**
|
||||
* @class Class for handling all the authentication related functionality.
|
||||
|
@ -18,7 +19,11 @@ class Auth {
|
|||
constructor() {
|
||||
// Map of openId sessions indexed by oauth2 state-variable
|
||||
this.openIdAuthSession = new Map()
|
||||
this.ignorePatterns = [/\/api\/items\/[^/]+\/cover/, /\/api\/authors\/[^/]+\/image/]
|
||||
const escapedRouterBasePath = escapeRegExp(global.RouterBasePath)
|
||||
this.ignorePatterns = [
|
||||
new RegExp(`^(${escapedRouterBasePath}/api)?/items/[^/]+/cover$`),
|
||||
new RegExp(`^(${escapedRouterBasePath}/api)?/authors/[^/]+/image$`)
|
||||
]
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -28,7 +33,7 @@ class Auth {
|
|||
* @private
|
||||
*/
|
||||
authNotNeeded(req) {
|
||||
return req.method === 'GET' && this.ignorePatterns.some((pattern) => pattern.test(req.originalUrl))
|
||||
return req.method === 'GET' && this.ignorePatterns.some((pattern) => pattern.test(req.path))
|
||||
}
|
||||
|
||||
ifAuthNeeded(middleware) {
|
||||
|
|
|
@ -190,7 +190,13 @@ class Database {
|
|||
await this.buildModels(force)
|
||||
Logger.info(`[Database] Db initialized with models:`, Object.keys(this.sequelize.models).join(', '))
|
||||
|
||||
await this.addTriggers()
|
||||
|
||||
await this.loadData()
|
||||
|
||||
Logger.info(`[Database] running ANALYZE`)
|
||||
await this.sequelize.query('ANALYZE')
|
||||
Logger.info(`[Database] ANALYZE completed`)
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -226,6 +232,28 @@ class Database {
|
|||
|
||||
try {
|
||||
await this.sequelize.authenticate()
|
||||
|
||||
// Set SQLite pragmas from environment variables
|
||||
const allowedPragmas = [
|
||||
{ name: 'mmap_size', env: 'SQLITE_MMAP_SIZE' },
|
||||
{ name: 'cache_size', env: 'SQLITE_CACHE_SIZE' },
|
||||
{ name: 'temp_store', env: 'SQLITE_TEMP_STORE' }
|
||||
]
|
||||
|
||||
for (const pragma of allowedPragmas) {
|
||||
const value = process.env[pragma.env]
|
||||
if (value !== undefined) {
|
||||
try {
|
||||
Logger.info(`[Database] Running "PRAGMA ${pragma.name} = ${value}"`)
|
||||
await this.sequelize.query(`PRAGMA ${pragma.name} = ${value}`)
|
||||
const [result] = await this.sequelize.query(`PRAGMA ${pragma.name}`)
|
||||
Logger.debug(`[Database] "PRAGMA ${pragma.name}" query result:`, result)
|
||||
} catch (error) {
|
||||
Logger.error(`[Database] Failed to set SQLite pragma ${pragma.name}`, error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (process.env.NUSQLITE3_PATH) {
|
||||
await this.loadExtension(process.env.NUSQLITE3_PATH)
|
||||
Logger.info(`[Database] Db supports unaccent and unicode foldings`)
|
||||
|
@ -678,6 +706,7 @@ class Database {
|
|||
await libraryItem.destroy()
|
||||
}
|
||||
|
||||
// Remove invalid PlaylistMediaItem records
|
||||
const playlistMediaItemsWithNoMediaItem = await this.playlistMediaItemModel.findAll({
|
||||
include: [
|
||||
{
|
||||
|
@ -699,6 +728,19 @@ class Database {
|
|||
await playlistMediaItem.destroy()
|
||||
}
|
||||
|
||||
// Remove invalid CollectionBook records
|
||||
const collectionBooksWithNoBook = await this.collectionBookModel.findAll({
|
||||
include: {
|
||||
model: this.bookModel,
|
||||
required: false
|
||||
},
|
||||
where: { '$book.id$': null }
|
||||
})
|
||||
for (const collectionBook of collectionBooksWithNoBook) {
|
||||
Logger.warn(`Found collectionBook with no book - removing it`)
|
||||
await collectionBook.destroy()
|
||||
}
|
||||
|
||||
// Remove empty series
|
||||
const emptySeries = await this.seriesModel.findAll({
|
||||
include: {
|
||||
|
@ -731,6 +773,112 @@ class Database {
|
|||
return textQuery
|
||||
}
|
||||
|
||||
/**
|
||||
* This is used to create necessary triggers for new databases.
|
||||
* It adds triggers to update libraryItems.title[IgnorePrefix] when (books|podcasts).title[IgnorePrefix] is updated
|
||||
*/
|
||||
async addTriggers() {
|
||||
await this.addTriggerIfNotExists('books', 'title', 'id', 'libraryItems', 'title', 'mediaId')
|
||||
await this.addTriggerIfNotExists('books', 'titleIgnorePrefix', 'id', 'libraryItems', 'titleIgnorePrefix', 'mediaId')
|
||||
await this.addTriggerIfNotExists('podcasts', 'title', 'id', 'libraryItems', 'title', 'mediaId')
|
||||
await this.addTriggerIfNotExists('podcasts', 'titleIgnorePrefix', 'id', 'libraryItems', 'titleIgnorePrefix', 'mediaId')
|
||||
await this.addAuthorNamesTriggersIfNotExist()
|
||||
}
|
||||
|
||||
async addTriggerIfNotExists(sourceTable, sourceColumn, sourceIdColumn, targetTable, targetColumn, targetIdColumn) {
|
||||
const action = `update_${targetTable}_${targetColumn}`
|
||||
const fromSource = sourceTable === 'books' ? '' : `_from_${sourceTable}_${sourceColumn}`
|
||||
const triggerName = this.convertToSnakeCase(`${action}${fromSource}`)
|
||||
|
||||
const [[{ count }]] = await this.sequelize.query(`SELECT COUNT(*) as count FROM sqlite_master WHERE type='trigger' AND name='${triggerName}'`)
|
||||
if (count > 0) return // Trigger already exists
|
||||
|
||||
Logger.info(`[Database] Adding trigger ${triggerName}`)
|
||||
|
||||
await this.sequelize.query(`
|
||||
CREATE TRIGGER ${triggerName}
|
||||
AFTER UPDATE OF ${sourceColumn} ON ${sourceTable}
|
||||
FOR EACH ROW
|
||||
BEGIN
|
||||
UPDATE ${targetTable}
|
||||
SET ${targetColumn} = NEW.${sourceColumn}
|
||||
WHERE ${targetTable}.${targetIdColumn} = NEW.${sourceIdColumn};
|
||||
END;
|
||||
`)
|
||||
}
|
||||
|
||||
async addAuthorNamesTriggersIfNotExist() {
|
||||
const libraryItems = 'libraryItems'
|
||||
const bookAuthors = 'bookAuthors'
|
||||
const authors = 'authors'
|
||||
const columns = [
|
||||
{ name: 'authorNamesFirstLast', source: `${authors}.name`, spec: { type: Sequelize.STRING, allowNull: true } },
|
||||
{ name: 'authorNamesLastFirst', source: `${authors}.lastFirst`, spec: { type: Sequelize.STRING, allowNull: true } }
|
||||
]
|
||||
const authorsSort = `${bookAuthors}.createdAt ASC`
|
||||
const columnNames = columns.map((column) => column.name).join(', ')
|
||||
const columnSourcesExpression = columns.map((column) => `GROUP_CONCAT(${column.source}, ', ' ORDER BY ${authorsSort})`).join(', ')
|
||||
const authorsJoin = `${authors} JOIN ${bookAuthors} ON ${authors}.id = ${bookAuthors}.authorId`
|
||||
|
||||
const addBookAuthorsTriggerIfNotExists = async (action) => {
|
||||
const modifiedRecord = action === 'delete' ? 'OLD' : 'NEW'
|
||||
const triggerName = this.convertToSnakeCase(`update_${libraryItems}_authorNames_on_${bookAuthors}_${action}`)
|
||||
const authorNamesSubQuery = `
|
||||
SELECT ${columnSourcesExpression}
|
||||
FROM ${authorsJoin}
|
||||
WHERE ${bookAuthors}.bookId = ${modifiedRecord}.bookId
|
||||
`
|
||||
const [[{ count }]] = await this.sequelize.query(`SELECT COUNT(*) as count FROM sqlite_master WHERE type='trigger' AND name='${triggerName}'`)
|
||||
if (count > 0) return // Trigger already exists
|
||||
|
||||
Logger.info(`[Database] Adding trigger ${triggerName}`)
|
||||
|
||||
await this.sequelize.query(`
|
||||
CREATE TRIGGER ${triggerName}
|
||||
AFTER ${action} ON ${bookAuthors}
|
||||
FOR EACH ROW
|
||||
BEGIN
|
||||
UPDATE ${libraryItems}
|
||||
SET (${columnNames}) = (${authorNamesSubQuery})
|
||||
WHERE mediaId = ${modifiedRecord}.bookId;
|
||||
END;
|
||||
`)
|
||||
}
|
||||
|
||||
const addAuthorsUpdateTriggerIfNotExists = async () => {
|
||||
const triggerName = this.convertToSnakeCase(`update_${libraryItems}_authorNames_on_authors_update`)
|
||||
const authorNamesSubQuery = `
|
||||
SELECT ${columnSourcesExpression}
|
||||
FROM ${authorsJoin}
|
||||
WHERE ${bookAuthors}.bookId = ${libraryItems}.mediaId
|
||||
`
|
||||
|
||||
const [[{ count }]] = await this.sequelize.query(`SELECT COUNT(*) as count FROM sqlite_master WHERE type='trigger' AND name='${triggerName}'`)
|
||||
if (count > 0) return // Trigger already exists
|
||||
|
||||
Logger.info(`[Database] Adding trigger ${triggerName}`)
|
||||
|
||||
await this.sequelize.query(`
|
||||
CREATE TRIGGER ${triggerName}
|
||||
AFTER UPDATE OF name ON ${authors}
|
||||
FOR EACH ROW
|
||||
BEGIN
|
||||
UPDATE ${libraryItems}
|
||||
SET (${columnNames}) = (${authorNamesSubQuery})
|
||||
WHERE mediaId IN (SELECT bookId FROM ${bookAuthors} WHERE authorId = NEW.id);
|
||||
END;
|
||||
`)
|
||||
}
|
||||
|
||||
await addBookAuthorsTriggerIfNotExists('insert')
|
||||
await addBookAuthorsTriggerIfNotExists('delete')
|
||||
await addAuthorsUpdateTriggerIfNotExists()
|
||||
}
|
||||
|
||||
convertToSnakeCase(str) {
|
||||
return str.replace(/([A-Z])/g, '_$1').toLowerCase()
|
||||
}
|
||||
|
||||
TextSearchQuery = class {
|
||||
constructor(sequelize, supportsUnaccent, query) {
|
||||
this.sequelize = sequelize
|
||||
|
|
|
@ -21,12 +21,7 @@ class Logger {
|
|||
}
|
||||
|
||||
get levelString() {
|
||||
for (const key in LogLevel) {
|
||||
if (LogLevel[key] === this.logLevel) {
|
||||
return key
|
||||
}
|
||||
}
|
||||
return 'UNKNOWN'
|
||||
return this.getLogLevelString(this.logLevel)
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -117,7 +112,7 @@ class Logger {
|
|||
if (level < LogLevel.FATAL && level < this.logLevel) return
|
||||
const consoleMethod = Logger.ConsoleMethods[levelName]
|
||||
console[consoleMethod](`[${this.timestamp}] ${levelName}:`, ...args)
|
||||
this.#logToFileAndListeners(level, levelName, args, source)
|
||||
return this.#logToFileAndListeners(level, levelName, args, source)
|
||||
}
|
||||
|
||||
trace(...args) {
|
||||
|
@ -141,7 +136,7 @@ class Logger {
|
|||
}
|
||||
|
||||
fatal(...args) {
|
||||
this.#log('FATAL', this.source, ...args)
|
||||
return this.#log('FATAL', this.source, ...args)
|
||||
}
|
||||
|
||||
note(...args) {
|
||||
|
|
|
@ -85,6 +85,12 @@ class Server {
|
|||
}
|
||||
}
|
||||
|
||||
if (process.env.PODCAST_DOWNLOAD_TIMEOUT) {
|
||||
global.PodcastDownloadTimeout = process.env.PODCAST_DOWNLOAD_TIMEOUT
|
||||
} else {
|
||||
global.PodcastDownloadTimeout = 30000
|
||||
}
|
||||
|
||||
if (!fs.pathExistsSync(global.ConfigPath)) {
|
||||
fs.mkdirSync(global.ConfigPath)
|
||||
}
|
||||
|
|
|
@ -84,6 +84,42 @@ class SocketAuthority {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Emits event with library item to all clients that can access the library item
|
||||
* Note: Emits toOldJSONExpanded()
|
||||
*
|
||||
* @param {string} evt
|
||||
* @param {import('./models/LibraryItem')} libraryItem
|
||||
*/
|
||||
libraryItemEmitter(evt, libraryItem) {
|
||||
for (const socketId in this.clients) {
|
||||
if (this.clients[socketId].user?.checkCanAccessLibraryItem(libraryItem)) {
|
||||
this.clients[socketId].socket.emit(evt, libraryItem.toOldJSONExpanded())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Emits event with library items to all clients that can access the library items
|
||||
* Note: Emits toOldJSONExpanded()
|
||||
*
|
||||
* @param {string} evt
|
||||
* @param {import('./models/LibraryItem')[]} libraryItems
|
||||
*/
|
||||
libraryItemsEmitter(evt, libraryItems) {
|
||||
for (const socketId in this.clients) {
|
||||
if (this.clients[socketId].user) {
|
||||
const libraryItemsAccessibleToUser = libraryItems.filter((li) => this.clients[socketId].user.checkCanAccessLibraryItem(li))
|
||||
if (libraryItemsAccessibleToUser.length) {
|
||||
this.clients[socketId].socket.emit(
|
||||
evt,
|
||||
libraryItemsAccessibleToUser.map((li) => li.toOldJSONExpanded())
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the Socket.IO server and disconnect all clients
|
||||
*
|
||||
|
|
|
@ -5,7 +5,7 @@ const Logger = require('./Logger')
|
|||
const Task = require('./objects/Task')
|
||||
const TaskManager = require('./managers/TaskManager')
|
||||
|
||||
const { filePathToPOSIX, isSameOrSubPath, getFileMTimeMs } = require('./utils/fileUtils')
|
||||
const { filePathToPOSIX, isSameOrSubPath, getFileMTimeMs, shouldIgnoreFile } = require('./utils/fileUtils')
|
||||
|
||||
/**
|
||||
* @typedef PendingFileUpdate
|
||||
|
@ -286,15 +286,10 @@ class FolderWatcher extends EventEmitter {
|
|||
|
||||
const relPath = path.replace(folderPath, '')
|
||||
|
||||
if (Path.extname(relPath).toLowerCase() === '.part') {
|
||||
Logger.debug(`[Watcher] Ignoring .part file "${relPath}"`)
|
||||
return false
|
||||
}
|
||||
|
||||
// Ignore files/folders starting with "."
|
||||
const hasDotPath = relPath.split('/').find((p) => p.startsWith('.'))
|
||||
if (hasDotPath) {
|
||||
Logger.debug(`[Watcher] Ignoring dot path "${relPath}" | Piece "${hasDotPath}"`)
|
||||
// Check for ignored extensions or directories, such as dotfiles and hidden directories
|
||||
const shouldIgnore = shouldIgnoreFile(relPath)
|
||||
if (shouldIgnore) {
|
||||
Logger.debug(`[Watcher] Ignoring ${shouldIgnore} - "${relPath}"`)
|
||||
return false
|
||||
}
|
||||
|
||||
|
|
|
@ -152,10 +152,7 @@ class AuthorController {
|
|||
for (const libraryItem of libraryItems) {
|
||||
await libraryItem.saveMetadataFile()
|
||||
}
|
||||
SocketAuthority.emitter(
|
||||
'items_updated',
|
||||
libraryItems.map((li) => li.toOldJSONExpanded())
|
||||
)
|
||||
SocketAuthority.libraryItemsEmitter('items_updated', libraryItems)
|
||||
}
|
||||
|
||||
// Remove old author
|
||||
|
@ -210,10 +207,7 @@ class AuthorController {
|
|||
}
|
||||
|
||||
if (libraryItems.length) {
|
||||
SocketAuthority.emitter(
|
||||
'items_updated',
|
||||
libraryItems.map((li) => li.toOldJSONExpanded())
|
||||
)
|
||||
SocketAuthority.libraryItemsEmitter('items_updated', libraryItems)
|
||||
}
|
||||
} else {
|
||||
numBooksForAuthor = await Database.bookAuthorModel.getCountForAuthor(req.author.id)
|
||||
|
|
|
@ -251,6 +251,7 @@ class CollectionController {
|
|||
/**
|
||||
* DELETE: /api/collections/:id/book/:bookId
|
||||
* Remove a single book from a collection. Re-order books
|
||||
* Users with update permission can remove books from collections
|
||||
* TODO: bookId is actually libraryItemId. Clients need updating to use bookId
|
||||
*
|
||||
* @param {CollectionControllerRequest} req
|
||||
|
@ -427,7 +428,8 @@ class CollectionController {
|
|||
req.collection = collection
|
||||
}
|
||||
|
||||
if (req.method == 'DELETE' && !req.user.canDelete) {
|
||||
// Users with update permission can remove books from collections
|
||||
if (req.method == 'DELETE' && !req.params.bookId && !req.user.canDelete) {
|
||||
Logger.warn(`[CollectionController] User "${req.user.username}" attempted to delete without permission`)
|
||||
return res.sendStatus(403)
|
||||
} else if ((req.method == 'PATCH' || req.method == 'POST') && !req.user.canUpdate) {
|
||||
|
|
|
@ -4,6 +4,7 @@ const Logger = require('../Logger')
|
|||
const fs = require('../libs/fsExtra')
|
||||
const { toNumber } = require('../utils/index')
|
||||
const fileUtils = require('../utils/fileUtils')
|
||||
const Database = require('../Database')
|
||||
|
||||
/**
|
||||
* @typedef RequestUserObject
|
||||
|
@ -87,14 +88,50 @@ class FileSystemController {
|
|||
return res.sendStatus(403)
|
||||
}
|
||||
|
||||
const filepath = req.body.filepath
|
||||
if (!filepath?.length) {
|
||||
const { filepath, directory, folderPath } = req.body
|
||||
|
||||
if (!filepath?.length || typeof filepath !== 'string') {
|
||||
return res.sendStatus(400)
|
||||
}
|
||||
|
||||
const exists = await fs.pathExists(filepath)
|
||||
res.json({
|
||||
exists
|
||||
|
||||
if (exists) {
|
||||
return res.json({
|
||||
exists: true
|
||||
})
|
||||
}
|
||||
|
||||
// If directory and folderPath are passed in, check if a library item exists in a subdirectory
|
||||
// See: https://github.com/advplyr/audiobookshelf/issues/4146
|
||||
if (typeof directory === 'string' && typeof folderPath === 'string' && directory.length > 0 && folderPath.length > 0) {
|
||||
const cleanedDirectory = directory.split('/').filter(Boolean).join('/')
|
||||
if (cleanedDirectory.includes('/')) {
|
||||
// Can only be 2 levels deep
|
||||
const possiblePaths = []
|
||||
const subdir = Path.dirname(directory)
|
||||
possiblePaths.push(fileUtils.filePathToPOSIX(Path.join(folderPath, subdir)))
|
||||
if (subdir.includes('/')) {
|
||||
possiblePaths.push(fileUtils.filePathToPOSIX(Path.join(folderPath, Path.dirname(subdir))))
|
||||
}
|
||||
|
||||
const libraryItem = await Database.libraryItemModel.findOne({
|
||||
where: {
|
||||
path: possiblePaths
|
||||
}
|
||||
})
|
||||
|
||||
if (libraryItem) {
|
||||
return res.json({
|
||||
exists: true,
|
||||
libraryItemTitle: libraryItem.title
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return res.json({
|
||||
exists: false
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,6 +23,7 @@ const RssFeedManager = require('../managers/RssFeedManager')
|
|||
const libraryFilters = require('../utils/queries/libraryFilters')
|
||||
const libraryItemsPodcastFilters = require('../utils/queries/libraryItemsPodcastFilters')
|
||||
const authorFilters = require('../utils/queries/authorFilters')
|
||||
const zipHelpers = require('../utils/zipHelpers')
|
||||
|
||||
/**
|
||||
* @typedef RequestUserObject
|
||||
|
@ -100,6 +101,15 @@ class LibraryController {
|
|||
return res.status(400).send(`Invalid request. Settings "${key}" must be a string`)
|
||||
}
|
||||
newLibraryPayload.settings[key] = req.body.settings[key]
|
||||
} else if (key === 'markAsFinishedPercentComplete' || key === 'markAsFinishedTimeRemaining') {
|
||||
if (req.body.settings[key] !== null && isNaN(req.body.settings[key])) {
|
||||
return res.status(400).send(`Invalid request. Setting "${key}" must be a number`)
|
||||
} else if (key === 'markAsFinishedPercentComplete' && req.body.settings[key] !== null && (Number(req.body.settings[key]) < 0 || Number(req.body.settings[key]) > 100)) {
|
||||
return res.status(400).send(`Invalid request. Setting "${key}" must be between 0 and 100`)
|
||||
} else if (key === 'markAsFinishedTimeRemaining' && req.body.settings[key] !== null && Number(req.body.settings[key]) < 0) {
|
||||
return res.status(400).send(`Invalid request. Setting "${key}" must be greater than or equal to 0`)
|
||||
}
|
||||
newLibraryPayload.settings[key] = req.body.settings[key] === null ? null : Number(req.body.settings[key])
|
||||
} else {
|
||||
if (typeof req.body.settings[key] !== typeof newLibraryPayload.settings[key]) {
|
||||
return res.status(400).send(`Invalid request. Setting "${key}" must be of type ${typeof newLibraryPayload.settings[key]}`)
|
||||
|
@ -170,21 +180,34 @@ class LibraryController {
|
|||
* GET: /api/libraries
|
||||
* Get all libraries
|
||||
*
|
||||
* ?include=stats to load library stats - used in android auto to filter out libraries with no audio
|
||||
*
|
||||
* @param {RequestWithUser} req
|
||||
* @param {Response} res
|
||||
*/
|
||||
async findAll(req, res) {
|
||||
const libraries = await Database.libraryModel.getAllWithFolders()
|
||||
let libraries = await Database.libraryModel.getAllWithFolders()
|
||||
|
||||
const librariesAccessible = req.user.permissions?.librariesAccessible || []
|
||||
if (librariesAccessible.length) {
|
||||
return res.json({
|
||||
libraries: libraries.filter((lib) => librariesAccessible.includes(lib.id)).map((lib) => lib.toOldJSON())
|
||||
})
|
||||
libraries = libraries.filter((lib) => librariesAccessible.includes(lib.id))
|
||||
}
|
||||
|
||||
libraries = libraries.map((lib) => lib.toOldJSON())
|
||||
|
||||
const includeArray = (req.query.include || '').split(',')
|
||||
if (includeArray.includes('stats')) {
|
||||
for (const library of libraries) {
|
||||
if (library.mediaType === 'book') {
|
||||
library.stats = await libraryItemsBookFilters.getBookLibraryStats(library.id)
|
||||
} else if (library.mediaType === 'podcast') {
|
||||
library.stats = await libraryItemsPodcastFilters.getPodcastLibraryStats(library.id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
res.json({
|
||||
libraries: libraries.map((lib) => lib.toOldJSON())
|
||||
libraries
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -232,6 +255,11 @@ class LibraryController {
|
|||
* @param {Response} res
|
||||
*/
|
||||
async update(req, res) {
|
||||
if (!req.user.isAdminOrUp) {
|
||||
Logger.error(`[LibraryController] Non-admin user "${req.user.username}" attempted to update library`)
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
|
||||
// Validation
|
||||
const updatePayload = {}
|
||||
const keysToCheck = ['name', 'provider', 'mediaType', 'icon']
|
||||
|
@ -312,7 +340,7 @@ class LibraryController {
|
|||
}
|
||||
if (req.body.settings[key] !== updatedSettings[key]) {
|
||||
hasUpdates = true
|
||||
updatedSettings[key] = Number(req.body.settings[key])
|
||||
updatedSettings[key] = req.body.settings[key] === null ? null : Number(req.body.settings[key])
|
||||
Logger.debug(`[LibraryController] Library "${req.library.name}" updating setting "${key}" to "${updatedSettings[key]}"`)
|
||||
}
|
||||
} else if (key === 'markAsFinishedTimeRemaining') {
|
||||
|
@ -325,7 +353,7 @@ class LibraryController {
|
|||
}
|
||||
if (req.body.settings[key] !== updatedSettings[key]) {
|
||||
hasUpdates = true
|
||||
updatedSettings[key] = Number(req.body.settings[key])
|
||||
updatedSettings[key] = req.body.settings[key] === null ? null : Number(req.body.settings[key])
|
||||
Logger.debug(`[LibraryController] Library "${req.library.name}" updating setting "${key}" to "${updatedSettings[key]}"`)
|
||||
}
|
||||
} else {
|
||||
|
@ -497,6 +525,11 @@ class LibraryController {
|
|||
* @param {Response} res
|
||||
*/
|
||||
async delete(req, res) {
|
||||
if (!req.user.isAdminOrUp) {
|
||||
Logger.error(`[LibraryController] Non-admin user "${req.user.username}" attempted to delete library`)
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
|
||||
// Remove library watcher
|
||||
Watcher.removeLibrary(req.library)
|
||||
|
||||
|
@ -617,6 +650,11 @@ class LibraryController {
|
|||
* @param {Response} res
|
||||
*/
|
||||
async removeLibraryItemsWithIssues(req, res) {
|
||||
if (!req.user.isAdminOrUp) {
|
||||
Logger.error(`[LibraryController] Non-admin user "${req.user.username}" attempted to delete library items missing or invalid`)
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
|
||||
const libraryItemsWithIssues = await Database.libraryItemModel.findAll({
|
||||
where: {
|
||||
libraryId: req.library.id,
|
||||
|
@ -1150,10 +1188,7 @@ class LibraryController {
|
|||
}
|
||||
|
||||
if (itemsUpdated.length) {
|
||||
SocketAuthority.emitter(
|
||||
'items_updated',
|
||||
itemsUpdated.map((li) => li.toOldJSONExpanded())
|
||||
)
|
||||
SocketAuthority.libraryItemsEmitter('items_updated', itemsUpdated)
|
||||
}
|
||||
|
||||
res.json({
|
||||
|
@ -1194,10 +1229,7 @@ class LibraryController {
|
|||
}
|
||||
|
||||
if (itemsUpdated.length) {
|
||||
SocketAuthority.emitter(
|
||||
'items_updated',
|
||||
itemsUpdated.map((li) => li.toOldJSONExpanded())
|
||||
)
|
||||
SocketAuthority.libraryItemsEmitter('items_updated', itemsUpdated)
|
||||
}
|
||||
|
||||
res.json({
|
||||
|
@ -1382,6 +1414,52 @@ class LibraryController {
|
|||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* GET: /api/library/:id/download
|
||||
* Downloads multiple library items
|
||||
*
|
||||
* @param {LibraryControllerRequest} req
|
||||
* @param {Response} res
|
||||
*/
|
||||
async downloadMultiple(req, res) {
|
||||
if (!req.user.canDownload) {
|
||||
Logger.warn(`User "${req.user.username}" attempted to download without permission`)
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
|
||||
if (!req.query.ids || typeof req.query.ids !== 'string') {
|
||||
res.status(400).send('Invalid request. ids must be a string')
|
||||
return
|
||||
}
|
||||
|
||||
const itemIds = req.query.ids.split(',')
|
||||
|
||||
const libraryItems = await Database.libraryItemModel.findAll({
|
||||
attributes: ['id', 'libraryId', 'path', 'isFile'],
|
||||
where: {
|
||||
id: itemIds
|
||||
}
|
||||
})
|
||||
|
||||
Logger.info(`[LibraryController] User "${req.user.username}" requested download for items "${itemIds}"`)
|
||||
|
||||
const filename = `LibraryItems-${Date.now()}.zip`
|
||||
const pathObjects = libraryItems.map((li) => ({ path: li.path, isFile: li.isFile }))
|
||||
|
||||
if (!pathObjects.length) {
|
||||
Logger.warn(`[LibraryController] No library items found for ids "${itemIds}"`)
|
||||
return res.status(404).send('Library items not found')
|
||||
}
|
||||
|
||||
try {
|
||||
await zipHelpers.zipDirectoriesPipe(pathObjects, filename, res)
|
||||
Logger.info(`[LibraryController] Downloaded ${pathObjects.length} items "${filename}"`)
|
||||
} catch (error) {
|
||||
Logger.error(`[LibraryController] Download failed for items "${filename}" at ${pathObjects.map((po) => po.path).join(', ')}`, error)
|
||||
zipHelpers.handleDownloadError(error, res)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {RequestWithUser} req
|
||||
|
|
|
@ -253,7 +253,7 @@ class LibraryItemController {
|
|||
}
|
||||
|
||||
Logger.debug(`[LibraryItemController] Updated library item media ${req.libraryItem.media.title}`)
|
||||
SocketAuthority.emitter('item_updated', req.libraryItem.toOldJSONExpanded())
|
||||
SocketAuthority.libraryItemEmitter('item_updated', req.libraryItem)
|
||||
}
|
||||
res.json({
|
||||
updated: hasUpdates,
|
||||
|
@ -300,7 +300,7 @@ class LibraryItemController {
|
|||
req.libraryItem.changed('updatedAt', true)
|
||||
await req.libraryItem.save()
|
||||
|
||||
SocketAuthority.emitter('item_updated', req.libraryItem.toOldJSONExpanded())
|
||||
SocketAuthority.libraryItemEmitter('item_updated', req.libraryItem)
|
||||
res.json({
|
||||
success: true,
|
||||
cover: result.cover
|
||||
|
@ -332,7 +332,7 @@ class LibraryItemController {
|
|||
req.libraryItem.changed('updatedAt', true)
|
||||
await req.libraryItem.save()
|
||||
|
||||
SocketAuthority.emitter('item_updated', req.libraryItem.toOldJSONExpanded())
|
||||
SocketAuthority.libraryItemEmitter('item_updated', req.libraryItem)
|
||||
}
|
||||
res.json({
|
||||
success: true,
|
||||
|
@ -358,7 +358,7 @@ class LibraryItemController {
|
|||
|
||||
await CacheManager.purgeCoverCache(req.libraryItem.id)
|
||||
|
||||
SocketAuthority.emitter('item_updated', req.libraryItem.toOldJSONExpanded())
|
||||
SocketAuthority.libraryItemEmitter('item_updated', req.libraryItem)
|
||||
}
|
||||
|
||||
res.sendStatus(200)
|
||||
|
@ -485,7 +485,7 @@ class LibraryItemController {
|
|||
req.libraryItem.media.changed('audioFiles', true)
|
||||
await req.libraryItem.media.save()
|
||||
|
||||
SocketAuthority.emitter('item_updated', req.libraryItem.toOldJSONExpanded())
|
||||
SocketAuthority.libraryItemEmitter('item_updated', req.libraryItem)
|
||||
res.json(req.libraryItem.toOldJSON())
|
||||
}
|
||||
|
||||
|
@ -663,7 +663,7 @@ class LibraryItemController {
|
|||
await libraryItem.saveMetadataFile()
|
||||
|
||||
Logger.debug(`[LibraryItemController] Updated library item media "${libraryItem.media.title}"`)
|
||||
SocketAuthority.emitter('item_updated', libraryItem.toOldJSONExpanded())
|
||||
SocketAuthority.libraryItemEmitter('item_updated', libraryItem)
|
||||
itemsUpdated++
|
||||
}
|
||||
}
|
||||
|
@ -894,7 +894,7 @@ class LibraryItemController {
|
|||
|
||||
await req.libraryItem.saveMetadataFile()
|
||||
|
||||
SocketAuthority.emitter('item_updated', req.libraryItem.toOldJSONExpanded())
|
||||
SocketAuthority.libraryItemEmitter('item_updated', req.libraryItem)
|
||||
}
|
||||
|
||||
res.json({
|
||||
|
@ -1005,7 +1005,7 @@ class LibraryItemController {
|
|||
|
||||
await req.libraryItem.save()
|
||||
|
||||
SocketAuthority.emitter('item_updated', req.libraryItem.toOldJSONExpanded())
|
||||
SocketAuthority.libraryItemEmitter('item_updated', req.libraryItem)
|
||||
res.sendStatus(200)
|
||||
}
|
||||
|
||||
|
@ -1153,7 +1153,7 @@ class LibraryItemController {
|
|||
|
||||
await req.libraryItem.save()
|
||||
|
||||
SocketAuthority.emitter('item_updated', req.libraryItem.toOldJSONExpanded())
|
||||
SocketAuthority.libraryItemEmitter('item_updated', req.libraryItem)
|
||||
res.sendStatus(200)
|
||||
}
|
||||
|
||||
|
|
|
@ -343,7 +343,7 @@ class MiscController {
|
|||
})
|
||||
await libraryItem.saveMetadataFile()
|
||||
|
||||
SocketAuthority.emitter('item_updated', libraryItem.toOldJSONExpanded())
|
||||
SocketAuthority.libraryItemEmitter('item_updated', libraryItem)
|
||||
numItemsUpdated++
|
||||
}
|
||||
}
|
||||
|
@ -386,7 +386,7 @@ class MiscController {
|
|||
})
|
||||
await libraryItem.saveMetadataFile()
|
||||
|
||||
SocketAuthority.emitter('item_updated', libraryItem.toOldJSONExpanded())
|
||||
SocketAuthority.libraryItemEmitter('item_updated', libraryItem)
|
||||
numItemsUpdated++
|
||||
}
|
||||
|
||||
|
@ -481,7 +481,7 @@ class MiscController {
|
|||
})
|
||||
await libraryItem.saveMetadataFile()
|
||||
|
||||
SocketAuthority.emitter('item_updated', libraryItem.toOldJSONExpanded())
|
||||
SocketAuthority.libraryItemEmitter('item_updated', libraryItem)
|
||||
numItemsUpdated++
|
||||
}
|
||||
}
|
||||
|
@ -524,7 +524,7 @@ class MiscController {
|
|||
})
|
||||
await libraryItem.saveMetadataFile()
|
||||
|
||||
SocketAuthority.emitter('item_updated', libraryItem.toOldJSONExpanded())
|
||||
SocketAuthority.libraryItemEmitter('item_updated', libraryItem)
|
||||
numItemsUpdated++
|
||||
}
|
||||
|
||||
|
|
|
@ -107,7 +107,9 @@ class PodcastController {
|
|||
libraryFiles: [],
|
||||
extraData: {},
|
||||
libraryId: library.id,
|
||||
libraryFolderId: folder.id
|
||||
libraryFolderId: folder.id,
|
||||
title: podcast.title,
|
||||
titleIgnorePrefix: podcast.titleIgnorePrefix
|
||||
},
|
||||
{ transaction }
|
||||
)
|
||||
|
@ -159,7 +161,7 @@ class PodcastController {
|
|||
}
|
||||
}
|
||||
|
||||
SocketAuthority.emitter('item_added', newLibraryItem.toOldJSONExpanded())
|
||||
SocketAuthority.libraryItemEmitter('item_added', newLibraryItem)
|
||||
|
||||
res.json(newLibraryItem.toOldJSONExpanded())
|
||||
|
||||
|
@ -377,7 +379,7 @@ class PodcastController {
|
|||
const overrideDetails = req.query.override === '1'
|
||||
const episodesUpdated = await Scanner.quickMatchPodcastEpisodes(req.libraryItem, { overrideDetails })
|
||||
if (episodesUpdated) {
|
||||
SocketAuthority.emitter('item_updated', req.libraryItem.toOldJSONExpanded())
|
||||
SocketAuthority.libraryItemEmitter('item_updated', req.libraryItem)
|
||||
}
|
||||
|
||||
res.json({
|
||||
|
@ -416,7 +418,7 @@ class PodcastController {
|
|||
Logger.info(`[PodcastController] Updated episode "${episode.title}" keys`, episode.changed())
|
||||
await episode.save()
|
||||
|
||||
SocketAuthority.emitter('item_updated', req.libraryItem.toOldJSONExpanded())
|
||||
SocketAuthority.libraryItemEmitter('item_updated', req.libraryItem)
|
||||
} else {
|
||||
Logger.info(`[PodcastController] No changes to episode "${episode.title}"`)
|
||||
}
|
||||
|
@ -461,6 +463,9 @@ class PodcastController {
|
|||
return res.sendStatus(404)
|
||||
}
|
||||
|
||||
// Remove it from the podcastEpisodes array
|
||||
req.libraryItem.media.podcastEpisodes = req.libraryItem.media.podcastEpisodes.filter((ep) => ep.id !== episodeId)
|
||||
|
||||
if (hardDelete) {
|
||||
const audioFile = episode.audioFile
|
||||
// TODO: this will trigger the watcher. should maybe handle this gracefully
|
||||
|
@ -495,7 +500,11 @@ class PodcastController {
|
|||
req.libraryItem.changed('libraryFiles', true)
|
||||
await req.libraryItem.save()
|
||||
|
||||
SocketAuthority.emitter('item_updated', req.libraryItem.toOldJSONExpanded())
|
||||
// update number of episodes
|
||||
req.libraryItem.media.numEpisodes = req.libraryItem.media.podcastEpisodes.length
|
||||
await req.libraryItem.media.save()
|
||||
|
||||
SocketAuthority.libraryItemEmitter('item_updated', req.libraryItem)
|
||||
res.json(req.libraryItem.toOldJSON())
|
||||
}
|
||||
|
||||
|
|
75
server/controllers/StatsController.js
Normal file
75
server/controllers/StatsController.js
Normal file
|
@ -0,0 +1,75 @@
|
|||
const { Request, Response, NextFunction } = require('express')
|
||||
const Logger = require('../Logger')
|
||||
|
||||
const adminStats = require('../utils/queries/adminStats')
|
||||
|
||||
/**
|
||||
* @typedef RequestUserObject
|
||||
* @property {import('../models/User')} user
|
||||
*
|
||||
* @typedef {Request & RequestUserObject} RequestWithUser
|
||||
*/
|
||||
|
||||
class StatsController {
|
||||
constructor() {}
|
||||
|
||||
/**
|
||||
* GET: /api/stats/server
|
||||
* Currently not in use
|
||||
*
|
||||
* @param {RequestWithUser} req
|
||||
* @param {Response} res
|
||||
*/
|
||||
async getServerStats(req, res) {
|
||||
Logger.debug('[StatsController] getServerStats')
|
||||
const totalSize = await adminStats.getTotalSize()
|
||||
const numAudioFiles = await adminStats.getNumAudioFiles()
|
||||
|
||||
res.json({
|
||||
books: {
|
||||
...totalSize.books,
|
||||
numAudioFiles: numAudioFiles.numBookAudioFiles
|
||||
},
|
||||
podcasts: {
|
||||
...totalSize.podcasts,
|
||||
numAudioFiles: numAudioFiles.numPodcastAudioFiles
|
||||
},
|
||||
total: {
|
||||
...totalSize.total,
|
||||
numAudioFiles: numAudioFiles.numAudioFiles
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* GET: /api/stats/year/:year
|
||||
*
|
||||
* @param {RequestWithUser} req
|
||||
* @param {Response} res
|
||||
*/
|
||||
async getAdminStatsForYear(req, res) {
|
||||
const year = Number(req.params.year)
|
||||
if (isNaN(year) || year < 2000 || year > 9999) {
|
||||
Logger.error(`[StatsController] Invalid year "${year}"`)
|
||||
return res.status(400).send('Invalid year')
|
||||
}
|
||||
const stats = await adminStats.getStatsForYear(year)
|
||||
res.json(stats)
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {RequestWithUser} req
|
||||
* @param {Response} res
|
||||
* @param {NextFunction} next
|
||||
*/
|
||||
async middleware(req, res, next) {
|
||||
if (!req.user.isAdminOrUp) {
|
||||
Logger.error(`[StatsController] Non-admin user "${req.user.username}" attempted to access stats route`)
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
|
||||
next()
|
||||
}
|
||||
}
|
||||
module.exports = new StatsController()
|
|
@ -8,6 +8,7 @@ const AudiobookCovers = require('../providers/AudiobookCovers')
|
|||
const CustomProviderAdapter = require('../providers/CustomProviderAdapter')
|
||||
const Logger = require('../Logger')
|
||||
const { levenshteinDistance, escapeRegExp } = require('../utils/index')
|
||||
const htmlSanitizer = require('../utils/htmlSanitizer')
|
||||
|
||||
class BookFinder {
|
||||
#providerResponseTimeout = 30000
|
||||
|
@ -463,6 +464,12 @@ class BookFinder {
|
|||
} else {
|
||||
books = await this.getGoogleBooksResults(title, author)
|
||||
}
|
||||
books.forEach((book) => {
|
||||
if (book.description) {
|
||||
book.description = htmlSanitizer.sanitize(book.description)
|
||||
book.descriptionPlain = htmlSanitizer.stripAllTags(book.description)
|
||||
}
|
||||
})
|
||||
return books
|
||||
}
|
||||
|
||||
|
|
|
@ -7,12 +7,6 @@
|
|||
*/
|
||||
|
||||
const htmlparser = require('htmlparser2');
|
||||
// const escapeStringRegexp = require('escape-string-regexp');
|
||||
// const { isPlainObject } = require('is-plain-object');
|
||||
// const deepmerge = require('deepmerge');
|
||||
// const parseSrcset = require('parse-srcset');
|
||||
// const { parse: postcssParse } = require('postcss');
|
||||
// Tags that can conceivably represent stand-alone media.
|
||||
|
||||
// ABS UPDATE: Packages not necessary
|
||||
// SOURCE: https://github.com/sindresorhus/escape-string-regexp/blob/main/index.js
|
||||
|
@ -76,17 +70,6 @@ function has(obj, key) {
|
|||
return ({}).hasOwnProperty.call(obj, key);
|
||||
}
|
||||
|
||||
// Returns those elements of `a` for which `cb(a)` returns truthy
|
||||
function filter(a, cb) {
|
||||
const n = [];
|
||||
each(a, function (v) {
|
||||
if (cb(v)) {
|
||||
n.push(v);
|
||||
}
|
||||
});
|
||||
return n;
|
||||
}
|
||||
|
||||
function isEmptyObject(obj) {
|
||||
for (const key in obj) {
|
||||
if (has(obj, key)) {
|
||||
|
@ -96,21 +79,6 @@ function isEmptyObject(obj) {
|
|||
return true;
|
||||
}
|
||||
|
||||
function stringifySrcset(parsedSrcset) {
|
||||
return parsedSrcset.map(function (part) {
|
||||
if (!part.url) {
|
||||
throw new Error('URL missing');
|
||||
}
|
||||
|
||||
return (
|
||||
part.url +
|
||||
(part.w ? ` ${part.w}w` : '') +
|
||||
(part.h ? ` ${part.h}h` : '') +
|
||||
(part.d ? ` ${part.d}x` : '')
|
||||
);
|
||||
}).join(', ');
|
||||
}
|
||||
|
||||
module.exports = sanitizeHtml;
|
||||
|
||||
// A valid attribute name.
|
||||
|
@ -714,86 +682,6 @@ function sanitizeHtml(html, options, _recursing) {
|
|||
return !options.allowedSchemes || options.allowedSchemes.indexOf(scheme) === -1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Filters user input css properties by allowlisted regex attributes.
|
||||
* Modifies the abstractSyntaxTree object.
|
||||
*
|
||||
* @param {object} abstractSyntaxTree - Object representation of CSS attributes.
|
||||
* @property {array[Declaration]} abstractSyntaxTree.nodes[0] - Each object cointains prop and value key, i.e { prop: 'color', value: 'red' }.
|
||||
* @param {object} allowedStyles - Keys are properties (i.e color), value is list of permitted regex rules (i.e /green/i).
|
||||
* @return {object} - The modified tree.
|
||||
*/
|
||||
// function filterCss(abstractSyntaxTree, allowedStyles) {
|
||||
// if (!allowedStyles) {
|
||||
// return abstractSyntaxTree;
|
||||
// }
|
||||
|
||||
// const astRules = abstractSyntaxTree.nodes[0];
|
||||
// let selectedRule;
|
||||
|
||||
// // Merge global and tag-specific styles into new AST.
|
||||
// if (allowedStyles[astRules.selector] && allowedStyles['*']) {
|
||||
// selectedRule = deepmerge(
|
||||
// allowedStyles[astRules.selector],
|
||||
// allowedStyles['*']
|
||||
// );
|
||||
// } else {
|
||||
// selectedRule = allowedStyles[astRules.selector] || allowedStyles['*'];
|
||||
// }
|
||||
|
||||
// if (selectedRule) {
|
||||
// abstractSyntaxTree.nodes[0].nodes = astRules.nodes.reduce(filterDeclarations(selectedRule), []);
|
||||
// }
|
||||
|
||||
// return abstractSyntaxTree;
|
||||
// }
|
||||
|
||||
/**
|
||||
* Extracts the style attributes from an AbstractSyntaxTree and formats those
|
||||
* values in the inline style attribute format.
|
||||
*
|
||||
* @param {AbstractSyntaxTree} filteredAST
|
||||
* @return {string} - Example: "color:yellow;text-align:center !important;font-family:helvetica;"
|
||||
*/
|
||||
function stringifyStyleAttributes(filteredAST) {
|
||||
return filteredAST.nodes[0].nodes
|
||||
.reduce(function (extractedAttributes, attrObject) {
|
||||
extractedAttributes.push(
|
||||
`${attrObject.prop}:${attrObject.value}${attrObject.important ? ' !important' : ''}`
|
||||
);
|
||||
return extractedAttributes;
|
||||
}, [])
|
||||
.join(';');
|
||||
}
|
||||
|
||||
/**
|
||||
* Filters the existing attributes for the given property. Discards any attributes
|
||||
* which don't match the allowlist.
|
||||
*
|
||||
* @param {object} selectedRule - Example: { color: red, font-family: helvetica }
|
||||
* @param {array} allowedDeclarationsList - List of declarations which pass the allowlist.
|
||||
* @param {object} attributeObject - Object representing the current css property.
|
||||
* @property {string} attributeObject.type - Typically 'declaration'.
|
||||
* @property {string} attributeObject.prop - The CSS property, i.e 'color'.
|
||||
* @property {string} attributeObject.value - The corresponding value to the css property, i.e 'red'.
|
||||
* @return {function} - When used in Array.reduce, will return an array of Declaration objects
|
||||
*/
|
||||
function filterDeclarations(selectedRule) {
|
||||
return function (allowedDeclarationsList, attributeObject) {
|
||||
// If this property is allowlisted...
|
||||
if (has(selectedRule, attributeObject.prop)) {
|
||||
const matchesRegex = selectedRule[attributeObject.prop].some(function (regularExpression) {
|
||||
return regularExpression.test(attributeObject.value);
|
||||
});
|
||||
|
||||
if (matchesRegex) {
|
||||
allowedDeclarationsList.push(attributeObject);
|
||||
}
|
||||
}
|
||||
return allowedDeclarationsList;
|
||||
};
|
||||
}
|
||||
|
||||
function filterClasses(classes, allowed, allowedGlobs) {
|
||||
if (!allowed) {
|
||||
// The class attribute is allowed without filtering on this tag
|
||||
|
|
|
@ -42,8 +42,7 @@ class ApiCacheManager {
|
|||
Logger.debug(`[ApiCacheManager] Skipping cache for random sort`)
|
||||
return next()
|
||||
}
|
||||
// Force URL to be lower case for matching against routes
|
||||
req.url = req.url.toLowerCase()
|
||||
|
||||
const key = { user: req.user.username, url: req.url }
|
||||
const stringifiedKey = JSON.stringify(key)
|
||||
Logger.debug(`[ApiCacheManager] count: ${this.cache.size} size: ${this.cache.calculatedSize}`)
|
||||
|
|
|
@ -130,7 +130,21 @@ class MigrationManager {
|
|||
|
||||
async initUmzug(umzugStorage = new SequelizeStorage({ sequelize: this.sequelize })) {
|
||||
// This check is for dependency injection in tests
|
||||
const files = (await fs.readdir(this.migrationsDir)).filter((file) => !file.startsWith('.')).map((file) => path.join(this.migrationsDir, file))
|
||||
const files = (await fs.readdir(this.migrationsDir))
|
||||
.filter((file) => {
|
||||
// Only include .js files and exclude dot files
|
||||
return !file.startsWith('.') && path.extname(file).toLowerCase() === '.js'
|
||||
})
|
||||
.map((file) => path.join(this.migrationsDir, file))
|
||||
|
||||
// Validate migration names
|
||||
for (const file of files) {
|
||||
const migrationName = path.basename(file, path.extname(file))
|
||||
const migrationVersion = this.extractVersionFromTag(migrationName)
|
||||
if (!migrationVersion) {
|
||||
throw new Error(`Invalid migration file: "${migrationName}". Unable to extract version from filename.`)
|
||||
}
|
||||
}
|
||||
|
||||
const parent = new Umzug({
|
||||
migrations: {
|
||||
|
|
|
@ -72,6 +72,15 @@ class PodcastManager {
|
|||
*/
|
||||
async startPodcastEpisodeDownload(podcastEpisodeDownload) {
|
||||
if (this.currentDownload) {
|
||||
// Prevent downloading episodes from the same URL for the same library item.
|
||||
// Allow downloading for different library items in case of the same podcast existing in multiple libraries (e.g. different folders)
|
||||
if (this.downloadQueue.some((d) => d.url === podcastEpisodeDownload.url && d.libraryItem.id === podcastEpisodeDownload.libraryItem.id)) {
|
||||
Logger.warn(`[PodcastManager] Episode already in queue: "${this.currentDownload.episodeTitle}"`)
|
||||
return
|
||||
} else if (this.currentDownload.url === podcastEpisodeDownload.url && this.currentDownload.libraryItem.id === podcastEpisodeDownload.libraryItem.id) {
|
||||
Logger.warn(`[PodcastManager] Episode download already in progress for "${podcastEpisodeDownload.episodeTitle}"`)
|
||||
return
|
||||
}
|
||||
this.downloadQueue.push(podcastEpisodeDownload)
|
||||
SocketAuthority.emitter('episode_download_queued', podcastEpisodeDownload.toJSONForClient())
|
||||
return
|
||||
|
@ -99,7 +108,7 @@ class PodcastManager {
|
|||
// e.g. "/tagesschau 20 Uhr.mp3" becomes "/tagesschau 20 Uhr (ep_asdfasdf).mp3"
|
||||
// this handles podcasts where every title is the same (ref https://github.com/advplyr/audiobookshelf/issues/1802)
|
||||
if (await fs.pathExists(this.currentDownload.targetPath)) {
|
||||
this.currentDownload.appendRandomId = true
|
||||
this.currentDownload.setAppendRandomId(true)
|
||||
}
|
||||
|
||||
// Ignores all added files to this dir
|
||||
|
@ -115,10 +124,24 @@ class PodcastManager {
|
|||
let success = false
|
||||
if (this.currentDownload.isMp3) {
|
||||
// Download episode and tag it
|
||||
success = await ffmpegHelpers.downloadPodcastEpisode(this.currentDownload).catch((error) => {
|
||||
const ffmpegDownloadResponse = await ffmpegHelpers.downloadPodcastEpisode(this.currentDownload).catch((error) => {
|
||||
Logger.error(`[PodcastManager] Podcast Episode download failed`, error)
|
||||
return false
|
||||
})
|
||||
success = !!ffmpegDownloadResponse?.success
|
||||
|
||||
// If failed due to ffmpeg error, retry without tagging
|
||||
// e.g. RSS feed may have incorrect file extension and file type
|
||||
// See https://github.com/advplyr/audiobookshelf/issues/3837
|
||||
if (!success && ffmpegDownloadResponse?.isFfmpegError) {
|
||||
Logger.info(`[PodcastManager] Retrying episode download without tagging`)
|
||||
// Download episode only
|
||||
success = await downloadFile(this.currentDownload.url, this.currentDownload.targetPath)
|
||||
.then(() => true)
|
||||
.catch((error) => {
|
||||
Logger.error(`[PodcastManager] Podcast Episode download failed`, error)
|
||||
return false
|
||||
})
|
||||
}
|
||||
} else {
|
||||
// Download episode only
|
||||
success = await downloadFile(this.currentDownload.url, this.currentDownload.targetPath)
|
||||
|
@ -188,6 +211,14 @@ class PodcastManager {
|
|||
const podcastEpisode = await Database.podcastEpisodeModel.createFromRssPodcastEpisode(this.currentDownload.rssPodcastEpisode, libraryItem.media.id, audioFile)
|
||||
|
||||
libraryItem.libraryFiles.push(libraryFile.toJSON())
|
||||
// Re-calculating library item size because this wasnt being updated properly for podcasts in v2.20.0 and below
|
||||
let libraryItemSize = 0
|
||||
libraryItem.libraryFiles.forEach((lf) => {
|
||||
if (lf.metadata.size && !isNaN(lf.metadata.size)) {
|
||||
libraryItemSize += Number(lf.metadata.size)
|
||||
}
|
||||
})
|
||||
libraryItem.size = libraryItemSize
|
||||
libraryItem.changed('libraryFiles', true)
|
||||
|
||||
libraryItem.media.podcastEpisodes.push(podcastEpisode)
|
||||
|
@ -218,7 +249,12 @@ class PodcastManager {
|
|||
|
||||
await libraryItem.save()
|
||||
|
||||
SocketAuthority.emitter('item_updated', libraryItem.toOldJSONExpanded())
|
||||
if (libraryItem.media.numEpisodes !== libraryItem.media.podcastEpisodes.length) {
|
||||
libraryItem.media.numEpisodes = libraryItem.media.podcastEpisodes.length
|
||||
await libraryItem.media.save()
|
||||
}
|
||||
|
||||
SocketAuthority.libraryItemEmitter('item_updated', libraryItem)
|
||||
const podcastEpisodeExpanded = podcastEpisode.toOldJSONExpanded(libraryItem.id)
|
||||
podcastEpisodeExpanded.libraryItem = libraryItem.toOldJSONExpanded()
|
||||
SocketAuthority.emitter('episode_added', podcastEpisodeExpanded)
|
||||
|
@ -331,7 +367,7 @@ class PodcastManager {
|
|||
libraryItem.changed('updatedAt', true)
|
||||
await libraryItem.save()
|
||||
|
||||
SocketAuthority.emitter('item_updated', libraryItem.toOldJSONExpanded())
|
||||
SocketAuthority.libraryItemEmitter('item_updated', libraryItem)
|
||||
|
||||
return libraryItem.media.autoDownloadEpisodes
|
||||
}
|
||||
|
@ -389,7 +425,7 @@ class PodcastManager {
|
|||
libraryItem.changed('updatedAt', true)
|
||||
await libraryItem.save()
|
||||
|
||||
SocketAuthority.emitter('item_updated', libraryItem.toOldJSONExpanded())
|
||||
SocketAuthority.libraryItemEmitter('item_updated', libraryItem)
|
||||
|
||||
return newEpisodes || []
|
||||
}
|
||||
|
@ -608,7 +644,9 @@ class PodcastManager {
|
|||
libraryFiles: [],
|
||||
extraData: {},
|
||||
libraryId: folder.libraryId,
|
||||
libraryFolderId: folder.id
|
||||
libraryFolderId: folder.id,
|
||||
title: podcast.title,
|
||||
titleIgnorePrefix: podcast.titleIgnorePrefix
|
||||
},
|
||||
{ transaction }
|
||||
)
|
||||
|
@ -674,7 +712,7 @@ class PodcastManager {
|
|||
}
|
||||
}
|
||||
|
||||
SocketAuthority.emitter('item_added', newLibraryItem.toOldJSONExpanded())
|
||||
SocketAuthority.libraryItemEmitter('item_added', newLibraryItem)
|
||||
|
||||
// Turn on podcast auto download cron if not already on
|
||||
if (newLibraryItem.media.autoDownloadEpisodes) {
|
||||
|
|
|
@ -246,6 +246,15 @@ class RssFeedManager {
|
|||
const extname = Path.extname(feed.coverPath).toLowerCase().slice(1)
|
||||
res.type(`image/${extname}`)
|
||||
const readStream = fs.createReadStream(feed.coverPath)
|
||||
|
||||
readStream.on('error', (error) => {
|
||||
Logger.error(`[RssFeedManager] Error streaming cover image: ${error.message}`)
|
||||
// Only send error if headers haven't been sent yet
|
||||
if (!res.headersSent) {
|
||||
res.sendStatus(404)
|
||||
}
|
||||
})
|
||||
|
||||
readStream.pipe(res)
|
||||
}
|
||||
|
||||
|
|
|
@ -13,3 +13,6 @@ Please add a record of every database migration that you create to this file. Th
|
|||
| v2.17.5 | v2.17.5-remove-host-from-feed-urls | removes the host (serverAddress) from URL columns in the feeds and feedEpisodes tables |
|
||||
| v2.17.6 | v2.17.6-share-add-isdownloadable | Adds the isDownloadable column to the mediaItemShares table |
|
||||
| v2.17.7 | v2.17.7-add-indices | Adds indices to the libraryItems and books tables to reduce query times |
|
||||
| v2.19.1 | v2.19.1-copy-title-to-library-items | Copies title and titleIgnorePrefix to the libraryItems table, creates update triggers and indices |
|
||||
| v2.19.4 | v2.19.4-improve-podcast-queries | Adds numEpisodes to podcasts, adds podcastId to mediaProgresses, copies podcast title to libraryItems |
|
||||
| v2.20.0 | v2.20.0-improve-author-sort-queries | Adds AuthorNames(FirstLast\|LastFirst) to libraryItems to improve author sort queries |
|
||||
|
|
164
server/migrations/v2.19.1-copy-title-to-library-items.js
Normal file
164
server/migrations/v2.19.1-copy-title-to-library-items.js
Normal file
|
@ -0,0 +1,164 @@
|
|||
const util = require('util')
|
||||
|
||||
/**
|
||||
* @typedef MigrationContext
|
||||
* @property {import('sequelize').QueryInterface} queryInterface - a suquelize QueryInterface object.
|
||||
* @property {import('../Logger')} logger - a Logger object.
|
||||
*
|
||||
* @typedef MigrationOptions
|
||||
* @property {MigrationContext} context - an object containing the migration context.
|
||||
*/
|
||||
|
||||
const migrationVersion = '2.19.1'
|
||||
const migrationName = `${migrationVersion}-copy-title-to-library-items`
|
||||
const loggerPrefix = `[${migrationVersion} migration]`
|
||||
|
||||
/**
|
||||
* This upward migration adds a title column to the libraryItems table, copies the title from the book to the libraryItem,
|
||||
* and creates a new index on the title column. In addition it sets a trigger on the books table to update the title column
|
||||
* in the libraryItems table when a book is updated.
|
||||
*
|
||||
* @param {MigrationOptions} options - an object containing the migration context.
|
||||
* @returns {Promise<void>} - A promise that resolves when the migration is complete.
|
||||
*/
|
||||
async function up({ context: { queryInterface, logger } }) {
|
||||
// Upwards migration script
|
||||
logger.info(`${loggerPrefix} UPGRADE BEGIN: ${migrationName}`)
|
||||
|
||||
await addColumn(queryInterface, logger, 'libraryItems', 'title', { type: queryInterface.sequelize.Sequelize.STRING, allowNull: true })
|
||||
await copyColumn(queryInterface, logger, 'books', 'title', 'id', 'libraryItems', 'title', 'mediaId')
|
||||
await addTrigger(queryInterface, logger, 'books', 'title', 'id', 'libraryItems', 'title', 'mediaId')
|
||||
await addIndex(queryInterface, logger, 'libraryItems', ['libraryId', 'mediaType', { name: 'title', collate: 'NOCASE' }])
|
||||
|
||||
await addColumn(queryInterface, logger, 'libraryItems', 'titleIgnorePrefix', { type: queryInterface.sequelize.Sequelize.STRING, allowNull: true })
|
||||
await copyColumn(queryInterface, logger, 'books', 'titleIgnorePrefix', 'id', 'libraryItems', 'titleIgnorePrefix', 'mediaId')
|
||||
await addTrigger(queryInterface, logger, 'books', 'titleIgnorePrefix', 'id', 'libraryItems', 'titleIgnorePrefix', 'mediaId')
|
||||
await addIndex(queryInterface, logger, 'libraryItems', ['libraryId', 'mediaType', { name: 'titleIgnorePrefix', collate: 'NOCASE' }])
|
||||
|
||||
await addIndex(queryInterface, logger, 'libraryItems', ['libraryId', 'mediaType', 'createdAt'])
|
||||
|
||||
logger.info(`${loggerPrefix} UPGRADE END: ${migrationName}`)
|
||||
}
|
||||
|
||||
/**
|
||||
* This downward migration script removes the title column from the libraryItems table, removes the trigger on the books table,
|
||||
* and removes the index on the title column.
|
||||
*
|
||||
* @param {MigrationOptions} options - an object containing the migration context.
|
||||
* @returns {Promise<void>} - A promise that resolves when the migration is complete.
|
||||
*/
|
||||
async function down({ context: { queryInterface, logger } }) {
|
||||
// Downward migration script
|
||||
logger.info(`${loggerPrefix} DOWNGRADE BEGIN: ${migrationName}`)
|
||||
|
||||
await removeIndex(queryInterface, logger, 'libraryItems', ['libraryId', 'mediaType', 'title'])
|
||||
await removeTrigger(queryInterface, logger, 'libraryItems', 'title')
|
||||
await removeColumn(queryInterface, logger, 'libraryItems', 'title')
|
||||
|
||||
await removeIndex(queryInterface, logger, 'libraryItems', ['libraryId', 'mediaType', 'titleIgnorePrefix'])
|
||||
await removeTrigger(queryInterface, logger, 'libraryItems', 'titleIgnorePrefix')
|
||||
await removeColumn(queryInterface, logger, 'libraryItems', 'titleIgnorePrefix')
|
||||
|
||||
await removeIndex(queryInterface, logger, 'libraryItems', ['libraryId', 'mediaType', 'createdAt'])
|
||||
|
||||
logger.info(`${loggerPrefix} DOWNGRADE END: ${migrationName}`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility function to add an index to a table. If the index already z`exists, it logs a message and continues.
|
||||
*
|
||||
* @param {import('sequelize').QueryInterface} queryInterface
|
||||
* @param {import ('../Logger')} logger
|
||||
* @param {string} tableName
|
||||
* @param {string[]} columns
|
||||
*/
|
||||
async function addIndex(queryInterface, logger, tableName, columns) {
|
||||
const columnString = columns.map((column) => util.inspect(column)).join(', ')
|
||||
const indexName = convertToSnakeCase(`${tableName}_${columns.map((column) => (typeof column === 'string' ? column : column.name)).join('_')}`)
|
||||
try {
|
||||
logger.info(`${loggerPrefix} adding index on [${columnString}] to table ${tableName}. index name: ${indexName}"`)
|
||||
await queryInterface.addIndex(tableName, columns)
|
||||
logger.info(`${loggerPrefix} added index on [${columnString}] to table ${tableName}. index name: ${indexName}"`)
|
||||
} catch (error) {
|
||||
if (error.name === 'SequelizeDatabaseError' && error.message.includes('already exists')) {
|
||||
logger.info(`${loggerPrefix} index [${columnString}] for table "${tableName}" already exists`)
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility function to remove an index from a table.
|
||||
* Sequelize implemets it using DROP INDEX IF EXISTS, so it won't throw an error if the index doesn't exist.
|
||||
*
|
||||
* @param {import('sequelize').QueryInterface} queryInterface
|
||||
* @param {import ('../Logger')} logger
|
||||
* @param {string} tableName
|
||||
* @param {string[]} columns
|
||||
*/
|
||||
async function removeIndex(queryInterface, logger, tableName, columns) {
|
||||
logger.info(`${loggerPrefix} removing index [${columns.join(', ')}] from table "${tableName}"`)
|
||||
await queryInterface.removeIndex(tableName, columns)
|
||||
logger.info(`${loggerPrefix} removed index [${columns.join(', ')}] from table "${tableName}"`)
|
||||
}
|
||||
|
||||
async function addColumn(queryInterface, logger, table, column, options) {
|
||||
logger.info(`${loggerPrefix} adding column "${column}" to table "${table}"`)
|
||||
const tableDescription = await queryInterface.describeTable(table)
|
||||
if (!tableDescription[column]) {
|
||||
await queryInterface.addColumn(table, column, options)
|
||||
logger.info(`${loggerPrefix} added column "${column}" to table "${table}"`)
|
||||
} else {
|
||||
logger.info(`${loggerPrefix} column "${column}" already exists in table "${table}"`)
|
||||
}
|
||||
}
|
||||
|
||||
async function removeColumn(queryInterface, logger, table, column) {
|
||||
logger.info(`${loggerPrefix} removing column "${column}" from table "${table}"`)
|
||||
await queryInterface.removeColumn(table, column)
|
||||
logger.info(`${loggerPrefix} removed column "${column}" from table "${table}"`)
|
||||
}
|
||||
|
||||
async function copyColumn(queryInterface, logger, sourceTable, sourceColumn, sourceIdColumn, targetTable, targetColumn, targetIdColumn) {
|
||||
logger.info(`${loggerPrefix} copying column "${sourceColumn}" from table "${sourceTable}" to table "${targetTable}"`)
|
||||
await queryInterface.sequelize.query(`
|
||||
UPDATE ${targetTable}
|
||||
SET ${targetColumn} = ${sourceTable}.${sourceColumn}
|
||||
FROM ${sourceTable}
|
||||
WHERE ${targetTable}.${targetIdColumn} = ${sourceTable}.${sourceIdColumn}
|
||||
`)
|
||||
logger.info(`${loggerPrefix} copied column "${sourceColumn}" from table "${sourceTable}" to table "${targetTable}"`)
|
||||
}
|
||||
|
||||
async function addTrigger(queryInterface, logger, sourceTable, sourceColumn, sourceIdColumn, targetTable, targetColumn, targetIdColumn) {
|
||||
logger.info(`${loggerPrefix} adding trigger to update ${targetTable}.${targetColumn} when ${sourceTable}.${sourceColumn} is updated`)
|
||||
const triggerName = convertToSnakeCase(`update_${targetTable}_${targetColumn}`)
|
||||
|
||||
await queryInterface.sequelize.query(`DROP TRIGGER IF EXISTS ${triggerName}`)
|
||||
|
||||
await queryInterface.sequelize.query(`
|
||||
CREATE TRIGGER ${triggerName}
|
||||
AFTER UPDATE OF ${sourceColumn} ON ${sourceTable}
|
||||
FOR EACH ROW
|
||||
BEGIN
|
||||
UPDATE ${targetTable}
|
||||
SET ${targetColumn} = NEW.${sourceColumn}
|
||||
WHERE ${targetTable}.${targetIdColumn} = NEW.${sourceIdColumn};
|
||||
END;
|
||||
`)
|
||||
logger.info(`${loggerPrefix} added trigger to update ${targetTable}.${targetColumn} when ${sourceTable}.${sourceColumn} is updated`)
|
||||
}
|
||||
|
||||
async function removeTrigger(queryInterface, logger, targetTable, targetColumn) {
|
||||
logger.info(`${loggerPrefix} removing trigger to update ${targetTable}.${targetColumn}`)
|
||||
const triggerName = convertToSnakeCase(`update_${targetTable}_${targetColumn}`)
|
||||
await queryInterface.sequelize.query(`DROP TRIGGER IF EXISTS ${triggerName}`)
|
||||
logger.info(`${loggerPrefix} removed trigger to update ${targetTable}.${targetColumn}`)
|
||||
}
|
||||
|
||||
function convertToSnakeCase(str) {
|
||||
return str.replace(/([A-Z])/g, '_$1').toLowerCase()
|
||||
}
|
||||
|
||||
module.exports = { up, down }
|
219
server/migrations/v2.19.4-improve-podcast-queries.js
Normal file
219
server/migrations/v2.19.4-improve-podcast-queries.js
Normal file
|
@ -0,0 +1,219 @@
|
|||
const util = require('util')
|
||||
|
||||
/**
|
||||
* @typedef MigrationContext
|
||||
* @property {import('sequelize').QueryInterface} queryInterface - a suquelize QueryInterface object.
|
||||
* @property {import('../Logger')} logger - a Logger object.
|
||||
*
|
||||
* @typedef MigrationOptions
|
||||
* @property {MigrationContext} context - an object containing the migration context.
|
||||
*/
|
||||
|
||||
const migrationVersion = '2.19.4'
|
||||
const migrationName = `${migrationVersion}-improve-podcast-queries`
|
||||
const loggerPrefix = `[${migrationVersion} migration]`
|
||||
|
||||
/**
|
||||
* This upward migration adds a numEpisodes column to the podcasts table and populates it.
|
||||
* It also adds a podcastId column to the mediaProgresses table and populates it.
|
||||
* It also copies the title and titleIgnorePrefix columns from the podcasts table to the libraryItems table,
|
||||
* and adds triggers to update them when the corresponding columns in the podcasts table are updated.
|
||||
*
|
||||
* @param {MigrationOptions} options - an object containing the migration context.
|
||||
* @returns {Promise<void>} - A promise that resolves when the migration is complete.
|
||||
*/
|
||||
async function up({ context: { queryInterface, logger } }) {
|
||||
// Upwards migration script
|
||||
logger.info(`${loggerPrefix} UPGRADE BEGIN: ${migrationName}`)
|
||||
|
||||
// Add numEpisodes column to podcasts table
|
||||
await addColumn(queryInterface, logger, 'podcasts', 'numEpisodes', { type: queryInterface.sequelize.Sequelize.INTEGER, allowNull: false, defaultValue: 0 })
|
||||
|
||||
// Populate numEpisodes column with the number of episodes for each podcast
|
||||
await populateNumEpisodes(queryInterface, logger)
|
||||
|
||||
// Add podcastId column to mediaProgresses table
|
||||
await addColumn(queryInterface, logger, 'mediaProgresses', 'podcastId', { type: queryInterface.sequelize.Sequelize.UUID, allowNull: true })
|
||||
|
||||
// Populate podcastId column with the podcastId for each mediaProgress
|
||||
await populatePodcastId(queryInterface, logger)
|
||||
|
||||
// Copy title and titleIgnorePrefix columns from podcasts to libraryItems
|
||||
await copyColumn(queryInterface, logger, 'podcasts', 'title', 'id', 'libraryItems', 'title', 'mediaId')
|
||||
await copyColumn(queryInterface, logger, 'podcasts', 'titleIgnorePrefix', 'id', 'libraryItems', 'titleIgnorePrefix', 'mediaId')
|
||||
|
||||
// Add triggers to update title and titleIgnorePrefix in libraryItems
|
||||
await addTrigger(queryInterface, logger, 'podcasts', 'title', 'id', 'libraryItems', 'title', 'mediaId')
|
||||
await addTrigger(queryInterface, logger, 'podcasts', 'titleIgnorePrefix', 'id', 'libraryItems', 'titleIgnorePrefix', 'mediaId')
|
||||
|
||||
logger.info(`${loggerPrefix} UPGRADE END: ${migrationName}`)
|
||||
}
|
||||
|
||||
/**
|
||||
* This downward migration removes the triggers on the podcasts table,
|
||||
* the numEpisodes column from the podcasts table, and the podcastId column from the mediaProgresses table.
|
||||
*
|
||||
* @param {MigrationOptions} options - an object containing the migration context.
|
||||
* @returns {Promise<void>} - A promise that resolves when the migration is complete.
|
||||
*/
|
||||
async function down({ context: { queryInterface, logger } }) {
|
||||
// Downward migration script
|
||||
logger.info(`${loggerPrefix} DOWNGRADE BEGIN: ${migrationName}`)
|
||||
|
||||
// Remove triggers from libraryItems
|
||||
await removeTrigger(queryInterface, logger, 'podcasts', 'title', 'libraryItems', 'title')
|
||||
await removeTrigger(queryInterface, logger, 'podcasts', 'titleIgnorePrefix', 'libraryItems', 'titleIgnorePrefix')
|
||||
|
||||
// Remove numEpisodes column from podcasts table
|
||||
await removeColumn(queryInterface, logger, 'podcasts', 'numEpisodes')
|
||||
|
||||
// Remove podcastId column from mediaProgresses table
|
||||
await removeColumn(queryInterface, logger, 'mediaProgresses', 'podcastId')
|
||||
|
||||
logger.info(`${loggerPrefix} DOWNGRADE END: ${migrationName}`)
|
||||
}
|
||||
|
||||
async function populateNumEpisodes(queryInterface, logger) {
|
||||
logger.info(`${loggerPrefix} populating numEpisodes column in podcasts table`)
|
||||
await queryInterface.sequelize.query(`
|
||||
UPDATE podcasts
|
||||
SET numEpisodes = (SELECT COUNT(*) FROM podcastEpisodes WHERE podcastEpisodes.podcastId = podcasts.id)
|
||||
`)
|
||||
logger.info(`${loggerPrefix} populated numEpisodes column in podcasts table`)
|
||||
}
|
||||
|
||||
async function populatePodcastId(queryInterface, logger) {
|
||||
logger.info(`${loggerPrefix} populating podcastId column in mediaProgresses table`)
|
||||
// bulk update podcastId to the podcastId of the podcastEpisode if the mediaItemType is podcastEpisode
|
||||
await queryInterface.sequelize.query(`
|
||||
UPDATE mediaProgresses
|
||||
SET podcastId = (SELECT podcastId FROM podcastEpisodes WHERE podcastEpisodes.id = mediaProgresses.mediaItemId)
|
||||
WHERE mediaItemType = 'podcastEpisode'
|
||||
`)
|
||||
logger.info(`${loggerPrefix} populated podcastId column in mediaProgresses table`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility function to add a column to a table. If the column already exists, it logs a message and continues.
|
||||
*
|
||||
* @param {import('sequelize').QueryInterface} queryInterface - a suquelize QueryInterface object.
|
||||
* @param {import('../Logger')} logger - a Logger object.
|
||||
* @param {string} table - the name of the table to add the column to.
|
||||
* @param {string} column - the name of the column to add.
|
||||
* @param {Object} options - the options for the column.
|
||||
*/
|
||||
async function addColumn(queryInterface, logger, table, column, options) {
|
||||
logger.info(`${loggerPrefix} adding column "${column}" to table "${table}"`)
|
||||
const tableDescription = await queryInterface.describeTable(table)
|
||||
if (!tableDescription[column]) {
|
||||
await queryInterface.addColumn(table, column, options)
|
||||
logger.info(`${loggerPrefix} added column "${column}" to table "${table}"`)
|
||||
} else {
|
||||
logger.info(`${loggerPrefix} column "${column}" already exists in table "${table}"`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility function to remove a column from a table. If the column does not exist, it logs a message and continues.
|
||||
*
|
||||
* @param {import('sequelize').QueryInterface} queryInterface - a suquelize QueryInterface object.
|
||||
* @param {import('../Logger')} logger - a Logger object.
|
||||
* @param {string} table - the name of the table to remove the column from.
|
||||
* @param {string} column - the name of the column to remove.
|
||||
*/
|
||||
async function removeColumn(queryInterface, logger, table, column) {
|
||||
logger.info(`${loggerPrefix} removing column "${column}" from table "${table}"`)
|
||||
const tableDescription = await queryInterface.describeTable(table)
|
||||
if (tableDescription[column]) {
|
||||
await queryInterface.sequelize.query(`ALTER TABLE ${table} DROP COLUMN ${column}`)
|
||||
logger.info(`${loggerPrefix} removed column "${column}" from table "${table}"`)
|
||||
} else {
|
||||
logger.info(`${loggerPrefix} column "${column}" does not exist in table "${table}"`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility function to add a trigger to update a column in a target table when a column in a source table is updated.
|
||||
* If the trigger already exists, it drops it and creates a new one.
|
||||
* sourceIdColumn and targetIdColumn are used to match the source and target rows.
|
||||
*
|
||||
* @param {import('sequelize').QueryInterface} queryInterface - a suquelize QueryInterface object.
|
||||
* @param {import('../Logger')} logger - a Logger object.
|
||||
* @param {string} sourceTable - the name of the source table.
|
||||
* @param {string} sourceColumn - the name of the column to update.
|
||||
* @param {string} sourceIdColumn - the name of the id column of the source table.
|
||||
* @param {string} targetTable - the name of the target table.
|
||||
* @param {string} targetColumn - the name of the column to update.
|
||||
* @param {string} targetIdColumn - the name of the id column of the target table.
|
||||
*/
|
||||
async function addTrigger(queryInterface, logger, sourceTable, sourceColumn, sourceIdColumn, targetTable, targetColumn, targetIdColumn) {
|
||||
logger.info(`${loggerPrefix} adding trigger to update ${targetTable}.${targetColumn} when ${sourceTable}.${sourceColumn} is updated`)
|
||||
const triggerName = convertToSnakeCase(`update_${targetTable}_${targetColumn}_from_${sourceTable}_${sourceColumn}`)
|
||||
|
||||
await queryInterface.sequelize.query(`DROP TRIGGER IF EXISTS ${triggerName}`)
|
||||
|
||||
await queryInterface.sequelize.query(`
|
||||
CREATE TRIGGER ${triggerName}
|
||||
AFTER UPDATE OF ${sourceColumn} ON ${sourceTable}
|
||||
FOR EACH ROW
|
||||
BEGIN
|
||||
UPDATE ${targetTable}
|
||||
SET ${targetColumn} = NEW.${sourceColumn}
|
||||
WHERE ${targetTable}.${targetIdColumn} = NEW.${sourceIdColumn};
|
||||
END;
|
||||
`)
|
||||
logger.info(`${loggerPrefix} added trigger to update ${targetTable}.${targetColumn} when ${sourceTable}.${sourceColumn} is updated`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility function to remove an update trigger from a table.
|
||||
*
|
||||
* @param {import('sequelize').QueryInterface} queryInterface - a suquelize QueryInterface object.
|
||||
* @param {import('../Logger')} logger - a Logger object.
|
||||
* @param {string} sourceTable - the name of the source table.
|
||||
* @param {string} sourceColumn - the name of the column to update.
|
||||
* @param {string} targetTable - the name of the target table.
|
||||
* @param {string} targetColumn - the name of the column to update.
|
||||
*/
|
||||
async function removeTrigger(queryInterface, logger, sourceTable, sourceColumn, targetTable, targetColumn) {
|
||||
logger.info(`${loggerPrefix} removing trigger to update ${targetTable}.${targetColumn}`)
|
||||
const triggerName = convertToSnakeCase(`update_${targetTable}_${targetColumn}_from_${sourceTable}_${sourceColumn}`)
|
||||
await queryInterface.sequelize.query(`DROP TRIGGER IF EXISTS ${triggerName}`)
|
||||
logger.info(`${loggerPrefix} removed trigger to update ${targetTable}.${targetColumn}`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility function to copy a column from a source table to a target table.
|
||||
* sourceIdColumn and targetIdColumn are used to match the source and target rows.
|
||||
*
|
||||
* @param {import('sequelize').QueryInterface} queryInterface - a suquelize QueryInterface object.
|
||||
* @param {import('../Logger')} logger - a Logger object.
|
||||
* @param {string} sourceTable - the name of the source table.
|
||||
* @param {string} sourceColumn - the name of the column to copy.
|
||||
* @param {string} sourceIdColumn - the name of the id column of the source table.
|
||||
* @param {string} targetTable - the name of the target table.
|
||||
* @param {string} targetColumn - the name of the column to copy to.
|
||||
* @param {string} targetIdColumn - the name of the id column of the target table.
|
||||
*/
|
||||
async function copyColumn(queryInterface, logger, sourceTable, sourceColumn, sourceIdColumn, targetTable, targetColumn, targetIdColumn) {
|
||||
logger.info(`${loggerPrefix} copying column "${sourceColumn}" from table "${sourceTable}" to table "${targetTable}"`)
|
||||
await queryInterface.sequelize.query(`
|
||||
UPDATE ${targetTable}
|
||||
SET ${targetColumn} = ${sourceTable}.${sourceColumn}
|
||||
FROM ${sourceTable}
|
||||
WHERE ${targetTable}.${targetIdColumn} = ${sourceTable}.${sourceIdColumn}
|
||||
`)
|
||||
logger.info(`${loggerPrefix} copied column "${sourceColumn}" from table "${sourceTable}" to table "${targetTable}"`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility function to convert a string to snake case, e.g. "titleIgnorePrefix" -> "title_ignore_prefix"
|
||||
*
|
||||
* @param {string} str - the string to convert to snake case.
|
||||
* @returns {string} - the string in snake case.
|
||||
*/
|
||||
function convertToSnakeCase(str) {
|
||||
return str.replace(/([A-Z])/g, '_$1').toLowerCase()
|
||||
}
|
||||
|
||||
module.exports = { up, down }
|
272
server/migrations/v2.20.0-improve-author-sort-queries.js
Normal file
272
server/migrations/v2.20.0-improve-author-sort-queries.js
Normal file
|
@ -0,0 +1,272 @@
|
|||
const util = require('util')
|
||||
const { Sequelize } = require('sequelize')
|
||||
|
||||
/**
|
||||
* @typedef MigrationContext
|
||||
* @property {import('sequelize').QueryInterface} queryInterface - a suquelize QueryInterface object.
|
||||
* @property {import('../Logger')} logger - a Logger object.
|
||||
*
|
||||
* @typedef MigrationOptions
|
||||
* @property {MigrationContext} context - an object containing the migration context.
|
||||
*/
|
||||
|
||||
const migrationVersion = '2.20.0'
|
||||
const migrationName = `${migrationVersion}-improve-author-sort-queries`
|
||||
const loggerPrefix = `[${migrationVersion} migration]`
|
||||
|
||||
// Migration constants
|
||||
const libraryItems = 'libraryItems'
|
||||
const bookAuthors = 'bookAuthors'
|
||||
const authors = 'authors'
|
||||
const podcastEpisodes = 'podcastEpisodes'
|
||||
const columns = [
|
||||
{ name: 'authorNamesFirstLast', source: `${authors}.name`, spec: { type: Sequelize.STRING, allowNull: true } },
|
||||
{ name: 'authorNamesLastFirst', source: `${authors}.lastFirst`, spec: { type: Sequelize.STRING, allowNull: true } }
|
||||
]
|
||||
const authorsSort = `${bookAuthors}.createdAt ASC`
|
||||
const columnNames = columns.map((column) => column.name).join(', ')
|
||||
const columnSourcesExpression = columns.map((column) => `GROUP_CONCAT(${column.source}, ', ' ORDER BY ${authorsSort})`).join(', ')
|
||||
const authorsJoin = `${authors} JOIN ${bookAuthors} ON ${authors}.id = ${bookAuthors}.authorId`
|
||||
|
||||
/**
|
||||
* This upward migration adds an authorNames column to the libraryItems table and populates it.
|
||||
* It also creates triggers to update the authorNames column when the corresponding bookAuthors and authors records are updated.
|
||||
* It also creates an index on the authorNames column.
|
||||
*
|
||||
* It also adds an index on publishedAt to the podcastEpisodes table.
|
||||
*
|
||||
* @param {MigrationOptions} options - an object containing the migration context.
|
||||
* @returns {Promise<void>} - A promise that resolves when the migration is complete.
|
||||
*/
|
||||
async function up({ context: { queryInterface, logger } }) {
|
||||
const helper = new MigrationHelper(queryInterface, logger)
|
||||
|
||||
// Upwards migration script
|
||||
logger.info(`${loggerPrefix} UPGRADE BEGIN: ${migrationName}`)
|
||||
|
||||
// Add authorNames columns to libraryItems table
|
||||
await helper.addColumns()
|
||||
|
||||
// Populate authorNames columns with the author names for each libraryItem
|
||||
await helper.populateColumnsFromSource()
|
||||
|
||||
// Create triggers to update the authorNames column when the corresponding bookAuthors and authors records are updated
|
||||
await helper.addTriggers()
|
||||
|
||||
// Create indexes on the authorNames columns
|
||||
await helper.addIndexes()
|
||||
|
||||
// Add index on publishedAt to the podcastEpisodes table
|
||||
await helper.addIndex(podcastEpisodes, ['publishedAt'])
|
||||
|
||||
logger.info(`${loggerPrefix} UPGRADE END: ${migrationName}`)
|
||||
}
|
||||
|
||||
/**
|
||||
* This downward migration removes the authorNames column from the libraryItems table,
|
||||
* the triggers on the bookAuthors and authors tables, and the index on the authorNames column.
|
||||
*
|
||||
* It also removes the index on publishedAt from the podcastEpisodes table.
|
||||
*
|
||||
* @param {MigrationOptions} options - an object containing the migration context.
|
||||
* @returns {Promise<void>} - A promise that resolves when the migration is complete.
|
||||
*/
|
||||
async function down({ context: { queryInterface, logger } }) {
|
||||
// Downward migration script
|
||||
logger.info(`${loggerPrefix} DOWNGRADE BEGIN: ${migrationName}`)
|
||||
|
||||
const helper = new MigrationHelper(queryInterface, logger)
|
||||
|
||||
// Remove triggers to update authorNames columns
|
||||
await helper.removeTriggers()
|
||||
|
||||
// Remove index on publishedAt from the podcastEpisodes table
|
||||
await helper.removeIndex(podcastEpisodes, ['publishedAt'])
|
||||
|
||||
// Remove indexes on the authorNames columns
|
||||
await helper.removeIndexes()
|
||||
|
||||
// Remove authorNames columns from libraryItems table
|
||||
await helper.removeColumns()
|
||||
|
||||
logger.info(`${loggerPrefix} DOWNGRADE END: ${migrationName}`)
|
||||
}
|
||||
|
||||
class MigrationHelper {
|
||||
constructor(queryInterface, logger) {
|
||||
this.queryInterface = queryInterface
|
||||
this.logger = logger
|
||||
}
|
||||
|
||||
async addColumn(table, column, options) {
|
||||
this.logger.info(`${loggerPrefix} adding column "${column}" to table "${table}"`)
|
||||
const tableDescription = await this.queryInterface.describeTable(table)
|
||||
if (!tableDescription[column]) {
|
||||
await this.queryInterface.addColumn(table, column, options)
|
||||
this.logger.info(`${loggerPrefix} added column "${column}" to table "${table}"`)
|
||||
} else {
|
||||
this.logger.info(`${loggerPrefix} column "${column}" already exists in table "${table}"`)
|
||||
}
|
||||
}
|
||||
|
||||
async addColumns() {
|
||||
this.logger.info(`${loggerPrefix} adding ${columnNames} columns to ${libraryItems} table`)
|
||||
for (const column of columns) {
|
||||
await this.addColumn(libraryItems, column.name, column.spec)
|
||||
}
|
||||
this.logger.info(`${loggerPrefix} added ${columnNames} columns to ${libraryItems} table`)
|
||||
}
|
||||
|
||||
async removeColumn(table, column) {
|
||||
this.logger.info(`${loggerPrefix} removing column "${column}" from table "${table}"`)
|
||||
const tableDescription = await this.queryInterface.describeTable(table)
|
||||
if (tableDescription[column]) {
|
||||
await this.queryInterface.sequelize.query(`ALTER TABLE ${table} DROP COLUMN ${column}`)
|
||||
this.logger.info(`${loggerPrefix} removed column "${column}" from table "${table}"`)
|
||||
} else {
|
||||
this.logger.info(`${loggerPrefix} column "${column}" does not exist in table "${table}"`)
|
||||
}
|
||||
}
|
||||
|
||||
async removeColumns() {
|
||||
this.logger.info(`${loggerPrefix} removing ${columnNames} columns from ${libraryItems} table`)
|
||||
for (const column of columns) {
|
||||
await this.removeColumn(libraryItems, column.name)
|
||||
}
|
||||
this.logger.info(`${loggerPrefix} removed ${columnNames} columns from ${libraryItems} table`)
|
||||
}
|
||||
|
||||
async populateColumnsFromSource() {
|
||||
this.logger.info(`${loggerPrefix} populating ${columnNames} columns in ${libraryItems} table`)
|
||||
const authorNamesSubQuery = `
|
||||
SELECT ${columnSourcesExpression}
|
||||
FROM ${authorsJoin}
|
||||
WHERE ${bookAuthors}.bookId = ${libraryItems}.mediaId
|
||||
`
|
||||
await this.queryInterface.sequelize.query(`
|
||||
UPDATE ${libraryItems}
|
||||
SET (${columnNames}) = (${authorNamesSubQuery})
|
||||
WHERE mediaType = 'book';
|
||||
`)
|
||||
this.logger.info(`${loggerPrefix} populated ${columnNames} columns in ${libraryItems} table`)
|
||||
}
|
||||
|
||||
async addBookAuthorsTrigger(action) {
|
||||
this.logger.info(`${loggerPrefix} adding trigger to update ${libraryItems} ${columnNames} on ${bookAuthors} ${action}`)
|
||||
const modifiedRecord = action === 'delete' ? 'OLD' : 'NEW'
|
||||
const triggerName = convertToSnakeCase(`update_${libraryItems}_authorNames_on_${bookAuthors}_${action}`)
|
||||
const authorNamesSubQuery = `
|
||||
SELECT ${columnSourcesExpression}
|
||||
FROM ${authorsJoin}
|
||||
WHERE ${bookAuthors}.bookId = ${modifiedRecord}.bookId
|
||||
`
|
||||
await this.queryInterface.sequelize.query(`DROP TRIGGER IF EXISTS ${triggerName}`)
|
||||
|
||||
await this.queryInterface.sequelize.query(`
|
||||
CREATE TRIGGER ${triggerName}
|
||||
AFTER ${action} ON ${bookAuthors}
|
||||
FOR EACH ROW
|
||||
BEGIN
|
||||
UPDATE ${libraryItems}
|
||||
SET (${columnNames}) = (${authorNamesSubQuery})
|
||||
WHERE mediaId = ${modifiedRecord}.bookId;
|
||||
END;
|
||||
`)
|
||||
this.logger.info(`${loggerPrefix} added trigger to update ${libraryItems} ${columnNames} on ${bookAuthors} ${action}`)
|
||||
}
|
||||
|
||||
async addAuthorsUpdateTrigger() {
|
||||
this.logger.info(`${loggerPrefix} adding trigger to update ${libraryItems} ${columnNames} on ${authors} update`)
|
||||
const triggerName = convertToSnakeCase(`update_${libraryItems}_authorNames_on_authors_update`)
|
||||
const authorNamesSubQuery = `
|
||||
SELECT ${columnSourcesExpression}
|
||||
FROM ${authorsJoin}
|
||||
WHERE ${bookAuthors}.bookId = ${libraryItems}.mediaId
|
||||
`
|
||||
|
||||
await this.queryInterface.sequelize.query(`DROP TRIGGER IF EXISTS ${triggerName}`)
|
||||
|
||||
await this.queryInterface.sequelize.query(`
|
||||
CREATE TRIGGER ${triggerName}
|
||||
AFTER UPDATE OF name ON ${authors}
|
||||
FOR EACH ROW
|
||||
BEGIN
|
||||
UPDATE ${libraryItems}
|
||||
SET (${columnNames}) = (${authorNamesSubQuery})
|
||||
WHERE mediaId IN (SELECT bookId FROM ${bookAuthors} WHERE authorId = NEW.id);
|
||||
END;
|
||||
`)
|
||||
this.logger.info(`${loggerPrefix} added trigger to update ${libraryItems} ${columnNames} on ${authors} update`)
|
||||
}
|
||||
|
||||
async addTriggers() {
|
||||
await this.addBookAuthorsTrigger('insert')
|
||||
await this.addBookAuthorsTrigger('delete')
|
||||
await this.addAuthorsUpdateTrigger()
|
||||
}
|
||||
|
||||
async removeBookAuthorsTrigger(action) {
|
||||
this.logger.info(`${loggerPrefix} removing trigger to update ${libraryItems} ${columnNames} on ${bookAuthors} ${action}`)
|
||||
const triggerName = convertToSnakeCase(`update_${libraryItems}_authorNames_on_${bookAuthors}_${action}`)
|
||||
await this.queryInterface.sequelize.query(`DROP TRIGGER IF EXISTS ${triggerName}`)
|
||||
this.logger.info(`${loggerPrefix} removed trigger to update ${libraryItems} ${columnNames} on ${bookAuthors} ${action}`)
|
||||
}
|
||||
|
||||
async removeAuthorsUpdateTrigger() {
|
||||
this.logger.info(`${loggerPrefix} removing trigger to update ${libraryItems} ${columnNames} on ${authors} update`)
|
||||
const triggerName = convertToSnakeCase(`update_${libraryItems}_authorNames_on_authors_update`)
|
||||
await this.queryInterface.sequelize.query(`DROP TRIGGER IF EXISTS ${triggerName}`)
|
||||
this.logger.info(`${loggerPrefix} removed trigger to update ${libraryItems} ${columnNames} on ${authors} update`)
|
||||
}
|
||||
|
||||
async removeTriggers() {
|
||||
await this.removeBookAuthorsTrigger('insert')
|
||||
await this.removeBookAuthorsTrigger('delete')
|
||||
await this.removeAuthorsUpdateTrigger()
|
||||
}
|
||||
|
||||
async addIndex(tableName, columns) {
|
||||
const columnString = columns.map((column) => util.inspect(column)).join(', ')
|
||||
const indexName = convertToSnakeCase(`${tableName}_${columns.map((column) => (typeof column === 'string' ? column : column.name)).join('_')}`)
|
||||
try {
|
||||
this.logger.info(`${loggerPrefix} adding index on [${columnString}] to table ${tableName}. index name: ${indexName}"`)
|
||||
await this.queryInterface.addIndex(tableName, columns)
|
||||
this.logger.info(`${loggerPrefix} added index on [${columnString}] to table ${tableName}. index name: ${indexName}"`)
|
||||
} catch (error) {
|
||||
if (error.name === 'SequelizeDatabaseError' && error.message.includes('already exists')) {
|
||||
this.logger.info(`${loggerPrefix} index [${columnString}] for table "${tableName}" already exists`)
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async addIndexes() {
|
||||
for (const column of columns) {
|
||||
await this.addIndex(libraryItems, ['libraryId', 'mediaType', { name: column.name, collate: 'NOCASE' }])
|
||||
}
|
||||
}
|
||||
|
||||
async removeIndex(tableName, columns) {
|
||||
this.logger.info(`${loggerPrefix} removing index [${columns.join(', ')}] from table "${tableName}"`)
|
||||
await this.queryInterface.removeIndex(tableName, columns)
|
||||
this.logger.info(`${loggerPrefix} removed index [${columns.join(', ')}] from table "${tableName}"`)
|
||||
}
|
||||
|
||||
async removeIndexes() {
|
||||
for (const column of columns) {
|
||||
await this.removeIndex(libraryItems, ['libraryId', 'mediaType', column.name])
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Utility function to convert a string to snake case, e.g. "titleIgnorePrefix" -> "title_ignore_prefix"
|
||||
*
|
||||
* @param {string} str - the string to convert to snake case.
|
||||
* @returns {string} - the string in snake case.
|
||||
*/
|
||||
function convertToSnakeCase(str) {
|
||||
return str.replace(/([A-Z])/g, '_$1').toLowerCase()
|
||||
}
|
||||
|
||||
module.exports = { up, down }
|
|
@ -2,6 +2,8 @@ const { DataTypes, Model } = require('sequelize')
|
|||
const Logger = require('../Logger')
|
||||
const { getTitlePrefixAtEnd, getTitleIgnorePrefix } = require('../utils')
|
||||
const parseNameString = require('../utils/parsers/parseNameString')
|
||||
const htmlSanitizer = require('../utils/htmlSanitizer')
|
||||
const libraryItemsBookFilters = require('../utils/queries/libraryItemsBookFilters')
|
||||
|
||||
/**
|
||||
* @typedef EBookFileObject
|
||||
|
@ -191,6 +193,14 @@ class Book extends Model {
|
|||
]
|
||||
}
|
||||
)
|
||||
|
||||
Book.addHook('afterDestroy', async (instance) => {
|
||||
libraryItemsBookFilters.clearCountCache('afterDestroy')
|
||||
})
|
||||
|
||||
Book.addHook('afterCreate', async (instance) => {
|
||||
libraryItemsBookFilters.clearCountCache('afterCreate')
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -285,7 +295,7 @@ class Book extends Model {
|
|||
const track = structuredClone(af)
|
||||
track.title = af.metadata.filename
|
||||
track.startOffset = startOffset
|
||||
track.contentUrl = `${global.RouterBasePath}/api/items/${libraryItemId}/file/${track.ino}`
|
||||
track.contentUrl = `/api/items/${libraryItemId}/file/${track.ino}`
|
||||
startOffset += track.duration
|
||||
return track
|
||||
})
|
||||
|
@ -364,7 +374,11 @@ class Book extends Model {
|
|||
if (payload.metadata) {
|
||||
const metadataStringKeys = ['title', 'subtitle', 'publishedYear', 'publishedDate', 'publisher', 'description', 'isbn', 'asin', 'language']
|
||||
metadataStringKeys.forEach((key) => {
|
||||
if (typeof payload.metadata[key] === 'string' && this[key] !== payload.metadata[key]) {
|
||||
if (typeof payload.metadata[key] == 'number') {
|
||||
payload.metadata[key] = String(payload.metadata[key])
|
||||
}
|
||||
|
||||
if ((typeof payload.metadata[key] === 'string' || payload.metadata[key] === null) && this[key] !== payload.metadata[key]) {
|
||||
this[key] = payload.metadata[key] || null
|
||||
|
||||
if (key === 'title') {
|
||||
|
@ -579,6 +593,7 @@ class Book extends Model {
|
|||
oldMetadataJSON.authorNameLF = this.authorNameLF
|
||||
oldMetadataJSON.narratorName = (this.narrators || []).join(', ')
|
||||
oldMetadataJSON.seriesName = this.seriesName
|
||||
oldMetadataJSON.descriptionPlain = this.description ? htmlSanitizer.stripAllTags(this.description) : null
|
||||
return oldMetadataJSON
|
||||
}
|
||||
|
||||
|
|
|
@ -561,7 +561,42 @@ class Feed extends Model {
|
|||
* @param {string} hostPrefix
|
||||
*/
|
||||
buildXml(hostPrefix) {
|
||||
const blockTags = [{ 'itunes:block': 'yes' }, { 'googleplay:block': 'yes' }]
|
||||
const customElements = [
|
||||
{ language: this.language || 'en' },
|
||||
{ author: this.author || 'advplyr' },
|
||||
{ 'itunes:author': this.author || 'advplyr' },
|
||||
{ 'itunes:type': this.podcastType || 'serial' },
|
||||
{
|
||||
'itunes:image': {
|
||||
_attr: {
|
||||
href: `${hostPrefix}${this.imageURL}`
|
||||
}
|
||||
}
|
||||
},
|
||||
{ 'itunes:explicit': !!this.explicit }
|
||||
]
|
||||
|
||||
if (this.description) {
|
||||
customElements.push({ 'itunes:summary': { _cdata: this.description } })
|
||||
}
|
||||
|
||||
const itunesOwnersData = []
|
||||
if (this.ownerName || this.author) {
|
||||
itunesOwnersData.push({ 'itunes:name': this.ownerName || this.author })
|
||||
}
|
||||
if (this.ownerEmail) {
|
||||
itunesOwnersData.push({ 'itunes:email': this.ownerEmail })
|
||||
}
|
||||
if (itunesOwnersData.length) {
|
||||
customElements.push({
|
||||
'itunes:owner': itunesOwnersData
|
||||
})
|
||||
}
|
||||
|
||||
if (this.preventIndexing) {
|
||||
customElements.push({ 'itunes:block': 'yes' }, { 'googleplay:block': 'yes' })
|
||||
}
|
||||
|
||||
const rssData = {
|
||||
title: this.title,
|
||||
description: this.description || '',
|
||||
|
@ -571,29 +606,10 @@ class Feed extends Model {
|
|||
image_url: `${hostPrefix}${this.imageURL}`,
|
||||
custom_namespaces: {
|
||||
itunes: 'http://www.itunes.com/dtds/podcast-1.0.dtd',
|
||||
psc: 'http://podlove.org/simple-chapters',
|
||||
podcast: 'https://podcastindex.org/namespace/1.0',
|
||||
googleplay: 'http://www.google.com/schemas/play-podcasts/1.0'
|
||||
},
|
||||
custom_elements: [
|
||||
{ language: this.language || 'en' },
|
||||
{ author: this.author || 'advplyr' },
|
||||
{ 'itunes:author': this.author || 'advplyr' },
|
||||
{ 'itunes:summary': this.description || '' },
|
||||
{ 'itunes:type': this.podcastType },
|
||||
{
|
||||
'itunes:image': {
|
||||
_attr: {
|
||||
href: `${hostPrefix}${this.imageURL}`
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
'itunes:owner': [{ 'itunes:name': this.ownerName || this.author || '' }, { 'itunes:email': this.ownerEmail || '' }]
|
||||
},
|
||||
{ 'itunes:explicit': !!this.explicit },
|
||||
...(this.preventIndexing ? blockTags : [])
|
||||
]
|
||||
custom_elements: customElements
|
||||
}
|
||||
|
||||
const rssfeed = new RSS(rssData)
|
||||
|
|
|
@ -135,12 +135,14 @@ class FeedEpisode extends Model {
|
|||
* @param {string} slug
|
||||
* @param {import('./Book').AudioFileObject} audioTrack
|
||||
* @param {boolean} useChapterTitles
|
||||
* @param {number} offsetIndex
|
||||
* @param {string} [existingEpisodeId]
|
||||
*/
|
||||
static getFeedEpisodeObjFromAudiobookTrack(book, pubDateStart, feed, slug, audioTrack, useChapterTitles, existingEpisodeId = null) {
|
||||
static getFeedEpisodeObjFromAudiobookTrack(book, pubDateStart, feed, slug, audioTrack, useChapterTitles, offsetIndex, existingEpisodeId = null) {
|
||||
// Example: <pubDate>Fri, 04 Feb 2015 00:00:00 GMT</pubDate>
|
||||
let timeOffset = isNaN(audioTrack.index) ? 0 : Number(audioTrack.index) * 1000 // Offset pubdate to ensure correct order
|
||||
let episodeId = existingEpisodeId || uuidv4()
|
||||
// Offset pubdate in 1 minute intervals to ensure correct order
|
||||
const timeOffset = offsetIndex * 60000
|
||||
const episodeId = existingEpisodeId || uuidv4()
|
||||
|
||||
// e.g. Track 1 will have a pub date before Track 2
|
||||
const audiobookPubDate = date.format(new Date(pubDateStart.valueOf() + timeOffset), 'ddd, DD MMM YYYY HH:mm:ss [GMT]')
|
||||
|
@ -190,14 +192,15 @@ class FeedEpisode extends Model {
|
|||
|
||||
const feedEpisodeObjs = []
|
||||
let numExisting = 0
|
||||
for (const track of trackList) {
|
||||
for (let i = 0; i < trackList.length; i++) {
|
||||
const track = trackList[i]
|
||||
// Check for existing episode by filepath
|
||||
const existingEpisode = feed.feedEpisodes?.find((episode) => {
|
||||
return episode.filePath === track.metadata.path
|
||||
})
|
||||
numExisting = existingEpisode ? numExisting + 1 : numExisting
|
||||
|
||||
feedEpisodeObjs.push(this.getFeedEpisodeObjFromAudiobookTrack(libraryItemExpanded.media, libraryItemExpanded.createdAt, feed, slug, track, useChapterTitles, existingEpisode?.id))
|
||||
feedEpisodeObjs.push(this.getFeedEpisodeObjFromAudiobookTrack(libraryItemExpanded.media, libraryItemExpanded.createdAt, feed, slug, track, useChapterTitles, i, existingEpisode?.id))
|
||||
}
|
||||
Logger.info(`[FeedEpisode] Upserting ${feedEpisodeObjs.length} episodes for feed ${feed.id} (${numExisting} existing)`)
|
||||
return this.bulkCreate(feedEpisodeObjs, { transaction, updateOnDuplicate: ['title', 'author', 'description', 'siteURL', 'enclosureURL', 'enclosureType', 'enclosureSize', 'pubDate', 'season', 'episode', 'episodeType', 'duration', 'filePath', 'explicit'] })
|
||||
|
@ -218,8 +221,9 @@ class FeedEpisode extends Model {
|
|||
|
||||
const feedEpisodeObjs = []
|
||||
let numExisting = 0
|
||||
let offsetIndex = 0
|
||||
for (const book of books) {
|
||||
const trackList = book.libraryItem.getTrackList()
|
||||
const trackList = book.getTracklist(book.libraryItem.id)
|
||||
const useChapterTitles = this.checkUseChapterTitlesForEpisodes(trackList, book)
|
||||
for (const track of trackList) {
|
||||
// Check for existing episode by filepath
|
||||
|
@ -228,7 +232,7 @@ class FeedEpisode extends Model {
|
|||
})
|
||||
numExisting = existingEpisode ? numExisting + 1 : numExisting
|
||||
|
||||
feedEpisodeObjs.push(this.getFeedEpisodeObjFromAudiobookTrack(book, earliestLibraryItemCreatedAt, feed, slug, track, useChapterTitles, existingEpisode?.id))
|
||||
feedEpisodeObjs.push(this.getFeedEpisodeObjFromAudiobookTrack(book, earliestLibraryItemCreatedAt, feed, slug, track, useChapterTitles, offsetIndex++, existingEpisode?.id))
|
||||
}
|
||||
}
|
||||
Logger.info(`[FeedEpisode] Upserting ${feedEpisodeObjs.length} episodes for feed ${feed.id} (${numExisting} existing)`)
|
||||
|
@ -304,6 +308,23 @@ class FeedEpisode extends Model {
|
|||
* @param {string} hostPrefix
|
||||
*/
|
||||
getRSSData(hostPrefix) {
|
||||
const customElements = [
|
||||
{ 'itunes:author': this.author || null },
|
||||
{ 'itunes:duration': Math.round(Number(this.duration)) },
|
||||
{
|
||||
'itunes:explicit': !!this.explicit
|
||||
},
|
||||
{ 'itunes:episodeType': this.episodeType || null },
|
||||
{ 'itunes:season': this.season || null },
|
||||
{ 'itunes:episode': this.episode || null }
|
||||
].filter((element) => {
|
||||
// Remove empty custom elements
|
||||
return Object.values(element)[0] !== null
|
||||
})
|
||||
if (this.description) {
|
||||
customElements.push({ 'itunes:summary': { _cdata: this.description } })
|
||||
}
|
||||
|
||||
return {
|
||||
title: this.title,
|
||||
description: this.description || '',
|
||||
|
@ -316,17 +337,7 @@ class FeedEpisode extends Model {
|
|||
type: this.enclosureType,
|
||||
size: this.enclosureSize
|
||||
},
|
||||
custom_elements: [
|
||||
{ 'itunes:author': this.author },
|
||||
{ 'itunes:duration': secondsToTimestamp(this.duration) },
|
||||
{ 'itunes:summary': this.description || '' },
|
||||
{
|
||||
'itunes:explicit': !!this.explicit
|
||||
},
|
||||
{ 'itunes:episodeType': this.episodeType },
|
||||
{ 'itunes:season': this.season },
|
||||
{ 'itunes:episode': this.episode }
|
||||
]
|
||||
custom_elements: customElements
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -73,6 +73,14 @@ class LibraryItem extends Model {
|
|||
|
||||
/** @type {Book.BookExpanded|Podcast.PodcastExpanded} - only set when expanded */
|
||||
this.media
|
||||
/** @type {string} */
|
||||
this.title // Only used for sorting
|
||||
/** @type {string} */
|
||||
this.titleIgnorePrefix // Only used for sorting
|
||||
/** @type {string} */
|
||||
this.authorNamesFirstLast // Only used for sorting
|
||||
/** @type {string} */
|
||||
this.authorNamesLastFirst // Only used for sorting
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -99,7 +107,7 @@ class LibraryItem extends Model {
|
|||
{
|
||||
model: this.sequelize.models.series,
|
||||
through: {
|
||||
attributes: ['sequence', 'createdAt']
|
||||
attributes: ['id', 'sequence', 'createdAt']
|
||||
}
|
||||
}
|
||||
]
|
||||
|
@ -155,7 +163,7 @@ class LibraryItem extends Model {
|
|||
{
|
||||
model: this.sequelize.models.series,
|
||||
through: {
|
||||
attributes: ['sequence']
|
||||
attributes: ['id', 'sequence']
|
||||
}
|
||||
}
|
||||
]
|
||||
|
@ -677,7 +685,11 @@ class LibraryItem extends Model {
|
|||
lastScan: DataTypes.DATE,
|
||||
lastScanVersion: DataTypes.STRING,
|
||||
libraryFiles: DataTypes.JSON,
|
||||
extraData: DataTypes.JSON
|
||||
extraData: DataTypes.JSON,
|
||||
title: DataTypes.STRING,
|
||||
titleIgnorePrefix: DataTypes.STRING,
|
||||
authorNamesFirstLast: DataTypes.STRING,
|
||||
authorNamesLastFirst: DataTypes.STRING
|
||||
},
|
||||
{
|
||||
sequelize,
|
||||
|
@ -695,6 +707,21 @@ class LibraryItem extends Model {
|
|||
{
|
||||
fields: ['libraryId', 'mediaType', 'size']
|
||||
},
|
||||
{
|
||||
fields: ['libraryId', 'mediaType', 'createdAt']
|
||||
},
|
||||
{
|
||||
fields: ['libraryId', 'mediaType', { name: 'title', collate: 'NOCASE' }]
|
||||
},
|
||||
{
|
||||
fields: ['libraryId', 'mediaType', { name: 'titleIgnorePrefix', collate: 'NOCASE' }]
|
||||
},
|
||||
{
|
||||
fields: ['libraryId', 'mediaType', { name: 'authorNamesFirstLast', collate: 'NOCASE' }]
|
||||
},
|
||||
{
|
||||
fields: ['libraryId', 'mediaType', { name: 'authorNamesLastFirst', collate: 'NOCASE' }]
|
||||
},
|
||||
{
|
||||
fields: ['libraryId', 'mediaId', 'mediaType']
|
||||
},
|
||||
|
|
|
@ -34,6 +34,8 @@ class MediaProgress extends Model {
|
|||
this.updatedAt
|
||||
/** @type {Date} */
|
||||
this.createdAt
|
||||
/** @type {UUIDV4} */
|
||||
this.podcastId
|
||||
}
|
||||
|
||||
static removeById(mediaProgressId) {
|
||||
|
@ -69,7 +71,8 @@ class MediaProgress extends Model {
|
|||
ebookLocation: DataTypes.STRING,
|
||||
ebookProgress: DataTypes.FLOAT,
|
||||
finishedAt: DataTypes.DATE,
|
||||
extraData: DataTypes.JSON
|
||||
extraData: DataTypes.JSON,
|
||||
podcastId: DataTypes.UUID
|
||||
},
|
||||
{
|
||||
sequelize,
|
||||
|
@ -123,6 +126,16 @@ class MediaProgress extends Model {
|
|||
}
|
||||
})
|
||||
|
||||
// make sure to call the afterDestroy hook for each instance
|
||||
MediaProgress.addHook('beforeBulkDestroy', (options) => {
|
||||
options.individualHooks = true
|
||||
})
|
||||
|
||||
// update the potentially cached user after destroying the media progress
|
||||
MediaProgress.addHook('afterDestroy', (instance) => {
|
||||
user.mediaProgressRemoved(instance)
|
||||
})
|
||||
|
||||
user.hasMany(MediaProgress, {
|
||||
onDelete: 'CASCADE'
|
||||
})
|
||||
|
@ -174,7 +187,7 @@ class MediaProgress extends Model {
|
|||
if (!this.extraData) this.extraData = {}
|
||||
if (progressPayload.isFinished !== undefined) {
|
||||
if (progressPayload.isFinished && !this.isFinished) {
|
||||
this.finishedAt = Date.now()
|
||||
this.finishedAt = progressPayload.finishedAt || Date.now()
|
||||
this.extraData.progress = 1
|
||||
this.changed('extraData', true)
|
||||
delete progressPayload.finishedAt
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
const { DataTypes, Model } = require('sequelize')
|
||||
const { getTitlePrefixAtEnd, getTitleIgnorePrefix } = require('../utils')
|
||||
const Logger = require('../Logger')
|
||||
const libraryItemsPodcastFilters = require('../utils/queries/libraryItemsPodcastFilters')
|
||||
|
||||
/**
|
||||
* @typedef PodcastExpandedProperties
|
||||
|
@ -61,6 +62,8 @@ class Podcast extends Model {
|
|||
this.createdAt
|
||||
/** @type {Date} */
|
||||
this.updatedAt
|
||||
/** @type {number} */
|
||||
this.numEpisodes
|
||||
|
||||
/** @type {import('./PodcastEpisode')[]} */
|
||||
this.podcastEpisodes
|
||||
|
@ -138,13 +141,22 @@ class Podcast extends Model {
|
|||
maxNewEpisodesToDownload: DataTypes.INTEGER,
|
||||
coverPath: DataTypes.STRING,
|
||||
tags: DataTypes.JSON,
|
||||
genres: DataTypes.JSON
|
||||
genres: DataTypes.JSON,
|
||||
numEpisodes: DataTypes.INTEGER
|
||||
},
|
||||
{
|
||||
sequelize,
|
||||
modelName: 'podcast'
|
||||
}
|
||||
)
|
||||
|
||||
Podcast.addHook('afterDestroy', async (instance) => {
|
||||
libraryItemsPodcastFilters.clearCountCache('podcast', 'afterDestroy')
|
||||
})
|
||||
|
||||
Podcast.addHook('afterCreate', async (instance) => {
|
||||
libraryItemsPodcastFilters.clearCountCache('podcast', 'afterCreate')
|
||||
})
|
||||
}
|
||||
|
||||
get hasMediaFiles() {
|
||||
|
@ -202,8 +214,9 @@ class Podcast extends Model {
|
|||
} else if (key === 'itunesPageUrl') {
|
||||
newKey = 'itunesPageURL'
|
||||
}
|
||||
if (typeof payload.metadata[key] === 'string' && payload.metadata[key] !== this[newKey]) {
|
||||
this[newKey] = payload.metadata[key]
|
||||
if ((typeof payload.metadata[key] === 'string' || payload.metadata[key] === null) && payload.metadata[key] !== this[newKey]) {
|
||||
this[newKey] = payload.metadata[key] || null
|
||||
|
||||
if (key === 'title') {
|
||||
this.titleIgnorePrefix = getTitleIgnorePrefix(this.title)
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
const { DataTypes, Model } = require('sequelize')
|
||||
|
||||
const libraryItemsPodcastFilters = require('../utils/queries/libraryItemsPodcastFilters')
|
||||
/**
|
||||
* @typedef ChapterObject
|
||||
* @property {number} id
|
||||
|
@ -80,9 +80,13 @@ class PodcastEpisode extends Model {
|
|||
if (rssPodcastEpisode.guid) {
|
||||
podcastEpisode.extraData.guid = rssPodcastEpisode.guid
|
||||
}
|
||||
|
||||
if (audioFile.chapters?.length) {
|
||||
podcastEpisode.chapters = audioFile.chapters.map((ch) => ({ ...ch }))
|
||||
} else if (rssPodcastEpisode.chapters?.length) {
|
||||
podcastEpisode.chapters = rssPodcastEpisode.chapters.map((ch) => ({ ...ch }))
|
||||
}
|
||||
|
||||
return this.create(podcastEpisode)
|
||||
}
|
||||
|
||||
|
@ -122,6 +126,10 @@ class PodcastEpisode extends Model {
|
|||
{
|
||||
name: 'podcastEpisode_createdAt_podcastId',
|
||||
fields: ['createdAt', 'podcastId']
|
||||
},
|
||||
{
|
||||
name: 'podcast_episodes_published_at',
|
||||
fields: ['publishedAt']
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -132,6 +140,14 @@ class PodcastEpisode extends Model {
|
|||
onDelete: 'CASCADE'
|
||||
})
|
||||
PodcastEpisode.belongsTo(podcast)
|
||||
|
||||
PodcastEpisode.addHook('afterDestroy', async (instance) => {
|
||||
libraryItemsPodcastFilters.clearCountCache('podcastEpisode', 'afterDestroy')
|
||||
})
|
||||
|
||||
PodcastEpisode.addHook('afterCreate', async (instance) => {
|
||||
libraryItemsPodcastFilters.clearCountCache('podcastEpisode', 'afterCreate')
|
||||
})
|
||||
}
|
||||
|
||||
get size() {
|
||||
|
@ -169,7 +185,7 @@ class PodcastEpisode extends Model {
|
|||
const track = structuredClone(this.audioFile)
|
||||
track.startOffset = 0
|
||||
track.title = this.audioFile.metadata.filename
|
||||
track.contentUrl = `${global.RouterBasePath}/api/items/${libraryItemId}/file/${track.ino}`
|
||||
track.contentUrl = `/api/items/${libraryItemId}/file/${track.ino}`
|
||||
return track
|
||||
}
|
||||
|
||||
|
|
|
@ -404,6 +404,14 @@ class User extends Model {
|
|||
return count > 0
|
||||
}
|
||||
|
||||
static mediaProgressRemoved(mediaProgress) {
|
||||
const cachedUser = userCache.getById(mediaProgress.userId)
|
||||
if (cachedUser) {
|
||||
Logger.debug(`[User] mediaProgressRemoved: ${mediaProgress.id} from user ${cachedUser.id}`)
|
||||
cachedUser.mediaProgresses = cachedUser.mediaProgresses.filter((mp) => mp.id !== mediaProgress.id)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize model
|
||||
* @param {import('../Database').sequelize} sequelize
|
||||
|
@ -626,6 +634,7 @@ class User extends Model {
|
|||
/** @type {import('./MediaProgress')|null} */
|
||||
let mediaProgress = null
|
||||
let mediaItemId = null
|
||||
let podcastId = null
|
||||
if (progressPayload.episodeId) {
|
||||
const podcastEpisode = await this.sequelize.models.podcastEpisode.findByPk(progressPayload.episodeId, {
|
||||
attributes: ['id', 'podcastId'],
|
||||
|
@ -654,6 +663,7 @@ class User extends Model {
|
|||
}
|
||||
mediaItemId = podcastEpisode.id
|
||||
mediaProgress = podcastEpisode.mediaProgresses?.[0]
|
||||
podcastId = podcastEpisode.podcastId
|
||||
} else {
|
||||
const libraryItem = await this.sequelize.models.libraryItem.findByPk(progressPayload.libraryItemId, {
|
||||
attributes: ['id', 'mediaId', 'mediaType'],
|
||||
|
@ -686,6 +696,7 @@ class User extends Model {
|
|||
const newMediaProgressPayload = {
|
||||
userId: this.id,
|
||||
mediaItemId,
|
||||
podcastId,
|
||||
mediaItemType: progressPayload.episodeId ? 'podcastEpisode' : 'book',
|
||||
duration: isNullOrNaN(progressPayload.duration) ? 0 : Number(progressPayload.duration),
|
||||
currentTime: isNullOrNaN(progressPayload.currentTime) ? 0 : Number(progressPayload.currentTime),
|
||||
|
@ -694,13 +705,14 @@ class User extends Model {
|
|||
ebookLocation: progressPayload.ebookLocation || null,
|
||||
ebookProgress: isNullOrNaN(progressPayload.ebookProgress) ? 0 : Number(progressPayload.ebookProgress),
|
||||
finishedAt: progressPayload.finishedAt || null,
|
||||
createdAt: progressPayload.createdAt || new Date(),
|
||||
extraData: {
|
||||
libraryItemId: progressPayload.libraryItemId,
|
||||
progress: isNullOrNaN(progressPayload.progress) ? 0 : Number(progressPayload.progress)
|
||||
}
|
||||
}
|
||||
if (newMediaProgressPayload.isFinished) {
|
||||
newMediaProgressPayload.finishedAt = new Date()
|
||||
newMediaProgressPayload.finishedAt = newMediaProgressPayload.finishedAt || new Date()
|
||||
newMediaProgressPayload.extraData.progress = 1
|
||||
} else {
|
||||
newMediaProgressPayload.finishedAt = null
|
||||
|
|
|
@ -20,6 +20,8 @@ class PodcastEpisodeDownload {
|
|||
|
||||
this.appendRandomId = false
|
||||
|
||||
this.targetFilename = null
|
||||
|
||||
this.startedAt = null
|
||||
this.createdAt = null
|
||||
this.finishedAt = null
|
||||
|
@ -43,7 +45,8 @@ class PodcastEpisodeDownload {
|
|||
season: this.rssPodcastEpisode?.season ?? null,
|
||||
episode: this.rssPodcastEpisode?.episode ?? null,
|
||||
episodeType: this.rssPodcastEpisode?.episodeType ?? 'full',
|
||||
publishedAt: this.rssPodcastEpisode?.publishedAt ?? null
|
||||
publishedAt: this.rssPodcastEpisode?.publishedAt ?? null,
|
||||
guid: this.rssPodcastEpisode?.guid ?? null
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -73,11 +76,6 @@ class PodcastEpisodeDownload {
|
|||
get episodeTitle() {
|
||||
return this.rssPodcastEpisode.title
|
||||
}
|
||||
get targetFilename() {
|
||||
const appendage = this.appendRandomId ? ` (${uuidv4()})` : ''
|
||||
const filename = `${this.rssPodcastEpisode.title}${appendage}.${this.fileExtension}`
|
||||
return sanitizeFilename(filename)
|
||||
}
|
||||
get targetPath() {
|
||||
return filePathToPOSIX(Path.join(this.libraryItem.path, this.targetFilename))
|
||||
}
|
||||
|
@ -92,6 +90,23 @@ class PodcastEpisodeDownload {
|
|||
return new Date(this.rssPodcastEpisode.publishedAt).getFullYear()
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} title
|
||||
*/
|
||||
getSanitizedFilename(title) {
|
||||
const appendage = this.appendRandomId ? ` (${this.id})` : ''
|
||||
const filename = `${title.trim()}${appendage}.${this.fileExtension}`
|
||||
return sanitizeFilename(filename)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {boolean} appendRandomId
|
||||
*/
|
||||
setAppendRandomId(appendRandomId) {
|
||||
this.appendRandomId = appendRandomId
|
||||
this.targetFilename = this.getSanitizedFilename(this.rssPodcastEpisode.title || '')
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {import('../utils/podcastUtils').RssPodcastEpisode} rssPodcastEpisode - from rss feed
|
||||
|
@ -111,6 +126,8 @@ class PodcastEpisodeDownload {
|
|||
this.url = encodeURI(url)
|
||||
}
|
||||
|
||||
this.targetFilename = this.getSanitizedFilename(this.rssPodcastEpisode.title || '')
|
||||
|
||||
this.libraryItem = libraryItem
|
||||
this.isAutoDownload = isAutoDownload
|
||||
this.createdAt = Date.now()
|
||||
|
|
|
@ -29,7 +29,7 @@ class AudioTrack {
|
|||
this.duration = audioFile.duration
|
||||
this.title = audioFile.metadata.filename || ''
|
||||
|
||||
this.contentUrl = `${global.RouterBasePath}/api/items/${itemId}/file/${audioFile.ino}`
|
||||
this.contentUrl = `/api/items/${itemId}/file/${audioFile.ino}`
|
||||
this.mimeType = audioFile.mimeType
|
||||
this.codec = audioFile.codec || null
|
||||
this.metadata = audioFile.metadata.clone()
|
||||
|
@ -44,4 +44,4 @@ class AudioTrack {
|
|||
this.mimeType = 'application/vnd.apple.mpegurl'
|
||||
}
|
||||
}
|
||||
module.exports = AudioTrack
|
||||
module.exports = AudioTrack
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
const axios = require('axios').default
|
||||
const htmlSanitizer = require('../utils/htmlSanitizer')
|
||||
const Logger = require('../Logger')
|
||||
const { isValidASIN } = require('../utils/index')
|
||||
|
||||
|
@ -68,7 +67,7 @@ class Audible {
|
|||
narrator: narrators ? narrators.map(({ name }) => name).join(', ') : null,
|
||||
publisher: publisherName,
|
||||
publishedYear: releaseDate ? releaseDate.split('-')[0] : null,
|
||||
description: summary ? htmlSanitizer.stripAllTags(summary) : null,
|
||||
description: summary || null,
|
||||
cover: image,
|
||||
asin,
|
||||
genres: genresFiltered.length ? genresFiltered : null,
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
const axios = require('axios').default
|
||||
const Database = require('../Database')
|
||||
const Logger = require('../Logger')
|
||||
const htmlSanitizer = require('../utils/htmlSanitizer')
|
||||
|
||||
class CustomProviderAdapter {
|
||||
#responseTimeout = 30000
|
||||
|
@ -40,6 +41,9 @@ class CustomProviderAdapter {
|
|||
}
|
||||
const queryString = new URLSearchParams(queryObj).toString()
|
||||
|
||||
const url = `${provider.url}/search?${queryString}`
|
||||
Logger.debug(`[CustomMetadataProvider] Search url: ${url}`)
|
||||
|
||||
// Setup headers
|
||||
const axiosOptions = {
|
||||
timeout
|
||||
|
@ -51,7 +55,7 @@ class CustomProviderAdapter {
|
|||
}
|
||||
|
||||
const matches = await axios
|
||||
.get(`${provider.url}/search?${queryString}`, axiosOptions)
|
||||
.get(url, axiosOptions)
|
||||
.then((res) => {
|
||||
if (!res?.data || !Array.isArray(res.data.matches)) return null
|
||||
return res.data.matches
|
||||
|
@ -65,25 +69,57 @@ class CustomProviderAdapter {
|
|||
throw new Error('Custom provider returned malformed response')
|
||||
}
|
||||
|
||||
const toStringOrUndefined = (value) => {
|
||||
if (typeof value === 'string' || typeof value === 'number') return String(value)
|
||||
if (Array.isArray(value) && value.every((v) => typeof v === 'string' || typeof v === 'number')) return value.join(',')
|
||||
return undefined
|
||||
}
|
||||
const validateSeriesArray = (series) => {
|
||||
if (!Array.isArray(series) || !series.length) return undefined
|
||||
return series
|
||||
.map((s) => {
|
||||
if (!s?.series || typeof s.series !== 'string') return undefined
|
||||
const _series = {
|
||||
series: s.series
|
||||
}
|
||||
if (s.sequence && (typeof s.sequence === 'string' || typeof s.sequence === 'number')) {
|
||||
_series.sequence = String(s.sequence)
|
||||
}
|
||||
return _series
|
||||
})
|
||||
.filter((s) => s !== undefined)
|
||||
}
|
||||
|
||||
// re-map keys to throw out
|
||||
return matches.map(({ title, subtitle, author, narrator, publisher, publishedYear, description, cover, isbn, asin, genres, tags, series, language, duration }) => {
|
||||
return {
|
||||
title,
|
||||
subtitle,
|
||||
author,
|
||||
narrator,
|
||||
publisher,
|
||||
publishedYear,
|
||||
description,
|
||||
cover,
|
||||
isbn,
|
||||
asin,
|
||||
genres,
|
||||
tags: tags?.join(',') || null,
|
||||
series: series?.length ? series : null,
|
||||
language,
|
||||
duration
|
||||
return matches.map((match) => {
|
||||
const { title, subtitle, author, narrator, publisher, publishedYear, description, cover, isbn, asin, genres, tags, series, language, duration } = match
|
||||
|
||||
const payload = {
|
||||
title: toStringOrUndefined(title),
|
||||
subtitle: toStringOrUndefined(subtitle),
|
||||
author: toStringOrUndefined(author),
|
||||
narrator: toStringOrUndefined(narrator),
|
||||
publisher: toStringOrUndefined(publisher),
|
||||
publishedYear: toStringOrUndefined(publishedYear),
|
||||
description: description && typeof description === 'string' ? htmlSanitizer.sanitize(description) : undefined,
|
||||
cover: toStringOrUndefined(cover),
|
||||
isbn: toStringOrUndefined(isbn),
|
||||
asin: toStringOrUndefined(asin),
|
||||
genres: Array.isArray(genres) && genres.every((g) => typeof g === 'string') ? genres : undefined,
|
||||
tags: toStringOrUndefined(tags),
|
||||
series: validateSeriesArray(series),
|
||||
language: toStringOrUndefined(language),
|
||||
duration: !isNaN(duration) && duration !== null ? Number(duration) : undefined
|
||||
}
|
||||
|
||||
// Remove undefined values
|
||||
for (const key in payload) {
|
||||
if (payload[key] === undefined) {
|
||||
delete payload[key]
|
||||
}
|
||||
}
|
||||
|
||||
return payload
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -66,10 +66,10 @@ class OpenLibrary {
|
|||
}
|
||||
|
||||
parsePublishYear(doc, worksData) {
|
||||
if (doc.first_publish_year && !isNaN(doc.first_publish_year)) return doc.first_publish_year
|
||||
if (doc.first_publish_year && !isNaN(doc.first_publish_year)) return String(doc.first_publish_year)
|
||||
if (worksData.first_publish_date) {
|
||||
var year = worksData.first_publish_date.split('-')[0]
|
||||
if (!isNaN(year)) return year
|
||||
if (!isNaN(year)) return String(year)
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
|
|
@ -112,7 +112,7 @@ class iTunes {
|
|||
artistId: data.artistId,
|
||||
title: data.collectionName,
|
||||
author,
|
||||
description: htmlSanitizer.stripAllTags(data.description || ''),
|
||||
description: data.description || null,
|
||||
publishedYear: data.releaseDate ? data.releaseDate.split('-')[0] : null,
|
||||
genres: data.primaryGenreName ? [data.primaryGenreName] : null,
|
||||
cover: this.getCoverArtwork(data)
|
||||
|
|
|
@ -33,8 +33,7 @@ const RSSFeedController = require('../controllers/RSSFeedController')
|
|||
const CustomMetadataProviderController = require('../controllers/CustomMetadataProviderController')
|
||||
const MiscController = require('../controllers/MiscController')
|
||||
const ShareController = require('../controllers/ShareController')
|
||||
|
||||
const { getTitleIgnorePrefix } = require('../utils/index')
|
||||
const StatsController = require('../controllers/StatsController')
|
||||
|
||||
class ApiRouter {
|
||||
constructor(Server) {
|
||||
|
@ -65,7 +64,7 @@ class ApiRouter {
|
|||
//
|
||||
// Library Routes
|
||||
//
|
||||
this.router.get(/^\/libraries/i, this.apiCacheManager.middleware)
|
||||
this.router.get(/^\/libraries/, this.apiCacheManager.middleware)
|
||||
this.router.post('/libraries', LibraryController.create.bind(this))
|
||||
this.router.get('/libraries', LibraryController.findAll.bind(this))
|
||||
this.router.get('/libraries/:id', LibraryController.middleware.bind(this), LibraryController.findOne.bind(this))
|
||||
|
@ -94,6 +93,7 @@ class ApiRouter {
|
|||
this.router.post('/libraries/order', LibraryController.reorder.bind(this))
|
||||
this.router.post('/libraries/:id/remove-metadata', LibraryController.middleware.bind(this), LibraryController.removeAllMetadataFiles.bind(this))
|
||||
this.router.get('/libraries/:id/podcast-titles', LibraryController.middleware.bind(this), LibraryController.getPodcastTitles.bind(this))
|
||||
this.router.get('/libraries/:id/download', LibraryController.middleware.bind(this), LibraryController.downloadMultiple.bind(this))
|
||||
|
||||
//
|
||||
// Item Routes
|
||||
|
@ -319,6 +319,12 @@ class ApiRouter {
|
|||
this.router.post('/share/mediaitem', ShareController.createMediaItemShare.bind(this))
|
||||
this.router.delete('/share/mediaitem/:id', ShareController.deleteMediaItemShare.bind(this))
|
||||
|
||||
//
|
||||
// Stats Routes
|
||||
//
|
||||
this.router.get('/stats/year/:year', StatsController.middleware.bind(this), StatsController.getAdminStatsForYear.bind(this))
|
||||
this.router.get('/stats/server', StatsController.middleware.bind(this), StatsController.getServerStats.bind(this))
|
||||
|
||||
//
|
||||
// Misc Routes
|
||||
//
|
||||
|
@ -337,7 +343,6 @@ class ApiRouter {
|
|||
this.router.get('/auth-settings', MiscController.getAuthSettings.bind(this))
|
||||
this.router.patch('/auth-settings', MiscController.updateAuthSettings.bind(this))
|
||||
this.router.post('/watcher/update', MiscController.updateWatchedPath.bind(this))
|
||||
this.router.get('/stats/year/:year', MiscController.getAdminStatsForYear.bind(this))
|
||||
this.router.get('/logger-data', MiscController.getLoggerData.bind(this))
|
||||
}
|
||||
|
||||
|
@ -392,21 +397,51 @@ class ApiRouter {
|
|||
async checkRemoveEmptySeries(seriesIds) {
|
||||
if (!seriesIds?.length) return
|
||||
|
||||
const series = await Database.seriesModel.findAll({
|
||||
where: {
|
||||
id: seriesIds
|
||||
},
|
||||
attributes: ['id', 'name', 'libraryId'],
|
||||
include: {
|
||||
model: Database.bookModel,
|
||||
attributes: ['id']
|
||||
}
|
||||
})
|
||||
const transaction = await Database.sequelize.transaction()
|
||||
try {
|
||||
const seriesToRemove = (
|
||||
await Database.seriesModel.findAll({
|
||||
where: [
|
||||
{
|
||||
id: seriesIds
|
||||
},
|
||||
sequelize.where(sequelize.literal('(SELECT count(*) FROM bookSeries bs WHERE bs.seriesId = series.id)'), 0)
|
||||
],
|
||||
attributes: ['id', 'name', 'libraryId'],
|
||||
include: {
|
||||
model: Database.bookModel,
|
||||
attributes: ['id'],
|
||||
required: false // Ensure it includes series even if no books exist
|
||||
},
|
||||
transaction
|
||||
})
|
||||
).map((s) => ({ id: s.id, name: s.name, libraryId: s.libraryId }))
|
||||
|
||||
for (const s of series) {
|
||||
if (!s.books.length) {
|
||||
await this.removeEmptySeries(s)
|
||||
if (seriesToRemove.length) {
|
||||
await Database.seriesModel.destroy({
|
||||
where: {
|
||||
id: seriesToRemove.map((s) => s.id)
|
||||
},
|
||||
transaction
|
||||
})
|
||||
}
|
||||
|
||||
await transaction.commit()
|
||||
|
||||
seriesToRemove.forEach(({ id, name, libraryId }) => {
|
||||
Logger.info(`[ApiRouter] Series "${name}" is now empty. Removing series`)
|
||||
|
||||
// Remove series from library filter data
|
||||
Database.removeSeriesFromFilterData(libraryId, id)
|
||||
SocketAuthority.emitter('series_removed', { id: id, libraryId: libraryId })
|
||||
})
|
||||
// Close rss feeds - remove from db and emit socket event
|
||||
if (seriesToRemove.length) {
|
||||
await RssFeedManager.closeFeedsForEntityIds(seriesToRemove.map((s) => s.id))
|
||||
}
|
||||
} catch (error) {
|
||||
await transaction.rollback()
|
||||
Logger.error(`[ApiRouter] Error removing empty series: ${error.message}`)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -420,61 +455,56 @@ class ApiRouter {
|
|||
async checkRemoveAuthorsWithNoBooks(authorIds) {
|
||||
if (!authorIds?.length) return
|
||||
|
||||
const bookAuthorsToRemove = (
|
||||
await Database.authorModel.findAll({
|
||||
where: [
|
||||
{
|
||||
id: authorIds,
|
||||
asin: {
|
||||
[sequelize.Op.or]: [null, '']
|
||||
const transaction = await Database.sequelize.transaction()
|
||||
try {
|
||||
// Select authors with locking to prevent concurrent updates
|
||||
const bookAuthorsToRemove = (
|
||||
await Database.authorModel.findAll({
|
||||
where: [
|
||||
{
|
||||
id: authorIds,
|
||||
asin: {
|
||||
[sequelize.Op.or]: [null, '']
|
||||
},
|
||||
description: {
|
||||
[sequelize.Op.or]: [null, '']
|
||||
},
|
||||
imagePath: {
|
||||
[sequelize.Op.or]: [null, '']
|
||||
}
|
||||
},
|
||||
description: {
|
||||
[sequelize.Op.or]: [null, '']
|
||||
},
|
||||
imagePath: {
|
||||
[sequelize.Op.or]: [null, '']
|
||||
}
|
||||
},
|
||||
sequelize.where(sequelize.literal('(SELECT count(*) FROM bookAuthors ba WHERE ba.authorId = author.id)'), 0)
|
||||
],
|
||||
attributes: ['id', 'name', 'libraryId'],
|
||||
raw: true
|
||||
})
|
||||
).map((au) => ({ id: au.id, name: au.name, libraryId: au.libraryId }))
|
||||
sequelize.where(sequelize.literal('(SELECT count(*) FROM bookAuthors ba WHERE ba.authorId = author.id)'), 0)
|
||||
],
|
||||
attributes: ['id', 'name', 'libraryId'],
|
||||
raw: true,
|
||||
transaction
|
||||
})
|
||||
).map((au) => ({ id: au.id, name: au.name, libraryId: au.libraryId }))
|
||||
|
||||
if (bookAuthorsToRemove.length) {
|
||||
await Database.authorModel.destroy({
|
||||
where: {
|
||||
id: bookAuthorsToRemove.map((au) => au.id)
|
||||
}
|
||||
})
|
||||
if (bookAuthorsToRemove.length) {
|
||||
await Database.authorModel.destroy({
|
||||
where: {
|
||||
id: bookAuthorsToRemove.map((au) => au.id)
|
||||
},
|
||||
transaction
|
||||
})
|
||||
}
|
||||
|
||||
await transaction.commit()
|
||||
|
||||
// Remove all book authors after completing remove from database
|
||||
bookAuthorsToRemove.forEach(({ id, name, libraryId }) => {
|
||||
Database.removeAuthorFromFilterData(libraryId, id)
|
||||
// TODO: Clients were expecting full author in payload but its unnecessary
|
||||
SocketAuthority.emitter('author_removed', { id, libraryId })
|
||||
Logger.info(`[ApiRouter] Removed author "${name}" with no books`)
|
||||
})
|
||||
} catch (error) {
|
||||
await transaction.rollback()
|
||||
Logger.error(`[ApiRouter] Error removing authors: ${error.message}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove an empty series & close an open RSS feed
|
||||
* @param {import('../models/Series')} series
|
||||
*/
|
||||
async removeEmptySeries(series) {
|
||||
await RssFeedManager.closeFeedForEntityId(series.id)
|
||||
Logger.info(`[ApiRouter] Series "${series.name}" is now empty. Removing series`)
|
||||
|
||||
// Remove series from library filter data
|
||||
Database.removeSeriesFromFilterData(series.libraryId, series.id)
|
||||
SocketAuthority.emitter('series_removed', {
|
||||
id: series.id,
|
||||
libraryId: series.libraryId
|
||||
})
|
||||
|
||||
await series.destroy()
|
||||
}
|
||||
|
||||
async getUserListeningSessionsHelper(userId) {
|
||||
const userSessions = await Database.getPlaybackSessions({ userId })
|
||||
return userSessions.sort((a, b) => b.updatedAt - a.updatedAt)
|
||||
|
|
|
@ -308,6 +308,27 @@ class AudioFileScanner {
|
|||
bookMetadata.series = series
|
||||
}
|
||||
} else {
|
||||
// Detect if multiple series are in the series & series-part tags.
|
||||
// Note: This requires that every series has a sequence and that they are separated by a semicolon.
|
||||
if (value.includes(';') && audioFileMetaTags.tagSeriesPart?.includes(';')) {
|
||||
const seriesSplit = value
|
||||
.split(';')
|
||||
.map((s) => s.trim())
|
||||
.filter(Boolean)
|
||||
const seriesSequenceSplit = audioFileMetaTags.tagSeriesPart
|
||||
.split(';')
|
||||
.map((s) => s.trim())
|
||||
.filter(Boolean)
|
||||
if (seriesSplit.length > 1 && seriesSplit.length === seriesSequenceSplit.length) {
|
||||
bookMetadata.series = seriesSplit.map((series, index) => ({
|
||||
name: series,
|
||||
sequence: seriesSequenceSplit[index] || null
|
||||
}))
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Detected multiple series in series/series-part tags: ${bookMetadata.series.map((s) => `${s.name} #${s.sequence}`).join(', ')}`)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Original embed used "series" and "series-part" tags
|
||||
bookMetadata.series = [
|
||||
{
|
||||
|
@ -499,16 +520,17 @@ class AudioFileScanner {
|
|||
// Filter these out and log a warning
|
||||
// See https://github.com/advplyr/audiobookshelf/issues/3361
|
||||
const afChaptersCleaned =
|
||||
file.chapters?.filter((c) => {
|
||||
file.chapters?.filter((c, i) => {
|
||||
if (c.end - c.start < 0.1) {
|
||||
libraryScan.addLog(LogLevel.WARN, `Chapter "${c.title}" has invalid duration of ${c.end - c.start} seconds. Skipping this chapter.`)
|
||||
libraryScan.addLog(LogLevel.WARN, `Audio file "${file.metadata.filename}" Chapter "${c.title}" (index ${i}) has invalid duration of ${c.end - c.start} seconds. Skipping this chapter.`)
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}) || []
|
||||
const afChapters = afChaptersCleaned.map((c) => ({
|
||||
|
||||
const afChapters = afChaptersCleaned.map((c, i) => ({
|
||||
...c,
|
||||
id: c.id + currChapterId,
|
||||
id: currChapterId + i,
|
||||
start: c.start + currStartTime,
|
||||
end: c.end + currStartTime
|
||||
}))
|
||||
|
|
|
@ -475,6 +475,8 @@ class BookScanner {
|
|||
bookAuthors: [],
|
||||
bookSeries: []
|
||||
}
|
||||
|
||||
const createdAtTimestamp = new Date().getTime()
|
||||
if (bookMetadata.authors.length) {
|
||||
for (const authorName of bookMetadata.authors) {
|
||||
const matchingAuthorId = await Database.getAuthorIdByName(libraryItemData.libraryId, authorName)
|
||||
|
@ -485,6 +487,8 @@ class BookScanner {
|
|||
} else {
|
||||
// New author
|
||||
bookObject.bookAuthors.push({
|
||||
// Ensures authors are in a set order
|
||||
createdAt: createdAtTimestamp + bookObject.bookAuthors.length,
|
||||
author: {
|
||||
libraryId: libraryItemData.libraryId,
|
||||
name: authorName,
|
||||
|
@ -521,6 +525,10 @@ class BookScanner {
|
|||
libraryItemObj.isMissing = false
|
||||
libraryItemObj.isInvalid = false
|
||||
libraryItemObj.extraData = {}
|
||||
libraryItemObj.title = bookMetadata.title
|
||||
libraryItemObj.titleIgnorePrefix = getTitleIgnorePrefix(bookMetadata.title)
|
||||
libraryItemObj.authorNamesFirstLast = bookMetadata.authors.join(', ')
|
||||
libraryItemObj.authorNamesLastFirst = bookMetadata.authors.map((author) => Database.authorModel.getLastFirst(author)).join(', ')
|
||||
|
||||
// Set isSupplementary flag on ebook library files
|
||||
for (const libraryFile of libraryItemObj.libraryFiles) {
|
||||
|
|
|
@ -64,7 +64,7 @@ class LibraryItemScanner {
|
|||
|
||||
const { libraryItem: expandedLibraryItem, wasUpdated } = await this.rescanLibraryItemMedia(libraryItem, libraryItemScanData, library.settings, scanLogger)
|
||||
if (libraryItemDataUpdated || wasUpdated) {
|
||||
SocketAuthority.emitter('item_updated', expandedLibraryItem.toOldJSONExpanded())
|
||||
SocketAuthority.libraryItemEmitter('item_updated', expandedLibraryItem)
|
||||
|
||||
await this.checkAuthorsAndSeriesRemovedFromBooks(library.id, scanLogger)
|
||||
|
||||
|
|
|
@ -4,7 +4,6 @@ const fs = require('../libs/fsExtra')
|
|||
const date = require('../libs/dateAndTime')
|
||||
|
||||
const Logger = require('../Logger')
|
||||
const { LogLevel } = require('../utils/constants')
|
||||
const { secondsToTimestamp, elapsedPretty } = require('../utils/index')
|
||||
|
||||
class LibraryScan {
|
||||
|
@ -109,20 +108,11 @@ class LibraryScan {
|
|||
this.elapsed = this.finishedAt - this.startedAt
|
||||
}
|
||||
|
||||
getLogLevelString(level) {
|
||||
for (const key in LogLevel) {
|
||||
if (LogLevel[key] === level) {
|
||||
return key
|
||||
}
|
||||
}
|
||||
return 'UNKNOWN'
|
||||
}
|
||||
|
||||
addLog(level, ...args) {
|
||||
const logObj = {
|
||||
timestamp: this.timestamp,
|
||||
message: args.join(' '),
|
||||
levelName: this.getLogLevelString(level),
|
||||
levelName: Logger.getLogLevelString(level),
|
||||
level
|
||||
}
|
||||
|
||||
|
|
|
@ -223,11 +223,7 @@ class LibraryScanner {
|
|||
|
||||
// Emit item updates in chunks of 10 to client
|
||||
if (libraryItemsUpdated.length === 10) {
|
||||
// TODO: Should only emit to clients where library item is accessible
|
||||
SocketAuthority.emitter(
|
||||
'items_updated',
|
||||
libraryItemsUpdated.map((li) => li.toOldJSONExpanded())
|
||||
)
|
||||
SocketAuthority.libraryItemsEmitter('items_updated', libraryItemsUpdated)
|
||||
libraryItemsUpdated = []
|
||||
}
|
||||
|
||||
|
@ -235,11 +231,7 @@ class LibraryScanner {
|
|||
}
|
||||
// Emit item updates to client
|
||||
if (libraryItemsUpdated.length) {
|
||||
// TODO: Should only emit to clients where library item is accessible
|
||||
SocketAuthority.emitter(
|
||||
'items_updated',
|
||||
libraryItemsUpdated.map((li) => li.toOldJSONExpanded())
|
||||
)
|
||||
SocketAuthority.libraryItemsEmitter('items_updated', libraryItemsUpdated)
|
||||
}
|
||||
|
||||
// Authors and series that were removed from books should be removed if they are now empty
|
||||
|
@ -277,11 +269,7 @@ class LibraryScanner {
|
|||
|
||||
// Emit new items in chunks of 10 to client
|
||||
if (newLibraryItems.length === 10) {
|
||||
// TODO: Should only emit to clients where library item is accessible
|
||||
SocketAuthority.emitter(
|
||||
'items_added',
|
||||
newLibraryItems.map((li) => li.toOldJSONExpanded())
|
||||
)
|
||||
SocketAuthority.libraryItemsEmitter('items_added', newLibraryItems)
|
||||
newLibraryItems = []
|
||||
}
|
||||
|
||||
|
@ -289,11 +277,7 @@ class LibraryScanner {
|
|||
}
|
||||
// Emit new items to client
|
||||
if (newLibraryItems.length) {
|
||||
// TODO: Should only emit to clients where library item is accessible
|
||||
SocketAuthority.emitter(
|
||||
'items_added',
|
||||
newLibraryItems.map((li) => li.toOldJSONExpanded())
|
||||
)
|
||||
SocketAuthority.libraryItemsEmitter('items_added', newLibraryItems)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -609,7 +593,7 @@ class LibraryScanner {
|
|||
Logger.info(`[LibraryScanner] Scanning file update group and library item was deleted "${existingLibraryItem.media.title}" - marking as missing`)
|
||||
existingLibraryItem.isMissing = true
|
||||
await existingLibraryItem.save()
|
||||
SocketAuthority.emitter('item_updated', existingLibraryItem.toOldJSONExpanded())
|
||||
SocketAuthority.libraryItemEmitter('item_updated', existingLibraryItem)
|
||||
|
||||
itemGroupingResults[itemDir] = ScanResult.REMOVED
|
||||
continue
|
||||
|
@ -643,7 +627,7 @@ class LibraryScanner {
|
|||
const isSingleMediaItem = isSingleMediaFile(fileUpdateGroup, itemDir)
|
||||
const newLibraryItem = await LibraryItemScanner.scanPotentialNewLibraryItem(fullPath, library, folder, isSingleMediaItem)
|
||||
if (newLibraryItem) {
|
||||
SocketAuthority.emitter('item_added', newLibraryItem.toOldJSONExpanded())
|
||||
SocketAuthority.libraryItemEmitter('item_added', newLibraryItem)
|
||||
}
|
||||
itemGroupingResults[itemDir] = newLibraryItem ? ScanResult.ADDED : ScanResult.NOTHING
|
||||
}
|
||||
|
|
|
@ -2,24 +2,26 @@ const { parseOpfMetadataXML } = require('../utils/parsers/parseOpfMetadata')
|
|||
const { readTextFile } = require('../utils/fileUtils')
|
||||
|
||||
class OpfFileScanner {
|
||||
constructor() { }
|
||||
constructor() {}
|
||||
|
||||
/**
|
||||
* Parse metadata from .opf file found in library scan and update bookMetadata
|
||||
*
|
||||
* @param {import('../models/LibraryItem').LibraryFileObject} opfLibraryFileObj
|
||||
* @param {Object} bookMetadata
|
||||
*
|
||||
* @param {import('../models/LibraryItem').LibraryFileObject} opfLibraryFileObj
|
||||
* @param {Object} bookMetadata
|
||||
*/
|
||||
async scanBookOpfFile(opfLibraryFileObj, bookMetadata) {
|
||||
const xmlText = await readTextFile(opfLibraryFileObj.metadata.path)
|
||||
const opfMetadata = xmlText ? await parseOpfMetadataXML(xmlText) : null
|
||||
if (opfMetadata) {
|
||||
for (const key in opfMetadata) {
|
||||
if (key === 'tags') { // Add tags only if tags are empty
|
||||
if (key === 'tags') {
|
||||
// Add tags only if tags are empty
|
||||
if (opfMetadata.tags.length) {
|
||||
bookMetadata.tags = opfMetadata.tags
|
||||
}
|
||||
} else if (key === 'genres') { // Add genres only if genres are empty
|
||||
} else if (key === 'genres') {
|
||||
// Add genres only if genres are empty
|
||||
if (opfMetadata.genres.length) {
|
||||
bookMetadata.genres = opfMetadata.genres
|
||||
}
|
||||
|
@ -42,4 +44,4 @@ class OpfFileScanner {
|
|||
}
|
||||
}
|
||||
}
|
||||
module.exports = new OpfFileScanner()
|
||||
module.exports = new OpfFileScanner()
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
const uuidv4 = require("uuid").v4
|
||||
const uuidv4 = require('uuid').v4
|
||||
const Path = require('path')
|
||||
const { LogLevel } = require('../utils/constants')
|
||||
const { getTitleIgnorePrefix } = require('../utils/index')
|
||||
|
@ -8,9 +8,9 @@ const { filePathToPOSIX, getFileTimestampsWithIno } = require('../utils/fileUtil
|
|||
const AudioFile = require('../objects/files/AudioFile')
|
||||
const CoverManager = require('../managers/CoverManager')
|
||||
const LibraryFile = require('../objects/files/LibraryFile')
|
||||
const fsExtra = require("../libs/fsExtra")
|
||||
const PodcastEpisode = require("../models/PodcastEpisode")
|
||||
const AbsMetadataFileScanner = require("./AbsMetadataFileScanner")
|
||||
const fsExtra = require('../libs/fsExtra')
|
||||
const PodcastEpisode = require('../models/PodcastEpisode')
|
||||
const AbsMetadataFileScanner = require('./AbsMetadataFileScanner')
|
||||
|
||||
/**
|
||||
* Metadata for podcasts pulled from files
|
||||
|
@ -32,13 +32,13 @@ const AbsMetadataFileScanner = require("./AbsMetadataFileScanner")
|
|||
*/
|
||||
|
||||
class PodcastScanner {
|
||||
constructor() { }
|
||||
constructor() {}
|
||||
|
||||
/**
|
||||
* @param {import('../models/LibraryItem')} existingLibraryItem
|
||||
* @param {import('./LibraryItemScanData')} libraryItemData
|
||||
* @param {import('../models/LibraryItem')} existingLibraryItem
|
||||
* @param {import('./LibraryItemScanData')} libraryItemData
|
||||
* @param {import('../models/Library').LibrarySettingsObject} librarySettings
|
||||
* @param {import('./LibraryScan')} libraryScan
|
||||
* @param {import('./LibraryScan')} libraryScan
|
||||
* @returns {Promise<{libraryItem:import('../models/LibraryItem'), wasUpdated:boolean}>}
|
||||
*/
|
||||
async rescanExistingPodcastLibraryItem(existingLibraryItem, libraryItemData, librarySettings, libraryScan) {
|
||||
|
@ -59,28 +59,53 @@ class PodcastScanner {
|
|||
|
||||
if (libraryItemData.hasAudioFileChanges || libraryItemData.audioLibraryFiles.length !== existingPodcastEpisodes.length) {
|
||||
// Filter out and destroy episodes that were removed
|
||||
existingPodcastEpisodes = await Promise.all(existingPodcastEpisodes.filter(async ep => {
|
||||
const episodesToRemove = []
|
||||
existingPodcastEpisodes = existingPodcastEpisodes.filter((ep) => {
|
||||
if (libraryItemData.checkAudioFileRemoved(ep.audioFile)) {
|
||||
libraryScan.addLog(LogLevel.INFO, `Podcast episode "${ep.title}" audio file was removed`)
|
||||
// TODO: Should clean up other data linked to this episode
|
||||
await ep.destroy()
|
||||
episodesToRemove.push(ep)
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}))
|
||||
})
|
||||
|
||||
if (episodesToRemove.length) {
|
||||
// Remove episodes from playlists and media progress
|
||||
const episodeIds = episodesToRemove.map((ep) => ep.id)
|
||||
await Database.playlistModel.removeMediaItemsFromPlaylists(episodeIds)
|
||||
const mediaProgressRemoved = await Database.mediaProgressModel.destroy({
|
||||
where: {
|
||||
mediaItemId: episodeIds
|
||||
}
|
||||
})
|
||||
if (mediaProgressRemoved) {
|
||||
libraryScan.addLog(LogLevel.INFO, `Removed ${mediaProgressRemoved} media progress for episodes`)
|
||||
}
|
||||
|
||||
// Remove episodes
|
||||
await Promise.all(
|
||||
episodesToRemove.map(async (ep) => {
|
||||
await ep.destroy()
|
||||
libraryScan.addLog(LogLevel.INFO, `Podcast episode "${ep.title}" audio file was removed`)
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
// Update audio files that were modified
|
||||
if (libraryItemData.audioLibraryFilesModified.length) {
|
||||
let scannedAudioFiles = await AudioFileScanner.executeMediaFileScans(existingLibraryItem.mediaType, libraryItemData, libraryItemData.audioLibraryFilesModified.map(lf => lf.new))
|
||||
let scannedAudioFiles = await AudioFileScanner.executeMediaFileScans(
|
||||
existingLibraryItem.mediaType,
|
||||
libraryItemData,
|
||||
libraryItemData.audioLibraryFilesModified.map((lf) => lf.new)
|
||||
)
|
||||
|
||||
for (const podcastEpisode of existingPodcastEpisodes) {
|
||||
let matchedScannedAudioFile = scannedAudioFiles.find(saf => saf.metadata.path === podcastEpisode.audioFile.metadata.path)
|
||||
let matchedScannedAudioFile = scannedAudioFiles.find((saf) => saf.metadata.path === podcastEpisode.audioFile.metadata.path)
|
||||
if (!matchedScannedAudioFile) {
|
||||
matchedScannedAudioFile = scannedAudioFiles.find(saf => saf.ino === podcastEpisode.audioFile.ino)
|
||||
matchedScannedAudioFile = scannedAudioFiles.find((saf) => saf.ino === podcastEpisode.audioFile.ino)
|
||||
}
|
||||
|
||||
if (matchedScannedAudioFile) {
|
||||
scannedAudioFiles = scannedAudioFiles.filter(saf => saf !== matchedScannedAudioFile)
|
||||
scannedAudioFiles = scannedAudioFiles.filter((saf) => saf !== matchedScannedAudioFile)
|
||||
const audioFile = new AudioFile(podcastEpisode.audioFile)
|
||||
audioFile.updateFromScan(matchedScannedAudioFile)
|
||||
podcastEpisode.audioFile = audioFile.toJSON()
|
||||
|
@ -107,6 +132,9 @@ class PodcastScanner {
|
|||
|
||||
// Create new podcast episodes from new found audio files
|
||||
for (const newAudioFile of newAudioFiles) {
|
||||
// Podcast episode audio files always have index 1
|
||||
newAudioFile.index = 1
|
||||
|
||||
const newEpisode = {
|
||||
title: newAudioFile.metaTags.tagTitle || newAudioFile.metadata.filenameNoExt,
|
||||
subtitle: null,
|
||||
|
@ -130,16 +158,20 @@ class PodcastScanner {
|
|||
}
|
||||
|
||||
let hasMediaChanges = false
|
||||
if (existingPodcastEpisodes.length !== media.numEpisodes) {
|
||||
media.numEpisodes = existingPodcastEpisodes.length
|
||||
hasMediaChanges = true
|
||||
}
|
||||
|
||||
// Check if cover was removed
|
||||
if (media.coverPath && libraryItemData.imageLibraryFilesRemoved.some(lf => lf.metadata.path === media.coverPath)) {
|
||||
if (media.coverPath && libraryItemData.imageLibraryFilesRemoved.some((lf) => lf.metadata.path === media.coverPath)) {
|
||||
media.coverPath = null
|
||||
hasMediaChanges = true
|
||||
}
|
||||
|
||||
// Update cover if it was modified
|
||||
if (media.coverPath && libraryItemData.imageLibraryFilesModified.length) {
|
||||
let coverMatch = libraryItemData.imageLibraryFilesModified.find(iFile => iFile.old.metadata.path === media.coverPath)
|
||||
let coverMatch = libraryItemData.imageLibraryFilesModified.find((iFile) => iFile.old.metadata.path === media.coverPath)
|
||||
if (coverMatch) {
|
||||
const coverPath = coverMatch.new.metadata.path
|
||||
if (coverPath !== media.coverPath) {
|
||||
|
@ -154,7 +186,7 @@ class PodcastScanner {
|
|||
// Check if cover is not set and image files were found
|
||||
if (!media.coverPath && libraryItemData.imageLibraryFiles.length) {
|
||||
// Prefer using a cover image with the name "cover" otherwise use the first image
|
||||
const coverMatch = libraryItemData.imageLibraryFiles.find(iFile => /\/cover\.[^.\/]*$/.test(iFile.metadata.path))
|
||||
const coverMatch = libraryItemData.imageLibraryFiles.find((iFile) => /\/cover\.[^.\/]*$/.test(iFile.metadata.path))
|
||||
media.coverPath = coverMatch?.metadata.path || libraryItemData.imageLibraryFiles[0].metadata.path
|
||||
hasMediaChanges = true
|
||||
}
|
||||
|
@ -167,7 +199,7 @@ class PodcastScanner {
|
|||
|
||||
if (key === 'genres') {
|
||||
const existingGenres = media.genres || []
|
||||
if (podcastMetadata.genres.some(g => !existingGenres.includes(g)) || existingGenres.some(g => !podcastMetadata.genres.includes(g))) {
|
||||
if (podcastMetadata.genres.some((g) => !existingGenres.includes(g)) || existingGenres.some((g) => !podcastMetadata.genres.includes(g))) {
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Updating podcast genres "${existingGenres.join(',')}" => "${podcastMetadata.genres.join(',')}" for podcast "${podcastMetadata.title}"`)
|
||||
media.genres = podcastMetadata.genres
|
||||
media.changed('genres', true)
|
||||
|
@ -175,7 +207,7 @@ class PodcastScanner {
|
|||
}
|
||||
} else if (key === 'tags') {
|
||||
const existingTags = media.tags || []
|
||||
if (podcastMetadata.tags.some(t => !existingTags.includes(t)) || existingTags.some(t => !podcastMetadata.tags.includes(t))) {
|
||||
if (podcastMetadata.tags.some((t) => !existingTags.includes(t)) || existingTags.some((t) => !podcastMetadata.tags.includes(t))) {
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Updating podcast tags "${existingTags.join(',')}" => "${podcastMetadata.tags.join(',')}" for podcast "${podcastMetadata.title}"`)
|
||||
media.tags = podcastMetadata.tags
|
||||
media.changed('tags', true)
|
||||
|
@ -190,7 +222,7 @@ class PodcastScanner {
|
|||
|
||||
// If no cover then extract cover from audio file if available
|
||||
if (!media.coverPath && existingPodcastEpisodes.length) {
|
||||
const audioFiles = existingPodcastEpisodes.map(ep => ep.audioFile)
|
||||
const audioFiles = existingPodcastEpisodes.map((ep) => ep.audioFile)
|
||||
const extractedCoverPath = await CoverManager.saveEmbeddedCoverArt(audioFiles, existingLibraryItem.id, existingLibraryItem.path)
|
||||
if (extractedCoverPath) {
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Updating podcast "${podcastMetadata.title}" extracted embedded cover art from audio file to path "${extractedCoverPath}"`)
|
||||
|
@ -222,10 +254,10 @@ class PodcastScanner {
|
|||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {import('./LibraryItemScanData')} libraryItemData
|
||||
*
|
||||
* @param {import('./LibraryItemScanData')} libraryItemData
|
||||
* @param {import('../models/Library').LibrarySettingsObject} librarySettings
|
||||
* @param {import('./LibraryScan')} libraryScan
|
||||
* @param {import('./LibraryScan')} libraryScan
|
||||
* @returns {Promise<import('../models/LibraryItem')>}
|
||||
*/
|
||||
async scanNewPodcastLibraryItem(libraryItemData, librarySettings, libraryScan) {
|
||||
|
@ -242,6 +274,9 @@ class PodcastScanner {
|
|||
|
||||
// Create podcast episodes from audio files
|
||||
for (const audioFile of scannedAudioFiles) {
|
||||
// Podcast episode audio files always have index 1
|
||||
audioFile.index = 1
|
||||
|
||||
const newEpisode = {
|
||||
title: audioFile.metaTags.tagTitle || audioFile.metadata.filenameNoExt,
|
||||
subtitle: null,
|
||||
|
@ -267,7 +302,7 @@ class PodcastScanner {
|
|||
// Set cover image from library file
|
||||
if (libraryItemData.imageLibraryFiles.length) {
|
||||
// Prefer using a cover image with the name "cover" otherwise use the first image
|
||||
const coverMatch = libraryItemData.imageLibraryFiles.find(iFile => /\/cover\.[^.\/]*$/.test(iFile.metadata.path))
|
||||
const coverMatch = libraryItemData.imageLibraryFiles.find((iFile) => /\/cover\.[^.\/]*$/.test(iFile.metadata.path))
|
||||
podcastMetadata.coverPath = coverMatch?.metadata.path || libraryItemData.imageLibraryFiles[0].metadata.path
|
||||
}
|
||||
|
||||
|
@ -283,7 +318,8 @@ class PodcastScanner {
|
|||
lastEpisodeCheck: 0,
|
||||
maxEpisodesToKeep: 0,
|
||||
maxNewEpisodesToDownload: 3,
|
||||
podcastEpisodes: newPodcastEpisodes
|
||||
podcastEpisodes: newPodcastEpisodes,
|
||||
numEpisodes: newPodcastEpisodes.length
|
||||
}
|
||||
|
||||
const libraryItemObj = libraryItemData.libraryItemObject
|
||||
|
@ -291,6 +327,8 @@ class PodcastScanner {
|
|||
libraryItemObj.isMissing = false
|
||||
libraryItemObj.isInvalid = false
|
||||
libraryItemObj.extraData = {}
|
||||
libraryItemObj.title = podcastObject.title
|
||||
libraryItemObj.titleIgnorePrefix = getTitleIgnorePrefix(podcastObject.title)
|
||||
|
||||
// If cover was not found in folder then check embedded covers in audio files
|
||||
if (!podcastObject.coverPath && scannedAudioFiles.length) {
|
||||
|
@ -324,10 +362,10 @@ class PodcastScanner {
|
|||
}
|
||||
|
||||
/**
|
||||
*
|
||||
*
|
||||
* @param {PodcastEpisode[]} podcastEpisodes Not the models for new podcasts
|
||||
* @param {import('./LibraryItemScanData')} libraryItemData
|
||||
* @param {import('./LibraryScan')} libraryScan
|
||||
* @param {import('./LibraryItemScanData')} libraryItemData
|
||||
* @param {import('./LibraryScan')} libraryScan
|
||||
* @param {string} [existingLibraryItemId]
|
||||
* @returns {Promise<PodcastMetadataObject>}
|
||||
*/
|
||||
|
@ -364,8 +402,8 @@ class PodcastScanner {
|
|||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {import('../models/LibraryItem')} libraryItem
|
||||
*
|
||||
* @param {import('../models/LibraryItem')} libraryItem
|
||||
* @param {import('./LibraryScan')} libraryScan
|
||||
* @returns {Promise}
|
||||
*/
|
||||
|
@ -399,41 +437,44 @@ class PodcastScanner {
|
|||
explicit: !!libraryItem.media.explicit,
|
||||
podcastType: libraryItem.media.podcastType
|
||||
}
|
||||
return fsExtra.writeFile(metadataFilePath, JSON.stringify(jsonObject, null, 2)).then(async () => {
|
||||
// Add metadata.json to libraryFiles array if it is new
|
||||
let metadataLibraryFile = libraryItem.libraryFiles.find(lf => lf.metadata.path === filePathToPOSIX(metadataFilePath))
|
||||
if (storeMetadataWithItem) {
|
||||
if (!metadataLibraryFile) {
|
||||
const newLibraryFile = new LibraryFile()
|
||||
await newLibraryFile.setDataFromPath(metadataFilePath, `metadata.json`)
|
||||
metadataLibraryFile = newLibraryFile.toJSON()
|
||||
libraryItem.libraryFiles.push(metadataLibraryFile)
|
||||
} else {
|
||||
const fileTimestamps = await getFileTimestampsWithIno(metadataFilePath)
|
||||
if (fileTimestamps) {
|
||||
metadataLibraryFile.metadata.mtimeMs = fileTimestamps.mtimeMs
|
||||
metadataLibraryFile.metadata.ctimeMs = fileTimestamps.ctimeMs
|
||||
metadataLibraryFile.metadata.size = fileTimestamps.size
|
||||
metadataLibraryFile.ino = fileTimestamps.ino
|
||||
return fsExtra
|
||||
.writeFile(metadataFilePath, JSON.stringify(jsonObject, null, 2))
|
||||
.then(async () => {
|
||||
// Add metadata.json to libraryFiles array if it is new
|
||||
let metadataLibraryFile = libraryItem.libraryFiles.find((lf) => lf.metadata.path === filePathToPOSIX(metadataFilePath))
|
||||
if (storeMetadataWithItem) {
|
||||
if (!metadataLibraryFile) {
|
||||
const newLibraryFile = new LibraryFile()
|
||||
await newLibraryFile.setDataFromPath(metadataFilePath, `metadata.json`)
|
||||
metadataLibraryFile = newLibraryFile.toJSON()
|
||||
libraryItem.libraryFiles.push(metadataLibraryFile)
|
||||
} else {
|
||||
const fileTimestamps = await getFileTimestampsWithIno(metadataFilePath)
|
||||
if (fileTimestamps) {
|
||||
metadataLibraryFile.metadata.mtimeMs = fileTimestamps.mtimeMs
|
||||
metadataLibraryFile.metadata.ctimeMs = fileTimestamps.ctimeMs
|
||||
metadataLibraryFile.metadata.size = fileTimestamps.size
|
||||
metadataLibraryFile.ino = fileTimestamps.ino
|
||||
}
|
||||
}
|
||||
const libraryItemDirTimestamps = await getFileTimestampsWithIno(libraryItem.path)
|
||||
if (libraryItemDirTimestamps) {
|
||||
libraryItem.mtime = libraryItemDirTimestamps.mtimeMs
|
||||
libraryItem.ctime = libraryItemDirTimestamps.ctimeMs
|
||||
let size = 0
|
||||
libraryItem.libraryFiles.forEach((lf) => (size += !isNaN(lf.metadata.size) ? Number(lf.metadata.size) : 0))
|
||||
libraryItem.size = size
|
||||
}
|
||||
}
|
||||
const libraryItemDirTimestamps = await getFileTimestampsWithIno(libraryItem.path)
|
||||
if (libraryItemDirTimestamps) {
|
||||
libraryItem.mtime = libraryItemDirTimestamps.mtimeMs
|
||||
libraryItem.ctime = libraryItemDirTimestamps.ctimeMs
|
||||
let size = 0
|
||||
libraryItem.libraryFiles.forEach((lf) => size += (!isNaN(lf.metadata.size) ? Number(lf.metadata.size) : 0))
|
||||
libraryItem.size = size
|
||||
}
|
||||
}
|
||||
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Success saving abmetadata to "${metadataFilePath}"`)
|
||||
libraryScan.addLog(LogLevel.DEBUG, `Success saving abmetadata to "${metadataFilePath}"`)
|
||||
|
||||
return metadataLibraryFile
|
||||
}).catch((error) => {
|
||||
libraryScan.addLog(LogLevel.ERROR, `Failed to save json file at "${metadataFilePath}"`, error)
|
||||
return null
|
||||
})
|
||||
return metadataLibraryFile
|
||||
})
|
||||
.catch((error) => {
|
||||
libraryScan.addLog(LogLevel.ERROR, `Failed to save json file at "${metadataFilePath}"`, error)
|
||||
return null
|
||||
})
|
||||
}
|
||||
}
|
||||
module.exports = new PodcastScanner()
|
||||
module.exports = new PodcastScanner()
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
const uuidv4 = require("uuid").v4
|
||||
const uuidv4 = require('uuid').v4
|
||||
const Logger = require('../Logger')
|
||||
const { LogLevel } = require('../utils/constants')
|
||||
|
||||
class ScanLogger {
|
||||
constructor() {
|
||||
|
@ -44,20 +43,11 @@ class ScanLogger {
|
|||
this.elapsed = this.finishedAt - this.startedAt
|
||||
}
|
||||
|
||||
getLogLevelString(level) {
|
||||
for (const key in LogLevel) {
|
||||
if (LogLevel[key] === level) {
|
||||
return key
|
||||
}
|
||||
}
|
||||
return 'UNKNOWN'
|
||||
}
|
||||
|
||||
addLog(level, ...args) {
|
||||
const logObj = {
|
||||
timestamp: (new Date()).toISOString(),
|
||||
timestamp: new Date().toISOString(),
|
||||
message: args.join(' '),
|
||||
levelName: this.getLogLevelString(level),
|
||||
levelName: Logger.getLogLevelString(level),
|
||||
level
|
||||
}
|
||||
|
||||
|
@ -67,4 +57,4 @@ class ScanLogger {
|
|||
this.logs.push(logObj)
|
||||
}
|
||||
}
|
||||
module.exports = ScanLogger
|
||||
module.exports = ScanLogger
|
||||
|
|
|
@ -48,13 +48,7 @@ class Scanner {
|
|||
let updatePayload = {}
|
||||
let hasUpdated = false
|
||||
|
||||
let existingAuthors = [] // Used for checking if authors or series are now empty
|
||||
let existingSeries = []
|
||||
|
||||
if (libraryItem.isBook) {
|
||||
existingAuthors = libraryItem.media.authors.map((a) => a.id)
|
||||
existingSeries = libraryItem.media.series.map((s) => s.id)
|
||||
|
||||
const searchISBN = options.isbn || libraryItem.media.isbn
|
||||
const searchASIN = options.asin || libraryItem.media.asin
|
||||
|
||||
|
@ -132,7 +126,7 @@ class Scanner {
|
|||
|
||||
await libraryItem.saveMetadataFile()
|
||||
|
||||
SocketAuthority.emitter('item_updated', libraryItem.toOldJSONExpanded())
|
||||
SocketAuthority.libraryItemEmitter('item_updated', libraryItem)
|
||||
}
|
||||
|
||||
return {
|
||||
|
|
|
@ -189,8 +189,14 @@ class CbzStreamZipComicBookExtractor extends AbstractComicBookExtractor {
|
|||
}
|
||||
|
||||
close() {
|
||||
this.archive?.close()
|
||||
Logger.debug(`[CbzStreamZipComicBookExtractor] Closed comic book "${this.comicPath}"`)
|
||||
this.archive
|
||||
?.close()
|
||||
.then(() => {
|
||||
Logger.debug(`[CbzStreamZipComicBookExtractor] Closed comic book "${this.comicPath}"`)
|
||||
})
|
||||
.catch((error) => {
|
||||
Logger.error(`[CbzStreamZipComicBookExtractor] Failed to close comic book "${this.comicPath}"`, error)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -99,7 +99,7 @@ module.exports.resizeImage = resizeImage
|
|||
/**
|
||||
*
|
||||
* @param {import('../objects/PodcastEpisodeDownload')} podcastEpisodeDownload
|
||||
* @returns
|
||||
* @returns {Promise<{success: boolean, isFfmpegError?: boolean}>}
|
||||
*/
|
||||
module.exports.downloadPodcastEpisode = (podcastEpisodeDownload) => {
|
||||
return new Promise(async (resolve) => {
|
||||
|
@ -110,12 +110,16 @@ module.exports.downloadPodcastEpisode = (podcastEpisodeDownload) => {
|
|||
headers: {
|
||||
'User-Agent': 'audiobookshelf (+https://audiobookshelf.org)'
|
||||
},
|
||||
timeout: 30000
|
||||
timeout: global.PodcastDownloadTimeout
|
||||
}).catch((error) => {
|
||||
Logger.error(`[ffmpegHelpers] Failed to download podcast episode with url "${podcastEpisodeDownload.url}"`, error)
|
||||
return null
|
||||
})
|
||||
if (!response) return resolve(false)
|
||||
if (!response) {
|
||||
return resolve({
|
||||
success: false
|
||||
})
|
||||
}
|
||||
|
||||
/** @type {import('../libs/fluentFfmpeg/index').FfmpegCommand} */
|
||||
const ffmpeg = Ffmpeg(response.data)
|
||||
|
@ -177,7 +181,10 @@ module.exports.downloadPodcastEpisode = (podcastEpisodeDownload) => {
|
|||
if (stderrLines.length) {
|
||||
Logger.error(`Full stderr dump for episode url "${podcastEpisodeDownload.url}": ${stderrLines.join('\n')}`)
|
||||
}
|
||||
resolve(false)
|
||||
resolve({
|
||||
success: false,
|
||||
isFfmpegError: true
|
||||
})
|
||||
})
|
||||
ffmpeg.on('progress', (progress) => {
|
||||
let progressPercent = 0
|
||||
|
@ -189,7 +196,9 @@ module.exports.downloadPodcastEpisode = (podcastEpisodeDownload) => {
|
|||
})
|
||||
ffmpeg.on('end', () => {
|
||||
Logger.debug(`[FfmpegHelpers] downloadPodcastEpisode: Complete`)
|
||||
resolve(podcastEpisodeDownload.targetPath)
|
||||
resolve({
|
||||
success: true
|
||||
})
|
||||
})
|
||||
ffmpeg.run()
|
||||
})
|
||||
|
|
|
@ -131,6 +131,40 @@ async function readTextFile(path) {
|
|||
}
|
||||
module.exports.readTextFile = readTextFile
|
||||
|
||||
/**
|
||||
* Check if file or directory should be ignored. Returns a string of the reason to ignore, or null if not ignored
|
||||
*
|
||||
* @param {string} path
|
||||
* @returns {string}
|
||||
*/
|
||||
module.exports.shouldIgnoreFile = (path) => {
|
||||
// Check if directory or file name starts with "."
|
||||
if (Path.basename(path).startsWith('.')) {
|
||||
return 'dotfile'
|
||||
}
|
||||
if (path.split('/').find((p) => p.startsWith('.'))) {
|
||||
return 'dotpath'
|
||||
}
|
||||
|
||||
// If these strings exist anywhere in the filename or directory name, ignore. Vendor specific hidden directories
|
||||
const includeAnywhereIgnore = ['@eaDir']
|
||||
const filteredInclude = includeAnywhereIgnore.filter((str) => path.includes(str))
|
||||
if (filteredInclude.length) {
|
||||
return `${filteredInclude[0]} directory`
|
||||
}
|
||||
|
||||
const extensionIgnores = ['.part', '.tmp', '.crdownload', '.download', '.bak', '.old', '.temp', '.tempfile', '.tempfile~']
|
||||
|
||||
// Check extension
|
||||
if (extensionIgnores.includes(Path.extname(path).toLowerCase())) {
|
||||
// Return the extension that is ignored
|
||||
return `${Path.extname(path)} file`
|
||||
}
|
||||
|
||||
// Should not ignore this file or directory
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef FilePathItem
|
||||
* @property {string} name - file name e.g. "audiofile.m4b"
|
||||
|
@ -147,7 +181,7 @@ module.exports.readTextFile = readTextFile
|
|||
* @param {string} [relPathToReplace]
|
||||
* @returns {FilePathItem[]}
|
||||
*/
|
||||
async function recurseFiles(path, relPathToReplace = null) {
|
||||
module.exports.recurseFiles = async (path, relPathToReplace = null) => {
|
||||
path = filePathToPOSIX(path)
|
||||
if (!path.endsWith('/')) path = path + '/'
|
||||
|
||||
|
@ -197,14 +231,10 @@ async function recurseFiles(path, relPathToReplace = null) {
|
|||
return false
|
||||
}
|
||||
|
||||
if (item.extension === '.part') {
|
||||
Logger.debug(`[fileUtils] Ignoring .part file "${relpath}"`)
|
||||
return false
|
||||
}
|
||||
|
||||
// Ignore any file if a directory or the filename starts with "."
|
||||
if (relpath.split('/').find((p) => p.startsWith('.'))) {
|
||||
Logger.debug(`[fileUtils] Ignoring path has . "${relpath}"`)
|
||||
// Check for ignored extensions or directories
|
||||
const shouldIgnore = this.shouldIgnoreFile(relpath)
|
||||
if (shouldIgnore) {
|
||||
Logger.debug(`[fileUtils] Ignoring ${shouldIgnore} - "${relpath}"`)
|
||||
return false
|
||||
}
|
||||
|
||||
|
@ -235,7 +265,6 @@ async function recurseFiles(path, relPathToReplace = null) {
|
|||
|
||||
return list
|
||||
}
|
||||
module.exports.recurseFiles = recurseFiles
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -286,10 +315,23 @@ module.exports.downloadFile = (url, filepath, contentTypeFilter = null) => {
|
|||
return reject(new Error(`Invalid content type "${response.headers?.['content-type'] || ''}"`))
|
||||
}
|
||||
|
||||
const totalSize = parseInt(response.headers['content-length'], 10)
|
||||
let downloadedSize = 0
|
||||
|
||||
// Write to filepath
|
||||
const writer = fs.createWriteStream(filepath)
|
||||
response.data.pipe(writer)
|
||||
|
||||
let lastProgress = 0
|
||||
response.data.on('data', (chunk) => {
|
||||
downloadedSize += chunk.length
|
||||
const progress = totalSize ? Math.round((downloadedSize / totalSize) * 100) : 0
|
||||
if (progress >= lastProgress + 5) {
|
||||
Logger.debug(`[fileUtils] File "${Path.basename(filepath)}" download progress: ${progress}% (${downloadedSize}/${totalSize} bytes)`)
|
||||
lastProgress = progress
|
||||
}
|
||||
})
|
||||
|
||||
writer.on('finish', resolve)
|
||||
writer.on('error', reject)
|
||||
})
|
||||
|
@ -320,6 +362,9 @@ module.exports.sanitizeFilename = (filename, colonReplacement = ' - ') => {
|
|||
return false
|
||||
}
|
||||
|
||||
// Normalize the string first to ensure consistent byte calculations
|
||||
filename = filename.normalize('NFC')
|
||||
|
||||
// Most file systems use number of bytes for max filename
|
||||
// to support most filesystems we will use max of 255 bytes in utf-16
|
||||
// Ref: https://doc.owncloud.com/server/next/admin_manual/troubleshooting/path_filename_length.html
|
||||
|
@ -348,8 +393,11 @@ module.exports.sanitizeFilename = (filename, colonReplacement = ' - ') => {
|
|||
const ext = Path.extname(sanitized) // separate out file extension
|
||||
const basename = Path.basename(sanitized, ext)
|
||||
const extByteLength = Buffer.byteLength(ext, 'utf16le')
|
||||
|
||||
const basenameByteLength = Buffer.byteLength(basename, 'utf16le')
|
||||
if (basenameByteLength + extByteLength > MAX_FILENAME_BYTES) {
|
||||
Logger.debug(`[fileUtils] Filename "${filename}" is too long (${basenameByteLength + extByteLength} bytes), trimming basename to ${MAX_FILENAME_BYTES - extByteLength} bytes.`)
|
||||
|
||||
const MaxBytesForBasename = MAX_FILENAME_BYTES - extByteLength
|
||||
let totalBytes = 0
|
||||
let trimmedBasename = ''
|
||||
|
@ -365,6 +413,10 @@ module.exports.sanitizeFilename = (filename, colonReplacement = ' - ') => {
|
|||
sanitized = trimmedBasename + ext
|
||||
}
|
||||
|
||||
if (filename !== sanitized) {
|
||||
Logger.debug(`[fileUtils] Sanitized filename "${filename}" to "${sanitized}" (${Buffer.byteLength(sanitized, 'utf16le')} bytes)`)
|
||||
}
|
||||
|
||||
return sanitized
|
||||
}
|
||||
|
||||
|
|
|
@ -1,11 +1,19 @@
|
|||
const sanitizeHtml = require('../libs/sanitizeHtml')
|
||||
const { entities } = require("./htmlEntities");
|
||||
const { entities } = require('./htmlEntities')
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} html
|
||||
* @returns {string}
|
||||
* @throws {Error} if input is not a string
|
||||
*/
|
||||
function sanitize(html) {
|
||||
if (typeof html !== 'string') {
|
||||
throw new Error('sanitizeHtml: input must be a string')
|
||||
}
|
||||
|
||||
const sanitizerOptions = {
|
||||
allowedTags: [
|
||||
'p', 'ol', 'ul', 'li', 'a', 'strong', 'em', 'del', 'br'
|
||||
],
|
||||
allowedTags: ['p', 'ol', 'ul', 'li', 'a', 'strong', 'em', 'del', 'br', 'b', 'i'],
|
||||
disallowedTagsMode: 'discard',
|
||||
allowedAttributes: {
|
||||
a: ['href', 'name', 'target']
|
||||
|
@ -34,6 +42,6 @@ function decodeHTMLEntities(strToDecode) {
|
|||
if (entity in entities) {
|
||||
return entities[entity]
|
||||
}
|
||||
return entity;
|
||||
return entity
|
||||
})
|
||||
}
|
||||
|
|
|
@ -112,7 +112,7 @@ function secondsToTimestamp(seconds, includeMs = false, alwaysIncludeHours = fal
|
|||
var ms = _seconds - Math.floor(seconds)
|
||||
_seconds = Math.floor(_seconds)
|
||||
|
||||
var msString = '.' + (includeMs ? ms.toFixed(3) : '0.0').split('.')[1]
|
||||
const msString = includeMs ? '.' + ms.toFixed(3).split('.')[1] : ''
|
||||
if (alwaysIncludeHours) {
|
||||
return `${_hours.toString().padStart(2, '0')}:${_minutes.toString().padStart(2, '0')}:${_seconds.toString().padStart(2, '0')}${msString}`
|
||||
}
|
||||
|
@ -243,3 +243,29 @@ module.exports.isValidASIN = (str) => {
|
|||
if (!str || typeof str !== 'string') return false
|
||||
return /^[A-Z0-9]{10}$/.test(str)
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert timestamp to seconds
|
||||
* @example "01:00:00" => 3600
|
||||
* @example "01:00" => 60
|
||||
* @example "01" => 1
|
||||
*
|
||||
* @param {string} timestamp
|
||||
* @returns {number}
|
||||
*/
|
||||
module.exports.timestampToSeconds = (timestamp) => {
|
||||
if (typeof timestamp !== 'string') {
|
||||
return null
|
||||
}
|
||||
const parts = timestamp.split(':').map(Number)
|
||||
if (parts.some(isNaN)) {
|
||||
return null
|
||||
} else if (parts.length === 1) {
|
||||
return parts[0]
|
||||
} else if (parts.length === 2) {
|
||||
return parts[0] * 60 + parts[1]
|
||||
} else if (parts.length === 3) {
|
||||
return parts[0] * 3600 + parts[1] * 60 + parts[2]
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
|
|
@ -43,7 +43,9 @@ async function parse(ebookFile) {
|
|||
archive = createComicBookExtractor(comicPath)
|
||||
await archive.open()
|
||||
|
||||
const filePaths = await archive.getFilePaths()
|
||||
const filePaths = await archive.getFilePaths().catch((error) => {
|
||||
Logger.error(`[parseComicMetadata] Failed to get file paths from comic at "${comicPath}"`, error)
|
||||
})
|
||||
|
||||
// Sort the file paths in a natural order to get the first image
|
||||
filePaths.sort((a, b) => {
|
||||
|
|
|
@ -56,7 +56,9 @@ async function extractCoverImage(epubPath, epubImageFilepath, outputCoverPath) {
|
|||
return false
|
||||
})
|
||||
|
||||
await zip.close()
|
||||
await zip.close().catch((error) => {
|
||||
Logger.error(`[parseEpubMetadata] Failed to close zip`, error)
|
||||
})
|
||||
|
||||
return success
|
||||
}
|
||||
|
@ -105,7 +107,8 @@ async function parse(ebookFile) {
|
|||
|
||||
// Attempt to find filepath to cover image:
|
||||
// Metadata may include <meta name="cover" content="id"/> where content is the id of the cover image in the manifest
|
||||
// Otherwise the first image in the manifest is used as the cover image
|
||||
// Otherwise find image in the manifest with cover-image property set
|
||||
// As a fallback the first image in the manifest is used as the cover image
|
||||
let packageMetadata = packageJson.package?.metadata
|
||||
if (Array.isArray(packageMetadata)) {
|
||||
packageMetadata = packageMetadata[0]
|
||||
|
@ -116,6 +119,9 @@ async function parse(ebookFile) {
|
|||
if (metaCoverId) {
|
||||
manifestFirstImage = packageJson.package?.manifest?.[0]?.item?.find((item) => item.$?.id === metaCoverId)
|
||||
}
|
||||
if (!manifestFirstImage) {
|
||||
manifestFirstImage = packageJson.package?.manifest?.[0]?.item?.find((item) => item.$?.['properties']?.split(' ')?.includes('cover-image'))
|
||||
}
|
||||
if (!manifestFirstImage) {
|
||||
manifestFirstImage = packageJson.package?.manifest?.[0]?.item?.find((item) => item.$?.['media-type']?.startsWith('image/'))
|
||||
}
|
||||
|
|
|
@ -35,11 +35,18 @@ module.exports.nameToLastFirst = (firstLast) => {
|
|||
return `${nameObj.last_name}, ${nameObj.first_name}`
|
||||
}
|
||||
|
||||
// Handle any name string
|
||||
/**
|
||||
* Parses a name string into an array of names
|
||||
*
|
||||
* @param {string} nameString - The name string to parse
|
||||
* @returns {{ names: string[] }} Array of names
|
||||
*/
|
||||
module.exports.parse = (nameString) => {
|
||||
if (!nameString) return null
|
||||
|
||||
var splitNames = []
|
||||
let splitNames = []
|
||||
const isCommaSeparated = nameString.includes(',')
|
||||
|
||||
// Example &LF: Friedman, Milton & Friedman, Rose
|
||||
if (nameString.includes('&')) {
|
||||
nameString.split('&').forEach((asa) => (splitNames = splitNames.concat(asa.split(','))))
|
||||
|
@ -59,17 +66,18 @@ module.exports.parse = (nameString) => {
|
|||
}
|
||||
}
|
||||
|
||||
var names = []
|
||||
let names = []
|
||||
|
||||
// 1 name FIRST LAST
|
||||
if (splitNames.length === 1) {
|
||||
names.push(parseName(nameString))
|
||||
} else {
|
||||
var firstChunkIsALastName = checkIsALastName(splitNames[0])
|
||||
var isEvenNum = splitNames.length % 2 === 0
|
||||
// Determines whether this is formatted as last, first or first last (only if using comma separator)
|
||||
// Example: "Smith; James Jones" -> ["Smith", "James Jones"]
|
||||
let firstChunkIsALastName = !isCommaSeparated ? false : checkIsALastName(splitNames[0])
|
||||
let isEvenNum = splitNames.length % 2 === 0
|
||||
|
||||
if (!isEvenNum && firstChunkIsALastName) {
|
||||
// console.error('Multi-name LAST,FIRST entry has a straggler (could be roman numerals or a suffix), ignore it')
|
||||
splitNames = splitNames.slice(0, splitNames.length - 1)
|
||||
}
|
||||
|
||||
|
|
|
@ -22,11 +22,22 @@ function parseCreators(metadata) {
|
|||
Object.keys(c['$'])
|
||||
.find((key) => key.startsWith('xmlns:'))
|
||||
?.split(':')[1] || 'opf'
|
||||
return {
|
||||
const creator = {
|
||||
value: c['_'],
|
||||
role: c['$'][`${namespace}:role`] || null,
|
||||
fileAs: c['$'][`${namespace}:file-as`] || null
|
||||
}
|
||||
|
||||
const id = c['$']['id']
|
||||
if (id && metadata.meta.refines?.some((r) => r.refines === `#${id}`)) {
|
||||
const creatorMeta = metadata.meta.refines.filter((r) => r.refines === `#${id}`)
|
||||
if (creatorMeta) {
|
||||
creator.role = creatorMeta.find((r) => r.property === 'role')?.value || creator.role || null
|
||||
creator.fileAs = creatorMeta.find((r) => r.property === 'file-as')?.value || creator.fileAs || null
|
||||
}
|
||||
}
|
||||
|
||||
return creator
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -187,7 +198,6 @@ module.exports.parseOpfMetadataJson = (json) => {
|
|||
const prefix = packageKey.split(':').shift()
|
||||
let metadata = prefix ? json[packageKey][`${prefix}:metadata`] || json[packageKey].metadata : json[packageKey].metadata
|
||||
if (!metadata) return null
|
||||
|
||||
if (Array.isArray(metadata)) {
|
||||
if (!metadata.length) return null
|
||||
metadata = metadata[0]
|
||||
|
@ -198,12 +208,22 @@ module.exports.parseOpfMetadataJson = (json) => {
|
|||
metadata.meta = {}
|
||||
if (metadataMeta?.length) {
|
||||
metadataMeta.forEach((meta) => {
|
||||
if (meta && meta['$'] && meta['$'].name) {
|
||||
if (meta?.['$']?.name) {
|
||||
metadata.meta[meta['$'].name] = [meta['$'].content || '']
|
||||
} else if (meta?.['$']?.refines) {
|
||||
// https://www.w3.org/TR/epub-33/#sec-meta-elem
|
||||
|
||||
if (!metadata.meta.refines) {
|
||||
metadata.meta.refines = []
|
||||
}
|
||||
metadata.meta.refines.push({
|
||||
value: meta._,
|
||||
refines: meta['$'].refines,
|
||||
property: meta['$'].property
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const creators = parseCreators(metadata)
|
||||
const authors = (fetchCreators(creators, 'aut') || []).map((au) => au?.trim()).filter((au) => au)
|
||||
const narrators = (fetchNarrators(creators, metadata) || []).map((nrt) => nrt?.trim()).filter((nrt) => nrt)
|
||||
|
@ -227,5 +247,6 @@ module.exports.parseOpfMetadataJson = (json) => {
|
|||
module.exports.parseOpfMetadataXML = async (xml) => {
|
||||
const json = await xmlToJSON(xml)
|
||||
if (!json) return null
|
||||
|
||||
return this.parseOpfMetadataJson(json)
|
||||
}
|
||||
|
|
|
@ -1,9 +1,17 @@
|
|||
const axios = require('axios')
|
||||
const ssrfFilter = require('ssrf-req-filter')
|
||||
const Logger = require('../Logger')
|
||||
const { xmlToJSON, levenshteinDistance } = require('./index')
|
||||
const { xmlToJSON, levenshteinDistance, timestampToSeconds } = require('./index')
|
||||
const htmlSanitizer = require('../utils/htmlSanitizer')
|
||||
|
||||
/**
|
||||
* @typedef RssPodcastChapter
|
||||
* @property {number} id
|
||||
* @property {string} title
|
||||
* @property {number} start
|
||||
* @property {number} end
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef RssPodcastEpisode
|
||||
* @property {string} title
|
||||
|
@ -22,6 +30,7 @@ const htmlSanitizer = require('../utils/htmlSanitizer')
|
|||
* @property {string} guid
|
||||
* @property {string} chaptersUrl
|
||||
* @property {string} chaptersType
|
||||
* @property {RssPodcastChapter[]} chapters
|
||||
*/
|
||||
|
||||
/**
|
||||
|
@ -52,6 +61,29 @@ function extractFirstArrayItem(json, key) {
|
|||
return json[key][0]
|
||||
}
|
||||
|
||||
function extractStringOrStringify(json) {
|
||||
try {
|
||||
if (typeof json[Object.keys(json)[0]]?.[0] === 'string') {
|
||||
return json[Object.keys(json)[0]][0]
|
||||
}
|
||||
// Handles case where html was included without being wrapped in CDATA
|
||||
return JSON.stringify(value)
|
||||
} catch {
|
||||
return ''
|
||||
}
|
||||
}
|
||||
|
||||
function extractFirstArrayItemString(json, key) {
|
||||
const item = extractFirstArrayItem(json, key)
|
||||
if (!item) return ''
|
||||
if (typeof item === 'object') {
|
||||
if (item?.['_'] && typeof item['_'] === 'string') return item['_']
|
||||
|
||||
return extractStringOrStringify(item)
|
||||
}
|
||||
return typeof item === 'string' ? item : ''
|
||||
}
|
||||
|
||||
function extractImage(channel) {
|
||||
if (!channel.image || !channel.image.url || !channel.image.url.length) {
|
||||
if (!channel['itunes:image'] || !channel['itunes:image'].length || !channel['itunes:image'][0]['$']) {
|
||||
|
@ -101,7 +133,7 @@ function extractPodcastMetadata(channel) {
|
|||
}
|
||||
|
||||
if (channel['description']) {
|
||||
const rawDescription = extractFirstArrayItem(channel, 'description') || ''
|
||||
const rawDescription = extractFirstArrayItemString(channel, 'description')
|
||||
metadata.description = htmlSanitizer.sanitize(rawDescription.trim())
|
||||
metadata.descriptionPlain = htmlSanitizer.stripAllTags(rawDescription.trim())
|
||||
}
|
||||
|
@ -118,15 +150,19 @@ function extractPodcastMetadata(channel) {
|
|||
|
||||
function extractEpisodeData(item) {
|
||||
// Episode must have url
|
||||
if (!item.enclosure?.[0]?.['$']?.url) {
|
||||
let enclosure
|
||||
|
||||
if (item.enclosure?.[0]?.['$']?.url) {
|
||||
enclosure = item.enclosure[0]['$']
|
||||
} else if (item['media:content']?.find((c) => c?.['$']?.url && (c?.['$']?.type ?? '').startsWith('audio'))) {
|
||||
enclosure = item['media:content'].find((c) => (c['$']?.type ?? '').startsWith('audio'))['$']
|
||||
} else {
|
||||
Logger.error(`[podcastUtils] Invalid podcast episode data`)
|
||||
return null
|
||||
}
|
||||
|
||||
const episode = {
|
||||
enclosure: {
|
||||
...item.enclosure[0]['$']
|
||||
}
|
||||
enclosure: enclosure
|
||||
}
|
||||
|
||||
episode.enclosure.url = episode.enclosure.url.trim()
|
||||
|
@ -145,7 +181,8 @@ function extractEpisodeData(item) {
|
|||
|
||||
// Supposed to be the plaintext description but not always followed
|
||||
if (item['description']) {
|
||||
const rawDescription = extractFirstArrayItem(item, 'description') || ''
|
||||
const rawDescription = extractFirstArrayItemString(item, 'description')
|
||||
|
||||
if (!episode.description) episode.description = htmlSanitizer.sanitize(rawDescription.trim())
|
||||
episode.descriptionPlain = htmlSanitizer.stripAllTags(rawDescription.trim())
|
||||
}
|
||||
|
@ -175,16 +212,55 @@ function extractEpisodeData(item) {
|
|||
const arrayFields = ['title', 'itunes:episodeType', 'itunes:season', 'itunes:episode', 'itunes:author', 'itunes:duration', 'itunes:explicit', 'itunes:subtitle']
|
||||
arrayFields.forEach((key) => {
|
||||
const cleanKey = key.split(':').pop()
|
||||
let value = extractFirstArrayItem(item, key)
|
||||
if (value?.['_']) value = value['_']
|
||||
episode[cleanKey] = value
|
||||
episode[cleanKey] = extractFirstArrayItemString(item, key)
|
||||
})
|
||||
|
||||
// Extract psc:chapters if duration is set
|
||||
let episodeDuration = !isNaN(episode.duration) ? timestampToSeconds(episode.duration) : null
|
||||
if (item['psc:chapters']?.[0]?.['psc:chapter']?.length && episodeDuration) {
|
||||
// Example chapter:
|
||||
// {"id":0,"start":0,"end":43.004286,"title":"chapter 1"}
|
||||
|
||||
const cleanedChapters = item['psc:chapters'][0]['psc:chapter'].map((chapter, index) => {
|
||||
if (!chapter['$']?.title || !chapter['$']?.start || typeof chapter['$']?.start !== 'string' || typeof chapter['$']?.title !== 'string') {
|
||||
return null
|
||||
}
|
||||
|
||||
const start = timestampToSeconds(chapter['$'].start)
|
||||
if (start === null) {
|
||||
return null
|
||||
}
|
||||
|
||||
return {
|
||||
id: index,
|
||||
title: chapter['$'].title,
|
||||
start
|
||||
}
|
||||
})
|
||||
|
||||
if (cleanedChapters.some((chapter) => !chapter)) {
|
||||
Logger.warn(`[podcastUtils] Invalid chapter data for ${episode.enclosure.url}`)
|
||||
} else {
|
||||
episode.chapters = cleanedChapters.map((chapter, index) => {
|
||||
const nextChapter = cleanedChapters[index + 1]
|
||||
const end = nextChapter ? nextChapter.start : episodeDuration
|
||||
return {
|
||||
id: chapter.id,
|
||||
title: chapter.title,
|
||||
start: chapter.start,
|
||||
end
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return episode
|
||||
}
|
||||
|
||||
function cleanEpisodeData(data) {
|
||||
const pubJsDate = data.pubDate ? new Date(data.pubDate) : null
|
||||
const publishedAt = pubJsDate && !isNaN(pubJsDate) ? pubJsDate.valueOf() : null
|
||||
|
||||
return {
|
||||
title: data.title,
|
||||
subtitle: data.subtitle || '',
|
||||
|
@ -201,7 +277,8 @@ function cleanEpisodeData(data) {
|
|||
enclosure: data.enclosure,
|
||||
guid: data.guid || null,
|
||||
chaptersUrl: data.chaptersUrl || null,
|
||||
chaptersType: data.chaptersType || null
|
||||
chaptersType: data.chaptersType || null,
|
||||
chapters: data.chapters || []
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -281,10 +358,11 @@ module.exports.getPodcastFeed = (feedUrl, excludeEpisodeMetadata = false) => {
|
|||
return axios({
|
||||
url: feedUrl,
|
||||
method: 'GET',
|
||||
timeout: 12000,
|
||||
timeout: global.PodcastDownloadTimeout,
|
||||
responseType: 'arraybuffer',
|
||||
headers: {
|
||||
Accept: 'application/rss+xml, application/xhtml+xml, application/xml, */*;q=0.8',
|
||||
'Accept-Encoding': 'gzip, compress, deflate',
|
||||
'User-Agent': userAgent
|
||||
},
|
||||
httpAgent: global.DisableSsrfRequestFilter?.(feedUrl) ? null : ssrfFilter(feedUrl),
|
||||
|
@ -316,6 +394,14 @@ module.exports.getPodcastFeed = (feedUrl, excludeEpisodeMetadata = false) => {
|
|||
return payload.podcast
|
||||
})
|
||||
.catch((error) => {
|
||||
// Check for failures due to redirecting from http to https. If original url was http, upgrade to https and try again
|
||||
if (error.code === 'ERR_FR_REDIRECTION_FAILURE' && error.cause.code === 'ERR_INVALID_PROTOCOL') {
|
||||
if (feedUrl.startsWith('http://') && error.request._options.protocol === 'https:') {
|
||||
Logger.info('Redirection from http to https detected. Upgrading Request', error.request._options.href)
|
||||
feedUrl = feedUrl.replace('http://', 'https://')
|
||||
return this.getPodcastFeed(feedUrl, excludeEpisodeMetadata)
|
||||
}
|
||||
}
|
||||
Logger.error('[podcastUtils] getPodcastFeed Error', error)
|
||||
return null
|
||||
})
|
||||
|
|
|
@ -143,6 +143,7 @@ function parseChapters(_chapters) {
|
|||
.map((chap) => {
|
||||
let title = chap['TAG:title'] || chap.title || ''
|
||||
if (!title && chap.tags?.title) title = chap.tags.title
|
||||
title = title.trim()
|
||||
|
||||
const timebase = chap.time_base?.includes('/') ? Number(chap.time_base.split('/')[1]) : 1
|
||||
const start = !isNullOrNaN(chap.start_time) ? Number(chap.start_time) : !isNullOrNaN(chap.start) ? Number(chap.start) / timebase : 0
|
||||
|
|
41
server/utils/profiler.js
Normal file
41
server/utils/profiler.js
Normal file
|
@ -0,0 +1,41 @@
|
|||
const { performance, createHistogram } = require('perf_hooks')
|
||||
const util = require('util')
|
||||
const Logger = require('../Logger')
|
||||
|
||||
const histograms = new Map()
|
||||
|
||||
function profile(asyncFunc, isFindQuery = true, funcName = asyncFunc.name) {
|
||||
if (!histograms.has(funcName)) {
|
||||
const histogram = createHistogram()
|
||||
histogram.values = []
|
||||
histograms.set(funcName, histogram)
|
||||
}
|
||||
const histogram = histograms.get(funcName)
|
||||
|
||||
return async (...args) => {
|
||||
if (isFindQuery) {
|
||||
const findOptions = args[0]
|
||||
Logger.info(`[${funcName}] findOptions:`, util.inspect(findOptions, { depth: null }))
|
||||
findOptions.logging = (query, time) => Logger.info(`[${funcName}] ${query} Elapsed time: ${time}ms`)
|
||||
findOptions.benchmark = true
|
||||
}
|
||||
const start = performance.now()
|
||||
try {
|
||||
const result = await asyncFunc(...args)
|
||||
return result
|
||||
} catch (error) {
|
||||
Logger.error(`[${funcName}] failed`)
|
||||
throw error
|
||||
} finally {
|
||||
const end = performance.now()
|
||||
const duration = Math.round(end - start)
|
||||
histogram.record(duration)
|
||||
histogram.values.push(duration)
|
||||
Logger.info(`[${funcName}] duration: ${duration}ms`)
|
||||
Logger.info(`[${funcName}] histogram values:`, histogram.values)
|
||||
Logger.info(`[${funcName}] histogram:`, histogram)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { profile }
|
|
@ -167,5 +167,51 @@ module.exports = {
|
|||
topNarrators,
|
||||
topGenres
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Get total file size and number of items for books and podcasts
|
||||
*
|
||||
* @typedef {Object} SizeObject
|
||||
* @property {number} totalSize
|
||||
* @property {number} numItems
|
||||
*
|
||||
* @returns {Promise<{books: SizeObject, podcasts: SizeObject, total: SizeObject}}>}
|
||||
*/
|
||||
async getTotalSize() {
|
||||
const [mediaTypeStats] = await Database.sequelize.query(`SELECT li.mediaType, SUM(li.size) AS totalSize, COUNT(*) AS numItems FROM libraryItems li group by li.mediaType;`)
|
||||
const bookStats = mediaTypeStats.find((m) => m.mediaType === 'book')
|
||||
const podcastStats = mediaTypeStats.find((m) => m.mediaType === 'podcast')
|
||||
|
||||
return {
|
||||
books: {
|
||||
totalSize: bookStats?.totalSize || 0,
|
||||
numItems: bookStats?.numItems || 0
|
||||
},
|
||||
podcasts: {
|
||||
totalSize: podcastStats?.totalSize || 0,
|
||||
numItems: podcastStats?.numItems || 0
|
||||
},
|
||||
total: {
|
||||
totalSize: (bookStats?.totalSize || 0) + (podcastStats?.totalSize || 0),
|
||||
numItems: (bookStats?.numItems || 0) + (podcastStats?.numItems || 0)
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Get total number of audio files for books and podcasts
|
||||
*
|
||||
* @returns {Promise<{numBookAudioFiles: number, numPodcastAudioFiles: number, numAudioFiles: number}>}
|
||||
*/
|
||||
async getNumAudioFiles() {
|
||||
const [numBookAudioFilesRow] = await Database.sequelize.query(`SELECT SUM(json_array_length(b.audioFiles)) AS numAudioFiles FROM books b;`)
|
||||
const numBookAudioFiles = numBookAudioFilesRow[0]?.numAudioFiles || 0
|
||||
const numPodcastAudioFiles = await Database.podcastEpisodeModel.count()
|
||||
return {
|
||||
numBookAudioFiles,
|
||||
numPodcastAudioFiles,
|
||||
numAudioFiles: numBookAudioFiles + numPodcastAudioFiles
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@ const Database = require('../../Database')
|
|||
const libraryItemsBookFilters = require('./libraryItemsBookFilters')
|
||||
const libraryItemsPodcastFilters = require('./libraryItemsPodcastFilters')
|
||||
const { createNewSortInstance } = require('../../libs/fastSort')
|
||||
const { profile } = require('../../utils/profiler')
|
||||
const naturalSort = createNewSortInstance({
|
||||
comparer: new Intl.Collator(undefined, { numeric: true, sensitivity: 'base' }).compare
|
||||
})
|
||||
|
@ -474,7 +475,8 @@ module.exports = {
|
|||
// Check how many podcasts are in library to determine if we need to load all of the data
|
||||
// This is done to handle the edge case of podcasts having been deleted and not having
|
||||
// an updatedAt timestamp to trigger a reload of the filter data
|
||||
const podcastCountFromDatabase = await Database.podcastModel.count({
|
||||
const podcastModelCount = process.env.QUERY_PROFILING ? profile(Database.podcastModel.count.bind(Database.podcastModel)) : Database.podcastModel.count.bind(Database.podcastModel)
|
||||
const podcastCountFromDatabase = await podcastModelCount({
|
||||
include: {
|
||||
model: Database.libraryItemModel,
|
||||
attributes: [],
|
||||
|
@ -489,7 +491,7 @@ module.exports = {
|
|||
// data was loaded. If so, we can skip loading all of the data.
|
||||
// Because many items could change, just check the count of items instead
|
||||
// of actually loading the data twice
|
||||
const changedPodcasts = await Database.podcastModel.count({
|
||||
const changedPodcasts = await podcastModelCount({
|
||||
include: {
|
||||
model: Database.libraryItemModel,
|
||||
attributes: [],
|
||||
|
@ -520,7 +522,8 @@ module.exports = {
|
|||
}
|
||||
|
||||
// Something has changed in the podcasts table, so reload all of the filter data for library
|
||||
const podcasts = await Database.podcastModel.findAll({
|
||||
const findAll = process.env.QUERY_PROFILING ? profile(Database.podcastModel.findAll.bind(Database.podcastModel)) : Database.podcastModel.findAll.bind(Database.podcastModel)
|
||||
const podcasts = await findAll({
|
||||
include: {
|
||||
model: Database.libraryItemModel,
|
||||
attributes: [],
|
||||
|
|
|
@ -4,6 +4,9 @@ const Logger = require('../../Logger')
|
|||
const authorFilters = require('./authorFilters')
|
||||
|
||||
const ShareManager = require('../../managers/ShareManager')
|
||||
const { profile } = require('../profiler')
|
||||
const stringifySequelizeQuery = require('../stringifySequelizeQuery')
|
||||
const countCache = new Map()
|
||||
|
||||
module.exports = {
|
||||
/**
|
||||
|
@ -261,18 +264,18 @@ module.exports = {
|
|||
} else if (sortBy === 'media.metadata.publishedYear') {
|
||||
return [[Sequelize.literal(`CAST(\`book\`.\`publishedYear\` AS INTEGER)`), dir]]
|
||||
} else if (sortBy === 'media.metadata.authorNameLF') {
|
||||
return [[Sequelize.literal('author_name COLLATE NOCASE'), dir]]
|
||||
return [[Sequelize.literal('`libraryItem`.`authorNamesLastFirst` COLLATE NOCASE'), dir]]
|
||||
} else if (sortBy === 'media.metadata.authorName') {
|
||||
return [[Sequelize.literal('author_name COLLATE NOCASE'), dir]]
|
||||
return [[Sequelize.literal('`libraryItem`.`authorNamesFirstLast` COLLATE NOCASE'), dir]]
|
||||
} else if (sortBy === 'media.metadata.title') {
|
||||
if (collapseseries) {
|
||||
return [[Sequelize.literal('display_title COLLATE NOCASE'), dir]]
|
||||
}
|
||||
|
||||
if (global.ServerSettings.sortingIgnorePrefix) {
|
||||
return [[Sequelize.literal('titleIgnorePrefix COLLATE NOCASE'), dir]]
|
||||
return [[Sequelize.literal('`libraryItem`.`titleIgnorePrefix` COLLATE NOCASE'), dir]]
|
||||
} else {
|
||||
return [[Sequelize.literal('`book`.`title` COLLATE NOCASE'), dir]]
|
||||
return [[Sequelize.literal('`libraryItem`.`title` COLLATE NOCASE'), dir]]
|
||||
}
|
||||
} else if (sortBy === 'sequence') {
|
||||
const nullDir = sortDesc ? 'DESC NULLS FIRST' : 'ASC NULLS LAST'
|
||||
|
@ -336,6 +339,35 @@ module.exports = {
|
|||
return { booksToExclude, bookSeriesToInclude }
|
||||
},
|
||||
|
||||
clearCountCache(hook) {
|
||||
Logger.debug(`[LibraryItemsBookFilters] book.${hook}: Clearing count cache`)
|
||||
countCache.clear()
|
||||
},
|
||||
|
||||
async findAndCountAll(findOptions, limit, offset, useCountCache) {
|
||||
const model = Database.bookModel
|
||||
if (useCountCache) {
|
||||
const countCacheKey = stringifySequelizeQuery(findOptions)
|
||||
Logger.debug(`[LibraryItemsBookFilters] countCacheKey: ${countCacheKey}`)
|
||||
if (!countCache.has(countCacheKey)) {
|
||||
const count = await model.count(findOptions)
|
||||
countCache.set(countCacheKey, count)
|
||||
}
|
||||
|
||||
findOptions.limit = limit || null
|
||||
findOptions.offset = offset
|
||||
|
||||
const rows = await model.findAll(findOptions)
|
||||
|
||||
return { rows, count: countCache.get(countCacheKey) }
|
||||
}
|
||||
|
||||
findOptions.limit = limit || null
|
||||
findOptions.offset = offset
|
||||
|
||||
return await model.findAndCountAll(findOptions)
|
||||
},
|
||||
|
||||
/**
|
||||
* Get library items for book media type using filter and sort
|
||||
* @param {string} libraryId
|
||||
|
@ -365,18 +397,7 @@ module.exports = {
|
|||
const includeRSSFeed = include.includes('rssfeed')
|
||||
const includeMediaItemShare = !!user?.isAdminOrUp && include.includes('share')
|
||||
|
||||
// For sorting by author name an additional attribute must be added
|
||||
// with author names concatenated
|
||||
let bookAttributes = null
|
||||
if (sortBy === 'media.metadata.authorNameLF') {
|
||||
bookAttributes = {
|
||||
include: [[Sequelize.literal(`(SELECT group_concat(lastFirst, ", ") FROM (SELECT a.lastFirst FROM authors AS a, bookAuthors as ba WHERE ba.authorId = a.id AND ba.bookId = book.id ORDER BY ba.createdAt ASC))`), 'author_name']]
|
||||
}
|
||||
} else if (sortBy === 'media.metadata.authorName') {
|
||||
bookAttributes = {
|
||||
include: [[Sequelize.literal(`(SELECT group_concat(name, ", ") FROM (SELECT a.name FROM authors AS a, bookAuthors as ba WHERE ba.authorId = a.id AND ba.bookId = book.id ORDER BY ba.createdAt ASC))`), 'author_name']]
|
||||
}
|
||||
}
|
||||
|
||||
const libraryItemWhere = {
|
||||
libraryId
|
||||
|
@ -408,18 +429,17 @@ module.exports = {
|
|||
|
||||
const libraryItemIncludes = []
|
||||
const bookIncludes = []
|
||||
if (includeRSSFeed) {
|
||||
|
||||
if (filterGroup === 'feed-open' || includeRSSFeed) {
|
||||
const rssFeedRequired = filterGroup === 'feed-open'
|
||||
libraryItemIncludes.push({
|
||||
model: Database.feedModel,
|
||||
required: filterGroup === 'feed-open'
|
||||
required: rssFeedRequired,
|
||||
separate: !rssFeedRequired
|
||||
})
|
||||
}
|
||||
if (filterGroup === 'feed-open' && !includeRSSFeed) {
|
||||
libraryItemIncludes.push({
|
||||
model: Database.feedModel,
|
||||
required: true
|
||||
})
|
||||
} else if (filterGroup === 'share-open') {
|
||||
|
||||
if (filterGroup === 'share-open') {
|
||||
bookIncludes.push({
|
||||
model: Database.mediaItemShareModel,
|
||||
required: true
|
||||
|
@ -554,13 +574,13 @@ module.exports = {
|
|||
// When collapsing series and sorting by title then use the series name instead of the book title
|
||||
// for this set an attribute "display_title" to use in sorting
|
||||
if (global.ServerSettings.sortingIgnorePrefix) {
|
||||
bookAttributes.include.push([Sequelize.literal(`IFNULL((SELECT s.nameIgnorePrefix FROM bookSeries AS bs, series AS s WHERE bs.seriesId = s.id AND bs.bookId = book.id AND bs.id IN (${bookSeriesToInclude.map((v) => `"${v.id}"`).join(', ')})), titleIgnorePrefix)`), 'display_title'])
|
||||
bookAttributes.include.push([Sequelize.literal(`IFNULL((SELECT s.nameIgnorePrefix FROM bookSeries AS bs, series AS s WHERE bs.seriesId = s.id AND bs.bookId = book.id AND bs.id IN (${bookSeriesToInclude.map((v) => `"${v.id}"`).join(', ')})), \`libraryItem\`.\`titleIgnorePrefix\`)`), 'display_title'])
|
||||
} else {
|
||||
bookAttributes.include.push([Sequelize.literal(`IFNULL((SELECT s.name FROM bookSeries AS bs, series AS s WHERE bs.seriesId = s.id AND bs.bookId = book.id AND bs.id IN (${bookSeriesToInclude.map((v) => `"${v.id}"`).join(', ')})), \`book\`.\`title\`)`), 'display_title'])
|
||||
bookAttributes.include.push([Sequelize.literal(`IFNULL((SELECT s.name FROM bookSeries AS bs, series AS s WHERE bs.seriesId = s.id AND bs.bookId = book.id AND bs.id IN (${bookSeriesToInclude.map((v) => `"${v.id}"`).join(', ')})), \`libraryItem\`.\`title\`)`), 'display_title'])
|
||||
}
|
||||
}
|
||||
|
||||
const { rows: books, count } = await Database.bookModel.findAndCountAll({
|
||||
const findOptions = {
|
||||
where: bookWhere,
|
||||
distinct: true,
|
||||
attributes: bookAttributes,
|
||||
|
@ -577,10 +597,11 @@ module.exports = {
|
|||
...bookIncludes
|
||||
],
|
||||
order: sortOrder,
|
||||
subQuery: false,
|
||||
limit: limit || null,
|
||||
offset
|
||||
})
|
||||
subQuery: false
|
||||
}
|
||||
|
||||
const findAndCountAll = process.env.QUERY_PROFILING ? profile(this.findAndCountAll) : this.findAndCountAll
|
||||
const { rows: books, count } = await findAndCountAll(findOptions, limit, offset, !filterGroup && !userPermissionBookWhere.bookWhere.length)
|
||||
|
||||
const libraryItems = books.map((bookExpanded) => {
|
||||
const libraryItem = bookExpanded.libraryItem
|
||||
|
@ -1008,8 +1029,8 @@ module.exports = {
|
|||
|
||||
const textSearchQuery = await Database.createTextSearchQuery(query)
|
||||
|
||||
const matchTitle = textSearchQuery.matchExpression('title')
|
||||
const matchSubtitle = textSearchQuery.matchExpression('subtitle')
|
||||
const matchTitle = textSearchQuery.matchExpression('book.title')
|
||||
const matchSubtitle = textSearchQuery.matchExpression('book.subtitle')
|
||||
|
||||
// Search title, subtitle, asin, isbn
|
||||
const books = await Database.bookModel.findAll({
|
||||
|
|
|
@ -1,6 +1,10 @@
|
|||
const Sequelize = require('sequelize')
|
||||
const Database = require('../../Database')
|
||||
const Logger = require('../../Logger')
|
||||
const { profile } = require('../../utils/profiler')
|
||||
const stringifySequelizeQuery = require('../stringifySequelizeQuery')
|
||||
|
||||
const countCache = new Map()
|
||||
|
||||
module.exports = {
|
||||
/**
|
||||
|
@ -84,9 +88,9 @@ module.exports = {
|
|||
return [[Sequelize.literal(`\`podcast\`.\`author\` COLLATE NOCASE ${nullDir}`)]]
|
||||
} else if (sortBy === 'media.metadata.title') {
|
||||
if (global.ServerSettings.sortingIgnorePrefix) {
|
||||
return [[Sequelize.literal('titleIgnorePrefix COLLATE NOCASE'), dir]]
|
||||
return [[Sequelize.literal('`libraryItem`.`titleIgnorePrefix` COLLATE NOCASE'), dir]]
|
||||
} else {
|
||||
return [[Sequelize.literal('`podcast`.`title` COLLATE NOCASE'), dir]]
|
||||
return [[Sequelize.literal('`libraryItem`.`title` COLLATE NOCASE'), dir]]
|
||||
}
|
||||
} else if (sortBy === 'media.numTracks') {
|
||||
return [['numEpisodes', dir]]
|
||||
|
@ -96,6 +100,34 @@ module.exports = {
|
|||
return []
|
||||
},
|
||||
|
||||
clearCountCache(model, hook) {
|
||||
Logger.debug(`[LibraryItemsPodcastFilters] ${model}.${hook}: Clearing count cache`)
|
||||
countCache.clear()
|
||||
},
|
||||
|
||||
async findAndCountAll(findOptions, model, limit, offset, useCountCache) {
|
||||
if (useCountCache) {
|
||||
const countCacheKey = stringifySequelizeQuery(findOptions)
|
||||
Logger.debug(`[LibraryItemsPodcastFilters] countCacheKey: ${countCacheKey}`)
|
||||
if (!countCache.has(countCacheKey)) {
|
||||
const count = await model.count(findOptions)
|
||||
countCache.set(countCacheKey, count)
|
||||
}
|
||||
|
||||
findOptions.limit = limit || null
|
||||
findOptions.offset = offset
|
||||
|
||||
const rows = await model.findAll(findOptions)
|
||||
|
||||
return { rows, count: countCache.get(countCacheKey) }
|
||||
}
|
||||
|
||||
findOptions.limit = limit || null
|
||||
findOptions.offset = offset
|
||||
|
||||
return await model.findAndCountAll(findOptions)
|
||||
},
|
||||
|
||||
/**
|
||||
* Get library items for podcast media type using filter and sort
|
||||
* @param {string} libraryId
|
||||
|
@ -120,7 +152,8 @@ module.exports = {
|
|||
if (includeRSSFeed) {
|
||||
libraryItemIncludes.push({
|
||||
model: Database.feedModel,
|
||||
required: filterGroup === 'feed-open'
|
||||
required: filterGroup === 'feed-open',
|
||||
separate: true
|
||||
})
|
||||
}
|
||||
if (filterGroup === 'issues') {
|
||||
|
@ -139,9 +172,6 @@ module.exports = {
|
|||
}
|
||||
|
||||
const podcastIncludes = []
|
||||
if (includeNumEpisodesIncomplete) {
|
||||
podcastIncludes.push([Sequelize.literal(`(SELECT count(*) FROM podcastEpisodes pe LEFT OUTER JOIN mediaProgresses mp ON mp.mediaItemId = pe.id AND mp.userId = :userId WHERE pe.podcastId = podcast.id AND (mp.isFinished = 0 OR mp.isFinished IS NULL))`), 'numEpisodesIncomplete'])
|
||||
}
|
||||
|
||||
let { mediaWhere, replacements } = this.getMediaGroupQuery(filterGroup, filterValue)
|
||||
replacements.userId = user.id
|
||||
|
@ -153,12 +183,12 @@ module.exports = {
|
|||
replacements = { ...replacements, ...userPermissionPodcastWhere.replacements }
|
||||
podcastWhere.push(...userPermissionPodcastWhere.podcastWhere)
|
||||
|
||||
const { rows: podcasts, count } = await Database.podcastModel.findAndCountAll({
|
||||
const findOptions = {
|
||||
where: podcastWhere,
|
||||
replacements,
|
||||
distinct: true,
|
||||
attributes: {
|
||||
include: [[Sequelize.literal(`(SELECT count(*) FROM podcastEpisodes pe WHERE pe.podcastId = podcast.id)`), 'numEpisodes'], ...podcastIncludes]
|
||||
include: [...podcastIncludes]
|
||||
},
|
||||
include: [
|
||||
{
|
||||
|
@ -169,10 +199,12 @@ module.exports = {
|
|||
}
|
||||
],
|
||||
order: this.getOrder(sortBy, sortDesc),
|
||||
subQuery: false,
|
||||
limit: limit || null,
|
||||
offset
|
||||
})
|
||||
subQuery: false
|
||||
}
|
||||
|
||||
const findAndCountAll = process.env.QUERY_PROFILING ? profile(this.findAndCountAll) : this.findAndCountAll
|
||||
|
||||
const { rows: podcasts, count } = await findAndCountAll(findOptions, Database.podcastModel, limit, offset, !filterGroup && !userPermissionPodcastWhere.podcastWhere.length)
|
||||
|
||||
const libraryItems = podcasts.map((podcastExpanded) => {
|
||||
const libraryItem = podcastExpanded.libraryItem
|
||||
|
@ -183,11 +215,15 @@ module.exports = {
|
|||
if (libraryItem.feeds?.length) {
|
||||
libraryItem.rssFeed = libraryItem.feeds[0]
|
||||
}
|
||||
if (podcast.dataValues.numEpisodesIncomplete) {
|
||||
libraryItem.numEpisodesIncomplete = podcast.dataValues.numEpisodesIncomplete
|
||||
}
|
||||
if (podcast.dataValues.numEpisodes) {
|
||||
podcast.numEpisodes = podcast.dataValues.numEpisodes
|
||||
|
||||
if (includeNumEpisodesIncomplete) {
|
||||
const numEpisodesComplete = user.mediaProgresses.reduce((acc, mp) => {
|
||||
if (mp.podcastId === podcast.id && mp.isFinished) {
|
||||
acc += 1
|
||||
}
|
||||
return acc
|
||||
}, 0)
|
||||
libraryItem.numEpisodesIncomplete = podcast.numEpisodes - numEpisodesComplete
|
||||
}
|
||||
|
||||
libraryItem.media = podcast
|
||||
|
@ -268,28 +304,31 @@ module.exports = {
|
|||
|
||||
const userPermissionPodcastWhere = this.getUserPermissionPodcastWhereQuery(user)
|
||||
|
||||
const { rows: podcastEpisodes, count } = await Database.podcastEpisodeModel.findAndCountAll({
|
||||
const findOptions = {
|
||||
where: podcastEpisodeWhere,
|
||||
replacements: userPermissionPodcastWhere.replacements,
|
||||
include: [
|
||||
{
|
||||
model: Database.podcastModel,
|
||||
required: true,
|
||||
where: userPermissionPodcastWhere.podcastWhere,
|
||||
include: [
|
||||
{
|
||||
model: Database.libraryItemModel,
|
||||
required: true,
|
||||
where: libraryItemWhere
|
||||
}
|
||||
]
|
||||
},
|
||||
...podcastEpisodeIncludes
|
||||
],
|
||||
distinct: true,
|
||||
subQuery: false,
|
||||
order: podcastEpisodeOrder,
|
||||
limit,
|
||||
offset
|
||||
})
|
||||
order: podcastEpisodeOrder
|
||||
}
|
||||
|
||||
const findAndCountAll = process.env.QUERY_PROFILING ? profile(this.findAndCountAll) : this.findAndCountAll
|
||||
|
||||
const { rows: podcastEpisodes, count } = await findAndCountAll(findOptions, Database.podcastEpisodeModel, limit, offset, !filterGroup)
|
||||
|
||||
const libraryItems = podcastEpisodes.map((ep) => {
|
||||
const libraryItem = ep.podcast.libraryItem
|
||||
|
@ -321,8 +360,8 @@ module.exports = {
|
|||
|
||||
const textSearchQuery = await Database.createTextSearchQuery(query)
|
||||
|
||||
const matchTitle = textSearchQuery.matchExpression('title')
|
||||
const matchAuthor = textSearchQuery.matchExpression('author')
|
||||
const matchTitle = textSearchQuery.matchExpression('podcast.title')
|
||||
const matchAuthor = textSearchQuery.matchExpression('podcast.author')
|
||||
|
||||
// Search title, author, itunesId, itunesArtistId
|
||||
const podcasts = await Database.podcastModel.findAll({
|
||||
|
@ -426,7 +465,7 @@ module.exports = {
|
|||
async getRecentEpisodes(user, library, limit, offset) {
|
||||
const userPermissionPodcastWhere = this.getUserPermissionPodcastWhereQuery(user)
|
||||
|
||||
const episodes = await Database.podcastEpisodeModel.findAll({
|
||||
const findOptions = {
|
||||
where: {
|
||||
'$mediaProgresses.isFinished$': {
|
||||
[Sequelize.Op.or]: [null, false]
|
||||
|
@ -457,7 +496,11 @@ module.exports = {
|
|||
subQuery: false,
|
||||
limit,
|
||||
offset
|
||||
})
|
||||
}
|
||||
|
||||
const findtAll = process.env.QUERY_PROFILING ? profile(Database.podcastEpisodeModel.findAll.bind(Database.podcastEpisodeModel)) : Database.podcastEpisodeModel.findAll.bind(Database.podcastEpisodeModel)
|
||||
|
||||
const episodes = await findtAll(findOptions)
|
||||
|
||||
const episodeResults = episodes.map((ep) => {
|
||||
ep.podcast.podcastEpisodes = [] // Not needed
|
||||
|
|
|
@ -44,7 +44,7 @@ function groupFileItemsIntoLibraryItemDirs(mediaType, fileItems, audiobooksOnly
|
|||
return i.deep > 0 || (mediaType === 'book' && isMediaFile(mediaType, i.extension, audiobooksOnly))
|
||||
})
|
||||
|
||||
// Step 2: Seperate media files and other files
|
||||
// Step 2: Separate media files and other files
|
||||
// - Directories without a media file will not be included
|
||||
/** @type {import('./fileUtils').FilePathItem[]} */
|
||||
const mediaFileItems = []
|
||||
|
|
25
server/utils/stringifySequelizeQuery.js
Normal file
25
server/utils/stringifySequelizeQuery.js
Normal file
|
@ -0,0 +1,25 @@
|
|||
function stringifySequelizeQuery(findOptions) {
|
||||
function isClass(func) {
|
||||
return typeof func === 'function' && /^class\s/.test(func.toString())
|
||||
}
|
||||
|
||||
function replacer(key, value) {
|
||||
if (typeof value === 'object' && value !== null) {
|
||||
const symbols = Object.getOwnPropertySymbols(value).reduce((acc, sym) => {
|
||||
acc[sym.toString()] = value[sym]
|
||||
return acc
|
||||
}, {})
|
||||
|
||||
return { ...value, ...symbols }
|
||||
}
|
||||
|
||||
if (isClass(value)) {
|
||||
return `${value.name}`
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
return JSON.stringify(findOptions, replacer)
|
||||
}
|
||||
module.exports = stringifySequelizeQuery
|
|
@ -1,3 +1,5 @@
|
|||
const Path = require('path')
|
||||
const { Response } = require('express')
|
||||
const Logger = require('../Logger')
|
||||
const archiver = require('../libs/archiver')
|
||||
|
||||
|
@ -50,3 +52,86 @@ module.exports.zipDirectoryPipe = (path, filename, res) => {
|
|||
archive.finalize()
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a zip archive containing multiple directories and streams it to the response.
|
||||
*
|
||||
* @param {{ path: string, isFile: boolean }[]} pathObjects
|
||||
* @param {string} filename - Name of the zip file to be sent as attachment.
|
||||
* @param {Response} res - Response object to pipe the archive data to.
|
||||
* @returns {Promise<void>} - Promise that resolves when the zip operation completes.
|
||||
*/
|
||||
module.exports.zipDirectoriesPipe = (pathObjects, filename, res) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
// create a file to stream archive data to
|
||||
res.attachment(filename)
|
||||
|
||||
const archive = archiver('zip', {
|
||||
zlib: { level: 0 } // Sets the compression level.
|
||||
})
|
||||
|
||||
// listen for all archive data to be written
|
||||
// 'close' event is fired only when a file descriptor is involved
|
||||
res.on('close', () => {
|
||||
Logger.info(archive.pointer() + ' total bytes')
|
||||
Logger.debug('archiver has been finalized and the output file descriptor has closed.')
|
||||
resolve()
|
||||
})
|
||||
|
||||
// This event is fired when the data source is drained no matter what was the data source.
|
||||
// It is not part of this library but rather from the NodeJS Stream API.
|
||||
// @see: https://nodejs.org/api/stream.html#stream_event_end
|
||||
res.on('end', () => {
|
||||
Logger.debug('Data has been drained')
|
||||
})
|
||||
|
||||
// good practice to catch warnings (ie stat failures and other non-blocking errors)
|
||||
archive.on('warning', function (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
// log warning
|
||||
Logger.warn(`[DownloadManager] Archiver warning: ${err.message}`)
|
||||
} else {
|
||||
// throw error
|
||||
Logger.error(`[DownloadManager] Archiver error: ${err.message}`)
|
||||
// throw err
|
||||
reject(err)
|
||||
}
|
||||
})
|
||||
archive.on('error', function (err) {
|
||||
Logger.error(`[DownloadManager] Archiver error: ${err.message}`)
|
||||
reject(err)
|
||||
})
|
||||
|
||||
// pipe archive data to the file
|
||||
archive.pipe(res)
|
||||
|
||||
// Add each path as a directory in the zip
|
||||
pathObjects.forEach((pathObject) => {
|
||||
if (!pathObject.isFile) {
|
||||
// Add the directory to the archive with its name as the root folder
|
||||
archive.directory(pathObject.path, Path.basename(pathObject.path))
|
||||
} else {
|
||||
archive.file(pathObject.path, { name: Path.basename(pathObject.path) })
|
||||
}
|
||||
})
|
||||
|
||||
archive.finalize()
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles errors that occur during the download process.
|
||||
*
|
||||
* @param {*} error
|
||||
* @param {Response} res
|
||||
* @returns {*}
|
||||
*/
|
||||
module.exports.handleDownloadError = (error, res) => {
|
||||
if (!res.headersSent) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return res.status(404).send('File not found')
|
||||
} else {
|
||||
return res.status(500).send('Download failed')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue