chore: merge and resolve

This commit is contained in:
jfrazx 2024-06-09 09:18:42 -07:00
commit e9e9a8ba75
No known key found for this signature in database
GPG key ID: 7E72C3BCC0F85A7B
260 changed files with 19677 additions and 3203 deletions

View file

@ -76,6 +76,9 @@ class Auth {
return
}
// Custom req timeout see: https://github.com/panva/node-openid-client/blob/main/docs/README.md#customizing
OpenIDClient.custom.setHttpOptionsDefaults({ timeout: 10000 })
const openIdIssuerClient = new OpenIDClient.Issuer({
issuer: global.ServerSettings.authOpenIDIssuerURL,
authorization_endpoint: global.ServerSettings.authOpenIDAuthorizationURL,
@ -86,7 +89,8 @@ class Auth {
}).Client
const openIdClient = new openIdIssuerClient({
client_id: global.ServerSettings.authOpenIDClientID,
client_secret: global.ServerSettings.authOpenIDClientSecret
client_secret: global.ServerSettings.authOpenIDClientSecret,
id_token_signed_response_alg: global.ServerSettings.authOpenIDTokenSigningAlgorithm
})
passport.use('openid-client', new OpenIDClient.Strategy({
client: openIdClient,
@ -95,71 +99,198 @@ class Auth {
scope: 'openid profile email'
}
}, async (tokenset, userinfo, done) => {
Logger.debug(`[Auth] openid callback userinfo=`, userinfo)
try {
Logger.debug(`[Auth] openid callback userinfo=`, JSON.stringify(userinfo, null, 2))
let failureMessage = 'Unauthorized'
if (!userinfo.sub) {
Logger.error(`[Auth] openid callback invalid userinfo, no sub`)
return done(null, null, failureMessage)
if (!userinfo.sub) {
throw new Error('Invalid userinfo, no sub')
}
if (!this.validateGroupClaim(userinfo)) {
throw new Error(`Group claim ${Database.serverSettings.authOpenIDGroupClaim} not found or empty in userinfo`)
}
let user = await this.findOrCreateUser(userinfo)
if (!user?.isActive) {
throw new Error('User not active or not found')
}
await this.setUserGroup(user, userinfo)
await this.updateUserPermissions(user, userinfo)
// We also have to save the id_token for later (used for logout) because we cannot set cookies here
user.openid_id_token = tokenset.id_token
return done(null, user)
} catch (error) {
Logger.error(`[Auth] openid callback error: ${error?.message}\n${error?.stack}`)
return done(null, null, 'Unauthorized')
}
}))
}
// First check for matching user by sub
let user = await Database.userModel.getUserByOpenIDSub(userinfo.sub)
if (!user) {
// Optionally match existing by email or username based on server setting "authOpenIDMatchExistingBy"
if (Database.serverSettings.authOpenIDMatchExistingBy === 'email' && userinfo.email && userinfo.email_verified) {
/**
* Finds an existing user by OpenID subject identifier, or by email/username based on server settings,
* or creates a new user if configured to do so.
*/
async findOrCreateUser(userinfo) {
let user = await Database.userModel.getUserByOpenIDSub(userinfo.sub)
// Matched by sub
if (user) {
Logger.debug(`[Auth] openid: User found by sub`)
return user
}
// Match existing user by email
if (Database.serverSettings.authOpenIDMatchExistingBy === 'email') {
if (userinfo.email) {
// Only disallow when email_verified explicitly set to false (allow both if not set or true)
if (userinfo.email_verified === false) {
Logger.warn(`[Auth] openid: User not found and email "${userinfo.email}" is not verified`)
return null
} else {
Logger.info(`[Auth] openid: User not found, checking existing with email "${userinfo.email}"`)
user = await Database.userModel.getUserByEmail(userinfo.email)
// Check that user is not already matched
if (user?.authOpenIDSub) {
Logger.warn(`[Auth] openid: User found with email "${userinfo.email}" but is already matched with sub "${user.authOpenIDSub}"`)
// TODO: Message isn't actually returned to the user yet. Need to override the passport authenticated callback
failureMessage = 'A matching user was found but is already matched with another user from your auth provider'
user = null
}
} else if (Database.serverSettings.authOpenIDMatchExistingBy === 'username' && userinfo.preferred_username) {
Logger.info(`[Auth] openid: User not found, checking existing with username "${userinfo.preferred_username}"`)
user = await Database.userModel.getUserByUsername(userinfo.preferred_username)
// Check that user is not already matched
if (user?.authOpenIDSub) {
Logger.warn(`[Auth] openid: User found with username "${userinfo.preferred_username}" but is already matched with sub "${user.authOpenIDSub}"`)
// TODO: Message isn't actually returned to the user yet. Need to override the passport authenticated callback
failureMessage = 'A matching user was found but is already matched with another user from your auth provider'
user = null
return null // User is linked to a different OpenID subject; do not proceed.
}
}
} else {
Logger.warn(`[Auth] openid: User not found and no email in userinfo`)
// We deny login, because if the admin whishes to match email, it makes sense to require it
return null
}
}
// Match existing user by username
else if (Database.serverSettings.authOpenIDMatchExistingBy === 'username') {
let username
// If existing user was matched and isActive then save sub to user
if (user?.isActive) {
Logger.info(`[Auth] openid: New user found matching existing user "${user.username}"`)
user.authOpenIDSub = userinfo.sub
await Database.userModel.updateFromOld(user)
} else if (user && !user.isActive) {
Logger.warn(`[Auth] openid: New user found matching existing user "${user.username}" but that user is deactivated`)
}
if (userinfo.preferred_username) {
Logger.info(`[Auth] openid: User not found, checking existing with userinfo.preferred_username "${userinfo.preferred_username}"`)
username = userinfo.preferred_username
} else if (userinfo.username) {
Logger.info(`[Auth] openid: User not found, checking existing with userinfo.username "${userinfo.username}"`)
username = userinfo.username
} else {
Logger.warn(`[Auth] openid: User not found and neither preferred_username nor username in userinfo`)
return null
}
// Optionally auto register the user
if (!user && Database.serverSettings.authOpenIDAutoRegister) {
Logger.info(`[Auth] openid: Auto-registering user with sub "${userinfo.sub}"`, userinfo)
user = await Database.userModel.createUserFromOpenIdUserInfo(userinfo, this)
user = await Database.userModel.getUserByUsername(username)
if (user?.authOpenIDSub) {
Logger.warn(`[Auth] openid: User found with username "${username}" but is already matched with sub "${user.authOpenIDSub}"`)
return null // User is linked to a different OpenID subject; do not proceed.
}
}
// Found existing user via email or username
if (user) {
if (!user.isActive) {
Logger.warn(`[Auth] openid: User found but is not active`)
return null
}
user.authOpenIDSub = userinfo.sub
await Database.userModel.updateFromOld(user)
Logger.debug(`[Auth] openid: User found by email/username`)
return user
}
// If no existing user was matched, auto-register if configured
if (Database.serverSettings.authOpenIDAutoRegister) {
Logger.info(`[Auth] openid: Auto-registering user with sub "${userinfo.sub}"`, userinfo)
user = await Database.userModel.createUserFromOpenIdUserInfo(userinfo, this)
return user
}
Logger.warn(`[Auth] openid: User not found and auto-register is disabled`)
return null
}
/**
* Validates the presence and content of the group claim in userinfo.
*/
validateGroupClaim(userinfo) {
const groupClaimName = Database.serverSettings.authOpenIDGroupClaim
if (!groupClaimName) // Allow no group claim when configured like this
return true
// If configured it must exist in userinfo
if (!userinfo[groupClaimName]) {
return false
}
return true
}
/**
* Sets the user group based on group claim in userinfo.
*
* @param {import('./objects/user/User')} user
* @param {Object} userinfo
*/
async setUserGroup(user, userinfo) {
const groupClaimName = Database.serverSettings.authOpenIDGroupClaim
if (!groupClaimName) // No group claim configured, don't set anything
return
if (!userinfo[groupClaimName])
throw new Error(`Group claim ${groupClaimName} not found in userinfo`)
const groupsList = userinfo[groupClaimName].map(group => group.toLowerCase())
const rolesInOrderOfPriority = ['admin', 'user', 'guest']
let userType = rolesInOrderOfPriority.find(role => groupsList.includes(role))
if (userType) {
if (user.type === 'root') {
// Check OpenID Group
if (userType !== 'admin') {
throw new Error(`Root user "${user.username}" cannot be downgraded to ${userType}. Denying login.`)
} else {
// If root user is logging in via OpenID, we will not change the type
return
}
}
if (!user?.isActive) {
if (user && !user.isActive) {
failureMessage = 'Unauthorized'
}
// deny login
done(null, null, failureMessage)
return
if (user.type !== userType) {
Logger.info(`[Auth] openid callback: Updating user "${user.username}" type to "${userType}" from "${user.type}"`)
user.type = userType
await Database.userModel.updateFromOld(user)
}
} else {
throw new Error(`No valid group found in userinfo: ${JSON.stringify(userinfo[groupClaimName], null, 2)}`)
}
}
// We also have to save the id_token for later (used for logout) because we cannot set cookies here
user.openid_id_token = tokenset.id_token
/**
* Updates user permissions based on the advanced permissions claim.
*
* @param {import('./objects/user/User')} user
* @param {Object} userinfo
*/
async updateUserPermissions(user, userinfo) {
const absPermissionsClaim = Database.serverSettings.authOpenIDAdvancedPermsClaim
if (!absPermissionsClaim) // No advanced permissions claim configured, don't set anything
return
// permit login
return done(null, user)
}))
if (user.type === 'admin' || user.type === 'root')
return
const absPermissions = userinfo[absPermissionsClaim]
if (!absPermissions)
throw new Error(`Advanced permissions claim ${absPermissionsClaim} not found in userinfo`)
if (user.updatePermissionsFromExternalJSON(absPermissions)) {
Logger.info(`[Auth] openid callback: Updating advanced perms for user "${user.username}" using "${JSON.stringify(absPermissions)}"`)
await Database.userModel.updateFromOld(user)
}
}
/**
@ -331,10 +462,19 @@ class Auth {
sso_redirect_uri: oidcStrategy._params.redirect_uri // Save the redirect_uri (for the SSO Provider) for the callback
}
var scope = 'openid profile email'
if (global.ServerSettings.authOpenIDGroupClaim) {
scope += ' ' + global.ServerSettings.authOpenIDGroupClaim
}
if (global.ServerSettings.authOpenIDAdvancedPermsClaim) {
scope += ' ' + global.ServerSettings.authOpenIDAdvancedPermsClaim
}
const authorizationUrl = client.authorizationUrl({
...oidcStrategy._params,
state: state,
response_type: 'code',
scope: scope,
code_challenge,
code_challenge_method
})
@ -343,7 +483,7 @@ class Auth {
res.redirect(authorizationUrl)
} catch (error) {
Logger.error(`[Auth] Error in /auth/openid route: ${error}`)
Logger.error(`[Auth] Error in /auth/openid route: ${error}\n${error?.stack}`)
res.status(500).send('Internal Server Error')
}
@ -399,7 +539,7 @@ class Auth {
// Redirect to the overwrite URI saved in the map
res.redirect(redirectUri)
} catch (error) {
Logger.error(`[Auth] Error in /auth/openid/mobile-redirect route: ${error}`)
Logger.error(`[Auth] Error in /auth/openid/mobile-redirect route: ${error}\n${error?.stack}`)
res.status(500).send('Internal Server Error')
}
})
@ -421,12 +561,12 @@ class Auth {
}
function handleAuthError(isMobile, errorCode, errorMessage, logMessage, response) {
Logger.error(logMessage)
Logger.error(JSON.stringify(logMessage, null, 2))
if (response) {
// Depending on the error, it can also have a body
// We also log the request header the passport plugin sents for the URL
const header = response.req?._header.replace(/Authorization: [^\r\n]*/i, 'Authorization: REDACTED')
Logger.debug(header + '\n' + response.body?.toString())
Logger.debug(header + '\n' + JSON.stringify(response.body, null, 2))
}
if (isMobile) {
@ -511,7 +651,8 @@ class Auth {
token_endpoint: data.token_endpoint,
userinfo_endpoint: data.userinfo_endpoint,
end_session_endpoint: data.end_session_endpoint,
jwks_uri: data.jwks_uri
jwks_uri: data.jwks_uri,
id_token_signing_alg_values_supported: data.id_token_signing_alg_values_supported
})
}).catch((error) => {
Logger.error(`[Auth] Failed to get openid configuration at "${configUrl}"`, error)
@ -530,42 +671,45 @@ class Auth {
res.clearCookie('auth_method')
let logoutUrl = null
if (authMethod === 'openid' || authMethod === 'openid-mobile') {
// If we are using openid, we need to redirect to the logout endpoint
// node-openid-client does not support doing it over passport
const oidcStrategy = passport._strategy('openid-client')
const client = oidcStrategy._client
let postLogoutRedirectUri = null
if (client.issuer.end_session_endpoint && client.issuer.end_session_endpoint.length > 0) {
let postLogoutRedirectUri = null
if (authMethod === 'openid') {
const protocol = (req.secure || req.get('x-forwarded-proto') === 'https') ? 'https' : 'http'
const host = req.get('host')
// TODO: ABS does currently not support subfolders for installation
// If we want to support it we need to include a config for the serverurl
postLogoutRedirectUri = `${protocol}://${host}/login`
if (authMethod === 'openid') {
const protocol = (req.secure || req.get('x-forwarded-proto') === 'https') ? 'https' : 'http'
const host = req.get('host')
// TODO: ABS does currently not support subfolders for installation
// If we want to support it we need to include a config for the serverurl
postLogoutRedirectUri = `${protocol}://${host}/login`
}
// else for openid-mobile we keep postLogoutRedirectUri on null
// nice would be to redirect to the app here, but for example Authentik does not implement
// the post_logout_redirect_uri parameter at all and for other providers
// we would also need again to implement (and even before get to know somehow for 3rd party apps)
// the correct app link like audiobookshelf://login (and maybe also provide a redirect like mobile-redirect).
// Instead because its null (and this way the parameter will be omitted completly), the client/app can simply append something like
// &post_logout_redirect_uri=audiobookshelf://login to the received logout url by itself which is the simplest solution
// (The URL needs to be whitelisted in the config of the SSO/ID provider)
logoutUrl = client.endSessionUrl({
id_token_hint: req.cookies.openid_id_token,
post_logout_redirect_uri: postLogoutRedirectUri
})
}
// else for openid-mobile we keep postLogoutRedirectUri on null
// nice would be to redirect to the app here, but for example Authentik does not implement
// the post_logout_redirect_uri parameter at all and for other providers
// we would also need again to implement (and even before get to know somehow for 3rd party apps)
// the correct app link like audiobookshelf://login (and maybe also provide a redirect like mobile-redirect).
// Instead because its null (and this way the parameter will be omitted completly), the client/app can simply append something like
// &post_logout_redirect_uri=audiobookshelf://login to the received logout url by itself which is the simplest solution
// (The URL needs to be whitelisted in the config of the SSO/ID provider)
const logoutUrl = client.endSessionUrl({
id_token_hint: req.cookies.openid_id_token,
post_logout_redirect_uri: postLogoutRedirectUri
})
res.clearCookie('openid_id_token')
// Tell the user agent (browser) to redirect to the authentification provider's logout URL
res.send({ redirect_url: logoutUrl })
} else {
res.sendStatus(200)
}
// Tell the user agent (browser) to redirect to the authentification provider's logout URL
// (or redirect_url: null if we don't have one)
res.send({ redirect_url: logoutUrl })
}
})
})

View file

@ -217,7 +217,6 @@ class Database {
async disconnect() {
Logger.info(`[Database] Disconnecting sqlite db`)
await this.sequelize.close()
this.sequelize = null
}
/**
@ -689,6 +688,34 @@ class Database {
return this.libraryFilterData[libraryId].series.some(se => se.id === seriesId)
}
/**
* Get author id for library by name. Uses library filter data if available
*
* @param {string} libraryId
* @param {string} authorName
* @returns {Promise<string>} author id or null if not found
*/
async getAuthorIdByName(libraryId, authorName) {
if (!this.libraryFilterData[libraryId]) {
return (await this.authorModel.getOldByNameAndLibrary(authorName, libraryId))?.id || null
}
return this.libraryFilterData[libraryId].authors.find(au => au.name === authorName)?.id || null
}
/**
* Get series id for library by name. Uses library filter data if available
*
* @param {string} libraryId
* @param {string} seriesName
* @returns {Promise<string>} series id or null if not found
*/
async getSeriesIdByName(libraryId, seriesName) {
if (!this.libraryFilterData[libraryId]) {
return (await this.seriesModel.getOldByNameAndLibrary(seriesName, libraryId))?.id || null
}
return this.libraryFilterData[libraryId].series.find(se => se.name === seriesName)?.id || null
}
/**
* Reset numIssues for library
* @param {string} libraryId

View file

@ -7,6 +7,7 @@ class Logger {
this.logManager = null
this.isDev = process.env.NODE_ENV !== 'production'
this.logLevel = !this.isDev ? LogLevel.INFO : LogLevel.TRACE
this.socketListeners = []
}
@ -49,7 +50,7 @@ class Logger {
}
addSocketListener(socket, level) {
var index = this.socketListeners.findIndex(s => s.id === socket.id)
var index = this.socketListeners.findIndex((s) => s.id === socket.id)
if (index >= 0) {
this.socketListeners.splice(index, 1, {
id: socket.id,
@ -66,18 +67,19 @@ class Logger {
}
removeSocketListener(socketId) {
this.socketListeners = this.socketListeners.filter(s => s.id !== socketId)
this.socketListeners = this.socketListeners.filter((s) => s.id !== socketId)
}
/**
*
* @param {number} level
* @param {string[]} args
*
* @param {number} level
* @param {string[]} args
* @param {string} src
*/
async handleLog(level, args) {
async handleLog(level, args, src) {
const logObj = {
timestamp: this.timestamp,
source: this.source,
source: src,
message: args.join(' '),
levelName: this.getLogLevelString(level),
level
@ -92,7 +94,7 @@ class Logger {
// Save log to file
if (level >= this.logLevel) {
await this.logManager.logToFile(logObj)
await this.logManager?.logToFile(logObj)
}
}
@ -104,47 +106,47 @@ class Logger {
trace(...args) {
if (this.logLevel > LogLevel.TRACE) return
console.trace(`[${this.timestamp}] TRACE:`, ...args)
this.handleLog(LogLevel.TRACE, args)
this.handleLog(LogLevel.TRACE, args, this.source)
}
debug(...args) {
if (this.logLevel > LogLevel.DEBUG) return
console.debug(`[${this.timestamp}] DEBUG:`, ...args, `(${this.source})`)
this.handleLog(LogLevel.DEBUG, args)
this.handleLog(LogLevel.DEBUG, args, this.source)
}
info(...args) {
if (this.logLevel > LogLevel.INFO) return
console.info(`[${this.timestamp}] INFO:`, ...args)
this.handleLog(LogLevel.INFO, args)
this.handleLog(LogLevel.INFO, args, this.source)
}
warn(...args) {
if (this.logLevel > LogLevel.WARN) return
console.warn(`[${this.timestamp}] WARN:`, ...args, `(${this.source})`)
this.handleLog(LogLevel.WARN, args)
this.handleLog(LogLevel.WARN, args, this.source)
}
error(...args) {
if (this.logLevel > LogLevel.ERROR) return
console.error(`[${this.timestamp}] ERROR:`, ...args, `(${this.source})`)
this.handleLog(LogLevel.ERROR, args)
this.handleLog(LogLevel.ERROR, args, this.source)
}
/**
* Fatal errors are ones that exit the process
* Fatal logs are saved to crash_logs.txt
*
* @param {...any} args
*
* @param {...any} args
*/
fatal(...args) {
console.error(`[${this.timestamp}] FATAL:`, ...args, `(${this.source})`)
return this.handleLog(LogLevel.FATAL, args)
return this.handleLog(LogLevel.FATAL, args, this.source)
}
note(...args) {
console.log(`[${this.timestamp}] NOTE:`, ...args)
this.handleLog(LogLevel.NOTE, args)
this.handleLog(LogLevel.NOTE, args, this.source)
}
}
module.exports = new Logger()
module.exports = new Logger()

View file

@ -5,7 +5,7 @@ const http = require('http')
const util = require('util')
const fs = require('./libs/fsExtra')
const fileUpload = require('./libs/expressFileupload')
const cookieParser = require("cookie-parser")
const cookieParser = require('cookie-parser')
const { version } = require('../package.json')
@ -41,17 +41,17 @@ const passport = require('passport')
const expressSession = require('express-session')
class Server {
constructor(SOURCE, PORT, HOST, UID, GID, CONFIG_PATH, METADATA_PATH, ROUTER_BASE_PATH) {
constructor(SOURCE, PORT, HOST, CONFIG_PATH, METADATA_PATH, ROUTER_BASE_PATH) {
this.Port = PORT
this.Host = HOST
global.Source = SOURCE
global.isWin = process.platform === 'win32'
global.Uid = isNaN(UID) ? undefined : Number(UID)
global.Gid = isNaN(GID) ? undefined : Number(GID)
global.ConfigPath = fileUtils.filePathToPOSIX(Path.normalize(CONFIG_PATH))
global.MetadataPath = fileUtils.filePathToPOSIX(Path.normalize(METADATA_PATH))
global.RouterBasePath = ROUTER_BASE_PATH
global.XAccel = process.env.USE_X_ACCEL
global.AllowCors = process.env.ALLOW_CORS === '1'
global.DisableSsrfRequestFilter = process.env.DISABLE_SSRF_REQUEST_FILTER === '1'
if (!fs.pathExistsSync(global.ConfigPath)) {
fs.mkdirSync(global.ConfigPath)
@ -182,15 +182,16 @@ class Server {
* so we have to allow cors for specific origins to the /api/items/:id/ebook endpoint
* The cover image is fetched with XMLHttpRequest in the mobile apps to load into a canvas and extract colors
* @see https://ionicframework.com/docs/troubleshooting/cors
*
* Running in development allows cors to allow testing the mobile apps in the browser
*
* Running in development allows cors to allow testing the mobile apps in the browser
* or env variable ALLOW_CORS = '1'
*/
app.use((req, res, next) => {
if (Logger.isDev || req.path.match(/\/api\/items\/([a-z0-9-]{36})\/(ebook|cover)(\/[0-9]+)?/)) {
const allowedOrigins = ['capacitor://localhost', 'http://localhost']
if (Logger.isDev || allowedOrigins.some(o => o === req.get('origin'))) {
if (global.AllowCors || Logger.isDev || allowedOrigins.some((o) => o === req.get('origin'))) {
res.header('Access-Control-Allow-Origin', req.get('origin'))
res.header("Access-Control-Allow-Methods", 'GET, POST, PATCH, PUT, DELETE, OPTIONS')
res.header('Access-Control-Allow-Methods', 'GET, POST, PATCH, PUT, DELETE, OPTIONS')
res.header('Access-Control-Allow-Headers', '*')
res.header('Access-Control-Allow-Credentials', true)
if (req.method === 'OPTIONS') {
@ -205,15 +206,17 @@ class Server {
// parse cookies in requests
app.use(cookieParser())
// enable express-session
app.use(expressSession({
secret: global.ServerSettings.tokenSecret,
resave: false,
saveUninitialized: false,
cookie: {
// also send the cookie if were are not on https (not every use has https)
secure: false
},
}))
app.use(
expressSession({
secret: global.ServerSettings.tokenSecret,
resave: false,
saveUninitialized: false,
cookie: {
// also send the cookie if were are not on https (not every use has https)
secure: false
}
})
)
// init passport.js
app.use(passport.initialize())
// register passport in express-session
@ -227,14 +230,16 @@ class Server {
this.server = http.createServer(app)
router.use(fileUpload({
defCharset: 'utf8',
defParamCharset: 'utf8',
useTempFiles: true,
tempFileDir: Path.join(global.MetadataPath, 'tmp')
}))
router.use(express.urlencoded({ extended: true, limit: "5mb" }))
router.use(express.json({ limit: "5mb" }))
router.use(
fileUpload({
defCharset: 'utf8',
defParamCharset: 'utf8',
useTempFiles: true,
tempFileDir: Path.join(global.MetadataPath, 'tmp')
})
)
router.use(express.urlencoded({ extended: true, limit: '5mb' }))
router.use(express.json({ limit: '5mb' }))
// Static path to generated nuxt
const distPath = Path.join(global.appRoot, '/client/dist')
@ -363,7 +368,7 @@ class Server {
const mediaProgressRemoved = await Database.mediaProgressModel.destroy({
where: {
id: {
[Sequelize.Op.in]: mediaProgressToRemove.map(mp => mp.id)
[Sequelize.Op.in]: mediaProgressToRemove.map((mp) => mp.id)
}
}
})
@ -377,15 +382,18 @@ class Server {
for (const _user of users) {
let hasUpdated = false
if (_user.seriesHideFromContinueListening.length) {
const seriesHiding = (await Database.seriesModel.findAll({
where: {
id: _user.seriesHideFromContinueListening
},
attributes: ['id'],
raw: true
})).map(se => se.id)
_user.seriesHideFromContinueListening = _user.seriesHideFromContinueListening.filter(seriesId => {
if (!seriesHiding.includes(seriesId)) { // Series removed
const seriesHiding = (
await Database.seriesModel.findAll({
where: {
id: _user.seriesHideFromContinueListening
},
attributes: ['id'],
raw: true
})
).map((se) => se.id)
_user.seriesHideFromContinueListening = _user.seriesHideFromContinueListening.filter((seriesId) => {
if (!seriesHiding.includes(seriesId)) {
// Series removed
hasUpdated = true
return false
}

View file

@ -103,15 +103,28 @@ class FolderWatcher extends EventEmitter {
this.buildLibraryWatcher(library)
}
/**
*
* @param {import('./objects/Library')} library
*/
updateLibrary(library) {
if (this.disabled || library.settings.disableWatcher) return
var libwatcher = this.libraryWatchers.find(lib => lib.id === library.id)
if (this.disabled) return
const libwatcher = this.libraryWatchers.find(lib => lib.id === library.id)
if (libwatcher) {
// Library watcher was disabled
if (library.settings.disableWatcher) {
Logger.info(`[Watcher] updateLibrary: Library "${library.name}" watcher disabled`)
libwatcher.watcher.close()
this.libraryWatchers = this.libraryWatchers.filter(lw => lw.id !== libwatcher.id)
return
}
libwatcher.name = library.name
// If any folder paths were added or removed then re-init watcher
var pathsToAdd = library.folderPaths.filter(path => !libwatcher.paths.includes(path))
var pathsRemoved = libwatcher.paths.filter(path => !library.folderPaths.includes(path))
const pathsToAdd = library.folderPaths.filter(path => !libwatcher.paths.includes(path))
const pathsRemoved = libwatcher.paths.filter(path => !library.folderPaths.includes(path))
if (pathsToAdd.length || pathsRemoved.length) {
Logger.info(`[Watcher] Re-Initializing watcher for "${library.name}".`)
@ -119,6 +132,10 @@ class FolderWatcher extends EventEmitter {
this.libraryWatchers = this.libraryWatchers.filter(lw => lw.id !== libwatcher.id)
this.buildLibraryWatcher(library)
}
} else if (!library.settings.disableWatcher) {
// Library watcher was enabled
Logger.info(`[Watcher] updateLibrary: Library "${library.name}" watcher enabled - initializing`)
this.buildLibraryWatcher(library)
}
}

View file

@ -15,7 +15,7 @@ const naturalSort = createNewSortInstance({
comparer: new Intl.Collator(undefined, { numeric: true, sensitivity: 'base' }).compare
})
class AuthorController {
constructor() { }
constructor() {}
async findOne(req, res) {
const include = (req.query.include || '').split(',')
@ -32,7 +32,6 @@ class AuthorController {
authorJson.libraryItems.forEach((li) => {
if (li.media.metadata.series) {
li.media.metadata.series.forEach((series) => {
const itemWithSeries = li.toJSONMinified()
itemWithSeries.media.metadata.series = series
@ -50,14 +49,14 @@ class AuthorController {
})
// Sort series items
for (const key in seriesMap) {
seriesMap[key].items = naturalSort(seriesMap[key].items).asc(li => li.media.metadata.series.sequence)
seriesMap[key].items = naturalSort(seriesMap[key].items).asc((li) => li.media.metadata.series.sequence)
}
authorJson.series = Object.values(seriesMap)
}
// Minify library items
authorJson.libraryItems = authorJson.libraryItems.map(li => li.toJSONMinified())
authorJson.libraryItems = authorJson.libraryItems.map((li) => li.toJSONMinified())
}
return res.json(authorJson)
@ -91,7 +90,8 @@ class AuthorController {
if (existingAuthor) {
const bookAuthorsToCreate = []
const itemsWithAuthor = await Database.libraryItemModel.getForAuthor(req.author)
itemsWithAuthor.forEach(libraryItem => { // Replace old author with merging author for each book
itemsWithAuthor.forEach((libraryItem) => {
// Replace old author with merging author for each book
libraryItem.media.metadata.replaceAuthor(req.author, existingAuthor)
bookAuthorsToCreate.push({
bookId: libraryItem.media.id,
@ -101,7 +101,10 @@ class AuthorController {
if (itemsWithAuthor.length) {
await Database.removeBulkBookAuthors(req.author.id) // Remove all old BookAuthor
await Database.createBulkBookAuthors(bookAuthorsToCreate) // Create all new BookAuthor
SocketAuthority.emitter('items_updated', itemsWithAuthor.map(li => li.toJSONExpanded()))
SocketAuthority.emitter(
'items_updated',
itemsWithAuthor.map((li) => li.toJSONExpanded())
)
}
// Remove old author
@ -118,7 +121,8 @@ class AuthorController {
author: existingAuthor.toJSON(),
merged: true
})
} else { // Regular author update
} else {
// Regular author update
if (req.author.update(payload)) {
hasUpdated = true
}
@ -127,12 +131,16 @@ class AuthorController {
req.author.updatedAt = Date.now()
const itemsWithAuthor = await Database.libraryItemModel.getForAuthor(req.author)
if (authorNameUpdate) { // Update author name on all books
itemsWithAuthor.forEach(libraryItem => {
if (authorNameUpdate) {
// Update author name on all books
itemsWithAuthor.forEach((libraryItem) => {
libraryItem.media.metadata.updateAuthor(req.author)
})
if (itemsWithAuthor.length) {
SocketAuthority.emitter('items_updated', itemsWithAuthor.map(li => li.toJSONExpanded()))
SocketAuthority.emitter(
'items_updated',
itemsWithAuthor.map((li) => li.toJSONExpanded())
)
}
}
@ -150,9 +158,9 @@ class AuthorController {
/**
* DELETE: /api/authors/:id
* Remove author from all books and delete
*
* @param {import('express').Request} req
* @param {import('express').Response} res
*
* @param {import('express').Request} req
* @param {import('express').Response} res
*/
async delete(req, res) {
Logger.info(`[AuthorController] Removing author "${req.author.name}"`)
@ -174,9 +182,9 @@ class AuthorController {
/**
* POST: /api/authors/:id/image
* Upload author image from web URL
*
* @param {import('express').Request} req
* @param {import('express').Response} res
*
* @param {import('express').Request} req
* @param {import('express').Response} res
*/
async uploadImage(req, res) {
if (!req.user.canUpload) {
@ -206,6 +214,7 @@ class AuthorController {
}
req.author.imagePath = result.path
req.author.updatedAt = Date.now()
await Database.authorModel.updateFromOld(req.author)
const numBooks = (await Database.libraryItemModel.getForAuthor(req.author)).length
@ -218,9 +227,9 @@ class AuthorController {
/**
* DELETE: /api/authors/:id/image
* Remove author image & delete image file
*
* @param {import('express').Request} req
* @param {import('express').Response} res
*
* @param {import('express').Request} req
* @param {import('express').Response} res
*/
async deleteImage(req, res) {
if (!req.author.imagePath) {
@ -292,10 +301,14 @@ class AuthorController {
// GET api/authors/:id/image
async getImage(req, res) {
const { query: { width, height, format, raw }, author } = req
const {
query: { width, height, format, raw },
author
} = req
if (raw) { // any value
if (!author.imagePath || !await fs.pathExists(author.imagePath)) {
if (raw) {
// any value
if (!author.imagePath || !(await fs.pathExists(author.imagePath))) {
return res.sendStatus(404)
}
@ -326,4 +339,4 @@ class AuthorController {
next()
}
}
module.exports = new AuthorController()
module.exports = new AuthorController()

View file

@ -49,8 +49,13 @@ class BackupController {
res.sendFile(req.backup.fullPath)
}
/**
*
* @param {import('express').Request} req
* @param {import('express').Response} res
*/
apply(req, res) {
this.backupManager.requestApplyBackup(req.backup, res)
this.backupManager.requestApplyBackup(this.apiCacheManager, req.backup, res)
}
middleware(req, res, next) {

View file

@ -23,7 +23,7 @@ const libraryItemsPodcastFilters = require('../utils/queries/libraryItemsPodcast
const authorFilters = require('../utils/queries/authorFilters')
class LibraryController {
constructor() { }
constructor() {}
async create(req, res) {
const newLibraryPayload = {
@ -35,7 +35,7 @@ class LibraryController {
// Validate that the custom provider exists if given any
if (newLibraryPayload.provider?.startsWith('custom-')) {
if (!await Database.customMetadataProviderModel.checkExistsBySlug(newLibraryPayload.provider)) {
if (!(await Database.customMetadataProviderModel.checkExistsBySlug(newLibraryPayload.provider))) {
Logger.error(`[LibraryController] Custom metadata provider "${newLibraryPayload.provider}" does not exist`)
return res.status(400).send('Custom metadata provider does not exist')
}
@ -43,14 +43,15 @@ class LibraryController {
// Validate folder paths exist or can be created & resolve rel paths
// returns 400 if a folder fails to access
newLibraryPayload.folders = newLibraryPayload.folders.map(f => {
newLibraryPayload.folders = newLibraryPayload.folders.map((f) => {
f.fullPath = fileUtils.filePathToPOSIX(Path.resolve(f.fullPath))
return f
})
for (const folder of newLibraryPayload.folders) {
try {
const direxists = await fs.pathExists(folder.fullPath)
if (!direxists) { // If folder does not exist try to make it and set file permissions/owner
if (!direxists) {
// If folder does not exist try to make it and set file permissions/owner
await fs.mkdir(folder.fullPath)
}
} catch (error) {
@ -85,20 +86,20 @@ class LibraryController {
const librariesAccessible = req.user.librariesAccessible || []
if (librariesAccessible.length) {
return res.json({
libraries: libraries.filter(lib => librariesAccessible.includes(lib.id)).map(lib => lib.toJSON())
libraries: libraries.filter((lib) => librariesAccessible.includes(lib.id)).map((lib) => lib.toJSON())
})
}
res.json({
libraries: libraries.map(lib => lib.toJSON())
libraries: libraries.map((lib) => lib.toJSON())
})
}
/**
* GET: /api/libraries/:id
*
* @param {import('express').Request} req
* @param {import('express').Response} res
*
* @param {import('express').Request} req
* @param {import('express').Response} res
*/
async findOne(req, res) {
const includeArray = (req.query.include || '').split(',')
@ -120,20 +121,27 @@ class LibraryController {
/**
* GET: /api/libraries/:id/episode-downloads
* Get podcast episodes in download queue
* @param {*} req
* @param {*} res
* @param {*} req
* @param {*} res
*/
async getEpisodeDownloadQueue(req, res) {
const libraryDownloadQueueDetails = this.podcastManager.getDownloadQueueDetails(req.library.id)
res.json(libraryDownloadQueueDetails)
}
/**
* PATCH: /api/libraries/:id
*
* @param {import('express').Request} req
* @param {import('express').Response} res
*/
async update(req, res) {
/** @type {import('../objects/Library')} */
const library = req.library
// Validate that the custom provider exists if given any
if (req.body.provider?.startsWith('custom-')) {
if (!await Database.customMetadataProviderModel.checkExistsBySlug(req.body.provider)) {
if (!(await Database.customMetadataProviderModel.checkExistsBySlug(req.body.provider))) {
Logger.error(`[LibraryController] Custom metadata provider "${req.body.provider}" does not exist`)
return res.status(400).send('Custom metadata provider does not exist')
}
@ -143,7 +151,7 @@ class LibraryController {
// returns 400 if a new folder fails to access
if (req.body.folders) {
const newFolderPaths = []
req.body.folders = req.body.folders.map(f => {
req.body.folders = req.body.folders.map((f) => {
if (!f.id) {
f.fullPath = fileUtils.filePathToPOSIX(Path.resolve(f.fullPath))
newFolderPaths.push(f.fullPath)
@ -154,10 +162,13 @@ class LibraryController {
const pathExists = await fs.pathExists(path)
if (!pathExists) {
// Ensure dir will recursively create directories which might be preferred over mkdir
const success = await fs.ensureDir(path).then(() => true).catch((error) => {
Logger.error(`[LibraryController] Failed to ensure folder dir "${path}"`, error)
return false
})
const success = await fs
.ensureDir(path)
.then(() => true)
.catch((error) => {
Logger.error(`[LibraryController] Failed to ensure folder dir "${path}"`, error)
return false
})
if (!success) {
return res.status(400).send(`Invalid folder directory "${path}"`)
}
@ -166,7 +177,7 @@ class LibraryController {
// Handle removing folders
for (const folder of library.folders) {
if (!req.body.folders.some(f => f.id === folder.id)) {
if (!req.body.folders.some((f) => f.id === folder.id)) {
// Remove library items in folder
const libraryItemsInFolder = await Database.libraryItemModel.findAll({
where: {
@ -188,7 +199,7 @@ class LibraryController {
for (const libraryItem of libraryItemsInFolder) {
let mediaItemIds = []
if (library.isPodcast) {
mediaItemIds = libraryItem.media.podcastEpisodes.map(pe => pe.id)
mediaItemIds = libraryItem.media.podcastEpisodes.map((pe) => pe.id)
} else {
mediaItemIds.push(libraryItem.mediaId)
}
@ -224,8 +235,8 @@ class LibraryController {
/**
* DELETE: /api/libraries/:id
* Delete a library
* @param {*} req
* @param {*} res
* @param {*} req
* @param {*} res
*/
async delete(req, res) {
const library = req.library
@ -260,7 +271,7 @@ class LibraryController {
for (const libraryItem of libraryItemsInLibrary) {
let mediaItemIds = []
if (library.isPodcast) {
mediaItemIds = libraryItem.media.podcastEpisodes.map(pe => pe.id)
mediaItemIds = libraryItem.media.podcastEpisodes.map((pe) => pe.id)
} else {
mediaItemIds.push(libraryItem.mediaId)
}
@ -286,12 +297,15 @@ class LibraryController {
/**
* GET /api/libraries/:id/items
*
* @param {import('express').Request} req
* @param {import('express').Response} res
*
* @param {import('express').Request} req
* @param {import('express').Response} res
*/
async getLibraryItems(req, res) {
const include = (req.query.include || '').split(',').map(v => v.trim().toLowerCase()).filter(v => !!v)
const include = (req.query.include || '')
.split(',')
.map((v) => v.trim().toLowerCase())
.filter((v) => !!v)
const payload = {
results: [],
@ -309,7 +323,9 @@ class LibraryController {
payload.offset = payload.page * payload.limit
// TODO: Temporary way of handling collapse sub-series. Either remove feature or handle through sql queries
if (payload.filterBy?.split('.')[0] === 'series' && payload.collapseseries) {
const filterByGroup = payload.filterBy?.split('.').shift()
const filterByValue = filterByGroup ? libraryFilters.decode(payload.filterBy.replace(`${filterByGroup}.`, '')) : null
if (filterByGroup === 'series' && filterByValue !== 'no-series' && payload.collapseseries) {
const seriesId = libraryFilters.decode(payload.filterBy.split('.')[1])
payload.results = await libraryHelpers.handleCollapseSubseries(payload, seriesId, req.user, req.library)
} else {
@ -324,8 +340,8 @@ class LibraryController {
/**
* DELETE: /libraries/:id/issues
* Remove all library items missing or invalid
* @param {import('express').Request} req
* @param {import('express').Response} res
* @param {import('express').Request} req
* @param {import('express').Response} res
*/
async removeLibraryItemsWithIssues(req, res) {
const libraryItemsWithIssues = await Database.libraryItemModel.findAll({
@ -362,7 +378,7 @@ class LibraryController {
for (const libraryItem of libraryItemsWithIssues) {
let mediaItemIds = []
if (req.library.isPodcast) {
mediaItemIds = libraryItem.media.podcastEpisodes.map(pe => pe.id)
mediaItemIds = libraryItem.media.podcastEpisodes.map((pe) => pe.id)
} else {
mediaItemIds.push(libraryItem.mediaId)
}
@ -379,14 +395,17 @@ class LibraryController {
}
/**
* GET: /api/libraries/:id/series
* Optional query string: `?include=rssfeed` that adds `rssFeed` to series if a feed is open
*
* @param {import('express').Request} req
* @param {import('express').Response} res
*/
* GET: /api/libraries/:id/series
* Optional query string: `?include=rssfeed` that adds `rssFeed` to series if a feed is open
*
* @param {import('express').Request} req
* @param {import('express').Response} res
*/
async getAllSeriesForLibrary(req, res) {
const include = (req.query.include || '').split(',').map(v => v.trim().toLowerCase()).filter(v => !!v)
const include = (req.query.include || '')
.split(',')
.map((v) => v.trim().toLowerCase())
.filter((v) => !!v)
const payload = {
results: [],
@ -414,12 +433,15 @@ class LibraryController {
* Optional includes (e.g. `?include=rssfeed,progress`)
* rssfeed: adds `rssFeed` to series object if a feed is open
* progress: adds `progress` to series object with { libraryItemIds:Array<llid>, libraryItemIdsFinished:Array<llid>, isFinished:boolean }
*
* @param {import('express').Request} req
*
* @param {import('express').Request} req
* @param {import('express').Response} res - Series
*/
async getSeriesForLibrary(req, res) {
const include = (req.query.include || '').split(',').map(v => v.trim().toLowerCase()).filter(v => !!v)
const include = (req.query.include || '')
.split(',')
.map((v) => v.trim().toLowerCase())
.filter((v) => !!v)
const series = await Database.seriesModel.findByPk(req.params.seriesId)
if (!series) return res.sendStatus(404)
@ -429,10 +451,10 @@ class LibraryController {
const seriesJson = oldSeries.toJSON()
if (include.includes('progress')) {
const libraryItemsFinished = libraryItemsInSeries.filter(li => !!req.user.getMediaProgress(li.id)?.isFinished)
const libraryItemsFinished = libraryItemsInSeries.filter((li) => !!req.user.getMediaProgress(li.id)?.isFinished)
seriesJson.progress = {
libraryItemIds: libraryItemsInSeries.map(li => li.id),
libraryItemIdsFinished: libraryItemsFinished.map(li => li.id),
libraryItemIds: libraryItemsInSeries.map((li) => li.id),
libraryItemIdsFinished: libraryItemsFinished.map((li) => li.id),
isFinished: libraryItemsFinished.length >= libraryItemsInSeries.length
}
}
@ -448,11 +470,14 @@ class LibraryController {
/**
* GET: /api/libraries/:id/collections
* Get all collections for library
* @param {*} req
* @param {*} res
* @param {*} req
* @param {*} res
*/
async getCollectionsForLibrary(req, res) {
const include = (req.query.include || '').split(',').map(v => v.trim().toLowerCase()).filter(v => !!v)
const include = (req.query.include || '')
.split(',')
.map((v) => v.trim().toLowerCase())
.filter((v) => !!v)
const payload = {
results: [],
@ -483,12 +508,11 @@ class LibraryController {
/**
* GET: /api/libraries/:id/playlists
* Get playlists for user in library
* @param {*} req
* @param {*} res
* @param {*} req
* @param {*} res
*/
async getUserPlaylistsForLibrary(req, res) {
let playlistsForUser = await Database.playlistModel.getPlaylistsForUserAndLibrary(req.user.id, req.library.id)
playlistsForUser = await Promise.all(playlistsForUser.map(async p => p.getOldJsonExpanded()))
let playlistsForUser = await Database.playlistModel.getOldPlaylistsForUserAndLibrary(req.user.id, req.library.id)
const payload = {
results: [],
@ -508,8 +532,8 @@ class LibraryController {
/**
* GET: /api/libraries/:id/filterdata
* @param {import('express').Request} req
* @param {import('express').Response} res
* @param {import('express').Request} req
* @param {import('express').Response} res
*/
async getLibraryFilterData(req, res) {
const filterData = await libraryFilters.getFilterData(req.library.mediaType, req.library.id)
@ -519,12 +543,15 @@ class LibraryController {
/**
* GET: /api/libraries/:id/personalized
* Home page shelves
* @param {import('express').Request} req
* @param {import('express').Response} res
* @param {import('express').Request} req
* @param {import('express').Response} res
*/
async getUserPersonalizedShelves(req, res) {
const limitPerShelf = req.query.limit && !isNaN(req.query.limit) ? Number(req.query.limit) || 10 : 10
const include = (req.query.include || '').split(',').map(v => v.trim().toLowerCase()).filter(v => !!v)
const include = (req.query.include || '')
.split(',')
.map((v) => v.trim().toLowerCase())
.filter((v) => !!v)
const shelves = await Database.libraryItemModel.getPersonalizedShelves(req.library, req.user, include, limitPerShelf)
res.json(shelves)
}
@ -532,8 +559,8 @@ class LibraryController {
/**
* POST: /api/libraries/order
* Change the display order of libraries
* @param {import('express').Request} req
* @param {import('express').Response} res
* @param {import('express').Request} req
* @param {import('express').Response} res
*/
async reorder(req, res) {
if (!req.user.isAdminOrUp) {
@ -545,7 +572,7 @@ class LibraryController {
const orderdata = req.body
let hasUpdates = false
for (let i = 0; i < orderdata.length; i++) {
const library = libraries.find(lib => lib.id === orderdata[i].id)
const library = libraries.find((lib) => lib.id === orderdata[i].id)
if (!library) {
Logger.error(`[LibraryController] Invalid library not found in reorder ${orderdata[i].id}`)
return res.sendStatus(500)
@ -564,7 +591,7 @@ class LibraryController {
}
res.json({
libraries: libraries.map(lib => lib.toJSON())
libraries: libraries.map((lib) => lib.toJSON())
})
}
@ -572,8 +599,8 @@ class LibraryController {
* GET: /api/libraries/:id/search
* Search library items with query
* ?q=search
* @param {import('express').Request} req
* @param {import('express').Response} res
* @param {import('express').Request} req
* @param {import('express').Response} res
*/
async search(req, res) {
if (!req.query.q || typeof req.query.q !== 'string') {
@ -589,8 +616,8 @@ class LibraryController {
/**
* GET: /api/libraries/:id/stats
* Get stats for library
* @param {import('express').Request} req
* @param {import('express').Response} res
* @param {import('express').Request} req
* @param {import('express').Response} res
*/
async stats(req, res) {
const stats = {
@ -598,12 +625,12 @@ class LibraryController {
}
if (req.library.isBook) {
const authors = await authorFilters.getAuthorsWithCount(req.library.id)
const authors = await authorFilters.getAuthorsWithCount(req.library.id, 10)
const genres = await libraryItemsBookFilters.getGenresWithCount(req.library.id)
const bookStats = await libraryItemsBookFilters.getBookLibraryStats(req.library.id)
const longestBooks = await libraryItemsBookFilters.getLongestBooks(req.library.id, 10)
stats.totalAuthors = authors.length
stats.totalAuthors = await authorFilters.getAuthorsTotalCount(req.library.id)
stats.authorsWithCount = authors
stats.totalGenres = genres.length
stats.genresWithCount = genres
@ -631,8 +658,8 @@ class LibraryController {
/**
* GET: /api/libraries/:id/authors
* Get authors for library
* @param {import('express').Request} req
* @param {import('express').Response} res
* @param {import('express').Request} req
* @param {import('express').Response} res
*/
async getAuthors(req, res) {
const { bookWhere, replacements } = libraryItemsBookFilters.getUserPermissionBookWhereQuery(req.user)
@ -650,9 +677,7 @@ class LibraryController {
attributes: []
}
},
order: [
[Sequelize.literal('name COLLATE NOCASE'), 'ASC']
]
order: [[Sequelize.literal('name COLLATE NOCASE'), 'ASC']]
})
const oldAuthors = []
@ -660,6 +685,7 @@ class LibraryController {
for (const author of authors) {
const oldAuthor = author.getOldAuthor().toJSON()
oldAuthor.numBooks = author.books.length
oldAuthor.lastFirst = author.lastFirst
oldAuthors.push(oldAuthor)
}
@ -670,8 +696,8 @@ class LibraryController {
/**
* GET: /api/libraries/:id/narrators
* @param {*} req
* @param {*} res
* @param {*} req
* @param {*} res
*/
async getNarrators(req, res) {
// Get all books with narrators
@ -691,7 +717,7 @@ class LibraryController {
const narrators = {}
for (const book of booksWithNarrators) {
book.narrators.forEach(n => {
book.narrators.forEach((n) => {
if (typeof n !== 'string') {
Logger.error(`[LibraryController] getNarrators: Invalid narrator "${n}" on book "${book.title}"`)
} else if (!narrators[n]) {
@ -707,7 +733,7 @@ class LibraryController {
}
res.json({
narrators: naturalSort(Object.values(narrators)).asc(n => n.name)
narrators: naturalSort(Object.values(narrators)).asc((n) => n.name)
})
}
@ -716,8 +742,8 @@ class LibraryController {
* Update narrator name
* :narratorId is base64 encoded name
* req.body { name }
* @param {*} req
* @param {*} res
* @param {*} req
* @param {*} res
*/
async updateNarrator(req, res) {
if (!req.user.canUpdate) {
@ -739,7 +765,7 @@ class LibraryController {
const itemsWithNarrator = await libraryItemFilters.getAllLibraryItemsWithNarrators([narratorName])
for (const libraryItem of itemsWithNarrator) {
libraryItem.media.narrators = libraryItem.media.narrators.filter(n => n !== narratorName)
libraryItem.media.narrators = libraryItem.media.narrators.filter((n) => n !== narratorName)
if (!libraryItem.media.narrators.includes(updatedName)) {
libraryItem.media.narrators.push(updatedName)
}
@ -751,7 +777,10 @@ class LibraryController {
}
if (itemsUpdated.length) {
SocketAuthority.emitter('items_updated', itemsUpdated.map(li => li.toJSONExpanded()))
SocketAuthority.emitter(
'items_updated',
itemsUpdated.map((li) => li.toJSONExpanded())
)
}
res.json({
@ -763,8 +792,8 @@ class LibraryController {
* DELETE: /api/libraries/:id/narrators/:narratorId
* Remove narrator
* :narratorId is base64 encoded name
* @param {*} req
* @param {*} res
* @param {*} req
* @param {*} res
*/
async removeNarrator(req, res) {
if (!req.user.canUpdate) {
@ -782,7 +811,7 @@ class LibraryController {
const itemsWithNarrator = await libraryItemFilters.getAllLibraryItemsWithNarrators([narratorName])
for (const libraryItem of itemsWithNarrator) {
libraryItem.media.narrators = libraryItem.media.narrators.filter(n => n !== narratorName)
libraryItem.media.narrators = libraryItem.media.narrators.filter((n) => n !== narratorName)
await libraryItem.media.update({
narrators: libraryItem.media.narrators
})
@ -791,7 +820,10 @@ class LibraryController {
}
if (itemsUpdated.length) {
SocketAuthority.emitter('items_updated', itemsUpdated.map(li => li.toJSONExpanded()))
SocketAuthority.emitter(
'items_updated',
itemsUpdated.map((li) => li.toJSONExpanded())
)
}
res.json({
@ -802,9 +834,9 @@ class LibraryController {
/**
* GET: /api/libraries/:id/matchall
* Quick match all library items. Book libraries only.
*
* @param {import('express').Request} req
* @param {import('express').Response} res
*
* @param {import('express').Request} req
* @param {import('express').Response} res
*/
async matchAll(req, res) {
if (!req.user.isAdminOrUp) {
@ -819,9 +851,9 @@ class LibraryController {
* POST: /api/libraries/:id/scan
* Optional query:
* ?force=1
*
* @param {import('express').Request} req
* @param {import('express').Response} res
*
* @param {import('express').Request} req
* @param {import('express').Response} res
*/
async scan(req, res) {
if (!req.user.isAdminOrUp) {
@ -840,8 +872,8 @@ class LibraryController {
/**
* GET: /api/libraries/:id/recent-episodes
* Used for latest page
* @param {import('express').Request} req
* @param {import('express').Response} res
* @param {import('express').Request} req
* @param {import('express').Response} res
*/
async getRecentEpisodes(req, res) {
if (!req.library.isPodcast) {
@ -851,7 +883,7 @@ class LibraryController {
const payload = {
episodes: [],
limit: req.query.limit && !isNaN(req.query.limit) ? Number(req.query.limit) : 0,
page: req.query.page && !isNaN(req.query.page) ? Number(req.query.page) : 0,
page: req.query.page && !isNaN(req.query.page) ? Number(req.query.page) : 0
}
const offset = payload.page * payload.limit
@ -862,8 +894,8 @@ class LibraryController {
/**
* GET: /api/libraries/:id/opml
* Get OPML file for a podcast library
* @param {import('express').Request} req
* @param {import('express').Response} res
* @param {import('express').Request} req
* @param {import('express').Response} res
*/
async getOPMLFile(req, res) {
const userPermissionPodcastWhere = libraryItemsPodcastFilters.getUserPermissionPodcastWhereQuery(req.user)
@ -887,9 +919,9 @@ class LibraryController {
/**
* Remove all metadata.json or metadata.abs files in library item folders
*
* @param {import('express').Request} req
* @param {import('express').Response} res
*
* @param {import('express').Request} req
* @param {import('express').Response} res
*/
async removeAllMetadataFiles(req, res) {
if (!req.user.isAdminOrUp) {
@ -921,10 +953,10 @@ class LibraryController {
let numRemoved = 0
for (const libraryItem of libraryItemsWithMetadata) {
const metadataFilepath = libraryItem.libraryFiles.find(lf => lf.metadata.filename === metadataFilename)?.metadata.path
const metadataFilepath = libraryItem.libraryFiles.find((lf) => lf.metadata.filename === metadataFilename)?.metadata.path
if (!metadataFilepath) continue
Logger.debug(`[LibraryController] Removing file "${metadataFilepath}"`)
if ((await fileUtils.removeFile(metadataFilepath))) {
if (await fileUtils.removeFile(metadataFilepath)) {
numRemoved++
}
}
@ -937,9 +969,9 @@ class LibraryController {
/**
* Middleware that is not using libraryItems from memory
* @param {import('express').Request} req
* @param {import('express').Response} res
* @param {import('express').NextFunction} next
* @param {import('express').Request} req
* @param {import('express').Response} res
* @param {import('express').NextFunction} next
*/
async middleware(req, res, next) {
if (!req.user.checkCanAccessLibrary(req.params.id)) {

View file

@ -117,13 +117,22 @@ class LibraryItemController {
zipHelpers.zipDirectoryPipe(libraryItemPath, filename, res)
}
//
// PATCH: will create new authors & series if in payload
//
/**
* PATCH: /items/:id/media
* Update media for a library item. Will create new authors & series when necessary
*
* @param {import('express').Request} req
* @param {import('express').Response} res
*/
async updateMedia(req, res) {
const libraryItem = req.libraryItem
const mediaPayload = req.body
if (mediaPayload.url) {
await LibraryItemController.prototype.uploadCover.bind(this)(req, res, false)
if (res.writableEnded || res.headersSent) return
}
// Book specific
if (libraryItem.isBook) {
await this.createAuthorsAndSeriesForItemUpdate(mediaPayload, libraryItem.libraryId)
@ -146,7 +155,7 @@ class LibraryItemController {
seriesRemoved = libraryItem.media.metadata.series.filter(se => !seriesIdsInUpdate.includes(se.id))
}
const hasUpdates = libraryItem.media.update(mediaPayload)
const hasUpdates = libraryItem.media.update(mediaPayload) || mediaPayload.url
if (hasUpdates) {
libraryItem.updatedAt = Date.now()
@ -171,7 +180,7 @@ class LibraryItemController {
}
// POST: api/items/:id/cover
async uploadCover(req, res) {
async uploadCover(req, res, updateAndReturnJson = true) {
if (!req.user.canUpload) {
Logger.warn('User attempted to upload a cover without permission', req.user)
return res.sendStatus(403)
@ -196,12 +205,14 @@ class LibraryItemController {
return res.status(500).send('Unknown error occurred')
}
await Database.updateLibraryItem(libraryItem)
SocketAuthority.emitter('item_updated', libraryItem.toJSONExpanded())
res.json({
success: true,
cover: result.cover
})
if (updateAndReturnJson) {
await Database.updateLibraryItem(libraryItem)
SocketAuthority.emitter('item_updated', libraryItem.toJSONExpanded())
res.json({
success: true,
cover: result.cover
})
}
}
// PATCH: api/items/:id/cover
@ -276,6 +287,9 @@ class LibraryItemController {
return res.sendStatus(404)
}
if (req.query.ts)
res.set('Cache-Control', 'private, max-age=86400')
if (raw) { // any value
if (global.XAccel) {
const encodedURI = encodeUriPath(global.XAccel + libraryItem.media.coverPath)

View file

@ -6,7 +6,7 @@ const { toNumber } = require('../utils/index')
const userStats = require('../utils/queries/userStats')
class MeController {
constructor() { }
constructor() {}
getCurrentUser(req, res) {
res.json(req.user.toJSONForBrowser())
@ -33,6 +33,43 @@ class MeController {
res.json(payload)
}
/**
* GET: /api/me/item/listening-sessions/:libraryItemId/:episodeId
*
* @this import('../routers/ApiRouter')
*
* @param {import('express').Request} req
* @param {import('express').Response} res
*/
async getItemListeningSessions(req, res) {
const libraryItem = await Database.libraryItemModel.findByPk(req.params.libraryItemId)
const episode = await Database.podcastEpisodeModel.findByPk(req.params.episodeId)
if (!libraryItem || (libraryItem.mediaType === 'podcast' && !episode)) {
Logger.error(`[MeController] Media item not found for library item id "${req.params.libraryItemId}"`)
return res.sendStatus(404)
}
const mediaItemId = episode?.id || libraryItem.mediaId
let listeningSessions = await this.getUserItemListeningSessionsHelper(req.user.id, mediaItemId)
const itemsPerPage = toNumber(req.query.itemsPerPage, 10) || 10
const page = toNumber(req.query.page, 0)
const start = page * itemsPerPage
const sessions = listeningSessions.slice(start, start + itemsPerPage)
const payload = {
total: listeningSessions.length,
numPages: Math.ceil(listeningSessions.length / itemsPerPage),
page,
itemsPerPage,
sessions
}
res.json(payload)
}
// GET: api/me/listening-stats
async getListeningStats(req, res) {
const listeningStats = await this.getUserListeningStatsHelpers(req.user.id)
@ -80,7 +117,7 @@ class MeController {
if (!libraryItem) {
return res.status(404).send('Item not found')
}
if (!libraryItem.media.episodes.find(ep => ep.id === episodeId)) {
if (!libraryItem.media.episodes.find((ep) => ep.id === episodeId)) {
Logger.error(`[MeController] removeEpisode episode ${episodeId} not found for item ${libraryItem.id}`)
return res.status(404).send('Episode not found')
}
@ -123,7 +160,7 @@ class MeController {
// POST: api/me/item/:id/bookmark
async createBookmark(req, res) {
if (!await Database.libraryItemModel.checkExistsById(req.params.id)) return res.sendStatus(404)
if (!(await Database.libraryItemModel.checkExistsById(req.params.id))) return res.sendStatus(404)
const { time, title } = req.body
const bookmark = req.user.createBookmark(req.params.id, time, title)
@ -134,7 +171,7 @@ class MeController {
// PATCH: api/me/item/:id/bookmark
async updateBookmark(req, res) {
if (!await Database.libraryItemModel.checkExistsById(req.params.id)) return res.sendStatus(404)
if (!(await Database.libraryItemModel.checkExistsById(req.params.id))) return res.sendStatus(404)
const { time, title } = req.body
if (!req.user.findBookmark(req.params.id, time)) {
@ -152,7 +189,7 @@ class MeController {
// DELETE: api/me/item/:id/bookmark/:time
async removeBookmark(req, res) {
if (!await Database.libraryItemModel.checkExistsById(req.params.id)) return res.sendStatus(404)
if (!(await Database.libraryItemModel.checkExistsById(req.params.id))) return res.sendStatus(404)
const time = Number(req.params.time)
if (isNaN(time)) return res.sendStatus(500)
@ -254,11 +291,10 @@ class MeController {
// TODO: More efficient to do this in a single query
for (const mediaProgress of req.user.mediaProgress) {
if (!mediaProgress.isFinished && (mediaProgress.progress > 0 || mediaProgress.ebookProgress > 0)) {
const libraryItem = await Database.libraryItemModel.getOldById(mediaProgress.libraryItemId)
if (libraryItem) {
if (mediaProgress.episodeId && libraryItem.mediaType === 'podcast') {
const episode = libraryItem.media.episodes.find(ep => ep.id === mediaProgress.episodeId)
const episode = libraryItem.media.episodes.find((ep) => ep.id === mediaProgress.episodeId)
if (episode) {
const libraryItemWithEpisode = {
...libraryItem.toJSONMinified(),
@ -277,7 +313,9 @@ class MeController {
}
}
itemsInProgress = sort(itemsInProgress).desc(li => li.progressLastUpdate).slice(0, limit)
itemsInProgress = sort(itemsInProgress)
.desc((li) => li.progressLastUpdate)
.slice(0, limit)
res.json({
libraryItems: itemsInProgress
})
@ -317,19 +355,22 @@ class MeController {
// GET: api/me/progress/:id/remove-from-continue-listening
async removeItemFromContinueListening(req, res) {
const mediaProgress = req.user.mediaProgress.find(mp => mp.id === req.params.id)
const mediaProgress = req.user.mediaProgress.find((mp) => mp.id === req.params.id)
if (!mediaProgress) {
return res.sendStatus(404)
}
const hasUpdated = req.user.removeProgressFromContinueListening(req.params.id)
if (hasUpdated) {
await Database.mediaProgressModel.update({
hideFromContinueListening: true
}, {
where: {
id: mediaProgress.id
await Database.mediaProgressModel.update(
{
hideFromContinueListening: true
},
{
where: {
id: mediaProgress.id
}
}
})
)
SocketAuthority.clientEmitter(req.user.id, 'user_updated', req.user.toJSONForBrowser())
}
res.json(req.user.toJSONForBrowser())
@ -337,9 +378,9 @@ class MeController {
/**
* GET: /api/me/stats/year/:year
*
* @param {import('express').Request} req
* @param {import('express').Response} res
*
* @param {import('express').Request} req
* @param {import('express').Response} res
*/
async getStatsForYear(req, res) {
const year = Number(req.params.year)
@ -351,4 +392,4 @@ class MeController {
res.json(data)
}
}
module.exports = new MeController()
module.exports = new MeController()

View file

@ -284,7 +284,7 @@ class MiscController {
}
res.json({
tags: tags
tags: tags.sort((a, b) => a.toLowerCase().localeCompare(b.toLowerCase()))
})
}
@ -329,6 +329,7 @@ class MiscController {
await libraryItem.media.update({
tags: libraryItem.media.tags
})
await libraryItem.saveMetadataFile()
const oldLibraryItem = Database.libraryItemModel.getOldLibraryItem(libraryItem)
SocketAuthority.emitter('item_updated', oldLibraryItem.toJSONExpanded())
numItemsUpdated++
@ -370,6 +371,7 @@ class MiscController {
await libraryItem.media.update({
tags: libraryItem.media.tags
})
await libraryItem.saveMetadataFile()
const oldLibraryItem = Database.libraryItemModel.getOldLibraryItem(libraryItem)
SocketAuthority.emitter('item_updated', oldLibraryItem.toJSONExpanded())
numItemsUpdated++
@ -462,6 +464,7 @@ class MiscController {
await libraryItem.media.update({
genres: libraryItem.media.genres
})
await libraryItem.saveMetadataFile()
const oldLibraryItem = Database.libraryItemModel.getOldLibraryItem(libraryItem)
SocketAuthority.emitter('item_updated', oldLibraryItem.toJSONExpanded())
numItemsUpdated++
@ -503,6 +506,7 @@ class MiscController {
await libraryItem.media.update({
genres: libraryItem.media.genres
})
await libraryItem.saveMetadataFile()
const oldLibraryItem = Database.libraryItemModel.getOldLibraryItem(libraryItem)
SocketAuthority.emitter('item_updated', oldLibraryItem.toJSONExpanded())
numItemsUpdated++

View file

@ -10,6 +10,8 @@ const Logger = require('../Logger')
const { levenshteinDistance, escapeRegExp } = require('../utils/index')
class BookFinder {
#providerResponseTimeout = 30000
constructor() {
this.openLibrary = new OpenLibrary()
this.googleBooks = new GoogleBooks()
@ -36,63 +38,75 @@ class BookFinder {
filterSearchResults(books, title, author, maxTitleDistance, maxAuthorDistance) {
var searchTitle = cleanTitleForCompares(title)
var searchAuthor = cleanAuthorForCompares(author)
return books.map(b => {
b.cleanedTitle = cleanTitleForCompares(b.title)
b.titleDistance = levenshteinDistance(b.cleanedTitle, title)
return books
.map((b) => {
b.cleanedTitle = cleanTitleForCompares(b.title)
b.titleDistance = levenshteinDistance(b.cleanedTitle, title)
// Total length of search (title or both title & author)
b.totalPossibleDistance = b.title.length
// Total length of search (title or both title & author)
b.totalPossibleDistance = b.title.length
if (author) {
if (!b.author) {
b.authorDistance = author.length
} else {
b.totalPossibleDistance += b.author.length
b.cleanedAuthor = cleanAuthorForCompares(b.author)
if (author) {
if (!b.author) {
b.authorDistance = author.length
} else {
b.totalPossibleDistance += b.author.length
b.cleanedAuthor = cleanAuthorForCompares(b.author)
var cleanedAuthorDistance = levenshteinDistance(b.cleanedAuthor, searchAuthor)
var authorDistance = levenshteinDistance(b.author || '', author)
var cleanedAuthorDistance = levenshteinDistance(b.cleanedAuthor, searchAuthor)
var authorDistance = levenshteinDistance(b.author || '', author)
// Use best distance
b.authorDistance = Math.min(cleanedAuthorDistance, authorDistance)
// Use best distance
b.authorDistance = Math.min(cleanedAuthorDistance, authorDistance)
// Check book author contains searchAuthor
if (searchAuthor.length > 4 && b.cleanedAuthor.includes(searchAuthor)) b.includesAuthor = searchAuthor
else if (author.length > 4 && b.author.includes(author)) b.includesAuthor = author
// Check book author contains searchAuthor
if (searchAuthor.length > 4 && b.cleanedAuthor.includes(searchAuthor)) b.includesAuthor = searchAuthor
else if (author.length > 4 && b.author.includes(author)) b.includesAuthor = author
}
}
}
b.totalDistance = b.titleDistance + (b.authorDistance || 0)
b.totalDistance = b.titleDistance + (b.authorDistance || 0)
// Check book title contains the searchTitle
if (searchTitle.length > 4 && b.cleanedTitle.includes(searchTitle)) b.includesTitle = searchTitle
else if (title.length > 4 && b.title.includes(title)) b.includesTitle = title
// Check book title contains the searchTitle
if (searchTitle.length > 4 && b.cleanedTitle.includes(searchTitle)) b.includesTitle = searchTitle
else if (title.length > 4 && b.title.includes(title)) b.includesTitle = title
return b
}).filter(b => {
if (b.includesTitle) { // If search title was found in result title then skip over leven distance check
if (this.verbose) Logger.debug(`Exact title was included in "${b.title}", Search: "${b.includesTitle}"`)
} else if (b.titleDistance > maxTitleDistance) {
if (this.verbose) Logger.debug(`Filtering out search result title distance = ${b.titleDistance}: "${b.cleanedTitle}"/"${searchTitle}"`)
return false
}
if (author) {
if (b.includesAuthor) { // If search author was found in result author then skip over leven distance check
if (this.verbose) Logger.debug(`Exact author was included in "${b.author}", Search: "${b.includesAuthor}"`)
} else if (b.authorDistance > maxAuthorDistance) {
if (this.verbose) Logger.debug(`Filtering out search result "${b.author}", author distance = ${b.authorDistance}: "${b.author}"/"${author}"`)
return b
})
.filter((b) => {
if (b.includesTitle) {
// If search title was found in result title then skip over leven distance check
if (this.verbose) Logger.debug(`Exact title was included in "${b.title}", Search: "${b.includesTitle}"`)
} else if (b.titleDistance > maxTitleDistance) {
if (this.verbose) Logger.debug(`Filtering out search result title distance = ${b.titleDistance}: "${b.cleanedTitle}"/"${searchTitle}"`)
return false
}
}
// If book total search length < 5 and was not exact match, then filter out
if (b.totalPossibleDistance < 5 && b.totalDistance > 0) return false
return true
})
if (author) {
if (b.includesAuthor) {
// If search author was found in result author then skip over leven distance check
if (this.verbose) Logger.debug(`Exact author was included in "${b.author}", Search: "${b.includesAuthor}"`)
} else if (b.authorDistance > maxAuthorDistance) {
if (this.verbose) Logger.debug(`Filtering out search result "${b.author}", author distance = ${b.authorDistance}: "${b.author}"/"${author}"`)
return false
}
}
// If book total search length < 5 and was not exact match, then filter out
if (b.totalPossibleDistance < 5 && b.totalDistance > 0) return false
return true
})
}
/**
*
* @param {string} title
* @param {string} author
* @param {number} maxTitleDistance
* @param {number} maxAuthorDistance
* @returns {Promise<Object[]>}
*/
async getOpenLibResults(title, author, maxTitleDistance, maxAuthorDistance) {
var books = await this.openLibrary.searchTitle(title)
var books = await this.openLibrary.searchTitle(title, this.#providerResponseTimeout)
if (this.verbose) Logger.debug(`OpenLib Book Search Results: ${books.length || 0}`)
if (books.errorCode) {
Logger.error(`OpenLib Search Error ${books.errorCode}`)
@ -109,8 +123,14 @@ class BookFinder {
return booksFiltered
}
/**
*
* @param {string} title
* @param {string} author
* @returns {Promise<Object[]>}
*/
async getGoogleBooksResults(title, author) {
var books = await this.googleBooks.search(title, author)
var books = await this.googleBooks.search(title, author, this.#providerResponseTimeout)
if (this.verbose) Logger.debug(`GoogleBooks Book Search Results: ${books.length || 0}`)
if (books.errorCode) {
Logger.error(`GoogleBooks Search Error ${books.errorCode}`)
@ -120,8 +140,14 @@ class BookFinder {
return books
}
/**
*
* @param {string} title
* @param {string} author
* @returns {Promise<Object[]>}
*/
async getFantLabResults(title, author) {
var books = await this.fantLab.search(title, author)
var books = await this.fantLab.search(title, author, this.#providerResponseTimeout)
if (this.verbose) Logger.debug(`FantLab Book Search Results: ${books.length || 0}`)
if (books.errorCode) {
Logger.error(`FantLab Search Error ${books.errorCode}`)
@ -131,40 +157,58 @@ class BookFinder {
return books
}
/**
*
* @param {string} search
* @returns {Promise<Object[]>}
*/
async getAudiobookCoversResults(search) {
const covers = await this.audiobookCovers.search(search)
const covers = await this.audiobookCovers.search(search, this.#providerResponseTimeout)
if (this.verbose) Logger.debug(`AudiobookCovers Search Results: ${covers.length || 0}`)
return covers || []
}
async getiTunesAudiobooksResults(title, author) {
return this.iTunesApi.searchAudiobooks(title)
/**
*
* @param {string} title
* @returns {Promise<Object[]>}
*/
async getiTunesAudiobooksResults(title) {
return this.iTunesApi.searchAudiobooks(title, this.#providerResponseTimeout)
}
/**
*
* @param {string} title
* @param {string} author
* @param {string} asin
* @param {string} provider
* @returns {Promise<Object[]>}
*/
async getAudibleResults(title, author, asin, provider) {
const region = provider.includes('.') ? provider.split('.').pop() : ''
const books = await this.audible.search(title, author, asin, region)
const books = await this.audible.search(title, author, asin, region, this.#providerResponseTimeout)
if (this.verbose) Logger.debug(`Audible Book Search Results: ${books.length || 0}`)
if (!books) return []
return books
}
/**
*
* @param {string} title
* @param {string} author
* @param {string} providerSlug
*
* @param {string} title
* @param {string} author
* @param {string} isbn
* @param {string} providerSlug
* @returns {Promise<Object[]>}
*/
async getCustomProviderResults(title, author, providerSlug) {
const books = await this.customProviderAdapter.search(title, author, providerSlug, 'book')
async getCustomProviderResults(title, author, isbn, providerSlug) {
const books = await this.customProviderAdapter.search(title, author, isbn, providerSlug, 'book', this.#providerResponseTimeout)
if (this.verbose) Logger.debug(`Custom provider '${providerSlug}' Search Results: ${books.length || 0}`)
return books
}
static TitleCandidates = class {
constructor(cleanAuthor) {
this.candidates = new Set()
this.cleanAuthor = cleanAuthor
@ -178,13 +222,13 @@ class BookFinder {
title = this.#removeAuthorFromTitle(title)
const titleTransformers = [
[/([,:;_]| by ).*/g, ''], // Remove subtitle
[/(^| )\d+k(bps)?( |$)/, ' '], // Remove bitrate
[/([,:;_]| by ).*/g, ''], // Remove subtitle
[/(^| )\d+k(bps)?( |$)/, ' '], // Remove bitrate
[/ (2nd|3rd|\d+th)\s+ed(\.|ition)?/g, ''], // Remove edition
[/(^| |\.)(m4b|m4a|mp3)( |$)/g, ''], // Remove file-type
[/ a novel.*$/g, ''], // Remove "a novel"
[/(^| )(un)?abridged( |$)/g, ' '], // Remove "unabridged/abridged"
[/^\d+ | \d+$/g, ''], // Remove preceding/trailing numbers
[/(^| |\.)(m4b|m4a|mp3)( |$)/g, ''], // Remove file-type
[/ a novel.*$/g, ''], // Remove "a novel"
[/(^| )(un)?abridged( |$)/g, ' '], // Remove "unabridged/abridged"
[/^\d+ | \d+$/g, ''] // Remove preceding/trailing numbers
]
// Main variant
@ -196,8 +240,7 @@ class BookFinder {
let candidate = cleanTitle
for (const transformer of titleTransformers)
candidate = candidate.replace(transformer[0], transformer[1]).trim()
for (const transformer of titleTransformers) candidate = candidate.replace(transformer[0], transformer[1]).trim()
if (candidate != cleanTitle) {
if (candidate) {
@ -239,7 +282,7 @@ class BookFinder {
#removeAuthorFromTitle(title) {
if (!this.cleanAuthor) return title
const authorRe = new RegExp(`(^| | by |)${escapeRegExp(this.cleanAuthor)}(?= |$)`, "g")
const authorRe = new RegExp(`(^| | by |)${escapeRegExp(this.cleanAuthor)}(?= |$)`, 'g')
const authorCleanedTitle = cleanAuthorForCompares(title)
const authorCleanedTitleWithoutAuthor = authorCleanedTitle.replace(authorRe, '')
if (authorCleanedTitleWithoutAuthor !== authorCleanedTitle) {
@ -296,7 +339,7 @@ class BookFinder {
promises.push(this.validateAuthor(candidate))
}
const results = [...new Set(await Promise.all(promises))]
filteredCandidates = results.filter(author => author)
filteredCandidates = results.filter((author) => author)
// If no valid candidates were found, add back an aggresively cleaned author version
if (!filteredCandidates.length && this.cleanAuthor) filteredCandidates.push(this.agressivelyCleanAuthor)
// Always add an empty author candidate
@ -311,17 +354,16 @@ class BookFinder {
}
}
/**
* Search for books including fuzzy searches
*
*
* @param {Object} libraryItem
* @param {string} provider
* @param {string} title
* @param {string} author
* @param {string} isbn
* @param {string} asin
* @param {{titleDistance:number, authorDistance:number, maxFuzzySearches:number}} options
* @param {string} provider
* @param {string} title
* @param {string} author
* @param {string} isbn
* @param {string} asin
* @param {{titleDistance:number, authorDistance:number, maxFuzzySearches:number}} options
* @returns {Promise<Object[]>}
*/
async search(libraryItem, provider, title, author, isbn, asin, options = {}) {
@ -333,11 +375,10 @@ class BookFinder {
// Custom providers are assumed to be correct
if (provider.startsWith('custom-')) {
return this.getCustomProviderResults(title, author, provider)
return this.getCustomProviderResults(title, author, isbn, provider)
}
if (!title)
return books
if (!title) return books
books = await this.runSearch(title, author, provider, asin, maxTitleDistance, maxAuthorDistance)
@ -352,17 +393,14 @@ class BookFinder {
let authorCandidates = new BookFinder.AuthorCandidates(cleanAuthor, this.audnexus)
// Remove underscores and parentheses with their contents, and replace with a separator
const cleanTitle = title.replace(/\[.*?\]|\(.*?\)|{.*?}|_/g, " - ")
const cleanTitle = title.replace(/\[.*?\]|\(.*?\)|{.*?}|_/g, ' - ')
// Split title into hypen-separated parts
const titleParts = cleanTitle.split(/ - | -|- /)
for (const titlePart of titleParts)
authorCandidates.add(titlePart)
for (const titlePart of titleParts) authorCandidates.add(titlePart)
authorCandidates = await authorCandidates.getCandidates()
loop_author:
for (const authorCandidate of authorCandidates) {
loop_author: for (const authorCandidate of authorCandidates) {
let titleCandidates = new BookFinder.TitleCandidates(authorCandidate)
for (const titlePart of titleParts)
titleCandidates.add(titlePart)
for (const titlePart of titleParts) titleCandidates.add(titlePart)
titleCandidates = titleCandidates.getCandidates()
for (const titleCandidate of titleCandidates) {
if (titleCandidate == title && authorCandidate == author) continue // We already tried this
@ -392,10 +430,10 @@ class BookFinder {
/**
* Search for books
*
* @param {string} title
* @param {string} author
* @param {string} provider
*
* @param {string} title
* @param {string} author
* @param {string} provider
* @param {string} asin only used for audible providers
* @param {number} maxTitleDistance only used for openlibrary provider
* @param {number} maxAuthorDistance only used for openlibrary provider
@ -411,7 +449,7 @@ class BookFinder {
} else if (provider.startsWith('audible')) {
books = await this.getAudibleResults(title, author, asin, provider)
} else if (provider === 'itunes') {
books = await this.getiTunesAudiobooksResults(title, author)
books = await this.getiTunesAudiobooksResults(title)
} else if (provider === 'openlibrary') {
books = await this.getOpenLibResults(title, author, maxTitleDistance, maxAuthorDistance)
} else if (provider === 'fantlab') {
@ -447,7 +485,7 @@ class BookFinder {
covers.push(result.cover)
}
})
return [...(new Set(covers))]
return [...new Set(covers)]
}
findChapters(asin, region) {
@ -467,7 +505,7 @@ function stripSubtitle(title) {
function replaceAccentedChars(str) {
try {
return str.normalize('NFD').replace(/[\u0300-\u036f]/g, "")
return str.normalize('NFD').replace(/[\u0300-\u036f]/g, '')
} catch (error) {
Logger.error('[BookFinder] str normalize error', error)
return str
@ -482,7 +520,7 @@ function cleanTitleForCompares(title) {
let stripped = stripSubtitle(title)
// Remove text in paranthesis (i.e. "Ender's Game (Ender's Saga)" becomes "Ender's Game")
let cleaned = stripped.replace(/ *\([^)]*\) */g, "")
let cleaned = stripped.replace(/ *\([^)]*\) */g, '')
// Remove single quotes (i.e. "Ender's Game" becomes "Enders Game")
cleaned = cleaned.replace(/'/g, '')

View file

@ -15,7 +15,7 @@ var ffCodecRegexp = /^\s*([D\.])([E\.])([VAS])([I\.])([L\.])([S\.]) ([^ ]+) +(.*
var ffEncodersRegexp = /\(encoders:([^\)]+)\)/;
var ffDecodersRegexp = /\(decoders:([^\)]+)\)/;
var encodersRegexp = /^\s*([VAS\.])([F\.])([S\.])([X\.])([B\.])([D\.]) ([^ ]+) +(.*)$/;
var formatRegexp = /^\s*([D ])([E ]) ([^ ]+) +(.*)$/;
var formatRegexp = /^\s*([D ])([E ])\s+([^ ]+)\s+(.*)$/;
var lineBreakRegexp = /\r\n|\r|\n/;
var filterRegexp = /^(?: [T\.][S\.][C\.] )?([^ ]+) +(AA?|VV?|\|)->(AA?|VV?|\|) +(.*)$/;

View file

@ -22,6 +22,16 @@ class ApiCacheManager {
this.cache.clear()
}
/**
* Reset hooks and clear cache. Used when applying backups
*/
reset() {
Logger.info(`[ApiCacheManager] Resetting cache`)
this.init()
this.cache.clear()
}
get middleware() {
return (req, res, next) => {
const key = { user: req.user.username, url: req.url }

View file

@ -146,23 +146,73 @@ class BackupManager {
}
}
async requestApplyBackup(backup, res) {
/**
*
* @param {import('./ApiCacheManager')} apiCacheManager
* @param {Backup} backup
* @param {import('express').Response} res
*/
async requestApplyBackup(apiCacheManager, backup, res) {
Logger.info(`[BackupManager] Applying backup at "${backup.fullPath}"`)
const zip = new StreamZip.async({ file: backup.fullPath })
const entries = await zip.entries()
// Ensure backup has an absdatabase.sqlite file
if (!Object.keys(entries).includes('absdatabase.sqlite')) {
Logger.error(`[BackupManager] Cannot apply old backup ${backup.fullPath}`)
await zip.close()
return res.status(500).send('Invalid backup file. Does not include absdatabase.sqlite. This might be from an older Audiobookshelf server.')
}
await Database.disconnect()
await zip.extract('absdatabase.sqlite', global.ConfigPath)
const dbPath = Path.join(global.ConfigPath, 'absdatabase.sqlite')
const tempDbPath = Path.join(global.ConfigPath, 'absdatabase-temp.sqlite')
// Extract backup sqlite file to temporary path
await zip.extract('absdatabase.sqlite', tempDbPath)
Logger.info(`[BackupManager] Extracted backup sqlite db to temp path ${tempDbPath}`)
// Verify extract - Abandon backup if sqlite file did not extract
if (!await fs.pathExists(tempDbPath)) {
Logger.error(`[BackupManager] Sqlite file not found after extract - abandon backup apply and reconnect db`)
await zip.close()
await Database.reconnect()
return res.status(500).send('Failed to extract sqlite db from backup')
}
// Attempt to remove existing db file
try {
await fs.remove(dbPath)
} catch (error) {
// Abandon backup and remove extracted sqlite file if unable to remove existing db file
Logger.error(`[BackupManager] Unable to overwrite existing db file - abandon backup apply and reconnect db`, error)
await fs.remove(tempDbPath)
await zip.close()
await Database.reconnect()
return res.status(500).send(`Failed to overwrite sqlite db: ${error?.message || 'Unknown Error'}`)
}
// Rename temp db
await fs.move(tempDbPath, dbPath)
Logger.info(`[BackupManager] Saved backup sqlite file at "${dbPath}"`)
// Extract /metadata/items and /metadata/authors folders
await zip.extract('metadata-items/', this.ItemsMetadataPath)
await zip.extract('metadata-authors/', this.AuthorsMetadataPath)
await zip.close()
// Reconnect db
await Database.reconnect()
// Reset api cache, set hooks again
await apiCacheManager.reset()
res.sendStatus(200)
// Triggers browser refresh for all clients
SocketAuthority.emitter('backup_applied')
}

View file

@ -11,8 +11,8 @@ const fileUtils = require('../utils/fileUtils')
class BinaryManager {
defaultRequiredBinaries = [
{ name: 'ffmpeg', envVariable: 'FFMPEG_PATH', validVersions: ['5.1', '6'] },
{ name: 'ffprobe', envVariable: 'FFPROBE_PATH', validVersions: ['5.1', '6'] }
{ name: 'ffmpeg', envVariable: 'FFMPEG_PATH', validVersions: ['5.1'] },
{ name: 'ffprobe', envVariable: 'FFPROBE_PATH', validVersions: ['5.1'] }
]
constructor(requiredBinaries = this.defaultRequiredBinaries) {
@ -24,7 +24,14 @@ class BinaryManager {
}
async init() {
// Optional skip binaries check
if (process.env.SKIP_BINARIES_CHECK === '1') {
Logger.info('[BinaryManager] Skipping check for binaries')
return
}
if (this.initialized) return
const missingBinaries = await this.findRequiredBinaries()
if (missingBinaries.length == 0) return
await this.removeOldBinaries(missingBinaries)
@ -135,7 +142,7 @@ class BinaryManager {
if (!binaries.length) return
Logger.info(`[BinaryManager] Installing binaries: ${binaries.join(', ')}`)
let destination = await fileUtils.isWritable(this.mainInstallPath) ? this.mainInstallPath : this.altInstallPath
await ffbinaries.downloadBinaries(binaries, { destination, version: '6.1', force: true })
await ffbinaries.downloadBinaries(binaries, { destination, version: '5.1', force: true })
Logger.info(`[BinaryManager] Binaries installed to ${destination}`)
}

View file

@ -144,8 +144,13 @@ class PlaybackSessionManager {
session.currentTime = sessionJson.currentTime
session.timeListening = sessionJson.timeListening
session.updatedAt = sessionJson.updatedAt
session.date = date.format(new Date(), 'YYYY-MM-DD')
session.dayOfWeek = date.format(new Date(), 'dddd')
let jsDate = new Date(sessionJson.updatedAt)
if (isNaN(jsDate)) {
jsDate = new Date()
}
session.date = date.format(jsDate, 'YYYY-MM-DD')
session.dayOfWeek = date.format(jsDate, 'dddd')
Logger.debug(`[PlaybackSessionManager] Updated session for "${session.displayTitle}" (${session.id})`)
await Database.updatePlaybackSession(session)

View file

@ -32,7 +32,7 @@ class PodcastManager {
}
getEpisodeDownloadsInQueue(libraryItemId) {
return this.downloadQueue.filter(d => d.libraryItemId === libraryItemId)
return this.downloadQueue.filter((d) => d.libraryItemId === libraryItemId)
}
clearDownloadQueue(libraryItemId = null) {
@ -44,12 +44,12 @@ class PodcastManager {
} else {
var itemDownloads = this.getEpisodeDownloadsInQueue(libraryItemId)
Logger.info(`[PodcastManager] Clearing downloads in queue for item "${libraryItemId}" (${itemDownloads.length})`)
this.downloadQueue = this.downloadQueue.filter(d => d.libraryItemId !== libraryItemId)
this.downloadQueue = this.downloadQueue.filter((d) => d.libraryItemId !== libraryItemId)
}
}
async downloadPodcastEpisodes(libraryItem, episodesToDownload, isAutoDownload) {
let index = Math.max(...libraryItem.media.episodes.filter(ep => ep.index == null || isNaN(ep.index)).map(ep => Number(ep.index))) + 1
let index = Math.max(...libraryItem.media.episodes.filter((ep) => ep.index == null || isNaN(ep.index)).map((ep) => Number(ep.index))) + 1
for (const ep of episodesToDownload) {
const newPe = new PodcastEpisode()
newPe.setData(ep, index++)
@ -72,7 +72,7 @@ class PodcastManager {
const taskDescription = `Downloading episode "${podcastEpisodeDownload.podcastEpisode.title}".`
const taskData = {
libraryId: podcastEpisodeDownload.libraryId,
libraryItemId: podcastEpisodeDownload.libraryItemId,
libraryItemId: podcastEpisodeDownload.libraryItemId
}
const task = TaskManager.createAndAddTask('download-podcast-episode', 'Downloading Episode', taskDescription, false, taskData)
@ -104,10 +104,12 @@ class PodcastManager {
})
} else {
// Download episode only
success = await downloadFile(this.currentDownload.url, this.currentDownload.targetPath).then(() => true).catch((error) => {
Logger.error(`[PodcastManager] Podcast Episode download failed`, error)
return false
})
success = await downloadFile(this.currentDownload.url, this.currentDownload.targetPath)
.then(() => true)
.catch((error) => {
Logger.error(`[PodcastManager] Podcast Episode download failed`, error)
return false
})
}
if (success) {
@ -156,7 +158,7 @@ class PodcastManager {
podcastEpisode.audioFile = audioFile
if (audioFile.chapters?.length) {
podcastEpisode.chapters = audioFile.chapters.map(ch => ({ ...ch }))
podcastEpisode.chapters = audioFile.chapters.map((ch) => ({ ...ch }))
}
libraryItem.media.addPodcastEpisode(podcastEpisode)
@ -181,7 +183,8 @@ class PodcastManager {
podcastEpisodeExpanded.libraryItem = libraryItem.toJSONExpanded()
SocketAuthority.emitter('episode_added', podcastEpisodeExpanded)
if (this.currentDownload.isAutoDownload) { // Notifications only for auto downloaded episodes
if (this.currentDownload.isAutoDownload) {
// Notifications only for auto downloaded episodes
this.notificationManager.onPodcastEpisodeDownloaded(libraryItem, podcastEpisode)
}
@ -191,12 +194,14 @@ class PodcastManager {
async removeOldestEpisode(libraryItem, episodeIdJustDownloaded) {
var smallestPublishedAt = 0
var oldestEpisode = null
libraryItem.media.episodesWithPubDate.filter(ep => ep.id !== episodeIdJustDownloaded).forEach((ep) => {
if (!smallestPublishedAt || ep.publishedAt < smallestPublishedAt) {
smallestPublishedAt = ep.publishedAt
oldestEpisode = ep
}
})
libraryItem.media.episodesWithPubDate
.filter((ep) => ep.id !== episodeIdJustDownloaded)
.forEach((ep) => {
if (!smallestPublishedAt || ep.publishedAt < smallestPublishedAt) {
smallestPublishedAt = ep.publishedAt
oldestEpisode = ep
}
})
// TODO: Should we check for open playback sessions for this episode?
// TODO: remove all user progress for this episode
if (oldestEpisode?.audioFile) {
@ -246,7 +251,8 @@ class PodcastManager {
var newEpisodes = await this.checkPodcastForNewEpisodes(libraryItem, dateToCheckForEpisodesAfter, libraryItem.media.maxNewEpisodesToDownload)
Logger.debug(`[PodcastManager] runEpisodeCheck: ${newEpisodes?.length || 'N/A'} episodes found`)
if (!newEpisodes) { // Failed
if (!newEpisodes) {
// Failed
// Allow up to MaxFailedEpisodeChecks failed attempts before disabling auto download
if (!this.failedCheckMap[libraryItem.id]) this.failedCheckMap[libraryItem.id] = 0
this.failedCheckMap[libraryItem.id]++
@ -285,7 +291,7 @@ class PodcastManager {
}
// Filter new and not already has
let newEpisodes = feed.episodes.filter(ep => ep.publishedAt > dateToCheckForEpisodesAfter && !podcastLibraryItem.media.checkHasEpisodeByFeedUrl(ep.enclosure.url))
let newEpisodes = feed.episodes.filter((ep) => ep.publishedAt > dateToCheckForEpisodesAfter && !podcastLibraryItem.media.checkHasEpisodeByFeedEpisode(ep))
if (maxNewEpisodes > 0) {
newEpisodes = newEpisodes.slice(0, maxNewEpisodes)
@ -322,7 +328,7 @@ class PodcastManager {
}
const matches = []
feed.episodes.forEach(ep => {
feed.episodes.forEach((ep) => {
if (!ep.title) return
const epTitle = ep.title.toLowerCase().trim()
@ -370,7 +376,7 @@ class PodcastManager {
/**
* OPML file string for podcasts in a library
* @param {import('../models/Podcast')[]} podcasts
* @param {import('../models/Podcast')[]} podcasts
* @returns {string} XML string
*/
generateOPMLFileText(podcasts) {
@ -383,7 +389,7 @@ class PodcastManager {
return {
currentDownload: _currentDownload?.toJSONForClient(),
queue: this.downloadQueue.filter(item => !libraryId || item.libraryId === libraryId).map(item => item.toJSONForClient())
queue: this.downloadQueue.filter((item) => !libraryId || item.libraryId === libraryId).map((item) => item.toJSONForClient())
}
}
}

View file

@ -26,11 +26,6 @@ class Author extends Model {
this.createdAt
}
static async getOldAuthors() {
const authors = await this.findAll()
return authors.map(au => au.getOldAuthor())
}
getOldAuthor() {
return new oldAuthor({
id: this.id,
@ -85,7 +80,7 @@ class Author extends Model {
/**
* Get oldAuthor by id
* @param {string} authorId
* @param {string} authorId
* @returns {Promise<oldAuthor>}
*/
static async getOldById(authorId) {
@ -96,7 +91,7 @@ class Author extends Model {
/**
* Check if author exists
* @param {string} authorId
* @param {string} authorId
* @returns {Promise<boolean>}
*/
static async checkExistsById(authorId) {
@ -106,60 +101,67 @@ class Author extends Model {
/**
* Get old author by name and libraryId. name case insensitive
* TODO: Look for authors ignoring punctuation
*
* @param {string} authorName
* @param {string} libraryId
*
* @param {string} authorName
* @param {string} libraryId
* @returns {Promise<oldAuthor>}
*/
static async getOldByNameAndLibrary(authorName, libraryId) {
const author = (await this.findOne({
where: [
where(fn('lower', col('name')), authorName.toLowerCase()),
{
libraryId
}
]
}))?.getOldAuthor()
const author = (
await this.findOne({
where: [
where(fn('lower', col('name')), authorName.toLowerCase()),
{
libraryId
}
]
})
)?.getOldAuthor()
return author
}
/**
* Initialize model
* @param {import('../Database').sequelize} sequelize
* @param {import('../Database').sequelize} sequelize
*/
static init(sequelize) {
super.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
name: DataTypes.STRING,
lastFirst: DataTypes.STRING,
asin: DataTypes.STRING,
description: DataTypes.TEXT,
imagePath: DataTypes.STRING
}, {
sequelize,
modelName: 'author',
indexes: [
{
fields: [{
name: 'name',
collate: 'NOCASE'
}]
super.init(
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
// {
// fields: [{
// name: 'lastFirst',
// collate: 'NOCASE'
// }]
// },
{
fields: ['libraryId']
}
]
})
name: DataTypes.STRING,
lastFirst: DataTypes.STRING,
asin: DataTypes.STRING,
description: DataTypes.TEXT,
imagePath: DataTypes.STRING
},
{
sequelize,
modelName: 'author',
indexes: [
{
fields: [
{
name: 'name',
collate: 'NOCASE'
}
]
},
// {
// fields: [{
// name: 'lastFirst',
// collate: 'NOCASE'
// }]
// },
{
fields: ['libraryId']
}
]
}
)
const { library } = sequelize.models
library.hasMany(Author, {

View file

@ -7,7 +7,7 @@ const Logger = require('../Logger')
* @property {string} ebookFormat
* @property {number} addedAt
* @property {number} updatedAt
* @property {{filename:string, ext:string, path:string, relPath:string, size:number, mtimeMs:number, ctimeMs:number, birthtimeMs:number}} metadata
* @property {{filename:string, ext:string, path:string, relPath:strFing, size:number, mtimeMs:number, ctimeMs:number, birthtimeMs:number}} metadata
*/
/**
@ -21,13 +21,13 @@ const Logger = require('../Logger')
/**
* @typedef SeriesExpandedProperties
* @property {{sequence:string}} bookSeries
*
*
* @typedef {import('./Series') & SeriesExpandedProperties} SeriesExpanded
*
*
* @typedef BookExpandedProperties
* @property {import('./Author')[]} authors
* @property {SeriesExpanded[]} series
*
*
* @typedef {Book & BookExpandedProperties} BookExpanded
*/
@ -112,29 +112,31 @@ class Book extends Model {
const bookExpanded = libraryItemExpanded.media
let authors = []
if (bookExpanded.authors?.length) {
authors = bookExpanded.authors.map(au => {
authors = bookExpanded.authors.map((au) => {
return {
id: au.id,
name: au.name
}
})
} else if (bookExpanded.bookAuthors?.length) {
authors = bookExpanded.bookAuthors.map(ba => {
if (ba.author) {
return {
id: ba.author.id,
name: ba.author.name
authors = bookExpanded.bookAuthors
.map((ba) => {
if (ba.author) {
return {
id: ba.author.id,
name: ba.author.name
}
} else {
Logger.error(`[Book] Invalid bookExpanded bookAuthors: no author`, ba)
return null
}
} else {
Logger.error(`[Book] Invalid bookExpanded bookAuthors: no author`, ba)
return null
}
}).filter(a => a)
})
.filter((a) => a)
}
let series = []
if (bookExpanded.series?.length) {
series = bookExpanded.series.map(se => {
series = bookExpanded.series.map((se) => {
return {
id: se.id,
name: se.name,
@ -142,18 +144,20 @@ class Book extends Model {
}
})
} else if (bookExpanded.bookSeries?.length) {
series = bookExpanded.bookSeries.map(bs => {
if (bs.series) {
return {
id: bs.series.id,
name: bs.series.name,
sequence: bs.sequence
series = bookExpanded.bookSeries
.map((bs) => {
if (bs.series) {
return {
id: bs.series.id,
name: bs.series.name,
sequence: bs.sequence
}
} else {
Logger.error(`[Book] Invalid bookExpanded bookSeries: no series`, bs)
return null
}
} else {
Logger.error(`[Book] Invalid bookExpanded bookSeries: no series`, bs)
return null
}
}).filter(s => s)
})
.filter((s) => s)
}
return {
@ -185,7 +189,7 @@ class Book extends Model {
}
/**
* @param {object} oldBook
* @param {object} oldBook
* @returns {boolean} true if updated
*/
static saveFromOld(oldBook) {
@ -194,10 +198,12 @@ class Book extends Model {
where: {
id: book.id
}
}).then(result => result[0] > 0).catch((error) => {
Logger.error(`[Book] Failed to save book ${book.id}`, error)
return false
})
.then((result) => result[0] > 0)
.catch((error) => {
Logger.error(`[Book] Failed to save book ${book.id}`, error)
return false
})
}
static getFromOld(oldBook) {
@ -219,7 +225,7 @@ class Book extends Model {
ebookFile: oldBook.ebookFile?.toJSON() || null,
coverPath: oldBook.coverPath,
duration: oldBook.duration,
audioFiles: oldBook.audioFiles?.map(af => af.toJSON()) || [],
audioFiles: oldBook.audioFiles?.map((af) => af.toJSON()) || [],
chapters: oldBook.chapters,
tags: oldBook.tags,
genres: oldBook.metadata.genres
@ -229,12 +235,12 @@ class Book extends Model {
getAbsMetadataJson() {
return {
tags: this.tags || [],
chapters: this.chapters?.map(c => ({ ...c })) || [],
chapters: this.chapters?.map((c) => ({ ...c })) || [],
title: this.title,
subtitle: this.subtitle,
authors: this.authors.map(a => a.name),
authors: this.authors.map((a) => a.name),
narrators: this.narrators,
series: this.series.map(se => {
series: this.series.map((se) => {
const sequence = se.bookSeries?.sequence || ''
if (!sequence) return se.name
return `${se.name} #${sequence}`
@ -254,61 +260,66 @@ class Book extends Model {
/**
* Initialize model
* @param {import('../Database').sequelize} sequelize
* @param {import('../Database').sequelize} sequelize
*/
static init(sequelize) {
super.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
title: DataTypes.STRING,
titleIgnorePrefix: DataTypes.STRING,
subtitle: DataTypes.STRING,
publishedYear: DataTypes.STRING,
publishedDate: DataTypes.STRING,
publisher: DataTypes.STRING,
description: DataTypes.TEXT,
isbn: DataTypes.STRING,
asin: DataTypes.STRING,
language: DataTypes.STRING,
explicit: DataTypes.BOOLEAN,
abridged: DataTypes.BOOLEAN,
coverPath: DataTypes.STRING,
duration: DataTypes.FLOAT,
super.init(
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
title: DataTypes.STRING,
titleIgnorePrefix: DataTypes.STRING,
subtitle: DataTypes.STRING,
publishedYear: DataTypes.STRING,
publishedDate: DataTypes.STRING,
publisher: DataTypes.STRING,
description: DataTypes.TEXT,
isbn: DataTypes.STRING,
asin: DataTypes.STRING,
language: DataTypes.STRING,
explicit: DataTypes.BOOLEAN,
abridged: DataTypes.BOOLEAN,
coverPath: DataTypes.STRING,
duration: DataTypes.FLOAT,
narrators: DataTypes.JSON,
audioFiles: DataTypes.JSON,
ebookFile: DataTypes.JSON,
chapters: DataTypes.JSON,
tags: DataTypes.JSON,
genres: DataTypes.JSON
}, {
sequelize,
modelName: 'book',
indexes: [
{
fields: [{
name: 'title',
collate: 'NOCASE'
}]
},
// {
// fields: [{
// name: 'titleIgnorePrefix',
// collate: 'NOCASE'
// }]
// },
{
fields: ['publishedYear']
},
// {
// fields: ['duration']
// }
]
})
narrators: DataTypes.JSON,
audioFiles: DataTypes.JSON,
ebookFile: DataTypes.JSON,
chapters: DataTypes.JSON,
tags: DataTypes.JSON,
genres: DataTypes.JSON
},
{
sequelize,
modelName: 'book',
indexes: [
{
fields: [
{
name: 'title',
collate: 'NOCASE'
}
]
},
// {
// fields: [{
// name: 'titleIgnorePrefix',
// collate: 'NOCASE'
// }]
// },
{
fields: ['publishedYear']
}
// {
// fields: ['duration']
// }
]
}
)
}
}
module.exports = Book
module.exports = Book

View file

@ -25,21 +25,24 @@ class BookAuthor extends Model {
/**
* Initialize model
* @param {import('../Database').sequelize} sequelize
* @param {import('../Database').sequelize} sequelize
*/
static init(sequelize) {
super.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
super.init(
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
}
},
{
sequelize,
modelName: 'bookAuthor',
timestamps: true,
updatedAt: false
}
}, {
sequelize,
modelName: 'bookAuthor',
timestamps: true,
updatedAt: false
})
)
// Super Many-to-Many
// ref: https://sequelize.org/docs/v6/advanced-association-concepts/advanced-many-to-many/#the-best-of-both-worlds-the-super-many-to-many-relationship
@ -58,4 +61,4 @@ class BookAuthor extends Model {
BookAuthor.belongsTo(author)
}
}
module.exports = BookAuthor
module.exports = BookAuthor

View file

@ -27,22 +27,25 @@ class BookSeries extends Model {
/**
* Initialize model
* @param {import('../Database').sequelize} sequelize
* @param {import('../Database').sequelize} sequelize
*/
static init(sequelize) {
super.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
super.init(
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
sequence: DataTypes.STRING
},
sequence: DataTypes.STRING
}, {
sequelize,
modelName: 'bookSeries',
timestamps: true,
updatedAt: false
})
{
sequelize,
modelName: 'bookSeries',
timestamps: true,
updatedAt: false
}
)
// Super Many-to-Many
// ref: https://sequelize.org/docs/v6/advanced-association-concepts/advanced-many-to-many/#the-best-of-both-worlds-the-super-many-to-many-relationship
@ -62,4 +65,4 @@ class BookSeries extends Model {
}
}
module.exports = BookSeries
module.exports = BookSeries

View file

@ -2,7 +2,6 @@ const { DataTypes, Model, Sequelize } = require('sequelize')
const oldCollection = require('../objects/Collection')
class Collection extends Model {
constructor(values, options) {
super(values, options)
@ -20,27 +19,13 @@ class Collection extends Model {
/** @type {Date} */
this.createdAt
}
/**
* Get all old collections
* @returns {Promise<oldCollection[]>}
*/
static async getOldCollections() {
const collections = await this.findAll({
include: {
model: this.sequelize.models.book,
include: this.sequelize.models.libraryItem
},
order: [[this.sequelize.models.book, this.sequelize.models.collectionBook, 'order', 'ASC']]
})
return collections.map(c => this.getOldCollection(c))
}
/**
* Get all old collections toJSONExpanded, items filtered for user permissions
* @param {[oldUser]} user
* @param {[string]} libraryId
* @param {[string[]]} include
* @returns {Promise<object[]>} oldCollection.toJSONExpanded
* @param {oldUser} [user]
* @param {string} [libraryId]
* @param {string[]} [include]
* @returns {Promise<oldCollection[]>} oldCollection.toJSONExpanded
*/
static async getOldCollectionsJsonExpanded(user, libraryId, include) {
let collectionWhere = null
@ -78,8 +63,7 @@ class Collection extends Model {
through: {
attributes: ['sequence']
}
},
}
]
},
...collectionIncludes
@ -87,11 +71,84 @@ class Collection extends Model {
order: [[this.sequelize.models.book, this.sequelize.models.collectionBook, 'order', 'ASC']]
})
// TODO: Handle user permission restrictions on initial query
return collections.map(c => {
const oldCollection = this.getOldCollection(c)
return collections
.map((c) => {
const oldCollection = this.getOldCollection(c)
// Filter books using user permissions
const books = c.books?.filter(b => {
// Filter books using user permissions
const books =
c.books?.filter((b) => {
if (user) {
if (b.tags?.length && !user.checkCanAccessLibraryItemWithTags(b.tags)) {
return false
}
if (b.explicit === true && !user.canAccessExplicitContent) {
return false
}
}
return true
}) || []
// Map to library items
const libraryItems = books.map((b) => {
const libraryItem = b.libraryItem
delete b.libraryItem
libraryItem.media = b
return this.sequelize.models.libraryItem.getOldLibraryItem(libraryItem)
})
// Users with restricted permissions will not see this collection
if (!books.length && oldCollection.books.length) {
return null
}
const collectionExpanded = oldCollection.toJSONExpanded(libraryItems)
// Map feed if found
if (c.feeds?.length) {
collectionExpanded.rssFeed = this.sequelize.models.feed.getOldFeed(c.feeds[0])
}
return collectionExpanded
})
.filter((c) => c)
}
/**
* Get old collection toJSONExpanded, items filtered for user permissions
* @param {oldUser} [user]
* @param {string[]} [include]
* @returns {Promise<oldCollection>} oldCollection.toJSONExpanded
*/
async getOldJsonExpanded(user, include) {
this.books =
(await this.getBooks({
include: [
{
model: this.sequelize.models.libraryItem
},
{
model: this.sequelize.models.author,
through: {
attributes: []
}
},
{
model: this.sequelize.models.series,
through: {
attributes: ['sequence']
}
}
],
order: [Sequelize.literal('`collectionBook.order` ASC')]
})) || []
const oldCollection = this.sequelize.models.collection.getOldCollection(this)
// Filter books using user permissions
// TODO: Handle user permission restrictions on initial query
const books =
this.books?.filter((b) => {
if (user) {
if (b.tags?.length && !user.checkCanAccessLibraryItemWithTags(b.tags)) {
return false
@ -103,77 +160,8 @@ class Collection extends Model {
return true
}) || []
// Map to library items
const libraryItems = books.map(b => {
const libraryItem = b.libraryItem
delete b.libraryItem
libraryItem.media = b
return this.sequelize.models.libraryItem.getOldLibraryItem(libraryItem)
})
// Users with restricted permissions will not see this collection
if (!books.length && oldCollection.books.length) {
return null
}
const collectionExpanded = oldCollection.toJSONExpanded(libraryItems)
// Map feed if found
if (c.feeds?.length) {
collectionExpanded.rssFeed = this.sequelize.models.feed.getOldFeed(c.feeds[0])
}
return collectionExpanded
}).filter(c => c)
}
/**
* Get old collection toJSONExpanded, items filtered for user permissions
* @param {[oldUser]} user
* @param {[string[]]} include
* @returns {Promise<object>} oldCollection.toJSONExpanded
*/
async getOldJsonExpanded(user, include) {
this.books = await this.getBooks({
include: [
{
model: this.sequelize.models.libraryItem
},
{
model: this.sequelize.models.author,
through: {
attributes: []
}
},
{
model: this.sequelize.models.series,
through: {
attributes: ['sequence']
}
},
],
order: [Sequelize.literal('`collectionBook.order` ASC')]
}) || []
const oldCollection = this.sequelize.models.collection.getOldCollection(this)
// Filter books using user permissions
// TODO: Handle user permission restrictions on initial query
const books = this.books?.filter(b => {
if (user) {
if (b.tags?.length && !user.checkCanAccessLibraryItemWithTags(b.tags)) {
return false
}
if (b.explicit === true && !user.canAccessExplicitContent) {
return false
}
}
return true
}) || []
// Map to library items
const libraryItems = books.map(b => {
const libraryItems = books.map((b) => {
const libraryItem = b.libraryItem
delete b.libraryItem
libraryItem.media = b
@ -199,11 +187,11 @@ class Collection extends Model {
/**
* Get old collection from Collection
* @param {Collection} collectionExpanded
* @param {Collection} collectionExpanded
* @returns {oldCollection}
*/
static getOldCollection(collectionExpanded) {
const libraryItemIds = collectionExpanded.books?.map(b => b.libraryItem?.id || null).filter(lid => lid) || []
const libraryItemIds = collectionExpanded.books?.map((b) => b.libraryItem?.id || null).filter((lid) => lid) || []
return new oldCollection({
id: collectionExpanded.id,
libraryId: collectionExpanded.libraryId,
@ -215,6 +203,11 @@ class Collection extends Model {
})
}
/**
*
* @param {oldCollection} oldCollection
* @returns {Promise<Collection>}
*/
static createFromOld(oldCollection) {
const collection = this.getFromOld(oldCollection)
return this.create(collection)
@ -239,7 +232,7 @@ class Collection extends Model {
/**
* Get old collection by id
* @param {string} collectionId
* @param {string} collectionId
* @returns {Promise<oldCollection|null>} returns null if not found
*/
static async getOldById(collectionId) {
@ -260,34 +253,34 @@ class Collection extends Model {
* @returns {Promise<oldCollection>}
*/
async getOld() {
this.books = await this.getBooks({
include: [
{
model: this.sequelize.models.libraryItem
},
{
model: this.sequelize.models.author,
through: {
attributes: []
this.books =
(await this.getBooks({
include: [
{
model: this.sequelize.models.libraryItem
},
{
model: this.sequelize.models.author,
through: {
attributes: []
}
},
{
model: this.sequelize.models.series,
through: {
attributes: ['sequence']
}
}
},
{
model: this.sequelize.models.series,
through: {
attributes: ['sequence']
}
},
],
order: [Sequelize.literal('`collectionBook.order` ASC')]
}) || []
],
order: [Sequelize.literal('`collectionBook.order` ASC')]
})) || []
return this.sequelize.models.collection.getOldCollection(this)
}
/**
* Remove all collections belonging to library
* @param {string} libraryId
* @param {string} libraryId
* @returns {Promise<number>} number of collections destroyed
*/
static async removeAllForLibrary(libraryId) {
@ -299,38 +292,26 @@ class Collection extends Model {
})
}
static async getAllForBook(bookId) {
const collections = await this.findAll({
include: {
model: this.sequelize.models.book,
where: {
id: bookId
},
required: true,
include: this.sequelize.models.libraryItem
},
order: [[this.sequelize.models.book, this.sequelize.models.collectionBook, 'order', 'ASC']]
})
return collections.map(c => this.getOldCollection(c))
}
/**
* Initialize model
* @param {import('../Database').sequelize} sequelize
* @param {import('../Database').sequelize} sequelize
*/
static init(sequelize) {
super.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
super.init(
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
name: DataTypes.STRING,
description: DataTypes.TEXT
},
name: DataTypes.STRING,
description: DataTypes.TEXT
}, {
sequelize,
modelName: 'collection'
})
{
sequelize,
modelName: 'collection'
}
)
const { library } = sequelize.models
@ -339,4 +320,4 @@ class Collection extends Model {
}
}
module.exports = Collection
module.exports = Collection

View file

@ -26,19 +26,22 @@ class CollectionBook extends Model {
}
static init(sequelize) {
super.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
super.init(
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
order: DataTypes.INTEGER
},
order: DataTypes.INTEGER
}, {
sequelize,
timestamps: true,
updatedAt: false,
modelName: 'collectionBook'
})
{
sequelize,
timestamps: true,
updatedAt: false,
modelName: 'collectionBook'
}
)
// Super Many-to-Many
// ref: https://sequelize.org/docs/v6/advanced-association-concepts/advanced-many-to-many/#the-best-of-both-worlds-the-super-many-to-many-relationship
@ -58,4 +61,4 @@ class CollectionBook extends Model {
}
}
module.exports = CollectionBook
module.exports = CollectionBook

View file

@ -114,26 +114,29 @@ class Device extends Model {
/**
* Initialize model
* @param {import('../Database').sequelize} sequelize
* @param {import('../Database').sequelize} sequelize
*/
static init(sequelize) {
super.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
super.init(
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
deviceId: DataTypes.STRING,
clientName: DataTypes.STRING, // e.g. Abs Web, Abs Android
clientVersion: DataTypes.STRING, // e.g. Server version or mobile version
ipAddress: DataTypes.STRING,
deviceName: DataTypes.STRING, // e.g. Windows 10 Chrome, Google Pixel 6, Apple iPhone 10,3
deviceVersion: DataTypes.STRING, // e.g. Browser version or Android SDK
extraData: DataTypes.JSON
},
deviceId: DataTypes.STRING,
clientName: DataTypes.STRING, // e.g. Abs Web, Abs Android
clientVersion: DataTypes.STRING, // e.g. Server version or mobile version
ipAddress: DataTypes.STRING,
deviceName: DataTypes.STRING, // e.g. Windows 10 Chrome, Google Pixel 6, Apple iPhone 10,3
deviceVersion: DataTypes.STRING, // e.g. Browser version or Android SDK
extraData: DataTypes.JSON
}, {
sequelize,
modelName: 'device'
})
{
sequelize,
modelName: 'device'
}
)
const { user } = sequelize.models
@ -144,4 +147,4 @@ class Device extends Model {
}
}
module.exports = Device
module.exports = Device

View file

@ -58,7 +58,7 @@ class Feed extends Model {
model: this.sequelize.models.feedEpisode
}
})
return feeds.map(f => this.getOldFeed(f))
return feeds.map((f) => this.getOldFeed(f))
}
/**
@ -117,7 +117,7 @@ class Feed extends Model {
entityType: 'libraryItem'
}
})
return feeds.map(f => f.entityId).filter(f => f) || []
return feeds.map((f) => f.entityId).filter((f) => f) || []
}
/**
@ -179,7 +179,7 @@ class Feed extends Model {
// Remove and update existing feed episodes
for (const feedEpisode of existingFeed.feedEpisodes) {
const oldFeedEpisode = oldFeedEpisodes.find(ep => ep.id === feedEpisode.id)
const oldFeedEpisode = oldFeedEpisodes.find((ep) => ep.id === feedEpisode.id)
// Episode removed
if (!oldFeedEpisode) {
feedEpisode.destroy()
@ -200,7 +200,7 @@ class Feed extends Model {
// Add new feed episodes
for (const episode of oldFeedEpisodes) {
if (!existingFeed.feedEpisodes.some(fe => fe.id === episode.id)) {
if (!existingFeed.feedEpisodes.some((fe) => fe.id === episode.id)) {
await this.sequelize.models.feedEpisode.createFromOld(feedObj.id, episode)
hasUpdates = true
}
@ -258,41 +258,44 @@ class Feed extends Model {
/**
* Initialize model
*
*
* Polymorphic association: Feeds can be created from LibraryItem, Collection, Playlist or Series
* @see https://sequelize.org/docs/v6/advanced-association-concepts/polymorphic-associations/
*
* @param {import('../Database').sequelize} sequelize
*
* @param {import('../Database').sequelize} sequelize
*/
static init(sequelize) {
super.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
super.init(
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
slug: DataTypes.STRING,
entityType: DataTypes.STRING,
entityId: DataTypes.UUIDV4,
entityUpdatedAt: DataTypes.DATE,
serverAddress: DataTypes.STRING,
feedURL: DataTypes.STRING,
imageURL: DataTypes.STRING,
siteURL: DataTypes.STRING,
title: DataTypes.STRING,
description: DataTypes.TEXT,
author: DataTypes.STRING,
podcastType: DataTypes.STRING,
language: DataTypes.STRING,
ownerName: DataTypes.STRING,
ownerEmail: DataTypes.STRING,
explicit: DataTypes.BOOLEAN,
preventIndexing: DataTypes.BOOLEAN,
coverPath: DataTypes.STRING
},
slug: DataTypes.STRING,
entityType: DataTypes.STRING,
entityId: DataTypes.UUIDV4,
entityUpdatedAt: DataTypes.DATE,
serverAddress: DataTypes.STRING,
feedURL: DataTypes.STRING,
imageURL: DataTypes.STRING,
siteURL: DataTypes.STRING,
title: DataTypes.STRING,
description: DataTypes.TEXT,
author: DataTypes.STRING,
podcastType: DataTypes.STRING,
language: DataTypes.STRING,
ownerName: DataTypes.STRING,
ownerEmail: DataTypes.STRING,
explicit: DataTypes.BOOLEAN,
preventIndexing: DataTypes.BOOLEAN,
coverPath: DataTypes.STRING
}, {
sequelize,
modelName: 'feed'
})
{
sequelize,
modelName: 'feed'
}
)
const { user, libraryItem, collection, series, playlist } = sequelize.models
@ -335,7 +338,7 @@ class Feed extends Model {
})
Feed.belongsTo(playlist, { foreignKey: 'entityId', constraints: false })
Feed.addHook('afterFind', findResult => {
Feed.addHook('afterFind', (findResult) => {
if (!findResult) return
if (!Array.isArray(findResult)) findResult = [findResult]
@ -368,4 +371,4 @@ class Feed extends Model {
}
}
module.exports = Feed
module.exports = Feed

View file

@ -65,9 +65,9 @@ class FeedEpisode extends Model {
/**
* Create feed episode from old model
*
* @param {string} feedId
* @param {Object} oldFeedEpisode
*
* @param {string} feedId
* @param {Object} oldFeedEpisode
* @returns {Promise<FeedEpisode>}
*/
static createFromOld(feedId, oldFeedEpisode) {
@ -98,33 +98,36 @@ class FeedEpisode extends Model {
/**
* Initialize model
* @param {import('../Database').sequelize} sequelize
* @param {import('../Database').sequelize} sequelize
*/
static init(sequelize) {
super.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
super.init(
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
title: DataTypes.STRING,
author: DataTypes.STRING,
description: DataTypes.TEXT,
siteURL: DataTypes.STRING,
enclosureURL: DataTypes.STRING,
enclosureType: DataTypes.STRING,
enclosureSize: DataTypes.BIGINT,
pubDate: DataTypes.STRING,
season: DataTypes.STRING,
episode: DataTypes.STRING,
episodeType: DataTypes.STRING,
duration: DataTypes.FLOAT,
filePath: DataTypes.STRING,
explicit: DataTypes.BOOLEAN
},
title: DataTypes.STRING,
author: DataTypes.STRING,
description: DataTypes.TEXT,
siteURL: DataTypes.STRING,
enclosureURL: DataTypes.STRING,
enclosureType: DataTypes.STRING,
enclosureSize: DataTypes.BIGINT,
pubDate: DataTypes.STRING,
season: DataTypes.STRING,
episode: DataTypes.STRING,
episodeType: DataTypes.STRING,
duration: DataTypes.FLOAT,
filePath: DataTypes.STRING,
explicit: DataTypes.BOOLEAN
}, {
sequelize,
modelName: 'feedEpisode'
})
{
sequelize,
modelName: 'feedEpisode'
}
)
const { feed } = sequelize.models
@ -135,4 +138,4 @@ class FeedEpisode extends Model {
}
}
module.exports = FeedEpisode
module.exports = FeedEpisode

View file

@ -10,7 +10,8 @@ const oldLibrary = require('../objects/Library')
* @property {boolean} skipMatchingMediaWithIsbn
* @property {string} autoScanCronExpression
* @property {boolean} audiobooksOnly
* @property {boolean} hideSingleBookSeries Do not show series that only have 1 book
* @property {boolean} hideSingleBookSeries Do not show series that only have 1 book
* @property {boolean} onlyShowLaterBooksInContinueSeries Skip showing books that are earlier than the max sequence read
* @property {string[]} metadataPrecedence
*/
@ -53,16 +54,16 @@ class Library extends Model {
include: this.sequelize.models.libraryFolder,
order: [['displayOrder', 'ASC']]
})
return libraries.map(lib => this.getOldLibrary(lib))
return libraries.map((lib) => this.getOldLibrary(lib))
}
/**
* Convert expanded Library to oldLibrary
* @param {Library} libraryExpanded
* @param {Library} libraryExpanded
* @returns {Promise<oldLibrary>}
*/
static getOldLibrary(libraryExpanded) {
const folders = libraryExpanded.libraryFolders.map(folder => {
const folders = libraryExpanded.libraryFolders.map((folder) => {
return {
id: folder.id,
fullPath: folder.path,
@ -89,13 +90,13 @@ class Library extends Model {
}
/**
* @param {object} oldLibrary
* @param {object} oldLibrary
* @returns {Library|null}
*/
static async createFromOld(oldLibrary) {
const library = this.getFromOld(oldLibrary)
library.libraryFolders = oldLibrary.folders.map(folder => {
library.libraryFolders = oldLibrary.folders.map((folder) => {
return {
id: folder.id,
path: folder.fullPath
@ -112,8 +113,8 @@ class Library extends Model {
/**
* Update library and library folders
* @param {object} oldLibrary
* @returns
* @param {object} oldLibrary
* @returns
*/
static async updateFromOld(oldLibrary) {
const existingLibrary = await this.findByPk(oldLibrary.id, {
@ -126,7 +127,7 @@ class Library extends Model {
const library = this.getFromOld(oldLibrary)
const libraryFolders = oldLibrary.folders.map(folder => {
const libraryFolders = oldLibrary.folders.map((folder) => {
return {
id: folder.id,
path: folder.fullPath,
@ -134,7 +135,7 @@ class Library extends Model {
}
})
for (const libraryFolder of libraryFolders) {
const existingLibraryFolder = existingLibrary.libraryFolders.find(lf => lf.id === libraryFolder.id)
const existingLibraryFolder = existingLibrary.libraryFolders.find((lf) => lf.id === libraryFolder.id)
if (!existingLibraryFolder) {
await this.sequelize.models.libraryFolder.create(libraryFolder)
} else if (existingLibraryFolder.path !== libraryFolder.path) {
@ -142,7 +143,7 @@ class Library extends Model {
}
}
const libraryFoldersRemoved = existingLibrary.libraryFolders.filter(lf => !libraryFolders.some(_lf => _lf.id === lf.id))
const libraryFoldersRemoved = existingLibrary.libraryFolders.filter((lf) => !libraryFolders.some((_lf) => _lf.id === lf.id))
for (const existingLibraryFolder of libraryFoldersRemoved) {
await existingLibraryFolder.destroy()
}
@ -176,8 +177,8 @@ class Library extends Model {
/**
* Destroy library by id
* @param {string} libraryId
* @returns
* @param {string} libraryId
* @returns
*/
static removeById(libraryId) {
return this.destroy({
@ -196,12 +197,12 @@ class Library extends Model {
attributes: ['id', 'displayOrder'],
order: [['displayOrder', 'ASC']]
})
return libraries.map(l => l.id)
return libraries.map((l) => l.id)
}
/**
* Find Library by primary key & return oldLibrary
* @param {string} libraryId
* @param {string} libraryId
* @returns {Promise<oldLibrary|null>} Returns null if not found
*/
static async getOldById(libraryId) {
@ -243,29 +244,32 @@ class Library extends Model {
/**
* Initialize model
* @param {import('../Database').sequelize} sequelize
* @param {import('../Database').sequelize} sequelize
*/
static init(sequelize) {
super.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
super.init(
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
name: DataTypes.STRING,
displayOrder: DataTypes.INTEGER,
icon: DataTypes.STRING,
mediaType: DataTypes.STRING,
provider: DataTypes.STRING,
lastScan: DataTypes.DATE,
lastScanVersion: DataTypes.STRING,
settings: DataTypes.JSON,
extraData: DataTypes.JSON
},
name: DataTypes.STRING,
displayOrder: DataTypes.INTEGER,
icon: DataTypes.STRING,
mediaType: DataTypes.STRING,
provider: DataTypes.STRING,
lastScan: DataTypes.DATE,
lastScanVersion: DataTypes.STRING,
settings: DataTypes.JSON,
extraData: DataTypes.JSON
}, {
sequelize,
modelName: 'library'
})
{
sequelize,
modelName: 'library'
}
)
}
}
module.exports = Library
module.exports = Library

View file

@ -16,33 +16,25 @@ class LibraryFolder extends Model {
this.updatedAt
}
/**
* Gets all library folder path strings
* @returns {Promise<string[]>} array of library folder paths
*/
static async getAllLibraryFolderPaths() {
const libraryFolders = await this.findAll({
attributes: ['path']
})
return libraryFolders.map(l => l.path)
}
/**
* Initialize model
* @param {import('../Database').sequelize} sequelize
* @param {import('../Database').sequelize} sequelize
*/
static init(sequelize) {
super.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
super.init(
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
path: DataTypes.STRING
},
path: DataTypes.STRING
}, {
sequelize,
modelName: 'libraryFolder'
})
{
sequelize,
modelName: 'libraryFolder'
}
)
const { library } = sequelize.models
library.hasMany(LibraryFolder, {
@ -52,4 +44,4 @@ class LibraryFolder extends Model {
}
}
module.exports = LibraryFolder
module.exports = LibraryFolder

View file

@ -1,8 +1,12 @@
const { DataTypes, Model, WhereOptions } = require('sequelize')
const Path = require('path')
const { DataTypes, Model } = require('sequelize')
const fsExtra = require('../libs/fsExtra')
const Logger = require('../Logger')
const oldLibraryItem = require('../objects/LibraryItem')
const libraryFilters = require('../utils/queries/libraryFilters')
const { areEquivalent } = require('../utils/index')
const { filePathToPOSIX, getFileTimestampsWithIno } = require('../utils/fileUtils')
const LibraryFile = require('../objects/files/LibraryFile')
const Book = require('./Book')
const Podcast = require('./Podcast')
@ -17,8 +21,8 @@ const Podcast = require('./Podcast')
/**
* @typedef LibraryItemExpandedProperties
* @property {Book.BookExpanded|Podcast.PodcastExpanded} media
*
* @property {Book.BookExpanded|Podcast.PodcastExpanded} media
*
* @typedef {LibraryItem & LibraryItemExpandedProperties} LibraryItemExpanded
*/
@ -73,7 +77,7 @@ class LibraryItem extends Model {
/**
* Gets library items partially expanded, not including podcast episodes
* @todo temporary solution
*
*
* @param {number} offset
* @param {number} limit
* @returns {Promise<LibraryItem[]>} LibraryItem
@ -116,7 +120,7 @@ class LibraryItem extends Model {
/**
* Currently unused because this is too slow and uses too much mem
* @param {[WhereOptions]} where
* @param {import('sequelize').WhereOptions} [where]
* @returns {Array<objects.LibraryItem>} old library items
*/
static async getAllOldLibraryItems(where = null) {
@ -150,13 +154,13 @@ class LibraryItem extends Model {
}
]
})
return libraryItems.map(ti => this.getOldLibraryItem(ti))
return libraryItems.map((ti) => this.getOldLibraryItem(ti))
}
/**
* Convert an expanded LibraryItem into an old library item
*
* @param {Model<LibraryItem>} libraryItemExpanded
*
* @param {Model<LibraryItem>} libraryItemExpanded
* @returns {oldLibraryItem}
*/
static getOldLibraryItem(libraryItemExpanded) {
@ -227,8 +231,8 @@ class LibraryItem extends Model {
/**
* Updates libraryItem, book, authors and series from old library item
*
* @param {oldLibraryItem} oldLibraryItem
*
* @param {oldLibraryItem} oldLibraryItem
* @returns {Promise<boolean>} true if updates were made
*/
static async fullUpdateFromOld(oldLibraryItem) {
@ -276,14 +280,14 @@ class LibraryItem extends Model {
for (const existingPodcastEpisode of existingPodcastEpisodes) {
// Episode was removed
if (!updatedPodcastEpisodes.some(ep => ep.id === existingPodcastEpisode.id)) {
if (!updatedPodcastEpisodes.some((ep) => ep.id === existingPodcastEpisode.id)) {
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" episode "${existingPodcastEpisode.title}" was removed`)
await existingPodcastEpisode.destroy()
hasUpdates = true
}
}
for (const updatedPodcastEpisode of updatedPodcastEpisodes) {
const existingEpisodeMatch = existingPodcastEpisodes.find(ep => ep.id === updatedPodcastEpisode.id)
const existingEpisodeMatch = existingPodcastEpisodes.find((ep) => ep.id === updatedPodcastEpisode.id)
if (!existingEpisodeMatch) {
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" episode "${updatedPodcastEpisode.title}" was added`)
await this.sequelize.models.podcastEpisode.createFromOld(updatedPodcastEpisode)
@ -312,19 +316,20 @@ class LibraryItem extends Model {
const existingAuthors = libraryItemExpanded.media.authors || []
const existingSeriesAll = libraryItemExpanded.media.series || []
const updatedAuthors = oldLibraryItem.media.metadata.authors || []
const uniqueUpdatedAuthors = updatedAuthors.filter((au, idx) => updatedAuthors.findIndex((a) => a.id === au.id) === idx)
const updatedSeriesAll = oldLibraryItem.media.metadata.series || []
for (const existingAuthor of existingAuthors) {
// Author was removed from Book
if (!updatedAuthors.some(au => au.id === existingAuthor.id)) {
if (!uniqueUpdatedAuthors.some((au) => au.id === existingAuthor.id)) {
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" author "${existingAuthor.name}" was removed`)
await this.sequelize.models.bookAuthor.removeByIds(existingAuthor.id, libraryItemExpanded.media.id)
hasUpdates = true
}
}
for (const updatedAuthor of updatedAuthors) {
for (const updatedAuthor of uniqueUpdatedAuthors) {
// Author was added
if (!existingAuthors.some(au => au.id === updatedAuthor.id)) {
if (!existingAuthors.some((au) => au.id === updatedAuthor.id)) {
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" author "${updatedAuthor.name}" was added`)
await this.sequelize.models.bookAuthor.create({ authorId: updatedAuthor.id, bookId: libraryItemExpanded.media.id })
hasUpdates = true
@ -332,7 +337,7 @@ class LibraryItem extends Model {
}
for (const existingSeries of existingSeriesAll) {
// Series was removed
if (!updatedSeriesAll.some(se => se.id === existingSeries.id)) {
if (!updatedSeriesAll.some((se) => se.id === existingSeries.id)) {
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" series "${existingSeries.name}" was removed`)
await this.sequelize.models.bookSeries.removeByIds(existingSeries.id, libraryItemExpanded.media.id)
hasUpdates = true
@ -340,7 +345,7 @@ class LibraryItem extends Model {
}
for (const updatedSeries of updatedSeriesAll) {
// Series was added/updated
const existingSeriesMatch = existingSeriesAll.find(se => se.id === updatedSeries.id)
const existingSeriesMatch = existingSeriesAll.find((se) => se.id === updatedSeries.id)
if (!existingSeriesMatch) {
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" series "${updatedSeries.name}" was added`)
await this.sequelize.models.bookSeries.create({ seriesId: updatedSeries.id, bookId: libraryItemExpanded.media.id, sequence: updatedSeries.sequence })
@ -378,6 +383,9 @@ class LibraryItem extends Model {
if (!areEquivalent(updatedLibraryItem[key], existingValue, true)) {
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" ${key} updated from ${existingValue} to ${updatedLibraryItem[key]}`)
hasLibraryItemUpdates = true
if (key === 'updatedAt') {
libraryItemExpanded.changed('updatedAt', true)
}
}
}
if (hasLibraryItemUpdates) {
@ -405,13 +413,14 @@ class LibraryItem extends Model {
isInvalid: !!oldLibraryItem.isInvalid,
mtime: oldLibraryItem.mtimeMs,
ctime: oldLibraryItem.ctimeMs,
updatedAt: oldLibraryItem.updatedAt,
birthtime: oldLibraryItem.birthtimeMs,
size: oldLibraryItem.size,
lastScan: oldLibraryItem.lastScan,
lastScanVersion: oldLibraryItem.scanVersion,
libraryId: oldLibraryItem.libraryId,
libraryFolderId: oldLibraryItem.folderId,
libraryFiles: oldLibraryItem.libraryFiles?.map(lf => lf.toJSON()) || [],
libraryFiles: oldLibraryItem.libraryFiles?.map((lf) => lf.toJSON()) || [],
extraData
}
}
@ -426,8 +435,8 @@ class LibraryItem extends Model {
}
/**
*
* @param {string} libraryItemId
*
* @param {string} libraryItemId
* @returns {Promise<LibraryItemExpanded>}
*/
static async getExpandedById(libraryItemId) {
@ -476,7 +485,7 @@ class LibraryItem extends Model {
/**
* Get old library item by id
* @param {string} libraryItemId
* @param {string} libraryItemId
* @returns {oldLibraryItem}
*/
static async getOldById(libraryItemId) {
@ -525,9 +534,9 @@ class LibraryItem extends Model {
/**
* Get library items using filter and sort
* @param {oldLibrary} library
* @param {oldUser} user
* @param {object} options
* @param {oldLibrary} library
* @param {oldUser} user
* @param {object} options
* @returns {object} { libraryItems:oldLibraryItem[], count:number }
*/
static async getByFilterAndSort(library, user, options) {
@ -536,7 +545,7 @@ class LibraryItem extends Model {
Logger.debug(`Loaded ${libraryItems.length} of ${count} items for libary page in ${((Date.now() - start) / 1000).toFixed(2)}s`)
return {
libraryItems: libraryItems.map(li => {
libraryItems: libraryItems.map((li) => {
const oldLibraryItem = this.getOldLibraryItem(li).toJSONMinified()
if (li.collapsedSeries) {
oldLibraryItem.collapsedSeries = li.collapsedSeries
@ -565,10 +574,10 @@ class LibraryItem extends Model {
/**
* Get home page data personalized shelves
* @param {oldLibrary} library
* @param {oldUser} user
* @param {string[]} include
* @param {number} limit
* @param {oldLibrary} library
* @param {oldUser} user
* @param {string[]} include
* @param {number} limit
* @returns {object[]} array of shelf objects
*/
static async getPersonalizedShelves(library, user, include, limit) {
@ -579,8 +588,8 @@ class LibraryItem extends Model {
// "Continue Listening" shelf
const itemsInProgressPayload = await libraryFilters.getMediaItemsInProgress(library, user, include, limit, false)
if (itemsInProgressPayload.items.length) {
const ebookOnlyItemsInProgress = itemsInProgressPayload.items.filter(li => li.media.isEBookOnly)
const audioOnlyItemsInProgress = itemsInProgressPayload.items.filter(li => !li.media.isEBookOnly)
const ebookOnlyItemsInProgress = itemsInProgressPayload.items.filter((li) => li.media.isEBookOnly)
const audioOnlyItemsInProgress = itemsInProgressPayload.items.filter((li) => !li.media.isEBookOnly)
shelves.push({
id: 'continue-listening',
@ -688,8 +697,8 @@ class LibraryItem extends Model {
// "Listen Again" shelf
const mediaFinishedPayload = await libraryFilters.getMediaFinished(library, user, include, limit)
if (mediaFinishedPayload.items.length) {
const ebookOnlyItemsInProgress = mediaFinishedPayload.items.filter(li => li.media.isEBookOnly)
const audioOnlyItemsInProgress = mediaFinishedPayload.items.filter(li => !li.media.isEBookOnly)
const ebookOnlyItemsInProgress = mediaFinishedPayload.items.filter((li) => li.media.isEBookOnly)
const audioOnlyItemsInProgress = mediaFinishedPayload.items.filter((li) => !li.media.isEBookOnly)
shelves.push({
id: 'listen-again',
@ -739,27 +748,27 @@ class LibraryItem extends Model {
/**
* Get book library items for author, optional use user permissions
* @param {oldAuthor} author
* @param {[oldUser]} user
* @param {[oldUser]} user
* @returns {Promise<oldLibraryItem[]>}
*/
static async getForAuthor(author, user = null) {
const { libraryItems } = await libraryFilters.getLibraryItemsForAuthor(author, user, undefined, undefined)
return libraryItems.map(li => this.getOldLibraryItem(li))
return libraryItems.map((li) => this.getOldLibraryItem(li))
}
/**
* Get book library items in a collection
* @param {oldCollection} collection
* @param {oldCollection} collection
* @returns {Promise<oldLibraryItem[]>}
*/
static async getForCollection(collection) {
const libraryItems = await libraryFilters.getLibraryItemsForCollection(collection)
return libraryItems.map(li => this.getOldLibraryItem(li))
return libraryItems.map((li) => this.getOldLibraryItem(li))
}
/**
* Check if library item exists
* @param {string} libraryItemId
* @param {string} libraryItemId
* @returns {Promise<boolean>}
*/
static async checkExistsById(libraryItemId) {
@ -767,13 +776,15 @@ class LibraryItem extends Model {
}
/**
*
* @param {WhereOptions} where
*
* @param {import('sequelize').WhereOptions} where
* @param {import('sequelize').BindOrReplacements} replacements
* @returns {Object} oldLibraryItem
*/
static async findOneOld(where) {
static async findOneOld(where, replacements = {}) {
const libraryItem = await this.findOne({
where,
replacements,
include: [
{
model: this.sequelize.models.book,
@ -811,8 +822,8 @@ class LibraryItem extends Model {
}
/**
*
* @param {import('sequelize').FindOptions} options
*
* @param {import('sequelize').FindOptions} options
* @returns {Promise<Book|Podcast>}
*/
getMedia(options) {
@ -821,57 +832,203 @@ class LibraryItem extends Model {
return this[mixinMethodName](options)
}
/**
*
* @returns {Promise<Book|Podcast>}
*/
getMediaExpanded() {
if (this.mediaType === 'podcast') {
return this.getMedia({
include: [
{
model: this.sequelize.models.podcastEpisode
}
]
})
} else {
return this.getMedia({
include: [
{
model: this.sequelize.models.author,
through: {
attributes: []
}
},
{
model: this.sequelize.models.series,
through: {
attributes: ['sequence']
}
}
],
order: [
[this.sequelize.models.author, this.sequelize.models.bookAuthor, 'createdAt', 'ASC'],
[this.sequelize.models.series, 'bookSeries', 'createdAt', 'ASC']
]
})
}
}
/**
*
* @returns {Promise}
*/
async saveMetadataFile() {
let metadataPath = Path.join(global.MetadataPath, 'items', this.id)
let storeMetadataWithItem = global.ServerSettings.storeMetadataWithItem
if (storeMetadataWithItem && !this.isFile) {
metadataPath = this.path
} else {
// Make sure metadata book dir exists
storeMetadataWithItem = false
await fsExtra.ensureDir(metadataPath)
}
const metadataFilePath = Path.join(metadataPath, `metadata.${global.ServerSettings.metadataFileFormat}`)
// Expanded with series, authors, podcastEpisodes
const mediaExpanded = this.media || (await this.getMediaExpanded())
let jsonObject = {}
if (this.mediaType === 'book') {
jsonObject = {
tags: mediaExpanded.tags || [],
chapters: mediaExpanded.chapters?.map((c) => ({ ...c })) || [],
title: mediaExpanded.title,
subtitle: mediaExpanded.subtitle,
authors: mediaExpanded.authors.map((a) => a.name),
narrators: mediaExpanded.narrators,
series: mediaExpanded.series.map((se) => {
const sequence = se.bookSeries?.sequence || ''
if (!sequence) return se.name
return `${se.name} #${sequence}`
}),
genres: mediaExpanded.genres || [],
publishedYear: mediaExpanded.publishedYear,
publishedDate: mediaExpanded.publishedDate,
publisher: mediaExpanded.publisher,
description: mediaExpanded.description,
isbn: mediaExpanded.isbn,
asin: mediaExpanded.asin,
language: mediaExpanded.language,
explicit: !!mediaExpanded.explicit,
abridged: !!mediaExpanded.abridged
}
} else {
jsonObject = {
tags: mediaExpanded.tags || [],
title: mediaExpanded.title,
author: mediaExpanded.author,
description: mediaExpanded.description,
releaseDate: mediaExpanded.releaseDate,
genres: mediaExpanded.genres || [],
feedURL: mediaExpanded.feedURL,
imageURL: mediaExpanded.imageURL,
itunesPageURL: mediaExpanded.itunesPageURL,
itunesId: mediaExpanded.itunesId,
itunesArtistId: mediaExpanded.itunesArtistId,
asin: mediaExpanded.asin,
language: mediaExpanded.language,
explicit: !!mediaExpanded.explicit,
podcastType: mediaExpanded.podcastType
}
}
return fsExtra
.writeFile(metadataFilePath, JSON.stringify(jsonObject, null, 2))
.then(async () => {
// Add metadata.json to libraryFiles array if it is new
let metadataLibraryFile = this.libraryFiles.find((lf) => lf.metadata.path === filePathToPOSIX(metadataFilePath))
if (storeMetadataWithItem) {
if (!metadataLibraryFile) {
const newLibraryFile = new LibraryFile()
await newLibraryFile.setDataFromPath(metadataFilePath, `metadata.json`)
metadataLibraryFile = newLibraryFile.toJSON()
this.libraryFiles.push(metadataLibraryFile)
} else {
const fileTimestamps = await getFileTimestampsWithIno(metadataFilePath)
if (fileTimestamps) {
metadataLibraryFile.metadata.mtimeMs = fileTimestamps.mtimeMs
metadataLibraryFile.metadata.ctimeMs = fileTimestamps.ctimeMs
metadataLibraryFile.metadata.size = fileTimestamps.size
metadataLibraryFile.ino = fileTimestamps.ino
}
}
const libraryItemDirTimestamps = await getFileTimestampsWithIno(this.path)
if (libraryItemDirTimestamps) {
this.mtime = libraryItemDirTimestamps.mtimeMs
this.ctime = libraryItemDirTimestamps.ctimeMs
let size = 0
this.libraryFiles.forEach((lf) => (size += !isNaN(lf.metadata.size) ? Number(lf.metadata.size) : 0))
this.size = size
await this.save()
}
}
Logger.debug(`Success saving abmetadata to "${metadataFilePath}"`)
return metadataLibraryFile
})
.catch((error) => {
Logger.error(`Failed to save json file at "${metadataFilePath}"`, error)
return null
})
}
/**
* Initialize model
* @param {import('../Database').sequelize} sequelize
* @param {import('../Database').sequelize} sequelize
*/
static init(sequelize) {
super.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
super.init(
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
ino: DataTypes.STRING,
path: DataTypes.STRING,
relPath: DataTypes.STRING,
mediaId: DataTypes.UUIDV4,
mediaType: DataTypes.STRING,
isFile: DataTypes.BOOLEAN,
isMissing: DataTypes.BOOLEAN,
isInvalid: DataTypes.BOOLEAN,
mtime: DataTypes.DATE(6),
ctime: DataTypes.DATE(6),
birthtime: DataTypes.DATE(6),
size: DataTypes.BIGINT,
lastScan: DataTypes.DATE,
lastScanVersion: DataTypes.STRING,
libraryFiles: DataTypes.JSON,
extraData: DataTypes.JSON
},
ino: DataTypes.STRING,
path: DataTypes.STRING,
relPath: DataTypes.STRING,
mediaId: DataTypes.UUIDV4,
mediaType: DataTypes.STRING,
isFile: DataTypes.BOOLEAN,
isMissing: DataTypes.BOOLEAN,
isInvalid: DataTypes.BOOLEAN,
mtime: DataTypes.DATE(6),
ctime: DataTypes.DATE(6),
birthtime: DataTypes.DATE(6),
size: DataTypes.BIGINT,
lastScan: DataTypes.DATE,
lastScanVersion: DataTypes.STRING,
libraryFiles: DataTypes.JSON,
extraData: DataTypes.JSON
}, {
sequelize,
modelName: 'libraryItem',
indexes: [
{
fields: ['createdAt']
},
{
fields: ['mediaId']
},
{
fields: ['libraryId', 'mediaType']
},
{
fields: ['libraryId', 'mediaId', 'mediaType']
},
{
fields: ['birthtime']
},
{
fields: ['mtime']
}
]
})
{
sequelize,
modelName: 'libraryItem',
indexes: [
{
fields: ['createdAt']
},
{
fields: ['mediaId']
},
{
fields: ['libraryId', 'mediaType']
},
{
fields: ['libraryId', 'mediaId', 'mediaType']
},
{
fields: ['birthtime']
},
{
fields: ['mtime']
}
]
}
)
const { library, libraryFolder, book, podcast } = sequelize.models
library.hasMany(LibraryItem)
@ -898,7 +1055,7 @@ class LibraryItem extends Model {
})
LibraryItem.belongsTo(podcast, { foreignKey: 'mediaId', constraints: false })
LibraryItem.addHook('afterFind', findResult => {
LibraryItem.addHook('afterFind', (findResult) => {
if (!findResult) return
if (!Array.isArray(findResult)) findResult = [findResult]
@ -918,7 +1075,7 @@ class LibraryItem extends Model {
}
})
LibraryItem.addHook('afterDestroy', async instance => {
LibraryItem.addHook('afterDestroy', async (instance) => {
if (!instance) return
const media = await instance.getMedia()
if (media) {

View file

@ -100,38 +100,41 @@ class MediaProgress extends Model {
/**
* Initialize model
*
*
* Polymorphic association: Book has many MediaProgress. PodcastEpisode has many MediaProgress.
* @see https://sequelize.org/docs/v6/advanced-association-concepts/polymorphic-associations/
*
* @param {import('../Database').sequelize} sequelize
*
* @param {import('../Database').sequelize} sequelize
*/
static init(sequelize) {
super.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
super.init(
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
mediaItemId: DataTypes.UUIDV4,
mediaItemType: DataTypes.STRING,
duration: DataTypes.FLOAT,
currentTime: DataTypes.FLOAT,
isFinished: DataTypes.BOOLEAN,
hideFromContinueListening: DataTypes.BOOLEAN,
ebookLocation: DataTypes.STRING,
ebookProgress: DataTypes.FLOAT,
finishedAt: DataTypes.DATE,
extraData: DataTypes.JSON
},
mediaItemId: DataTypes.UUIDV4,
mediaItemType: DataTypes.STRING,
duration: DataTypes.FLOAT,
currentTime: DataTypes.FLOAT,
isFinished: DataTypes.BOOLEAN,
hideFromContinueListening: DataTypes.BOOLEAN,
ebookLocation: DataTypes.STRING,
ebookProgress: DataTypes.FLOAT,
finishedAt: DataTypes.DATE,
extraData: DataTypes.JSON
}, {
sequelize,
modelName: 'mediaProgress',
indexes: [
{
fields: ['updatedAt']
}
]
})
{
sequelize,
modelName: 'mediaProgress',
indexes: [
{
fields: ['updatedAt']
}
]
}
)
const { book, podcastEpisode, user } = sequelize.models
@ -153,7 +156,7 @@ class MediaProgress extends Model {
})
MediaProgress.belongsTo(podcastEpisode, { foreignKey: 'mediaItemId', constraints: false })
MediaProgress.addHook('afterFind', findResult => {
MediaProgress.addHook('afterFind', (findResult) => {
if (!findResult) return
if (!Array.isArray(findResult)) findResult = [findResult]
@ -181,4 +184,4 @@ class MediaProgress extends Model {
}
}
module.exports = MediaProgress
module.exports = MediaProgress

View file

@ -2,7 +2,6 @@ const { DataTypes, Model } = require('sequelize')
const oldPlaybackSession = require('../objects/PlaybackSession')
class PlaybackSession extends Model {
constructor(values, options) {
super(values, options)
@ -62,7 +61,7 @@ class PlaybackSession extends Model {
}
]
})
return playbackSessions.map(session => this.getOldPlaybackSession(session))
return playbackSessions.map((session) => this.getOldPlaybackSession(session))
}
static async getById(sessionId) {
@ -170,35 +169,38 @@ class PlaybackSession extends Model {
/**
* Initialize model
* @param {import('../Database').sequelize} sequelize
* @param {import('../Database').sequelize} sequelize
*/
static init(sequelize) {
super.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
super.init(
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
mediaItemId: DataTypes.UUIDV4,
mediaItemType: DataTypes.STRING,
displayTitle: DataTypes.STRING,
displayAuthor: DataTypes.STRING,
duration: DataTypes.FLOAT,
playMethod: DataTypes.INTEGER,
mediaPlayer: DataTypes.STRING,
startTime: DataTypes.FLOAT,
currentTime: DataTypes.FLOAT,
serverVersion: DataTypes.STRING,
coverPath: DataTypes.STRING,
timeListening: DataTypes.INTEGER,
mediaMetadata: DataTypes.JSON,
date: DataTypes.STRING,
dayOfWeek: DataTypes.STRING,
extraData: DataTypes.JSON
},
mediaItemId: DataTypes.UUIDV4,
mediaItemType: DataTypes.STRING,
displayTitle: DataTypes.STRING,
displayAuthor: DataTypes.STRING,
duration: DataTypes.FLOAT,
playMethod: DataTypes.INTEGER,
mediaPlayer: DataTypes.STRING,
startTime: DataTypes.FLOAT,
currentTime: DataTypes.FLOAT,
serverVersion: DataTypes.STRING,
coverPath: DataTypes.STRING,
timeListening: DataTypes.INTEGER,
mediaMetadata: DataTypes.JSON,
date: DataTypes.STRING,
dayOfWeek: DataTypes.STRING,
extraData: DataTypes.JSON
}, {
sequelize,
modelName: 'playbackSession'
})
{
sequelize,
modelName: 'playbackSession'
}
)
const { book, podcastEpisode, user, device, library } = sequelize.models
@ -229,7 +231,7 @@ class PlaybackSession extends Model {
})
PlaybackSession.belongsTo(podcastEpisode, { foreignKey: 'mediaItemId', constraints: false })
PlaybackSession.addHook('afterFind', findResult => {
PlaybackSession.addHook('afterFind', (findResult) => {
if (!findResult) return
if (!Array.isArray(findResult)) findResult = [findResult]

View file

@ -23,41 +23,21 @@ class Playlist extends Model {
this.updatedAt
}
static async getOldPlaylists() {
const playlists = await this.findAll({
include: {
model: this.sequelize.models.playlistMediaItem,
include: [
{
model: this.sequelize.models.book,
include: this.sequelize.models.libraryItem
},
{
model: this.sequelize.models.podcastEpisode,
include: {
model: this.sequelize.models.podcast,
include: this.sequelize.models.libraryItem
}
}
]
},
order: [['playlistMediaItems', 'order', 'ASC']]
})
return playlists.map(p => this.getOldPlaylist(p))
}
static getOldPlaylist(playlistExpanded) {
const items = playlistExpanded.playlistMediaItems.map(pmi => {
const libraryItemId = pmi.mediaItem?.podcast?.libraryItem?.id || pmi.mediaItem?.libraryItem?.id || null
if (!libraryItemId) {
Logger.error(`[Playlist] Invalid playlist media item - No library item id found`, JSON.stringify(pmi, null, 2))
return null
}
return {
episodeId: pmi.mediaItemType === 'podcastEpisode' ? pmi.mediaItemId : '',
libraryItemId
}
}).filter(pmi => pmi)
const items = playlistExpanded.playlistMediaItems
.map((pmi) => {
const mediaItem = pmi.mediaItem || pmi.dataValues?.mediaItem
const libraryItemId = mediaItem?.podcast?.libraryItem?.id || mediaItem?.libraryItem?.id || null
if (!libraryItemId) {
Logger.error(`[Playlist] Invalid playlist media item - No library item id found`, JSON.stringify(pmi, null, 2))
return null
}
return {
episodeId: pmi.mediaItemType === 'podcastEpisode' ? pmi.mediaItemId : '',
libraryItemId
}
})
.filter((pmi) => pmi)
return new oldPlaylist({
id: playlistExpanded.id,
@ -73,29 +53,30 @@ class Playlist extends Model {
/**
* Get old playlist toJSONExpanded
* @param {[string[]]} include
* @returns {Promise<object>} oldPlaylist.toJSONExpanded
* @param {string[]} [include]
* @returns {Promise<oldPlaylist>} oldPlaylist.toJSONExpanded
*/
async getOldJsonExpanded(include) {
this.playlistMediaItems = await this.getPlaylistMediaItems({
include: [
{
model: this.sequelize.models.book,
include: this.sequelize.models.libraryItem
},
{
model: this.sequelize.models.podcastEpisode,
include: {
model: this.sequelize.models.podcast,
this.playlistMediaItems =
(await this.getPlaylistMediaItems({
include: [
{
model: this.sequelize.models.book,
include: this.sequelize.models.libraryItem
},
{
model: this.sequelize.models.podcastEpisode,
include: {
model: this.sequelize.models.podcast,
include: this.sequelize.models.libraryItem
}
}
}
],
order: [['order', 'ASC']]
}) || []
],
order: [['order', 'ASC']]
})) || []
const oldPlaylist = this.sequelize.models.playlist.getOldPlaylist(this)
const libraryItemIds = oldPlaylist.items.map(i => i.libraryItemId)
const libraryItemIds = oldPlaylist.items.map((i) => i.libraryItemId)
let libraryItems = await this.sequelize.models.libraryItem.getAllOldLibraryItems({
id: libraryItemIds
@ -138,7 +119,7 @@ class Playlist extends Model {
/**
* Get playlist by id
* @param {string} playlistId
* @param {string} playlistId
* @returns {Promise<oldPlaylist|null>} returns null if not found
*/
static async getById(playlistId) {
@ -167,12 +148,13 @@ class Playlist extends Model {
}
/**
* Get playlists for user and optionally for library
* @param {string} userId
* @param {[string]} libraryId optional
* @returns {Promise<Playlist[]>}
* Get old playlists for user and optionally for library
*
* @param {string} userId
* @param {string} [libraryId]
* @returns {Promise<oldPlaylist[]>}
*/
static async getPlaylistsForUserAndLibrary(userId, libraryId = null) {
static async getOldPlaylistsForUserAndLibrary(userId, libraryId = null) {
if (!userId && !libraryId) return []
const whereQuery = {}
if (userId) {
@ -181,7 +163,7 @@ class Playlist extends Model {
if (libraryId) {
whereQuery.libraryId = libraryId
}
const playlists = await this.findAll({
const playlistsExpanded = await this.findAll({
where: whereQuery,
include: {
model: this.sequelize.models.playlistMediaItem,
@ -204,14 +186,44 @@ class Playlist extends Model {
['playlistMediaItems', 'order', 'ASC']
]
})
return playlists
const oldPlaylists = []
for (const playlistExpanded of playlistsExpanded) {
const oldPlaylist = this.getOldPlaylist(playlistExpanded)
const libraryItems = []
for (const pmi of playlistExpanded.playlistMediaItems) {
let mediaItem = pmi.mediaItem || pmi.dataValues.mediaItem
if (!mediaItem) {
Logger.error(`[Playlist] Invalid playlist media item - No media item found`, JSON.stringify(mediaItem, null, 2))
continue
}
let libraryItem = mediaItem.libraryItem || mediaItem.podcast?.libraryItem
if (mediaItem.podcast) {
libraryItem.media = mediaItem.podcast
libraryItem.media.podcastEpisodes = [mediaItem]
delete mediaItem.podcast.libraryItem
} else {
libraryItem.media = mediaItem
delete mediaItem.libraryItem
}
const oldLibraryItem = this.sequelize.models.libraryItem.getOldLibraryItem(libraryItem)
libraryItems.push(oldLibraryItem)
}
const oldPlaylistJson = oldPlaylist.toJSONExpanded(libraryItems)
oldPlaylists.push(oldPlaylistJson)
}
return oldPlaylists
}
/**
* Get number of playlists for a user and library
* @param {string} userId
* @param {string} libraryId
* @returns
* @param {string} userId
* @param {string} libraryId
* @returns
*/
static async getNumPlaylistsForUserAndLibrary(userId, libraryId) {
return this.count({
@ -224,7 +236,7 @@ class Playlist extends Model {
/**
* Get all playlists for mediaItemIds
* @param {string[]} mediaItemIds
* @param {string[]} mediaItemIds
* @returns {Promise<Playlist[]>}
*/
static async getPlaylistsForMediaItemIds(mediaItemIds) {
@ -263,9 +275,9 @@ class Playlist extends Model {
const playlists = []
for (const playlistMediaItem of playlistMediaItemsExpanded) {
const playlist = playlistMediaItem.playlist
if (playlists.some(p => p.id === playlist.id)) continue
if (playlists.some((p) => p.id === playlist.id)) continue
playlist.playlistMediaItems = playlist.playlistMediaItems.map(pmi => {
playlist.playlistMediaItems = playlist.playlistMediaItems.map((pmi) => {
if (pmi.mediaItemType === 'book' && pmi.book !== undefined) {
pmi.mediaItem = pmi.book
pmi.dataValues.mediaItem = pmi.dataValues.book
@ -286,21 +298,24 @@ class Playlist extends Model {
/**
* Initialize model
* @param {import('../Database').sequelize} sequelize
* @param {import('../Database').sequelize} sequelize
*/
static init(sequelize) {
super.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
super.init(
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
name: DataTypes.STRING,
description: DataTypes.TEXT
},
name: DataTypes.STRING,
description: DataTypes.TEXT
}, {
sequelize,
modelName: 'playlist'
})
{
sequelize,
modelName: 'playlist'
}
)
const { library, user } = sequelize.models
library.hasMany(Playlist)
@ -311,14 +326,14 @@ class Playlist extends Model {
})
Playlist.belongsTo(user)
Playlist.addHook('afterFind', findResult => {
Playlist.addHook('afterFind', (findResult) => {
if (!findResult) return
if (!Array.isArray(findResult)) findResult = [findResult]
for (const instance of findResult) {
if (instance.playlistMediaItems?.length) {
instance.playlistMediaItems = instance.playlistMediaItems.map(pmi => {
instance.playlistMediaItems = instance.playlistMediaItems.map((pmi) => {
if (pmi.mediaItemType === 'book' && pmi.book !== undefined) {
pmi.mediaItem = pmi.book
pmi.dataValues.mediaItem = pmi.dataValues.book
@ -334,10 +349,9 @@ class Playlist extends Model {
return pmi
})
}
}
})
}
}
module.exports = Playlist
module.exports = Playlist

View file

@ -35,24 +35,27 @@ class PlaylistMediaItem extends Model {
/**
* Initialize model
* @param {import('../Database').sequelize} sequelize
* @param {import('../Database').sequelize} sequelize
*/
static init(sequelize) {
super.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
super.init(
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
mediaItemId: DataTypes.UUIDV4,
mediaItemType: DataTypes.STRING,
order: DataTypes.INTEGER
},
mediaItemId: DataTypes.UUIDV4,
mediaItemType: DataTypes.STRING,
order: DataTypes.INTEGER
}, {
sequelize,
timestamps: true,
updatedAt: false,
modelName: 'playlistMediaItem'
})
{
sequelize,
timestamps: true,
updatedAt: false,
modelName: 'playlistMediaItem'
}
)
const { book, podcastEpisode, playlist } = sequelize.models
@ -74,7 +77,7 @@ class PlaylistMediaItem extends Model {
})
PlaylistMediaItem.belongsTo(podcastEpisode, { foreignKey: 'mediaItemId', constraints: false })
PlaylistMediaItem.addHook('afterFind', findResult => {
PlaylistMediaItem.addHook('afterFind', (findResult) => {
if (!findResult) return
if (!Array.isArray(findResult)) findResult = [findResult]

View file

@ -3,7 +3,7 @@ const { DataTypes, Model } = require('sequelize')
/**
* @typedef PodcastExpandedProperties
* @property {import('./PodcastEpisode')[]} podcastEpisodes
*
*
* @typedef {Podcast & PodcastExpandedProperties} PodcastExpanded
*/
@ -61,7 +61,7 @@ class Podcast extends Model {
static getOldPodcast(libraryItemExpanded) {
const podcastExpanded = libraryItemExpanded.media
const podcastEpisodes = podcastExpanded.podcastEpisodes?.map(ep => ep.getOldPodcastEpisode(libraryItemExpanded.id).toJSON()).sort((a, b) => a.index - b.index)
const podcastEpisodes = podcastExpanded.podcastEpisodes?.map((ep) => ep.getOldPodcastEpisode(libraryItemExpanded.id).toJSON()).sort((a, b) => a.index - b.index)
return {
id: podcastExpanded.id,
libraryItemId: libraryItemExpanded.id,
@ -140,42 +140,45 @@ class Podcast extends Model {
/**
* Initialize model
* @param {import('../Database').sequelize} sequelize
* @param {import('../Database').sequelize} sequelize
*/
static init(sequelize) {
super.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
title: DataTypes.STRING,
titleIgnorePrefix: DataTypes.STRING,
author: DataTypes.STRING,
releaseDate: DataTypes.STRING,
feedURL: DataTypes.STRING,
imageURL: DataTypes.STRING,
description: DataTypes.TEXT,
itunesPageURL: DataTypes.STRING,
itunesId: DataTypes.STRING,
itunesArtistId: DataTypes.STRING,
language: DataTypes.STRING,
podcastType: DataTypes.STRING,
explicit: DataTypes.BOOLEAN,
super.init(
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
title: DataTypes.STRING,
titleIgnorePrefix: DataTypes.STRING,
author: DataTypes.STRING,
releaseDate: DataTypes.STRING,
feedURL: DataTypes.STRING,
imageURL: DataTypes.STRING,
description: DataTypes.TEXT,
itunesPageURL: DataTypes.STRING,
itunesId: DataTypes.STRING,
itunesArtistId: DataTypes.STRING,
language: DataTypes.STRING,
podcastType: DataTypes.STRING,
explicit: DataTypes.BOOLEAN,
autoDownloadEpisodes: DataTypes.BOOLEAN,
autoDownloadSchedule: DataTypes.STRING,
lastEpisodeCheck: DataTypes.DATE,
maxEpisodesToKeep: DataTypes.INTEGER,
maxNewEpisodesToDownload: DataTypes.INTEGER,
coverPath: DataTypes.STRING,
tags: DataTypes.JSON,
genres: DataTypes.JSON
}, {
sequelize,
modelName: 'podcast'
})
autoDownloadEpisodes: DataTypes.BOOLEAN,
autoDownloadSchedule: DataTypes.STRING,
lastEpisodeCheck: DataTypes.DATE,
maxEpisodesToKeep: DataTypes.INTEGER,
maxNewEpisodesToDownload: DataTypes.INTEGER,
coverPath: DataTypes.STRING,
tags: DataTypes.JSON,
genres: DataTypes.JSON
},
{
sequelize,
modelName: 'podcast'
}
)
}
}
module.exports = Podcast
module.exports = Podcast

View file

@ -54,7 +54,7 @@ class PodcastEpisode extends Model {
}
/**
* @param {string} libraryItemId
* @param {string} libraryItemId
* @returns {oldPodcastEpisode}
*/
getOldPodcastEpisode(libraryItemId = null) {
@ -125,40 +125,43 @@ class PodcastEpisode extends Model {
/**
* Initialize model
* @param {import('../Database').sequelize} sequelize
* @param {import('../Database').sequelize} sequelize
*/
static init(sequelize) {
super.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
index: DataTypes.INTEGER,
season: DataTypes.STRING,
episode: DataTypes.STRING,
episodeType: DataTypes.STRING,
title: DataTypes.STRING,
subtitle: DataTypes.STRING(1000),
description: DataTypes.TEXT,
pubDate: DataTypes.STRING,
enclosureURL: DataTypes.STRING,
enclosureSize: DataTypes.BIGINT,
enclosureType: DataTypes.STRING,
publishedAt: DataTypes.DATE,
super.init(
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
index: DataTypes.INTEGER,
season: DataTypes.STRING,
episode: DataTypes.STRING,
episodeType: DataTypes.STRING,
title: DataTypes.STRING,
subtitle: DataTypes.STRING(1000),
description: DataTypes.TEXT,
pubDate: DataTypes.STRING,
enclosureURL: DataTypes.STRING,
enclosureSize: DataTypes.BIGINT,
enclosureType: DataTypes.STRING,
publishedAt: DataTypes.DATE,
audioFile: DataTypes.JSON,
chapters: DataTypes.JSON,
extraData: DataTypes.JSON
}, {
sequelize,
modelName: 'podcastEpisode',
indexes: [
{
fields: ['createdAt']
}
]
})
audioFile: DataTypes.JSON,
chapters: DataTypes.JSON,
extraData: DataTypes.JSON
},
{
sequelize,
modelName: 'podcastEpisode',
indexes: [
{
fields: ['createdAt']
}
]
}
)
const { podcast } = sequelize.models
podcast.hasMany(PodcastEpisode, {
@ -168,4 +171,4 @@ class PodcastEpisode extends Model {
}
}
module.exports = PodcastEpisode
module.exports = PodcastEpisode

View file

@ -24,7 +24,7 @@ class Series extends Model {
static async getAllOldSeries() {
const series = await this.findAll()
return series.map(se => se.getOldSeries())
return series.map((se) => se.getOldSeries())
}
getOldSeries() {
@ -77,7 +77,7 @@ class Series extends Model {
/**
* Get oldSeries by id
* @param {string} seriesId
* @param {string} seriesId
* @returns {Promise<oldSeries>}
*/
static async getOldById(seriesId) {
@ -88,7 +88,7 @@ class Series extends Model {
/**
* Check if series exists
* @param {string} seriesId
* @param {string} seriesId
* @returns {Promise<boolean>}
*/
static async checkExistsById(seriesId) {
@ -97,58 +97,65 @@ class Series extends Model {
/**
* Get old series by name and libraryId. name case insensitive
*
* @param {string} seriesName
* @param {string} libraryId
*
* @param {string} seriesName
* @param {string} libraryId
* @returns {Promise<oldSeries>}
*/
static async getOldByNameAndLibrary(seriesName, libraryId) {
const series = (await this.findOne({
where: [
where(fn('lower', col('name')), seriesName.toLowerCase()),
{
libraryId
}
]
}))?.getOldSeries()
const series = (
await this.findOne({
where: [
where(fn('lower', col('name')), seriesName.toLowerCase()),
{
libraryId
}
]
})
)?.getOldSeries()
return series
}
/**
* Initialize model
* @param {import('../Database').sequelize} sequelize
* @param {import('../Database').sequelize} sequelize
*/
static init(sequelize) {
super.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
name: DataTypes.STRING,
nameIgnorePrefix: DataTypes.STRING,
description: DataTypes.TEXT
}, {
sequelize,
modelName: 'series',
indexes: [
{
fields: [{
name: 'name',
collate: 'NOCASE'
}]
super.init(
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
// {
// fields: [{
// name: 'nameIgnorePrefix',
// collate: 'NOCASE'
// }]
// },
{
fields: ['libraryId']
}
]
})
name: DataTypes.STRING,
nameIgnorePrefix: DataTypes.STRING,
description: DataTypes.TEXT
},
{
sequelize,
modelName: 'series',
indexes: [
{
fields: [
{
name: 'name',
collate: 'NOCASE'
}
]
},
// {
// fields: [{
// name: 'nameIgnorePrefix',
// collate: 'NOCASE'
// }]
// },
{
fields: ['libraryId']
}
]
}
)
const { library } = sequelize.models
library.hasMany(Series, {
@ -158,4 +165,4 @@ class Series extends Model {
}
}
module.exports = Series
module.exports = Series

View file

@ -19,12 +19,11 @@ class Setting extends Model {
}
static async getOldSettings() {
const settings = (await this.findAll()).map(se => se.value)
const settings = (await this.findAll()).map((se) => se.value)
const emailSettingsJson = settings.find(se => se.id === 'email-settings')
const serverSettingsJson = settings.find(se => se.id === 'server-settings')
const notificationSettingsJson = settings.find(se => se.id === 'notification-settings')
const emailSettingsJson = settings.find((se) => se.id === 'email-settings')
const serverSettingsJson = settings.find((se) => se.id === 'server-settings')
const notificationSettingsJson = settings.find((se) => se.id === 'notification-settings')
return {
settings,
@ -43,20 +42,23 @@ class Setting extends Model {
/**
* Initialize model
* @param {import('../Database').sequelize} sequelize
* @param {import('../Database').sequelize} sequelize
*/
static init(sequelize) {
super.init({
key: {
type: DataTypes.STRING,
primaryKey: true
super.init(
{
key: {
type: DataTypes.STRING,
primaryKey: true
},
value: DataTypes.JSON
},
value: DataTypes.JSON
}, {
sequelize,
modelName: 'setting'
})
{
sequelize,
modelName: 'setting'
}
)
}
}
module.exports = Setting
module.exports = Setting

View file

@ -1,4 +1,4 @@
const uuidv4 = require("uuid").v4
const uuidv4 = require('uuid').v4
const sequelize = require('sequelize')
const Logger = require('../Logger')
const oldUser = require('../objects/user/User')
@ -45,17 +45,17 @@ class User extends Model {
const users = await this.findAll({
include: this.sequelize.models.mediaProgress
})
return users.map(u => this.getOldUser(u))
return users.map((u) => this.getOldUser(u))
}
/**
* Get old user model from new
*
* @param {Object} userExpanded
*
* @param {Object} userExpanded
* @returns {oldUser}
*/
static getOldUser(userExpanded) {
const mediaProgress = userExpanded.mediaProgresses.map(mp => mp.getOldMediaProgress())
const mediaProgress = userExpanded.mediaProgresses.map((mp) => mp.getOldMediaProgress())
const librariesAccessible = userExpanded.permissions?.librariesAccessible || []
const itemTagsSelected = userExpanded.permissions?.itemTagsSelected || []
@ -86,8 +86,8 @@ class User extends Model {
}
/**
*
* @param {oldUser} oldUser
*
* @param {oldUser} oldUser
* @returns {Promise<User>}
*/
static createFromOld(oldUser) {
@ -97,8 +97,8 @@ class User extends Model {
/**
* Update User from old user model
*
* @param {oldUser} oldUser
*
* @param {oldUser} oldUser
* @param {boolean} [hooks=true] Run before / after bulk update hooks?
* @returns {Promise<boolean>}
*/
@ -109,16 +109,18 @@ class User extends Model {
where: {
id: user.id
}
}).then((result) => result[0] > 0).catch((error) => {
Logger.error(`[User] Failed to save user ${oldUser.id}`, error)
return false
})
.then((result) => result[0] > 0)
.catch((error) => {
Logger.error(`[User] Failed to save user ${oldUser.id}`, error)
return false
})
}
/**
* Get new User model from old
*
* @param {oldUser} oldUser
*
* @param {oldUser} oldUser
* @returns {Object}
*/
static getFromOld(oldUser) {
@ -160,9 +162,9 @@ class User extends Model {
/**
* Create root user
* @param {string} username
* @param {string} pash
* @param {Auth} auth
* @param {string} username
* @param {string} pash
* @param {Auth} auth
* @returns {Promise<oldUser>}
*/
static async createRootUser(username, pash, auth) {
@ -185,15 +187,15 @@ class User extends Model {
/**
* Create user from openid userinfo
* @param {Object} userinfo
* @param {Auth} auth
* @param {Object} userinfo
* @param {Auth} auth
* @returns {Promise<oldUser>}
*/
static async createUserFromOpenIdUserInfo(userinfo, auth) {
const userId = uuidv4()
// TODO: Ensure username is unique?
const username = userinfo.preferred_username || userinfo.name || userinfo.sub
const email = (userinfo.email && userinfo.email_verified) ? userinfo.email : null
const email = userinfo.email && userinfo.email_verified ? userinfo.email : null
const token = await auth.generateAccessToken({ id: userId, username })
@ -218,7 +220,7 @@ class User extends Model {
/**
* Get a user by id or by the old database id
* @temp User ids were updated in v2.3.0 migration and old API tokens may still use that id
* @param {string} userId
* @param {string} userId
* @returns {Promise<oldUser|null>} null if not found
*/
static async getUserByIdOrOldId(userId) {
@ -244,7 +246,7 @@ class User extends Model {
/**
* Get user by username case insensitive
* @param {string} username
* @param {string} username
* @returns {Promise<oldUser|null>} returns null if not found
*/
static async getUserByUsername(username) {
@ -263,7 +265,7 @@ class User extends Model {
/**
* Get user by email case insensitive
* @param {string} username
* @param {string} username
* @returns {Promise<oldUser|null>} returns null if not found
*/
static async getUserByEmail(email) {
@ -282,7 +284,7 @@ class User extends Model {
/**
* Get user by id
* @param {string} userId
* @param {string} userId
* @returns {Promise<oldUser|null>} returns null if not found
*/
static async getUserById(userId) {
@ -296,7 +298,7 @@ class User extends Model {
/**
* Get user by openid sub
* @param {string} sub
* @param {string} sub
* @returns {Promise<oldUser|null>} returns null if not found
*/
static async getUserByOpenIDSub(sub) {
@ -317,7 +319,7 @@ class User extends Model {
const users = await this.findAll({
attributes: ['id', 'username']
})
return users.map(u => {
return users.map((u) => {
return {
id: u.id,
username: u.username
@ -340,37 +342,40 @@ class User extends Model {
/**
* Initialize model
* @param {import('../Database').sequelize} sequelize
* @param {import('../Database').sequelize} sequelize
*/
static init(sequelize) {
super.init({
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
super.init(
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
username: DataTypes.STRING,
email: DataTypes.STRING,
pash: DataTypes.STRING,
type: DataTypes.STRING,
token: DataTypes.STRING,
isActive: {
type: DataTypes.BOOLEAN,
defaultValue: false
},
isLocked: {
type: DataTypes.BOOLEAN,
defaultValue: false
},
lastSeen: DataTypes.DATE,
permissions: DataTypes.JSON,
bookmarks: DataTypes.JSON,
extraData: DataTypes.JSON
},
username: DataTypes.STRING,
email: DataTypes.STRING,
pash: DataTypes.STRING,
type: DataTypes.STRING,
token: DataTypes.STRING,
isActive: {
type: DataTypes.BOOLEAN,
defaultValue: false
},
isLocked: {
type: DataTypes.BOOLEAN,
defaultValue: false
},
lastSeen: DataTypes.DATE,
permissions: DataTypes.JSON,
bookmarks: DataTypes.JSON,
extraData: DataTypes.JSON
}, {
sequelize,
modelName: 'user'
})
{
sequelize,
modelName: 'user'
}
)
}
}
module.exports = User
module.exports = User

View file

@ -195,7 +195,7 @@ class Stream extends EventEmitter {
var current_chunk = []
var last_seg_in_chunk = -1
var segments = Array.from(this.segmentsCreated).sort((a, b) => a - b);
var segments = Array.from(this.segmentsCreated).sort((a, b) => a - b)
var lastSegment = segments[segments.length - 1]
if (lastSegment > this.furthestSegmentCreated) {
this.furthestSegmentCreated = lastSegment
@ -254,8 +254,14 @@ class Stream extends EventEmitter {
this.ffmpeg = Ffmpeg()
this.furthestSegmentCreated = 0
var adjustedStartTime = Math.max(this.startTime - this.maxSeekBackTime, 0)
var trackStartTime = await writeConcatFile(this.tracks, this.concatFilesPath, adjustedStartTime)
const adjustedStartTime = Math.max(this.startTime - this.maxSeekBackTime, 0)
const trackStartTime = await writeConcatFile(this.tracks, this.concatFilesPath, adjustedStartTime)
if (trackStartTime == null) {
// Close stream show error
this.ffmpeg = null
this.close('Failed to write stream concat file')
return
}
this.ffmpeg.addInput(this.concatFilesPath)
// seek_timestamp : https://ffmpeg.org/ffmpeg.html
@ -342,8 +348,9 @@ class Stream extends EventEmitter {
Logger.error('Ffmpeg Err', '"' + err.message + '"')
// Temporary workaround for https://github.com/advplyr/audiobookshelf/issues/172 and https://github.com/advplyr/audiobookshelf/issues/2157
const aacErrorMsg = 'ffmpeg exited with code 1:'
if (audioCodec === 'copy' && this.isAACEncodable && err.message?.startsWith(aacErrorMsg)) {
const aacErrorMsg = 'ffmpeg exited with code 1'
const errorMessageSuggestsReEncode = err.message?.startsWith(aacErrorMsg) && !err.message?.includes('No such file or directory')
if (audioCodec === 'copy' && this.isAACEncodable && errorMessageSuggestsReEncode) {
Logger.info(`[Stream] Re-attempting stream with AAC encode`)
this.transcodeOptions.forceAAC = true
this.reset(this.startTime)

View file

@ -1,4 +1,4 @@
const uuidv4 = require("uuid").v4
const uuidv4 = require('uuid').v4
const { areEquivalent, copyValue } = require('../../utils/index')
const AudioFile = require('../files/AudioFile')
const AudioTrack = require('../files/AudioTrack')
@ -47,7 +47,7 @@ class PodcastEpisode {
this.enclosure = episode.enclosure ? { ...episode.enclosure } : null
this.guid = episode.guid || null
this.pubDate = episode.pubDate
this.chapters = episode.chapters?.map(ch => ({ ...ch })) || []
this.chapters = episode.chapters?.map((ch) => ({ ...ch })) || []
this.audioFile = episode.audioFile ? new AudioFile(episode.audioFile) : null
this.publishedAt = episode.publishedAt
this.addedAt = episode.addedAt
@ -74,7 +74,7 @@ class PodcastEpisode {
enclosure: this.enclosure ? { ...this.enclosure } : null,
guid: this.guid,
pubDate: this.pubDate,
chapters: this.chapters.map(ch => ({ ...ch })),
chapters: this.chapters.map((ch) => ({ ...ch })),
audioFile: this.audioFile?.toJSON() || null,
publishedAt: this.publishedAt,
addedAt: this.addedAt,
@ -98,7 +98,7 @@ class PodcastEpisode {
enclosure: this.enclosure ? { ...this.enclosure } : null,
guid: this.guid,
pubDate: this.pubDate,
chapters: this.chapters.map(ch => ({ ...ch })),
chapters: this.chapters.map((ch) => ({ ...ch })),
audioFile: this.audioFile?.toJSON() || null,
audioTrack: this.audioTrack?.toJSON() || null,
publishedAt: this.publishedAt,
@ -121,7 +121,9 @@ class PodcastEpisode {
get duration() {
return this.audioFile?.duration || 0
}
get size() { return this.audioFile?.metadata.size || 0 }
get size() {
return this.audioFile?.metadata.size || 0
}
get enclosureUrl() {
return this.enclosure?.url || null
}
@ -151,9 +153,9 @@ class PodcastEpisode {
let hasUpdates = false
for (const key in this.toJSON()) {
let newValue = payload[key]
if (newValue === "") newValue = null
if (newValue === '') newValue = null
let existingValue = this[key]
if (existingValue === "") existingValue = null
if (existingValue === '') existingValue = null
if (newValue != undefined && !areEquivalent(newValue, existingValue)) {
this[key] = copyValue(newValue)
@ -177,7 +179,7 @@ class PodcastEpisode {
}
checkEqualsEnclosureUrl(url) {
if (!this.enclosure || !this.enclosure.url) return false
if (!this.enclosure?.url) return false
return this.enclosure.url == url
}
}

View file

@ -32,7 +32,6 @@ class AudioFile {
this.metaTags = null
this.manuallyVerified = false
this.invalid = false
this.exclude = false
this.error = null
@ -53,7 +52,6 @@ class AudioFile {
trackNumFromFilename: this.trackNumFromFilename,
discNumFromFilename: this.discNumFromFilename,
manuallyVerified: !!this.manuallyVerified,
invalid: !!this.invalid,
exclude: !!this.exclude,
error: this.error || null,
format: this.format,
@ -78,7 +76,6 @@ class AudioFile {
this.addedAt = data.addedAt
this.updatedAt = data.updatedAt
this.manuallyVerified = !!data.manuallyVerified
this.invalid = !!data.invalid
this.exclude = !!data.exclude
this.error = data.error || null
@ -112,10 +109,6 @@ class AudioFile {
}
}
get isValidTrack() {
return !this.invalid && !this.exclude
}
// New scanner creates AudioFile from AudioFileScanner
setDataFromProbe(libraryFile, probeData) {
this.ino = libraryFile.ino || null

View file

@ -17,7 +17,6 @@ class Book {
this.audioFiles = []
this.chapters = []
this.missingParts = []
this.ebookFile = null
this.lastCoverSearch = null
@ -36,7 +35,6 @@ class Book {
this.tags = [...book.tags]
this.audioFiles = book.audioFiles.map(f => new AudioFile(f))
this.chapters = book.chapters.map(c => ({ ...c }))
this.missingParts = book.missingParts ? [...book.missingParts] : []
this.ebookFile = book.ebookFile ? new EBookFile(book.ebookFile) : null
this.lastCoverSearch = book.lastCoverSearch || null
this.lastCoverSearchQuery = book.lastCoverSearchQuery || null
@ -51,7 +49,6 @@ class Book {
tags: [...this.tags],
audioFiles: this.audioFiles.map(f => f.toJSON()),
chapters: this.chapters.map(c => ({ ...c })),
missingParts: [...this.missingParts],
ebookFile: this.ebookFile ? this.ebookFile.toJSON() : null
}
}
@ -65,8 +62,6 @@ class Book {
numTracks: this.tracks.length,
numAudioFiles: this.audioFiles.length,
numChapters: this.chapters.length,
numMissingParts: this.missingParts.length,
numInvalidAudioFiles: this.invalidAudioFiles.length,
duration: this.duration,
size: this.size,
ebookFormat: this.ebookFile?.ebookFormat
@ -85,7 +80,6 @@ class Book {
duration: this.duration,
size: this.size,
tracks: this.tracks.map(t => t.toJSON()),
missingParts: [...this.missingParts],
ebookFile: this.ebookFile?.toJSON() || null
}
}
@ -109,11 +103,8 @@ class Book {
get hasMediaEntities() {
return !!this.tracks.length || this.ebookFile
}
get invalidAudioFiles() {
return this.audioFiles.filter(af => af.invalid)
}
get includedAudioFiles() {
return this.audioFiles.filter(af => !af.exclude && !af.invalid)
return this.audioFiles.filter(af => !af.exclude)
}
get tracks() {
let startOffset = 0
@ -238,7 +229,6 @@ class Book {
this.audioFiles = orderedFileData.map((fileData) => {
const audioFile = this.audioFiles.find(af => af.ino === fileData.ino)
audioFile.manuallyVerified = true
audioFile.invalid = false
audioFile.error = null
if (fileData.exclude !== undefined) {
audioFile.exclude = !!fileData.exclude
@ -257,7 +247,6 @@ class Book {
rebuildTracks() {
Logger.debug(`[Book] Tracks being rebuilt...!`)
this.audioFiles.sort((a, b) => a.index - b.index)
this.missingParts = []
}
// Only checks container format

View file

@ -42,7 +42,13 @@ class Podcast {
this.autoDownloadSchedule = podcast.autoDownloadSchedule || '0 * * * *' // Added in 2.1.3 so default to hourly
this.lastEpisodeCheck = podcast.lastEpisodeCheck || 0
this.maxEpisodesToKeep = podcast.maxEpisodesToKeep || 0
this.maxNewEpisodesToDownload = podcast.maxNewEpisodesToDownload || 3
// Default is 3 but 0 is allowed
if (typeof podcast.maxNewEpisodesToDownload !== 'number') {
this.maxNewEpisodesToDownload = 3
} else {
this.maxNewEpisodesToDownload = podcast.maxNewEpisodesToDownload
}
}
toJSON() {
@ -52,7 +58,7 @@ class Podcast {
metadata: this.metadata.toJSON(),
coverPath: this.coverPath,
tags: [...this.tags],
episodes: this.episodes.map(e => e.toJSON()),
episodes: this.episodes.map((e) => e.toJSON()),
autoDownloadEpisodes: this.autoDownloadEpisodes,
autoDownloadSchedule: this.autoDownloadSchedule,
lastEpisodeCheck: this.lastEpisodeCheck,
@ -84,7 +90,7 @@ class Podcast {
metadata: this.metadata.toJSONExpanded(),
coverPath: this.coverPath,
tags: [...this.tags],
episodes: this.episodes.map(e => e.toJSONExpanded()),
episodes: this.episodes.map((e) => e.toJSONExpanded()),
autoDownloadEpisodes: this.autoDownloadEpisodes,
autoDownloadSchedule: this.autoDownloadSchedule,
lastEpisodeCheck: this.lastEpisodeCheck,
@ -115,7 +121,7 @@ class Podcast {
get size() {
var total = 0
this.episodes.forEach((ep) => total += ep.size)
this.episodes.forEach((ep) => (total += ep.size))
return total
}
get hasMediaEntities() {
@ -123,7 +129,7 @@ class Podcast {
}
get duration() {
let total = 0
this.episodes.forEach((ep) => total += ep.duration)
this.episodes.forEach((ep) => (total += ep.duration))
return total
}
get numTracks() {
@ -139,7 +145,7 @@ class Podcast {
return largestPublishedAt
}
get episodesWithPubDate() {
return this.episodes.filter(ep => !!ep.publishedAt)
return this.episodes.filter((ep) => !!ep.publishedAt)
}
update(payload) {
@ -163,7 +169,7 @@ class Podcast {
}
updateEpisode(id, payload) {
var episode = this.episodes.find(ep => ep.id == id)
var episode = this.episodes.find((ep) => ep.id == id)
if (!episode) return false
return episode.update(payload)
}
@ -176,15 +182,15 @@ class Podcast {
}
removeFileWithInode(inode) {
const hasEpisode = this.episodes.some(ep => ep.audioFile.ino === inode)
const hasEpisode = this.episodes.some((ep) => ep.audioFile.ino === inode)
if (hasEpisode) {
this.episodes = this.episodes.filter(ep => ep.audioFile.ino !== inode)
this.episodes = this.episodes.filter((ep) => ep.audioFile.ino !== inode)
}
return hasEpisode
}
findFileWithInode(inode) {
var episode = this.episodes.find(ep => ep.audioFile.ino === inode)
var episode = this.episodes.find((ep) => ep.audioFile.ino === inode)
if (episode) return episode.audioFile
return null
}
@ -202,21 +208,23 @@ class Podcast {
}
checkHasEpisode(episodeId) {
return this.episodes.some(ep => ep.id === episodeId)
return this.episodes.some((ep) => ep.id === episodeId)
}
checkHasEpisodeByFeedUrl(url) {
return this.episodes.some(ep => ep.checkEqualsEnclosureUrl(url))
checkHasEpisodeByFeedEpisode(feedEpisode) {
const guid = feedEpisode.guid
const url = feedEpisode.enclosure.url
return this.episodes.some((ep) => (ep.guid && ep.guid === guid) || ep.checkEqualsEnclosureUrl(url))
}
// Only checks container format
checkCanDirectPlay(payload, episodeId) {
var episode = this.episodes.find(ep => ep.id === episodeId)
var episode = this.episodes.find((ep) => ep.id === episodeId)
if (!episode) return false
return episode.checkCanDirectPlay(payload)
}
getDirectPlayTracklist(episodeId) {
var episode = this.episodes.find(ep => ep.id === episodeId)
var episode = this.episodes.find((ep) => ep.id === episodeId)
if (!episode) return false
return episode.getDirectPlayTracklist()
}
@ -235,15 +243,15 @@ class Podcast {
}
removeEpisode(episodeId) {
const episode = this.episodes.find(ep => ep.id === episodeId)
const episode = this.episodes.find((ep) => ep.id === episodeId)
if (episode) {
this.episodes = this.episodes.filter(ep => ep.id !== episodeId)
this.episodes = this.episodes.filter((ep) => ep.id !== episodeId)
}
return episode
}
getPlaybackTitle(episodeId) {
var episode = this.episodes.find(ep => ep.id == episodeId)
var episode = this.episodes.find((ep) => ep.id == episodeId)
if (!episode) return this.metadata.title
return episode.title
}
@ -253,7 +261,7 @@ class Podcast {
}
getEpisodeDuration(episodeId) {
var episode = this.episodes.find(ep => ep.id == episodeId)
var episode = this.episodes.find((ep) => ep.id == episodeId)
if (!episode) return 0
return episode.duration
}
@ -262,13 +270,13 @@ class Podcast {
if (!episodeId) return null
// Support old episode ids for mobile downloads
if (episodeId.startsWith('ep_')) return this.episodes.find(ep => ep.oldEpisodeId == episodeId)
if (episodeId.startsWith('ep_')) return this.episodes.find((ep) => ep.oldEpisodeId == episodeId)
return this.episodes.find(ep => ep.id == episodeId)
return this.episodes.find((ep) => ep.id == episodeId)
}
getChapters(episodeId) {
return this.getEpisode(episodeId)?.chapters?.map(ch => ({ ...ch })) || []
return this.getEpisode(episodeId)?.chapters?.map((ch) => ({ ...ch })) || []
}
}
module.exports = Podcast

View file

@ -16,6 +16,7 @@ class EmailSettings {
this.host = null
this.port = 465
this.secure = true
this.rejectUnauthorized = true
this.user = null
this.pass = null
this.testAddress = null
@ -33,11 +34,17 @@ class EmailSettings {
this.host = settings.host
this.port = settings.port
this.secure = !!settings.secure
this.rejectUnauthorized = !!settings.rejectUnauthorized
this.user = settings.user
this.pass = settings.pass
this.testAddress = settings.testAddress
this.fromAddress = settings.fromAddress
this.ereaderDevices = settings.ereaderDevices?.map(d => ({ ...d })) || []
this.ereaderDevices = settings.ereaderDevices?.map((d) => ({ ...d })) || []
// rejectUnauthorized added after v2.10.1 - defaults to true
if (settings.rejectUnauthorized === undefined) {
this.rejectUnauthorized = true
}
}
toJSON() {
@ -46,11 +53,12 @@ class EmailSettings {
host: this.host,
port: this.port,
secure: this.secure,
rejectUnauthorized: this.rejectUnauthorized,
user: this.user,
pass: this.pass,
testAddress: this.testAddress,
fromAddress: this.fromAddress,
ereaderDevices: this.ereaderDevices.map(d => ({ ...d }))
ereaderDevices: this.ereaderDevices.map((d) => ({ ...d }))
}
}
@ -62,27 +70,30 @@ class EmailSettings {
else payload.port = Number(payload.port)
}
if (payload.secure !== undefined) payload.secure = !!payload.secure
if (payload.rejectUnauthorized !== undefined) payload.rejectUnauthorized = !!payload.rejectUnauthorized
if (payload.ereaderDevices !== undefined && !Array.isArray(payload.ereaderDevices)) payload.ereaderDevices = undefined
if (payload.ereaderDevices?.length) {
// Validate ereader devices
payload.ereaderDevices = payload.ereaderDevices.map((device) => {
if (!device.name || !device.email) {
Logger.error(`[EmailSettings] Update ereader device is invalid`, device)
return null
}
if (!device.availabilityOption || !['adminOrUp', 'userOrUp', 'guestOrUp', 'specificUsers'].includes(device.availabilityOption)) {
device.availabilityOption = 'adminOrUp'
}
if (device.availabilityOption === 'specificUsers' && !device.users?.length) {
device.availabilityOption = 'adminOrUp'
}
if (device.availabilityOption !== 'specificUsers' && device.users?.length) {
device.users = []
}
return device
}).filter(d => d)
payload.ereaderDevices = payload.ereaderDevices
.map((device) => {
if (!device.name || !device.email) {
Logger.error(`[EmailSettings] Update ereader device is invalid`, device)
return null
}
if (!device.availabilityOption || !['adminOrUp', 'userOrUp', 'guestOrUp', 'specificUsers'].includes(device.availabilityOption)) {
device.availabilityOption = 'adminOrUp'
}
if (device.availabilityOption === 'specificUsers' && !device.users?.length) {
device.availabilityOption = 'adminOrUp'
}
if (device.availabilityOption !== 'specificUsers' && device.users?.length) {
device.users = []
}
return device
})
.filter((d) => d)
}
let hasUpdates = false
@ -105,6 +116,10 @@ class EmailSettings {
host: this.host,
secure: this.secure
}
// Only set to true for port 465 (https://nodemailer.com/smtp/#tls-options)
if (this.port !== 465) {
payload.secure = false
}
if (this.port) payload.port = this.port
if (this.user && this.pass !== undefined) {
payload.auth = {
@ -112,14 +127,20 @@ class EmailSettings {
pass: this.pass
}
}
// Allow self-signed certs (https://nodemailer.com/smtp/#3-allow-self-signed-certificates)
if (!this.rejectUnauthorized) {
payload.tls = {
rejectUnauthorized: false
}
}
return payload
}
/**
*
* @param {EreaderDeviceObject} device
* @param {import('../user/User')} user
*
* @param {EreaderDeviceObject} device
* @param {import('../user/User')} user
* @returns {boolean}
*/
checkUserCanAccessDevice(device, user) {
@ -136,8 +157,8 @@ class EmailSettings {
/**
* Get ereader devices accessible to user
*
* @param {import('../user/User')} user
*
* @param {import('../user/User')} user
* @returns {EreaderDeviceObject[]}
*/
getEReaderDevices(user) {
@ -146,12 +167,12 @@ class EmailSettings {
/**
* Get ereader device by name
*
* @param {string} deviceName
*
* @param {string} deviceName
* @returns {EreaderDeviceObject}
*/
getEReaderDevice(deviceName) {
return this.ereaderDevices.find(d => d.name === deviceName)
return this.ereaderDevices.find((d) => d.name === deviceName)
}
}
module.exports = EmailSettings
module.exports = EmailSettings

View file

@ -8,7 +8,9 @@ class LibrarySettings {
this.skipMatchingMediaWithIsbn = false
this.autoScanCronExpression = null
this.audiobooksOnly = false
this.hideSingleBookSeries = false // Do not show series that only have 1 book
this.epubsAllowScriptedContent = false
this.hideSingleBookSeries = false // Do not show series that only have 1 book
this.onlyShowLaterBooksInContinueSeries = false // Skip showing books that are earlier than the max sequence read
this.metadataPrecedence = ['folderStructure', 'audioMetatags', 'nfoFile', 'txtFiles', 'opfFile', 'absMetadata']
this.podcastSearchRegion = 'us'
@ -24,7 +26,9 @@ class LibrarySettings {
this.skipMatchingMediaWithIsbn = !!settings.skipMatchingMediaWithIsbn
this.autoScanCronExpression = settings.autoScanCronExpression || null
this.audiobooksOnly = !!settings.audiobooksOnly
this.epubsAllowScriptedContent = !!settings.epubsAllowScriptedContent
this.hideSingleBookSeries = !!settings.hideSingleBookSeries
this.onlyShowLaterBooksInContinueSeries = !!settings.onlyShowLaterBooksInContinueSeries
if (settings.metadataPrecedence) {
this.metadataPrecedence = [...settings.metadataPrecedence]
} else {
@ -42,7 +46,9 @@ class LibrarySettings {
skipMatchingMediaWithIsbn: this.skipMatchingMediaWithIsbn,
autoScanCronExpression: this.autoScanCronExpression,
audiobooksOnly: this.audiobooksOnly,
epubsAllowScriptedContent: this.epubsAllowScriptedContent,
hideSingleBookSeries: this.hideSingleBookSeries,
onlyShowLaterBooksInContinueSeries: this.onlyShowLaterBooksInContinueSeries,
metadataPrecedence: [...this.metadataPrecedence],
podcastSearchRegion: this.podcastSearchRegion
}
@ -64,4 +70,4 @@ class LibrarySettings {
return hasUpdates
}
}
module.exports = LibrarySettings
module.exports = LibrarySettings

View file

@ -1,6 +1,7 @@
const packageJson = require('../../../package.json')
const { BookshelfView } = require('../../utils/constants')
const Logger = require('../../Logger')
const User = require('../user/User')
class ServerSettings {
constructor(settings) {
@ -67,11 +68,14 @@ class ServerSettings {
this.authOpenIDLogoutURL = null
this.authOpenIDClientID = null
this.authOpenIDClientSecret = null
this.authOpenIDTokenSigningAlgorithm = 'RS256'
this.authOpenIDButtonText = 'Login with OpenId'
this.authOpenIDAutoLaunch = false
this.authOpenIDAutoRegister = false
this.authOpenIDMatchExistingBy = null
this.authOpenIDMobileRedirectURIs = ['audiobookshelf://oauth']
this.authOpenIDGroupClaim = ''
this.authOpenIDAdvancedPermsClaim = ''
if (settings) {
this.construct(settings)
@ -124,11 +128,14 @@ class ServerSettings {
this.authOpenIDLogoutURL = settings.authOpenIDLogoutURL || null
this.authOpenIDClientID = settings.authOpenIDClientID || null
this.authOpenIDClientSecret = settings.authOpenIDClientSecret || null
this.authOpenIDTokenSigningAlgorithm = settings.authOpenIDTokenSigningAlgorithm || 'RS256'
this.authOpenIDButtonText = settings.authOpenIDButtonText || 'Login with OpenId'
this.authOpenIDAutoLaunch = !!settings.authOpenIDAutoLaunch
this.authOpenIDAutoRegister = !!settings.authOpenIDAutoRegister
this.authOpenIDMatchExistingBy = settings.authOpenIDMatchExistingBy || null
this.authOpenIDMobileRedirectURIs = settings.authOpenIDMobileRedirectURIs || ['audiobookshelf://oauth']
this.authOpenIDGroupClaim = settings.authOpenIDGroupClaim || ''
this.authOpenIDAdvancedPermsClaim = settings.authOpenIDAdvancedPermsClaim || ''
if (!Array.isArray(this.authActiveAuthMethods)) {
this.authActiveAuthMethods = ['local']
@ -212,11 +219,14 @@ class ServerSettings {
authOpenIDLogoutURL: this.authOpenIDLogoutURL,
authOpenIDClientID: this.authOpenIDClientID, // Do not return to client
authOpenIDClientSecret: this.authOpenIDClientSecret, // Do not return to client
authOpenIDTokenSigningAlgorithm: this.authOpenIDTokenSigningAlgorithm,
authOpenIDButtonText: this.authOpenIDButtonText,
authOpenIDAutoLaunch: this.authOpenIDAutoLaunch,
authOpenIDAutoRegister: this.authOpenIDAutoRegister,
authOpenIDMatchExistingBy: this.authOpenIDMatchExistingBy,
authOpenIDMobileRedirectURIs: this.authOpenIDMobileRedirectURIs // Do not return to client
authOpenIDMobileRedirectURIs: this.authOpenIDMobileRedirectURIs, // Do not return to client
authOpenIDGroupClaim: this.authOpenIDGroupClaim, // Do not return to client
authOpenIDAdvancedPermsClaim: this.authOpenIDAdvancedPermsClaim // Do not return to client
}
}
@ -226,6 +236,8 @@ class ServerSettings {
delete json.authOpenIDClientID
delete json.authOpenIDClientSecret
delete json.authOpenIDMobileRedirectURIs
delete json.authOpenIDGroupClaim
delete json.authOpenIDAdvancedPermsClaim
return json
}
@ -243,7 +255,8 @@ class ServerSettings {
this.authOpenIDUserInfoURL &&
this.authOpenIDJwksURL &&
this.authOpenIDClientID &&
this.authOpenIDClientSecret
this.authOpenIDClientSecret &&
this.authOpenIDTokenSigningAlgorithm
}
get authenticationSettings() {
@ -258,11 +271,16 @@ class ServerSettings {
authOpenIDLogoutURL: this.authOpenIDLogoutURL,
authOpenIDClientID: this.authOpenIDClientID, // Do not return to client
authOpenIDClientSecret: this.authOpenIDClientSecret, // Do not return to client
authOpenIDTokenSigningAlgorithm: this.authOpenIDTokenSigningAlgorithm,
authOpenIDButtonText: this.authOpenIDButtonText,
authOpenIDAutoLaunch: this.authOpenIDAutoLaunch,
authOpenIDAutoRegister: this.authOpenIDAutoRegister,
authOpenIDMatchExistingBy: this.authOpenIDMatchExistingBy,
authOpenIDMobileRedirectURIs: this.authOpenIDMobileRedirectURIs // Do not return to client
authOpenIDMobileRedirectURIs: this.authOpenIDMobileRedirectURIs, // Do not return to client
authOpenIDGroupClaim: this.authOpenIDGroupClaim, // Do not return to client
authOpenIDAdvancedPermsClaim: this.authOpenIDAdvancedPermsClaim, // Do not return to client
authOpenIDSamplePermissions: User.getSampleAbsPermissions()
}
}

View file

@ -268,6 +268,111 @@ class User {
return hasUpdates
}
// List of expected permission properties from the client
static permissionMapping = {
canDownload: 'download',
canUpload: 'upload',
canDelete: 'delete',
canUpdate: 'update',
canAccessExplicitContent: 'accessExplicitContent',
canAccessAllLibraries: 'accessAllLibraries',
canAccessAllTags: 'accessAllTags',
tagsAreDenylist: 'selectedTagsNotAccessible',
// Direct mapping for array-based permissions
allowedLibraries: 'librariesAccessible',
allowedTags: 'itemTagsSelected'
}
/**
* Update user permissions from external JSON
*
* @param {Object} absPermissions JSON containing user permissions
* @returns {boolean} true if updates were made
*/
updatePermissionsFromExternalJSON(absPermissions) {
let hasUpdates = false
let updatedUserPermissions = {}
// Initialize all permissions to false first
Object.keys(User.permissionMapping).forEach(mappingKey => {
const userPermKey = User.permissionMapping[mappingKey]
if (typeof this.permissions[userPermKey] === 'boolean') {
updatedUserPermissions[userPermKey] = false // Default to false for boolean permissions
}
})
// Map the boolean permissions from absPermissions
Object.keys(absPermissions).forEach(absKey => {
const userPermKey = User.permissionMapping[absKey]
if (!userPermKey) {
throw new Error(`Unexpected permission property: ${absKey}`)
}
if (updatedUserPermissions[userPermKey] !== undefined) {
updatedUserPermissions[userPermKey] = !!absPermissions[absKey]
}
})
// Update user permissions if changes were made
if (JSON.stringify(this.permissions) !== JSON.stringify(updatedUserPermissions)) {
this.permissions = updatedUserPermissions
hasUpdates = true
}
// Handle allowedLibraries
if (this.permissions.accessAllLibraries) {
if (this.librariesAccessible.length) {
this.librariesAccessible = []
hasUpdates = true
}
} else if (absPermissions.allowedLibraries?.length && absPermissions.allowedLibraries.join(',') !== this.librariesAccessible.join(',')) {
if (absPermissions.allowedLibraries.some(lid => typeof lid !== 'string')) {
throw new Error('Invalid permission property "allowedLibraries", expecting array of strings')
}
this.librariesAccessible = absPermissions.allowedLibraries
hasUpdates = true
}
// Handle allowedTags
if (this.permissions.accessAllTags) {
if (this.itemTagsSelected.length) {
this.itemTagsSelected = []
hasUpdates = true
}
} else if (absPermissions.allowedTags?.length && absPermissions.allowedTags.join(',') !== this.itemTagsSelected.join(',')) {
if (absPermissions.allowedTags.some(tag => typeof tag !== 'string')) {
throw new Error('Invalid permission property "allowedTags", expecting array of strings')
}
this.itemTagsSelected = absPermissions.allowedTags
hasUpdates = true
}
return hasUpdates
}
/**
* Get a sample to show how a JSON for updatePermissionsFromExternalJSON should look like
*
* @returns {string} JSON string
*/
static getSampleAbsPermissions() {
// Start with a template object where all permissions are false for simplicity
const samplePermissions = Object.keys(User.permissionMapping).reduce((acc, key) => {
// For array-based permissions, provide a sample array
if (key === 'allowedLibraries') {
acc[key] = [`5406ba8a-16e1-451d-96d7-4931b0a0d966`, `918fd848-7c1d-4a02-818a-847435a879ca`]
} else if (key === 'allowedTags') {
acc[key] = [`ExampleTag`, `AnotherTag`, `ThirdTag`]
} else {
acc[key] = false
}
return acc
}, {})
return JSON.stringify(samplePermissions, null, 2) // Pretty print the JSON
}
/**
* Get first available library id for user
*

View file

@ -1,139 +1,176 @@
const axios = require('axios')
const axios = require('axios').default
const htmlSanitizer = require('../utils/htmlSanitizer')
const Logger = require('../Logger')
class Audible {
constructor() {
this.regionMap = {
'us': '.com',
'ca': '.ca',
'uk': '.co.uk',
'au': '.com.au',
'fr': '.fr',
'de': '.de',
'jp': '.co.jp',
'it': '.it',
'in': '.in',
'es': '.es'
}
#responseTimeout = 30000
constructor() {
this.regionMap = {
us: '.com',
ca: '.ca',
uk: '.co.uk',
au: '.com.au',
fr: '.fr',
de: '.de',
jp: '.co.jp',
it: '.it',
in: '.in',
es: '.es'
}
}
/**
* Audible will sometimes send sequences with "Book 1" or "2, Dramatized Adaptation"
* @see https://github.com/advplyr/audiobookshelf/issues/2380
* @see https://github.com/advplyr/audiobookshelf/issues/1339
*
* @param {string} seriesName
* @param {string} sequence
* @returns {string}
*/
cleanSeriesSequence(seriesName, sequence) {
if (!sequence) return ''
// match any number with optional decimal (e.g, 1 or 1.5 or .5)
let numberFound = sequence.match(/\.\d+|\d+(?:\.\d+)?/)
let updatedSequence = numberFound ? numberFound[0] : sequence
if (sequence !== updatedSequence) {
Logger.debug(`[Audible] Series "${seriesName}" sequence was cleaned from "${sequence}" to "${updatedSequence}"`)
}
return updatedSequence
}
cleanResult(item) {
const { title, subtitle, asin, authors, narrators, publisherName, summary, releaseDate, image, genres, seriesPrimary, seriesSecondary, language, runtimeLengthMin, formatType } = item
const series = []
if (seriesPrimary) {
series.push({
series: seriesPrimary.name,
sequence: this.cleanSeriesSequence(seriesPrimary.name, seriesPrimary.position || '')
})
}
if (seriesSecondary) {
series.push({
series: seriesSecondary.name,
sequence: this.cleanSeriesSequence(seriesSecondary.name, seriesSecondary.position || '')
})
}
/**
* Audible will sometimes send sequences with "Book 1" or "2, Dramatized Adaptation"
* @see https://github.com/advplyr/audiobookshelf/issues/2380
* @see https://github.com/advplyr/audiobookshelf/issues/1339
*
* @param {string} seriesName
* @param {string} sequence
* @returns {string}
*/
cleanSeriesSequence(seriesName, sequence) {
if (!sequence) return ''
let updatedSequence = sequence.replace(/Book /, '').trim()
if (updatedSequence.includes(' ')) {
updatedSequence = updatedSequence.split(' ').shift().replace(/,$/, '')
}
if (sequence !== updatedSequence) {
Logger.debug(`[Audible] Series "${seriesName}" sequence was cleaned from "${sequence}" to "${updatedSequence}"`)
}
return updatedSequence
const genresFiltered = genres ? genres.filter((g) => g.type == 'genre').map((g) => g.name) : []
const tagsFiltered = genres ? genres.filter((g) => g.type == 'tag').map((g) => g.name) : []
return {
title,
subtitle: subtitle || null,
author: authors ? authors.map(({ name }) => name).join(', ') : null,
narrator: narrators ? narrators.map(({ name }) => name).join(', ') : null,
publisher: publisherName,
publishedYear: releaseDate ? releaseDate.split('-')[0] : null,
description: summary ? htmlSanitizer.stripAllTags(summary) : null,
cover: image,
asin,
genres: genresFiltered.length ? genresFiltered : null,
tags: tagsFiltered.length ? tagsFiltered.join(', ') : null,
series: series.length ? series : null,
language: language ? language.charAt(0).toUpperCase() + language.slice(1) : null,
duration: runtimeLengthMin && !isNaN(runtimeLengthMin) ? Number(runtimeLengthMin) : 0,
region: item.region || null,
rating: item.rating || null,
abridged: formatType === 'abridged'
}
}
/**
* Test if a search title matches an ASIN. Supports lowercase letters
*
* @param {string} title
* @returns {boolean}
*/
isProbablyAsin(title) {
return /^[0-9A-Za-z]{10}$/.test(title)
}
/**
*
* @param {string} asin
* @param {string} region
* @param {number} [timeout] response timeout in ms
* @returns {Promise<Object[]>}
*/
asinSearch(asin, region, timeout = this.#responseTimeout) {
if (!asin) return []
if (!timeout || isNaN(timeout)) timeout = this.#responseTimeout
asin = encodeURIComponent(asin.toUpperCase())
var regionQuery = region ? `?region=${region}` : ''
var url = `https://api.audnex.us/books/${asin}${regionQuery}`
Logger.debug(`[Audible] ASIN url: ${url}`)
return axios
.get(url, {
timeout
})
.then((res) => {
if (!res || !res.data || !res.data.asin) return null
return res.data
})
.catch((error) => {
Logger.error('[Audible] ASIN search error', error)
return []
})
}
/**
*
* @param {string} title
* @param {string} author
* @param {string} asin
* @param {string} region
* @param {number} [timeout] response timeout in ms
* @returns {Promise<Object[]>}
*/
async search(title, author, asin, region, timeout = this.#responseTimeout) {
if (region && !this.regionMap[region]) {
Logger.error(`[Audible] search: Invalid region ${region}`)
region = ''
}
if (!timeout || isNaN(timeout)) timeout = this.#responseTimeout
let items
if (asin) {
items = [await this.asinSearch(asin, region, timeout)]
}
cleanResult(item) {
const { title, subtitle, asin, authors, narrators, publisherName, summary, releaseDate, image, genres, seriesPrimary, seriesSecondary, language, runtimeLengthMin, formatType } = item
const series = []
if (seriesPrimary) {
series.push({
series: seriesPrimary.name,
sequence: this.cleanSeriesSequence(seriesPrimary.name, seriesPrimary.position || '')
})
}
if (seriesSecondary) {
series.push({
series: seriesSecondary.name,
sequence: this.cleanSeriesSequence(seriesSecondary.name, seriesSecondary.position || '')
})
}
const genresFiltered = genres ? genres.filter(g => g.type == "genre").map(g => g.name) : []
const tagsFiltered = genres ? genres.filter(g => g.type == "tag").map(g => g.name) : []
return {
title,
subtitle: subtitle || null,
author: authors ? authors.map(({ name }) => name).join(', ') : null,
narrator: narrators ? narrators.map(({ name }) => name).join(', ') : null,
publisher: publisherName,
publishedYear: releaseDate ? releaseDate.split('-')[0] : null,
description: summary ? htmlSanitizer.stripAllTags(summary) : null,
cover: image,
asin,
genres: genresFiltered.length ? genresFiltered : null,
tags: tagsFiltered.length ? tagsFiltered.join(', ') : null,
series: series.length ? series : null,
language: language ? language.charAt(0).toUpperCase() + language.slice(1) : null,
duration: runtimeLengthMin && !isNaN(runtimeLengthMin) ? Number(runtimeLengthMin) : 0,
region: item.region || null,
rating: item.rating || null,
abridged: formatType === 'abridged'
}
if (!items && this.isProbablyAsin(title)) {
items = [await this.asinSearch(title, region, timeout)]
}
isProbablyAsin(title) {
return /^[0-9A-Z]{10}$/.test(title)
}
asinSearch(asin, region) {
asin = encodeURIComponent(asin)
var regionQuery = region ? `?region=${region}` : ''
var url = `https://api.audnex.us/books/${asin}${regionQuery}`
Logger.debug(`[Audible] ASIN url: ${url}`)
return axios.get(url).then((res) => {
if (!res || !res.data || !res.data.asin) return null
return res.data
}).catch(error => {
Logger.error('[Audible] ASIN search error', error)
return []
if (!items) {
const queryObj = {
num_results: '10',
products_sort_by: 'Relevance',
title: title
}
if (author) queryObj.author = author
const queryString = new URLSearchParams(queryObj).toString()
const tld = region ? this.regionMap[region] : '.com'
const url = `https://api.audible${tld}/1.0/catalog/products?${queryString}`
Logger.debug(`[Audible] Search url: ${url}`)
items = await axios
.get(url, {
timeout
})
.then((res) => {
if (!res?.data?.products) return null
return Promise.all(res.data.products.map((result) => this.asinSearch(result.asin, region, timeout)))
})
.catch((error) => {
Logger.error('[Audible] query search error', error)
return []
})
}
async search(title, author, asin, region) {
if (region && !this.regionMap[region]) {
Logger.error(`[Audible] search: Invalid region ${region}`)
region = ''
}
let items
if (asin) {
items = [await this.asinSearch(asin, region)]
}
if (!items && this.isProbablyAsin(title)) {
items = [await this.asinSearch(title, region)]
}
if (!items) {
const queryObj = {
num_results: '10',
products_sort_by: 'Relevance',
title: title
}
if (author) queryObj.author = author
const queryString = (new URLSearchParams(queryObj)).toString()
const tld = region ? this.regionMap[region] : '.com'
const url = `https://api.audible${tld}/1.0/catalog/products?${queryString}`
Logger.debug(`[Audible] Search url: ${url}`)
items = await axios.get(url).then((res) => {
if (!res || !res.data || !res.data.products) return null
return Promise.all(res.data.products.map(result => this.asinSearch(result.asin, region)))
}).catch(error => {
Logger.error('[Audible] query search error', error)
return []
})
}
return items ? items.map(item => this.cleanResult(item)) : []
}
return items?.map((item) => this.cleanResult(item)) || []
}
}
module.exports = Audible
module.exports = Audible

View file

@ -2,22 +2,32 @@ const axios = require('axios')
const Logger = require('../Logger')
class AudiobookCovers {
constructor() { }
#responseTimeout = 30000
constructor() {}
/**
*
* @param {string} search
* @param {number} [timeout]
* @returns {Promise<{cover: string}[]>}
*/
async search(search, timeout = this.#responseTimeout) {
if (!timeout || isNaN(timeout)) timeout = this.#responseTimeout
async search(search) {
const url = `https://api.audiobookcovers.com/cover/bytext/`
const params = new URLSearchParams([['q', search]])
const items = await axios.get(url, { params }).then((res) => {
if (!res || !res.data) return []
return res.data
}).catch(error => {
Logger.error('[AudiobookCovers] Cover search error', error)
return []
})
return items.map(item => ({ cover: item.versions.png.original }))
const items = await axios
.get(url, {
params,
timeout
})
.then((res) => res?.data || [])
.catch((error) => {
Logger.error('[AudiobookCovers] Cover search error', error)
return []
})
return items.map((item) => ({ cover: item.versions.png.original }))
}
}
module.exports = AudiobookCovers

View file

@ -1,4 +1,4 @@
const axios = require('axios')
const axios = require('axios').default
const { levenshteinDistance } = require('../utils/index')
const Logger = require('../Logger')
const Throttle = require('p-throttle')
@ -15,7 +15,7 @@ class Audnexus {
static _instance = null
constructor() {
// ensures Audnexus class is singleton
// ensures Audnexus class is singleton
if (Audnexus._instance) {
return Audnexus._instance
}
@ -25,7 +25,7 @@ class Audnexus {
// Rate limit is 100 requests per minute.
// @see https://github.com/laxamentumtech/audnexus#-deployment-
this.limiter = Throttle({
// Setting the limit to 1 allows for a short pause between requests that is imperceptible to the end user.
// Setting the limit to 1 allows for a short pause between requests that is imperceptible to the end user.
// A larger limit will grab blocks faster and then wait for the alloted time(interval) before
// fetching another batch, but with a discernable pause from the user perspective.
limit: 1,
@ -37,10 +37,10 @@ class Audnexus {
}
/**
*
* @param {string} name
* @param {string} region
* @returns {Promise<{asin:string, name:string}[]>}
*
* @param {string} name
* @param {string} region
* @returns {Promise<{asin:string, name:string}[]>}
*/
authorASINsRequest(name, region) {
const searchParams = new URLSearchParams()
@ -60,9 +60,9 @@ class Audnexus {
}
/**
*
* @param {string} asin
* @param {string} region
*
* @param {string} asin
* @param {string} region
* @returns {Promise<AuthorSearchObj>}
*/
authorRequest(asin, region) {
@ -73,17 +73,17 @@ class Audnexus {
Logger.info(`[Audnexus] Searching for author "${authorRequestUrl}"`)
return this._processRequest(this.limiter(() => axios.get(authorRequestUrl)))
.then((res) => res.data)
.catch((error) => {
.then(res => res.data)
.catch(error => {
Logger.error(`[Audnexus] Author request failed for ${asin}`, error)
return null
})
}
/**
*
* @param {string} asin
* @param {string} region
*
* @param {string} asin
* @param {string} region
* @returns {Promise<AuthorSearchObj>}
*/
async findAuthorByASIN(asin, region) {
@ -99,10 +99,10 @@ class Audnexus {
}
/**
*
* @param {string} name
* @param {string} region
* @param {number} maxLevenshtein
*
* @param {string} name
* @param {string} region
* @param {number} maxLevenshtein
* @returns {Promise<AuthorSearchObj>}
*/
async findAuthorByName(name, region, maxLevenshtein = 3) {
@ -138,8 +138,8 @@ class Audnexus {
Logger.debug(`[Audnexus] Get chapters for ASIN ${asin}/${region}`)
return this._processRequest(this.limiter(() => axios.get(`${this.baseUrl}/books/${asin}/chapters?region=${region}`)))
.then((res) => res.data)
.catch((error) => {
.then(res => res.data)
.catch(error => {
Logger.error(`[Audnexus] Chapter ASIN request failed for ${asin}/${region}`, error)
return null
})
@ -150,8 +150,7 @@ class Audnexus {
*/
async _processRequest(request) {
try {
const response = await request()
return response
return await request()
} catch (error) {
if (error.response?.status === 429) {
const retryAfter = parseInt(error.response.headers?.['retry-after'], 10) || 5

View file

@ -1,93 +1,91 @@
const axios = require('axios').default
const Database = require('../Database')
const axios = require('axios')
const Logger = require('../Logger')
class CustomProviderAdapter {
constructor() { }
#responseTimeout = 30000
/**
*
* @param {string} title
* @param {string} author
* @param {string} providerSlug
* @param {string} mediaType
* @returns {Promise<Object[]>}
*/
async search(title, author, providerSlug, mediaType) {
const providerId = providerSlug.split('custom-')[1]
const provider = await Database.customMetadataProviderModel.findByPk(providerId)
constructor() {}
if (!provider) {
throw new Error("Custom provider not found for the given id")
}
/**
*
* @param {string} title
* @param {string} author
* @param {string} isbn
* @param {string} providerSlug
* @param {string} mediaType
* @param {number} [timeout] response timeout in ms
* @returns {Promise<Object[]>}
*/
async search(title, author, isbn, providerSlug, mediaType, timeout = this.#responseTimeout) {
if (!timeout || isNaN(timeout)) timeout = this.#responseTimeout
// Setup query params
const queryObj = {
mediaType,
query: title
}
if (author) {
queryObj.author = author
}
const queryString = (new URLSearchParams(queryObj)).toString()
const providerId = providerSlug.split('custom-')[1]
const provider = await Database.customMetadataProviderModel.findByPk(providerId)
// Setup headers
const axiosOptions = {}
if (provider.authHeaderValue) {
axiosOptions.headers = {
'Authorization': provider.authHeaderValue
}
}
const matches = await axios.get(`${provider.url}/search?${queryString}}`, axiosOptions).then((res) => {
if (!res?.data || !Array.isArray(res.data.matches)) return null
return res.data.matches
}).catch(error => {
Logger.error('[CustomMetadataProvider] Search error', error)
return []
})
if (!matches) {
throw new Error("Custom provider returned malformed response")
}
// re-map keys to throw out
return matches.map(({
title,
subtitle,
author,
narrator,
publisher,
publishedYear,
description,
cover,
isbn,
asin,
genres,
tags,
series,
language,
duration
}) => {
return {
title,
subtitle,
author,
narrator,
publisher,
publishedYear,
description,
cover,
isbn,
asin,
genres,
tags: tags?.join(',') || null,
series: series?.length ? series : null,
language,
duration
}
})
if (!provider) {
throw new Error('Custom provider not found for the given id')
}
// Setup query params
const queryObj = {
mediaType,
query: title
}
if (author) {
queryObj.author = author
}
if (isbn) {
queryObj.isbn = isbn
}
const queryString = new URLSearchParams(queryObj).toString()
// Setup headers
const axiosOptions = {
timeout
}
if (provider.authHeaderValue) {
axiosOptions.headers = {
Authorization: provider.authHeaderValue
}
}
const matches = await axios
.get(`${provider.url}/search?${queryString}`, axiosOptions)
.then((res) => {
if (!res?.data || !Array.isArray(res.data.matches)) return null
return res.data.matches
})
.catch((error) => {
Logger.error('[CustomMetadataProvider] Search error', error)
return []
})
if (!matches) {
throw new Error('Custom provider returned malformed response')
}
// re-map keys to throw out
return matches.map(({ title, subtitle, author, narrator, publisher, publishedYear, description, cover, isbn, asin, genres, tags, series, language, duration }) => {
return {
title,
subtitle,
author,
narrator,
publisher,
publishedYear,
description,
cover,
isbn,
asin,
genres,
tags: tags?.join(',') || null,
series: series?.length ? series : null,
language,
duration
}
})
}
}
module.exports = CustomProviderAdapter
module.exports = CustomProviderAdapter

View file

@ -2,6 +2,7 @@ const axios = require('axios')
const Logger = require('../Logger')
class FantLab {
#responseTimeout = 30000
// 7 - other
// 11 - essay
// 12 - article
@ -22,28 +23,47 @@ class FantLab {
_filterWorkType = [7, 11, 12, 22, 23, 24, 25, 26, 46, 47, 49, 51, 52, 55, 56, 57]
_baseUrl = 'https://api.fantlab.ru'
constructor() { }
constructor() {}
/**
* @param {string} title
* @param {string} author'
* @param {number} [timeout] response timeout in ms
* @returns {Promise<Object[]>}
**/
async search(title, author, timeout = this.#responseTimeout) {
if (!timeout || isNaN(timeout)) timeout = this.#responseTimeout
async search(title, author) {
let searchString = encodeURIComponent(title)
if (author) {
searchString += encodeURIComponent(' ' + author)
}
const url = `${this._baseUrl}/search-works?q=${searchString}&page=1&onlymatches=1`
Logger.debug(`[FantLab] Search url: ${url}`)
const items = await axios.get(url).then((res) => {
return res.data || []
}).catch(error => {
Logger.error('[FantLab] search error', error)
return []
})
const items = await axios
.get(url, {
timeout
})
.then((res) => {
return res.data || []
})
.catch((error) => {
Logger.error('[FantLab] search error', error)
return []
})
return Promise.all(items.map(async item => await this.getWork(item))).then(resArray => {
return resArray.filter(res => res)
return Promise.all(items.map(async (item) => await this.getWork(item, timeout))).then((resArray) => {
return resArray.filter((res) => res)
})
}
async getWork(item) {
/**
* @param {Object} item
* @param {number} [timeout] response timeout in ms
* @returns {Promise<Object>}
**/
async getWork(item, timeout = this.#responseTimeout) {
if (!timeout || isNaN(timeout)) timeout = this.#responseTimeout
const { work_id, work_type_id } = item
if (this._filterWorkType.includes(work_type_id)) {
@ -51,23 +71,34 @@ class FantLab {
}
const url = `${this._baseUrl}/work/${work_id}/extended`
const bookData = await axios.get(url).then((resp) => {
return resp.data || null
}).catch((error) => {
Logger.error(`[FantLab] work info request for url "${url}" error`, error)
return null
})
const bookData = await axios
.get(url, {
timeout
})
.then((resp) => {
return resp.data || null
})
.catch((error) => {
Logger.error(`[FantLab] work info request for url "${url}" error`, error)
return null
})
return this.cleanBookData(bookData)
return this.cleanBookData(bookData, timeout)
}
async cleanBookData(bookData) {
/**
*
* @param {Object} bookData
* @param {number} [timeout]
* @returns {Promise<Object>}
*/
async cleanBookData(bookData, timeout = this.#responseTimeout) {
let { authors, work_name_alts, work_id, work_name, work_year, work_description, image, classificatory, editions_blocks } = bookData
const subtitle = Array.isArray(work_name_alts) ? work_name_alts[0] : null
const authorNames = authors.map(au => (au.name || '').trim()).filter(au => au)
const authorNames = authors.map((au) => (au.name || '').trim()).filter((au) => au)
const imageAndIsbn = await this.tryGetCoverFromEditions(editions_blocks)
const imageAndIsbn = await this.tryGetCoverFromEditions(editions_blocks, timeout)
const imageToUse = imageAndIsbn?.imageUrl || image
@ -88,7 +119,7 @@ class FantLab {
tryGetGenres(classificatory) {
if (!classificatory || !classificatory.genre_group) return []
const genresGroup = classificatory.genre_group.find(group => group.genre_group_id == 1) // genres and subgenres
const genresGroup = classificatory.genre_group.find((group) => group.genre_group_id == 1) // genres and subgenres
// genre_group_id=2 - General Characteristics
// genre_group_id=3 - Arena
@ -108,10 +139,16 @@ class FantLab {
tryGetSubGenres(rootGenre) {
if (!rootGenre.genre || !rootGenre.genre.length) return []
return rootGenre.genre.map(g => g.label).filter(g => g)
return rootGenre.genre.map((g) => g.label).filter((g) => g)
}
async tryGetCoverFromEditions(editions) {
/**
*
* @param {Object} editions
* @param {number} [timeout]
* @returns {Promise<{imageUrl: string, isbn: string}>
*/
async tryGetCoverFromEditions(editions, timeout = this.#responseTimeout) {
if (!editions) {
return null
}
@ -129,24 +166,37 @@ class FantLab {
const isbn = lastEdition['isbn'] || null // get only from paper edition
return {
imageUrl: await this.getCoverFromEdition(editionId),
imageUrl: await this.getCoverFromEdition(editionId, timeout),
isbn
}
}
async getCoverFromEdition(editionId) {
/**
*
* @param {number} editionId
* @param {number} [timeout]
* @returns {Promise<string>}
*/
async getCoverFromEdition(editionId, timeout = this.#responseTimeout) {
if (!editionId) return null
if (!timeout || isNaN(timeout)) timeout = this.#responseTimeout
const url = `${this._baseUrl}/edition/${editionId}`
const editionInfo = await axios.get(url).then((resp) => {
return resp.data || null
}).catch(error => {
Logger.error(`[FantLab] search cover from edition with url "${url}" error`, error)
return null
})
const editionInfo = await axios
.get(url, {
timeout
})
.then((resp) => {
return resp.data || null
})
.catch((error) => {
Logger.error(`[FantLab] search cover from edition with url "${url}" error`, error)
return null
})
return editionInfo?.image || null
}
}
module.exports = FantLab
module.exports = FantLab

View file

@ -2,12 +2,14 @@ const axios = require('axios')
const Logger = require('../Logger')
class GoogleBooks {
constructor() { }
#responseTimeout = 30000
constructor() {}
extractIsbn(industryIdentifiers) {
if (!industryIdentifiers || !industryIdentifiers.length) return null
var isbnObj = industryIdentifiers.find(i => i.type === 'ISBN_13') || industryIdentifiers.find(i => i.type === 'ISBN_10')
var isbnObj = industryIdentifiers.find((i) => i.type === 'ISBN_13') || industryIdentifiers.find((i) => i.type === 'ISBN_10')
if (isbnObj && isbnObj.identifier) return isbnObj.identifier
return null
}
@ -38,24 +40,38 @@ class GoogleBooks {
}
}
async search(title, author) {
/**
* Search for a book by title and author
* @param {string} title
* @param {string} author
* @param {number} [timeout] response timeout in ms
* @returns {Promise<Object[]>}
**/
async search(title, author, timeout = this.#responseTimeout) {
if (!timeout || isNaN(timeout)) timeout = this.#responseTimeout
title = encodeURIComponent(title)
var queryString = `q=intitle:${title}`
let queryString = `q=intitle:${title}`
if (author) {
author = encodeURIComponent(author)
queryString += `+inauthor:${author}`
}
var url = `https://www.googleapis.com/books/v1/volumes?${queryString}`
const url = `https://www.googleapis.com/books/v1/volumes?${queryString}`
Logger.debug(`[GoogleBooks] Search url: ${url}`)
var items = await axios.get(url).then((res) => {
if (!res || !res.data || !res.data.items) return []
return res.data.items
}).catch(error => {
Logger.error('[GoogleBooks] Volume search error', error)
return []
})
return items.map(item => this.cleanResult(item))
const items = await axios
.get(url, {
timeout
})
.then((res) => {
if (!res || !res.data || !res.data.items) return []
return res.data.items
})
.catch((error) => {
Logger.error('[GoogleBooks] Volume search error', error)
return []
})
return items.map((item) => this.cleanResult(item))
}
}
module.exports = GoogleBooks
module.exports = GoogleBooks

View file

@ -1,17 +1,31 @@
var axios = require('axios')
const axios = require('axios').default
class OpenLibrary {
#responseTimeout = 30000
constructor() {
this.baseUrl = 'https://openlibrary.org'
}
get(uri) {
return axios.get(`${this.baseUrl}/${uri}`).then((res) => {
return res.data
}).catch((error) => {
console.error('Failed', error)
return false
})
/**
*
* @param {string} uri
* @param {number} timeout
* @returns {Promise<Object>}
*/
get(uri, timeout = this.#responseTimeout) {
if (!timeout || isNaN(timeout)) timeout = this.#responseTimeout
return axios
.get(`${this.baseUrl}/${uri}`, {
timeout
})
.then((res) => {
return res.data
})
.catch((error) => {
console.error('Failed', error)
return null
})
}
async isbnLookup(isbn) {
@ -33,7 +47,7 @@ class OpenLibrary {
}
}
if (!worksData.covers) worksData.covers = []
var coverImages = worksData.covers.filter(c => c > 0).map(c => `https://covers.openlibrary.org/b/id/${c}-L.jpg`)
var coverImages = worksData.covers.filter((c) => c > 0).map((c) => `https://covers.openlibrary.org/b/id/${c}-L.jpg`)
var description = null
if (worksData.description) {
if (typeof worksData.description === 'string') {
@ -73,27 +87,35 @@ class OpenLibrary {
}
async search(query) {
var queryString = Object.keys(query).map(key => key + '=' + query[key]).join('&')
var queryString = Object.keys(query)
.map((key) => key + '=' + query[key])
.join('&')
var lookupData = await this.get(`/search.json?${queryString}`)
if (!lookupData) {
return {
errorCode: 404
}
}
var searchDocs = await Promise.all(lookupData.docs.map(d => this.cleanSearchDoc(d)))
var searchDocs = await Promise.all(lookupData.docs.map((d) => this.cleanSearchDoc(d)))
return searchDocs
}
async searchTitle(title) {
title = encodeURIComponent(title);
var lookupData = await this.get(`/search.json?title=${title}`)
/**
*
* @param {string} title
* @param {number} timeout
* @returns {Promise<Object[]>}
*/
async searchTitle(title, timeout = this.#responseTimeout) {
title = encodeURIComponent(title)
var lookupData = await this.get(`/search.json?title=${title}`, timeout)
if (!lookupData) {
return {
errorCode: 404
}
}
var searchDocs = await Promise.all(lookupData.docs.map(d => this.cleanSearchDoc(d)))
var searchDocs = await Promise.all(lookupData.docs.map((d) => this.cleanSearchDoc(d)))
return searchDocs
}
}
module.exports = OpenLibrary
module.exports = OpenLibrary

View file

@ -28,19 +28,24 @@ const htmlSanitizer = require('../utils/htmlSanitizer')
*/
class iTunes {
constructor() { }
#responseTimeout = 30000
constructor() {}
/**
* @see https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/iTuneSearchAPI/Searching.html
*
* @param {iTunesSearchParams} options
*
* @param {iTunesSearchParams} options
* @param {number} [timeout] response timeout in ms
* @returns {Promise<Object[]>}
*/
search(options) {
search(options, timeout = this.#responseTimeout) {
if (!options.term) {
Logger.error('[iTunes] Invalid search options - no term')
return []
}
if (!timeout || isNaN(timeout)) timeout = this.#responseTimeout
const query = {
term: options.term,
media: options.media,
@ -49,12 +54,18 @@ class iTunes {
limit: options.limit,
country: options.country
}
return axios.get('https://itunes.apple.com/search', { params: query }).then((response) => {
return response.data.results || []
}).catch((error) => {
Logger.error(`[iTunes] search request error`, error)
return []
})
return axios
.get('https://itunes.apple.com/search', {
params: query,
timeout
})
.then((response) => {
return response.data.results || []
})
.catch((error) => {
Logger.error(`[iTunes] search request error`, error)
return []
})
}
// Example cover art: https://is1-ssl.mzstatic.com/image/thumb/Music118/v4/cb/ea/73/cbea739b-ff3b-11c4-fb93-7889fbec7390/9781598874983_cover.jpg/100x100bb.jpg
@ -65,20 +76,22 @@ class iTunes {
return data.artworkUrl600
}
// Should already be sorted from small to large
var artworkSizes = Object.keys(data).filter(key => key.startsWith('artworkUrl')).map(key => {
return {
url: data[key],
size: Number(key.replace('artworkUrl', ''))
}
})
var artworkSizes = Object.keys(data)
.filter((key) => key.startsWith('artworkUrl'))
.map((key) => {
return {
url: data[key],
size: Number(key.replace('artworkUrl', ''))
}
})
if (!artworkSizes.length) return null
// Return next biggest size > 600
var nextBestSize = artworkSizes.find(size => size.size > 600)
var nextBestSize = artworkSizes.find((size) => size.size > 600)
if (nextBestSize) return nextBestSize.url
// Find square artwork
var squareArtwork = artworkSizes.find(size => size.url.includes(`${size.size}x${size.size}bb`))
var squareArtwork = artworkSizes.find((size) => size.url.includes(`${size.size}x${size.size}bb`))
// Square cover replace with 600x600bb
if (squareArtwork) {
@ -106,15 +119,21 @@ class iTunes {
}
}
searchAudiobooks(term) {
return this.search({ term, entity: 'audiobook', media: 'audiobook' }).then((results) => {
/**
*
* @param {string} term
* @param {number} [timeout] response timeout in ms
* @returns {Promise<Object[]>}
*/
searchAudiobooks(term, timeout = this.#responseTimeout) {
return this.search({ term, entity: 'audiobook', media: 'audiobook' }, timeout).then((results) => {
return results.map(this.cleanAudiobook.bind(this))
})
}
/**
*
* @param {Object} data
*
* @param {Object} data
* @returns {iTunesPodcastSearchResult}
*/
cleanPodcast(data) {
@ -136,13 +155,14 @@ class iTunes {
}
/**
*
* @param {string} term
* @param {{country:string}} options
*
* @param {string} term
* @param {{country:string}} options
* @param {number} [timeout] response timeout in ms
* @returns {Promise<iTunesPodcastSearchResult[]>}
*/
searchPodcasts(term, options = {}) {
return this.search({ term, entity: 'podcast', media: 'podcast', ...options }).then((results) => {
searchPodcasts(term, options = {}, timeout = this.#responseTimeout) {
return this.search({ term, entity: 'podcast', media: 'podcast', ...options }, timeout).then((results) => {
return results.map(this.cleanPodcast.bind(this))
})
}

View file

@ -166,6 +166,7 @@ class ApiRouter {
//
this.router.get('/me', MeController.getCurrentUser.bind(this))
this.router.get('/me/listening-sessions', MeController.getListeningSessions.bind(this))
this.router.get('/me/item/listening-sessions/:libraryItemId/:episodeId?', MeController.getItemListeningSessions.bind(this))
this.router.get('/me/listening-stats', MeController.getListeningStats.bind(this))
this.router.get('/me/progress/:id/remove-from-continue-listening', MeController.removeItemFromContinueListening.bind(this))
this.router.get('/me/progress/:id/:episodeId?', MeController.getMediaProgress.bind(this))
@ -425,9 +426,9 @@ class ApiRouter {
/**
* Used when a series is removed from a book
* Series is removed if it only has 1 book
*
*
* @param {string} bookId
* @param {string[]} seriesIds
* @param {string[]} seriesIds
*/
async checkRemoveEmptySeries(bookId, seriesIds) {
if (!seriesIds?.length) return
@ -455,7 +456,7 @@ class ApiRouter {
/**
* Remove an empty series & close an open RSS feed
* @param {import('../models/Series')} series
* @param {import('../models/Series')} series
*/
async removeEmptySeries(series) {
await this.rssFeedManager.closeFeedForEntityId(series.id)
@ -474,6 +475,11 @@ class ApiRouter {
return userSessions.sort((a, b) => b.updatedAt - a.updatedAt)
}
async getUserItemListeningSessionsHelper(userId, mediaItemId) {
const userSessions = await Database.getPlaybackSessions({ userId, mediaItemId })
return userSessions.sort((a, b) => b.updatedAt - a.updatedAt)
}
async getUserListeningStatsHelpers(userId) {
const today = date.format(new Date(), 'YYYY-MM-DD')
@ -531,6 +537,7 @@ class ApiRouter {
const authorName = (mediaMetadata.authors[i].name || '').trim()
if (!authorName) {
Logger.error(`[ApiRouter] Invalid author object, no name`, mediaMetadata.authors[i])
mediaMetadata.authors[i].id = null
continue
}
@ -559,6 +566,8 @@ class ApiRouter {
mediaMetadata.authors[i].id = author.id
}
}
// Remove authors without an id
mediaMetadata.authors = mediaMetadata.authors.filter(au => !!au.id)
if (newAuthors.length) {
await Database.createBulkAuthors(newAuthors)
SocketAuthority.emitter('authors_added', newAuthors.map(au => au.toJSON()))
@ -572,6 +581,7 @@ class ApiRouter {
const seriesName = (mediaMetadata.series[i].name || '').trim()
if (!seriesName) {
Logger.error(`[ApiRouter] Invalid series object, no name`, mediaMetadata.series[i])
mediaMetadata.series[i].id = null
continue
}
@ -600,6 +610,8 @@ class ApiRouter {
mediaMetadata.series[i].id = seriesItem.id
}
}
// Remove series without an id
mediaMetadata.series = mediaMetadata.series.filter(se => se.id)
if (newSeries.length) {
await Database.createBulkSeries(newSeries)
SocketAuthority.emitter('multiple_series_added', newSeries.map(se => se.toJSON()))

View file

@ -8,11 +8,11 @@ const LibraryItem = require('../models/LibraryItem')
const AudioFile = require('../objects/files/AudioFile')
class AudioFileScanner {
constructor() { }
constructor() {}
/**
* Is array of numbers sequential, i.e. 1, 2, 3, 4
* @param {number[]} nums
* @param {number[]} nums
* @returns {boolean}
*/
isSequential(nums) {
@ -27,8 +27,8 @@ class AudioFileScanner {
}
/**
* Remove
* @param {number[]} nums
* Remove
* @param {number[]} nums
* @returns {number[]}
*/
removeDupes(nums) {
@ -44,8 +44,8 @@ class AudioFileScanner {
/**
* Order audio files by track/disc number
* @param {string} libraryItemRelPath
* @param {import('../models/Book').AudioFileObject[]} audioFiles
* @param {string} libraryItemRelPath
* @param {import('../models/Book').AudioFileObject[]} audioFiles
* @returns {import('../models/Book').AudioFileObject[]}
*/
runSmartTrackOrder(libraryItemRelPath, audioFiles) {
@ -103,8 +103,8 @@ class AudioFileScanner {
/**
* Get track and disc number from audio filename
* @param {{title:string, subtitle:string, series:string, sequence:string, publishedYear:string, narrators:string}} mediaMetadataFromScan
* @param {LibraryItem.LibraryFileObject} audioLibraryFile
* @param {{title:string, subtitle:string, series:string, sequence:string, publishedYear:string, narrators:string}} mediaMetadataFromScan
* @param {LibraryItem.LibraryFileObject} audioLibraryFile
* @returns {{trackNumber:number, discNumber:number}}
*/
getTrackAndDiscNumberFromFilename(mediaMetadataFromScan, audioLibraryFile) {
@ -146,10 +146,10 @@ class AudioFileScanner {
}
/**
*
* @param {string} mediaType
* @param {LibraryItem.LibraryFileObject} libraryFile
* @param {{title:string, subtitle:string, series:string, sequence:string, publishedYear:string, narrators:string}} mediaMetadataFromScan
*
* @param {string} mediaType
* @param {LibraryItem.LibraryFileObject} libraryFile
* @param {{title:string, subtitle:string, series:string, sequence:string, publishedYear:string, narrators:string}} mediaMetadataFromScan
* @returns {Promise<AudioFile>}
*/
async scan(mediaType, libraryFile, mediaMetadataFromScan) {
@ -181,7 +181,7 @@ class AudioFileScanner {
/**
* Scan LibraryFiles and return AudioFiles
* @param {string} mediaType
* @param {import('./LibraryItemScanData')} libraryItemScanData
* @param {import('./LibraryItemScanData')} libraryItemScanData
* @param {LibraryItem.LibraryFileObject[]} audioLibraryFiles
* @returns {Promise<AudioFile[]>}
*/
@ -193,15 +193,15 @@ class AudioFileScanner {
for (let i = batch; i < Math.min(batch + batchSize, audioLibraryFiles.length); i++) {
proms.push(this.scan(mediaType, audioLibraryFiles[i], libraryItemScanData.mediaMetadata))
}
results.push(...await Promise.all(proms).then((scanResults) => scanResults.filter(sr => sr)))
results.push(...(await Promise.all(proms).then((scanResults) => scanResults.filter((sr) => sr))))
}
return results
}
/**
*
* @param {AudioFile} audioFile
*
* @param {AudioFile} audioFile
* @returns {object}
*/
probeAudioFile(audioFile) {
@ -211,10 +211,10 @@ class AudioFileScanner {
/**
* Set book metadata & chapters from audio file meta tags
*
*
* @param {string} bookTitle
* @param {import('../models/Book').AudioFileObject} audioFile
* @param {Object} bookMetadata
* @param {import('../models/Book').AudioFileObject} audioFile
* @param {Object} bookMetadata
* @param {import('./LibraryScan')} libraryScan
*/
setBookMetadataFromAudioMetaTags(bookTitle, audioFiles, bookMetadata, libraryScan) {
@ -243,7 +243,7 @@ class AudioFileScanner {
{
tag: 'tagAlbum',
altTag: 'tagTitle',
key: 'title',
key: 'title'
},
{
tag: 'tagArtist',
@ -311,9 +311,9 @@ class AudioFileScanner {
/**
* Set podcast metadata from first audio file
*
* @param {import('../models/Book').AudioFileObject} audioFile
* @param {Object} podcastMetadata
*
* @param {import('../models/Book').AudioFileObject} audioFile
* @param {Object} podcastMetadata
* @param {import('./LibraryScan')} libraryScan
*/
setPodcastMetadataFromAudioMetaTags(audioFile, podcastMetadata, libraryScan) {
@ -343,7 +343,7 @@ class AudioFileScanner {
},
{
tag: 'tagPodcastType',
key: 'podcastType',
key: 'podcastType'
}
]
@ -370,7 +370,7 @@ class AudioFileScanner {
}
/**
*
*
* @param {import('../models/PodcastEpisode')} podcastEpisode Not the model when creating new podcast
* @param {import('./ScanLogger')} scanLogger
*/
@ -378,7 +378,7 @@ class AudioFileScanner {
const MetadataMapArray = [
{
tag: 'tagComment',
altTag: 'tagSubtitle',
altTag: 'tagDescription',
key: 'description'
},
{
@ -391,7 +391,7 @@ class AudioFileScanner {
},
{
tag: 'tagDisc',
key: 'season',
key: 'season'
},
{
tag: 'tagTrack',
@ -446,7 +446,7 @@ class AudioFileScanner {
/**
* @param {string} bookTitle
* @param {AudioFile[]} audioFiles
* @param {AudioFile[]} audioFiles
* @param {import('./LibraryScan')} libraryScan
* @returns {import('../models/Book').ChapterObject[]}
*/
@ -464,12 +464,7 @@ class AudioFileScanner {
// If first audio file has embedded chapters then use embedded chapters
if (audioFiles[0].chapters?.length) {
// If all files chapters are the same, then only make chapters for the first file
if (
audioFiles.length === 1 ||
audioFiles.length > 1 &&
audioFiles[0].chapters.length === audioFiles[1].chapters?.length &&
audioFiles[0].chapters.every((c, i) => c.title === audioFiles[1].chapters[i].title && c.start === audioFiles[1].chapters[i].start)
) {
if (audioFiles.length === 1 || (audioFiles.length > 1 && audioFiles[0].chapters.length === audioFiles[1].chapters?.length && audioFiles[0].chapters.every((c, i) => c.title === audioFiles[1].chapters[i].title && c.start === audioFiles[1].chapters[i].start))) {
libraryScan.addLog(LogLevel.DEBUG, `setChapters: Using embedded chapters in first audio file ${audioFiles[0].metadata?.path}`)
chapters = audioFiles[0].chapters.map((c) => ({ ...c }))
} else {
@ -479,12 +474,13 @@ class AudioFileScanner {
audioFiles.forEach((file) => {
if (file.duration) {
const afChapters = file.chapters?.map((c) => ({
...c,
id: c.id + currChapterId,
start: c.start + currStartTime,
end: c.end + currStartTime,
})) ?? []
const afChapters =
file.chapters?.map((c) => ({
...c,
id: c.id + currChapterId,
start: c.start + currStartTime,
end: c.end + currStartTime
})) ?? []
chapters = chapters.concat(afChapters)
currChapterId += file.chapters?.length ?? 0
@ -494,12 +490,11 @@ class AudioFileScanner {
return chapters
}
} else if (audioFiles.length > 1) {
// In some cases the ID3 title tag for each file is the chapter title, the criteria to determine if this will be used
// 1. Every audio file has an ID3 title tag set
// 2. None of the title tags are the same as the book title
// 3. Every ID3 title tag is unique
const metaTagTitlesFound = [...new Set(audioFiles.map(af => af.metaTags?.tagTitle).filter(tagTitle => !!tagTitle && tagTitle !== bookTitle))]
const metaTagTitlesFound = [...new Set(audioFiles.map((af) => af.metaTags?.tagTitle).filter((tagTitle) => !!tagTitle && tagTitle !== bookTitle))]
const useMetaTagAsTitle = metaTagTitlesFound.length === audioFiles.length
// Build chapters from audio files
@ -528,8 +523,8 @@ class AudioFileScanner {
/**
* Parse a genre string into multiple genres
* @example "Fantasy;Sci-Fi;History" => ["Fantasy", "Sci-Fi", "History"]
*
* @param {string} genreTag
*
* @param {string} genreTag
* @returns {string[]}
*/
parseGenresString(genreTag) {
@ -537,10 +532,13 @@ class AudioFileScanner {
const separators = ['/', '//', ';']
for (let i = 0; i < separators.length; i++) {
if (genreTag.includes(separators[i])) {
return genreTag.split(separators[i]).map(genre => genre.trim()).filter(g => !!g)
return genreTag
.split(separators[i])
.map((genre) => genre.trim())
.filter((g) => !!g)
}
}
return [genreTag]
}
}
module.exports = new AudioFileScanner()
module.exports = new AudioFileScanner()

View file

@ -20,6 +20,7 @@ const LibraryScan = require("./LibraryScan")
const OpfFileScanner = require('./OpfFileScanner')
const NfoFileScanner = require('./NfoFileScanner')
const AbsMetadataFileScanner = require('./AbsMetadataFileScanner')
const EBookFile = require("../objects/files/EBookFile")
/**
* Metadata for books pulled from files
@ -84,7 +85,7 @@ class BookScanner {
// Update audio files that were modified
if (libraryItemData.audioLibraryFilesModified.length) {
let scannedAudioFiles = await AudioFileScanner.executeMediaFileScans(existingLibraryItem.mediaType, libraryItemData, libraryItemData.audioLibraryFilesModified)
let scannedAudioFiles = await AudioFileScanner.executeMediaFileScans(existingLibraryItem.mediaType, libraryItemData, libraryItemData.audioLibraryFilesModified.map(lf => lf.new))
media.audioFiles = media.audioFiles.map((audioFileObj) => {
let matchedScannedAudioFile = scannedAudioFiles.find(saf => saf.metadata.path === audioFileObj.metadata.path)
if (!matchedScannedAudioFile) {
@ -138,11 +139,25 @@ class BookScanner {
}
// Check if cover was removed
if (media.coverPath && !libraryItemData.imageLibraryFiles.some(lf => lf.metadata.path === media.coverPath) && !(await fsExtra.pathExists(media.coverPath))) {
if (media.coverPath && libraryItemData.imageLibraryFilesRemoved.some(lf => lf.metadata.path === media.coverPath) && !(await fsExtra.pathExists(media.coverPath))) {
media.coverPath = null
hasMediaChanges = true
}
// Update cover if it was modified
if (media.coverPath && libraryItemData.imageLibraryFilesModified.length) {
let coverMatch = libraryItemData.imageLibraryFilesModified.find(iFile => iFile.old.metadata.path === media.coverPath)
if (coverMatch) {
const coverPath = coverMatch.new.metadata.path
if (coverPath !== media.coverPath) {
libraryScan.addLog(LogLevel.DEBUG, `Updating book cover "${media.coverPath}" => "${coverPath}" for book "${media.title}"`)
media.coverPath = coverPath
media.changed('coverPath', true)
hasMediaChanges = true
}
}
}
// Check if cover is not set and image files were found
if (!media.coverPath && libraryItemData.imageLibraryFiles.length) {
// Prefer using a cover image with the name "cover" otherwise use the first image
@ -157,6 +172,19 @@ class BookScanner {
hasMediaChanges = true
}
// Update ebook if it was modified
if (media.ebookFile && libraryItemData.ebookLibraryFilesModified.length) {
let ebookMatch = libraryItemData.ebookLibraryFilesModified.find(eFile => eFile.old.metadata.path === media.ebookFile.metadata.path)
if (ebookMatch) {
const ebookFile = new EBookFile(ebookMatch.new)
ebookFile.ebookFormat = ebookFile.metadata.ext.slice(1).toLowerCase()
libraryScan.addLog(LogLevel.DEBUG, `Updating book ebook file "${media.ebookFile.metadata.path}" => "${ebookFile.metadata.path}" for book "${media.title}"`)
media.ebookFile = ebookFile.toJSON()
media.changed('ebookFile', true)
hasMediaChanges = true
}
}
// Check if ebook is not set and ebooks were found
if (!media.ebookFile && !librarySettings.audiobooksOnly && libraryItemData.ebookLibraryFiles.length) {
// Prefer to use an epub ebook then fallback to the first ebook found
@ -186,11 +214,11 @@ class BookScanner {
// Check for authors added
for (const authorName of bookMetadata.authors) {
if (!media.authors.some(au => au.name === authorName)) {
const existingAuthor = Database.libraryFilterData[libraryItemData.libraryId].authors.find(au => au.name === authorName)
if (existingAuthor) {
const existingAuthorId = await Database.getAuthorIdByName(libraryItemData.libraryId, authorName)
if (existingAuthorId) {
await Database.bookAuthorModel.create({
bookId: media.id,
authorId: existingAuthor.id
authorId: existingAuthorId
})
libraryScan.addLog(LogLevel.DEBUG, `Updating book "${bookMetadata.title}" added author "${authorName}"`)
authorsUpdated = true
@ -221,11 +249,11 @@ class BookScanner {
for (const seriesObj of bookMetadata.series) {
const existingBookSeries = media.series.find(se => se.name === seriesObj.name)
if (!existingBookSeries) {
const existingSeries = Database.libraryFilterData[libraryItemData.libraryId].series.find(se => se.name === seriesObj.name)
if (existingSeries) {
const existingSeriesId = await Database.getSeriesIdByName(libraryItemData.libraryId, seriesObj.name)
if (existingSeriesId) {
await Database.bookSeriesModel.create({
bookId: media.id,
seriesId: existingSeries.id,
seriesId: existingSeriesId,
sequence: seriesObj.sequence
})
libraryScan.addLog(LogLevel.DEBUG, `Updating book "${bookMetadata.title}" added series "${seriesObj.name}"${seriesObj.sequence ? ` with sequence "${seriesObj.sequence}"` : ''}`)
@ -443,10 +471,10 @@ class BookScanner {
}
if (bookMetadata.authors.length) {
for (const authorName of bookMetadata.authors) {
const matchingAuthor = Database.libraryFilterData[libraryItemData.libraryId].authors.find(au => au.name === authorName)
if (matchingAuthor) {
const matchingAuthorId = await Database.getAuthorIdByName(libraryItemData.libraryId, authorName)
if (matchingAuthorId) {
bookObject.bookAuthors.push({
authorId: matchingAuthor.id
authorId: matchingAuthorId
})
} else {
// New author
@ -463,10 +491,10 @@ class BookScanner {
if (bookMetadata.series.length) {
for (const seriesObj of bookMetadata.series) {
if (!seriesObj.name) continue
const matchingSeries = Database.libraryFilterData[libraryItemData.libraryId].series.find(se => se.name === seriesObj.name)
if (matchingSeries) {
const matchingSeriesId = await Database.getSeriesIdByName(libraryItemData.libraryId, seriesObj.name)
if (matchingSeriesId) {
bookObject.bookSeries.push({
seriesId: matchingSeries.id,
seriesId: matchingSeriesId,
sequence: seriesObj.sequence
})
} else {

View file

@ -4,6 +4,12 @@ const LibraryItem = require('../models/LibraryItem')
const globals = require('../utils/globals')
class LibraryItemScanData {
/**
* @typedef LibraryFileModifiedObject
* @property {LibraryItem.LibraryFileObject} old
* @property {LibraryItem.LibraryFileObject} new
*/
constructor(data) {
/** @type {string} */
this.libraryFolderId = data.libraryFolderId
@ -39,7 +45,7 @@ class LibraryItemScanData {
this.libraryFilesRemoved = []
/** @type {LibraryItem.LibraryFileObject[]} */
this.libraryFilesAdded = []
/** @type {LibraryItem.LibraryFileObject[]} */
/** @type {LibraryFileModifiedObject[]} */
this.libraryFilesModified = []
}
@ -77,9 +83,9 @@ class LibraryItemScanData {
return (this.audioLibraryFilesRemoved.length + this.audioLibraryFilesAdded.length + this.audioLibraryFilesModified.length) > 0
}
/** @type {LibraryItem.LibraryFileObject[]} */
/** @type {LibraryFileModifiedObject[]} */
get audioLibraryFilesModified() {
return this.libraryFilesModified.filter(lf => globals.SupportedAudioTypes.includes(lf.metadata.ext?.slice(1).toLowerCase() || ''))
return this.libraryFilesModified.filter(lf => globals.SupportedAudioTypes.includes(lf.old.metadata.ext?.slice(1).toLowerCase() || ''))
}
/** @type {LibraryItem.LibraryFileObject[]} */
@ -97,12 +103,42 @@ class LibraryItemScanData {
return this.libraryFiles.filter(lf => globals.SupportedAudioTypes.includes(lf.metadata.ext?.slice(1).toLowerCase() || ''))
}
/** @type {LibraryFileModifiedObject[]} */
get imageLibraryFilesModified() {
return this.libraryFilesModified.filter(lf => globals.SupportedImageTypes.includes(lf.old.metadata.ext?.slice(1).toLowerCase() || ''))
}
/** @type {LibraryItem.LibraryFileObject[]} */
get imageLibraryFilesRemoved() {
return this.libraryFilesRemoved.filter(lf => globals.SupportedImageTypes.includes(lf.metadata.ext?.slice(1).toLowerCase() || ''))
}
/** @type {LibraryItem.LibraryFileObject[]} */
get imageLibraryFilesAdded() {
return this.libraryFilesAdded.filter(lf => globals.SupportedImageTypes.includes(lf.metadata.ext?.slice(1).toLowerCase() || ''))
}
/** @type {LibraryItem.LibraryFileObject[]} */
get imageLibraryFiles() {
return this.libraryFiles.filter(lf => globals.SupportedImageTypes.includes(lf.metadata.ext?.slice(1).toLowerCase() || ''))
}
/** @type {import('../objects/files/LibraryFile')[]} */
/** @type {LibraryFileModifiedObject[]} */
get ebookLibraryFilesModified() {
return this.libraryFilesModified.filter(lf => globals.SupportedEbookTypes.includes(lf.old.metadata.ext?.slice(1).toLowerCase() || ''))
}
/** @type {LibraryItem.LibraryFileObject[]} */
get ebookLibraryFilesRemoved() {
return this.libraryFilesRemoved.filter(lf => globals.SupportedEbookTypes.includes(lf.metadata.ext?.slice(1).toLowerCase() || ''))
}
/** @type {LibraryItem.LibraryFileObject[]} */
get ebookLibraryFilesAdded() {
return this.libraryFilesAdded.filter(lf => globals.SupportedEbookTypes.includes(lf.metadata.ext?.slice(1).toLowerCase() || ''))
}
/** @type {LibraryItem.LibraryFileObject[]} */
get ebookLibraryFiles() {
return this.libraryFiles.filter(lf => globals.SupportedEbookTypes.includes(lf.metadata.ext?.slice(1).toLowerCase() || ''))
}
@ -153,7 +189,7 @@ class LibraryItemScanData {
existingLibraryItem[key] = this[key]
this.hasChanges = true
if (key === 'relPath') {
if (key === 'relPath' || key === 'path') {
this.hasPathChange = true
}
}
@ -202,8 +238,9 @@ class LibraryItemScanData {
this.hasChanges = true
} else {
libraryFilesAdded = libraryFilesAdded.filter(lf => lf !== matchingLibraryFile)
let existingLibraryFileBefore = structuredClone(existingLibraryFile)
if (this.compareUpdateLibraryFile(existingLibraryItem.path, existingLibraryFile, matchingLibraryFile, libraryScan)) {
this.libraryFilesModified.push(existingLibraryFile)
this.libraryFilesModified.push({old: existingLibraryFileBefore, new: existingLibraryFile})
this.hasChanges = true
}
}

View file

@ -21,10 +21,10 @@ class LibraryItemScanner {
* Scan single library item
*
* @param {string} libraryItemId
* @param {{relPath:string, path:string}} [renamedPaths] used by watcher when item folder was renamed
* @param {{relPath:string, path:string}} [updateLibraryItemDetails] used by watcher when item folder was renamed
* @returns {number} ScanResult
*/
async scanLibraryItem(libraryItemId, renamedPaths = null) {
async scanLibraryItem(libraryItemId, updateLibraryItemDetails = null) {
// TODO: Add task manager
const libraryItem = await Database.libraryItemModel.findByPk(libraryItemId)
if (!libraryItem) {
@ -32,11 +32,12 @@ class LibraryItemScanner {
return ScanResult.NOTHING
}
const libraryFolderId = updateLibraryItemDetails?.libraryFolderId || libraryItem.libraryFolderId
const library = await Database.libraryModel.findByPk(libraryItem.libraryId, {
include: {
model: Database.libraryFolderModel,
where: {
id: libraryItem.libraryFolderId
id: libraryFolderId
}
}
})
@ -51,11 +52,11 @@ class LibraryItemScanner {
const scanLogger = new ScanLogger()
scanLogger.verbose = true
scanLogger.setData('libraryItem', renamedPaths?.relPath || libraryItem.relPath)
scanLogger.setData('libraryItem', updateLibraryItemDetails?.relPath || libraryItem.relPath)
const libraryItemPath = renamedPaths?.path || fileUtils.filePathToPOSIX(libraryItem.path)
const libraryItemPath = updateLibraryItemDetails?.path || fileUtils.filePathToPOSIX(libraryItem.path)
const folder = library.libraryFolders[0]
const libraryItemScanData = await this.getLibraryItemScanData(libraryItemPath, library, folder, false)
const libraryItemScanData = await this.getLibraryItemScanData(libraryItemPath, library, folder, updateLibraryItemDetails?.isFile || false)
let libraryItemDataUpdated = await libraryItemScanData.checkLibraryItemData(libraryItem, scanLogger)

View file

@ -154,7 +154,11 @@ class LibraryScanner {
let libraryItemData = libraryItemDataFound.find(lid => lid.path === existingLibraryItem.path)
if (!libraryItemData) {
// Fallback to finding matching library item with matching inode value
libraryItemData = libraryItemDataFound.find(lid => lid.ino === existingLibraryItem.ino)
libraryItemData = libraryItemDataFound.find(lid =>
ItemToItemInoMatch(lid, existingLibraryItem) ||
ItemToFileInoMatch(lid, existingLibraryItem) ||
ItemToFileInoMatch(existingLibraryItem, lid)
)
if (libraryItemData) {
libraryScan.addLog(LogLevel.INFO, `Library item with path "${existingLibraryItem.path}" was not found, but library item inode "${existingLibraryItem.ino}" was found at path "${libraryItemData.path}"`)
}
@ -522,22 +526,25 @@ class LibraryScanner {
// Check if book dir group is already an item
let existingLibraryItem = await Database.libraryItemModel.findOneOld({
libraryId: library.id,
path: potentialChildDirs
})
let renamedPaths = {}
let updatedLibraryItemDetails = {}
if (!existingLibraryItem) {
const dirIno = await fileUtils.getIno(fullPath)
existingLibraryItem = await Database.libraryItemModel.findOneOld({
ino: dirIno
})
const isSingleMedia = isSingleMediaFile(fileUpdateGroup, itemDir)
existingLibraryItem =
await findLibraryItemByItemToItemInoMatch(library.id, fullPath) ||
await findLibraryItemByItemToFileInoMatch(library.id, fullPath, isSingleMedia) ||
await findLibraryItemByFileToItemInoMatch(library.id, fullPath, isSingleMedia, fileUpdateGroup[itemDir])
if (existingLibraryItem) {
Logger.debug(`[LibraryScanner] scanFolderUpdates: Library item found by inode value=${dirIno}. "${existingLibraryItem.relPath} => ${itemDir}"`)
// Update library item paths for scan
existingLibraryItem.path = fullPath
existingLibraryItem.relPath = itemDir
renamedPaths.path = fullPath
renamedPaths.relPath = itemDir
updatedLibraryItemDetails.path = fullPath
updatedLibraryItemDetails.relPath = itemDir
updatedLibraryItemDetails.libraryFolderId = folder.id
updatedLibraryItemDetails.isFile = isSingleMedia
}
}
if (existingLibraryItem) {
@ -554,10 +561,9 @@ class LibraryScanner {
continue
}
}
// Scan library item for updates
Logger.debug(`[LibraryScanner] Folder update for relative path "${itemDir}" is in library item "${existingLibraryItem.media.metadata.title}" - scan for updates`)
itemGroupingResults[itemDir] = await LibraryItemScanner.scanLibraryItem(existingLibraryItem.id, renamedPaths)
itemGroupingResults[itemDir] = await LibraryItemScanner.scanLibraryItem(existingLibraryItem.id, updatedLibraryItemDetails)
continue
} else if (library.settings.audiobooksOnly && !hasAudioFiles(fileUpdateGroup, itemDir)) {
Logger.debug(`[LibraryScanner] Folder update for relative path "${itemDir}" has no audio files`)
@ -594,6 +600,14 @@ class LibraryScanner {
}
module.exports = new LibraryScanner()
function ItemToFileInoMatch(libraryItem1, libraryItem2) {
return libraryItem1.isFile && libraryItem2.libraryFiles.some(lf => lf.ino === libraryItem1.ino)
}
function ItemToItemInoMatch(libraryItem1, libraryItem2) {
return libraryItem1.ino === libraryItem2.ino
}
function hasAudioFiles(fileUpdateGroup, itemDir) {
return isSingleMediaFile(fileUpdateGroup, itemDir) ?
scanUtils.checkFilepathIsAudioFile(fileUpdateGroup[itemDir]) :
@ -603,3 +617,55 @@ function hasAudioFiles(fileUpdateGroup, itemDir) {
function isSingleMediaFile(fileUpdateGroup, itemDir) {
return itemDir === fileUpdateGroup[itemDir]
}
async function findLibraryItemByItemToItemInoMatch(libraryId, fullPath) {
const ino = await fileUtils.getIno(fullPath)
if (!ino) return null
const existingLibraryItem = await Database.libraryItemModel.findOneOld({
libraryId: libraryId,
ino: ino
})
if (existingLibraryItem)
Logger.debug(`[LibraryScanner] Found library item with matching inode "${ino}" at path "${existingLibraryItem.path}"`)
return existingLibraryItem
}
async function findLibraryItemByItemToFileInoMatch(libraryId, fullPath, isSingleMedia) {
if (!isSingleMedia) return null
// check if it was moved from another folder by comparing the ino to the library files
const ino = await fileUtils.getIno(fullPath)
if (!ino) return null
const existingLibraryItem = await Database.libraryItemModel.findOneOld([
{
libraryId: libraryId
},
sequelize.where(sequelize.literal('(SELECT count(*) FROM json_each(libraryFiles) WHERE json_valid(json_each.value) AND json_each.value->>"$.ino" = :inode)'), {
[sequelize.Op.gt]: 0
})
], {
inode: ino
})
if (existingLibraryItem)
Logger.debug(`[LibraryScanner] Found library item with a library file matching inode "${ino}" at path "${existingLibraryItem.path}"`)
return existingLibraryItem
}
async function findLibraryItemByFileToItemInoMatch(libraryId, fullPath, isSingleMedia, itemFiles) {
if (isSingleMedia) return null
// check if it was moved from the root folder by comparing the ino to the ino of the scanned files
let itemFileInos = []
for (const itemFile of itemFiles) {
const ino = await fileUtils.getIno(Path.posix.join(fullPath, itemFile))
if (ino) itemFileInos.push(ino)
}
if (!itemFileInos.length) return null
const existingLibraryItem = await Database.libraryItemModel.findOneOld({
libraryId: libraryId,
ino: {
[sequelize.Op.in]: itemFileInos
}
})
if (existingLibraryItem)
Logger.debug(`[LibraryScanner] Found library item with inode matching one of "${itemFileInos.join(',')}" at path "${existingLibraryItem.path}"`)
return existingLibraryItem
}

View file

@ -71,7 +71,7 @@ class PodcastScanner {
// Update audio files that were modified
if (libraryItemData.audioLibraryFilesModified.length) {
let scannedAudioFiles = await AudioFileScanner.executeMediaFileScans(existingLibraryItem.mediaType, libraryItemData, libraryItemData.audioLibraryFilesModified)
let scannedAudioFiles = await AudioFileScanner.executeMediaFileScans(existingLibraryItem.mediaType, libraryItemData, libraryItemData.audioLibraryFilesModified.map(lf => lf.new))
for (const podcastEpisode of existingPodcastEpisodes) {
let matchedScannedAudioFile = scannedAudioFiles.find(saf => saf.metadata.path === podcastEpisode.audioFile.metadata.path)
@ -132,11 +132,25 @@ class PodcastScanner {
let hasMediaChanges = false
// Check if cover was removed
if (media.coverPath && !libraryItemData.imageLibraryFiles.some(lf => lf.metadata.path === media.coverPath)) {
if (media.coverPath && libraryItemData.imageLibraryFilesRemoved.some(lf => lf.metadata.path === media.coverPath)) {
media.coverPath = null
hasMediaChanges = true
}
// Update cover if it was modified
if (media.coverPath && libraryItemData.imageLibraryFilesModified.length) {
let coverMatch = libraryItemData.imageLibraryFilesModified.find(iFile => iFile.old.metadata.path === media.coverPath)
if (coverMatch) {
const coverPath = coverMatch.new.metadata.path
if (coverPath !== media.coverPath) {
libraryScan.addLog(LogLevel.DEBUG, `Updating podcast cover "${media.coverPath}" => "${coverPath}" for podcast "${media.title}"`)
media.coverPath = coverPath
media.changed('coverPath', true)
hasMediaChanges = true
}
}
}
// Check if cover is not set and image files were found
if (!media.coverPath && libraryItemData.imageLibraryFiles.length) {
// Prefer using a cover image with the name "cover" otherwise use the first image

View file

@ -359,7 +359,7 @@ class Scanner {
}
offset += limit
hasMoreChunks = libraryItems.length < limit
hasMoreChunks = libraryItems.length === limit
let oldLibraryItems = libraryItems.map(li => Database.libraryItemModel.getOldLibraryItem(li))
const shouldContinue = await this.matchLibraryItemsChunk(library, oldLibraryItems, libraryScan)

View file

@ -35,9 +35,14 @@ async function writeConcatFile(tracks, outputPath, startTime = 0) {
return line
})
var inputstr = trackPaths.join('\n\n')
await fs.writeFile(outputPath, inputstr)
return firstTrackStartTime
try {
await fs.writeFile(outputPath, inputstr)
return firstTrackStartTime
} catch (error) {
Logger.error(`[ffmpegHelpers] Failed to write stream concat file at "${outputPath}"`, error)
return null
}
}
module.exports.writeConcatFile = writeConcatFile
@ -104,7 +109,8 @@ module.exports.downloadPodcastEpisode = (podcastEpisodeDownload) => {
const ffmpeg = Ffmpeg(response.data)
ffmpeg.addOption('-loglevel debug') // Debug logs printed on error
ffmpeg.outputOptions(
'-c', 'copy',
'-c:a', 'copy',
'-map', '0:a',
'-metadata', 'podcast=1'
)

View file

@ -7,13 +7,12 @@ const rra = require('../libs/recursiveReaddirAsync')
const Logger = require('../Logger')
const { AudioMimeType } = require('./constants')
/**
* Make sure folder separator is POSIX for Windows file paths. e.g. "C:\Users\Abs" becomes "C:/Users/Abs"
*
* @param {String} path - Ugly file path
* @return {String} Pretty posix file path
*/
* Make sure folder separator is POSIX for Windows file paths. e.g. "C:\Users\Abs" becomes "C:/Users/Abs"
*
* @param {String} path - Ugly file path
* @return {String} Pretty posix file path
*/
const filePathToPOSIX = (path) => {
if (!global.isWin || !path) return path
return path.replace(/\\/g, '/')
@ -22,9 +21,9 @@ module.exports.filePathToPOSIX = filePathToPOSIX
/**
* Check path is a child of or equal to another path
*
* @param {string} parentPath
* @param {string} childPath
*
* @param {string} parentPath
* @param {string} childPath
* @returns {boolean}
*/
function isSameOrSubPath(parentPath, childPath) {
@ -33,8 +32,8 @@ function isSameOrSubPath(parentPath, childPath) {
if (parentPath === childPath) return true
const relativePath = Path.relative(parentPath, childPath)
return (
relativePath === '' // Same path (e.g. parentPath = '/a/b/', childPath = '/a/b')
|| !relativePath.startsWith('..') && !Path.isAbsolute(relativePath) // Sub path
relativePath === '' || // Same path (e.g. parentPath = '/a/b/', childPath = '/a/b')
(!relativePath.startsWith('..') && !Path.isAbsolute(relativePath)) // Sub path
)
}
module.exports.isSameOrSubPath = isSameOrSubPath
@ -59,7 +58,7 @@ async function getFileTimestampsWithIno(path) {
ino: String(stat.ino)
}
} catch (err) {
Logger.error('[fileUtils] Failed to getFileTimestampsWithIno', err)
Logger.error(`[fileUtils] Failed to getFileTimestampsWithIno for path "${path}"`, err)
return false
}
}
@ -67,8 +66,8 @@ module.exports.getFileTimestampsWithIno = getFileTimestampsWithIno
/**
* Get file size
*
* @param {string} path
*
* @param {string} path
* @returns {Promise<number>}
*/
module.exports.getFileSize = async (path) => {
@ -77,8 +76,8 @@ module.exports.getFileSize = async (path) => {
/**
* Get file mtimeMs
*
* @param {string} path
*
* @param {string} path
* @returns {Promise<number>} epoch timestamp
*/
module.exports.getFileMTimeMs = async (path) => {
@ -91,8 +90,8 @@ module.exports.getFileMTimeMs = async (path) => {
}
/**
*
* @param {string} filepath
*
* @param {string} filepath
* @returns {boolean}
*/
async function checkPathIsFile(filepath) {
@ -106,16 +105,19 @@ async function checkPathIsFile(filepath) {
module.exports.checkPathIsFile = checkPathIsFile
function getIno(path) {
return fs.stat(path, { bigint: true }).then((data => String(data.ino))).catch((err) => {
Logger.error('[Utils] Failed to get ino for path', path, err)
return null
})
return fs
.stat(path, { bigint: true })
.then((data) => String(data.ino))
.catch((err) => {
Logger.error('[Utils] Failed to get ino for path', path, err)
return null
})
}
module.exports.getIno = getIno
/**
* Read contents of file
* @param {string} path
* @param {string} path
* @returns {string}
*/
async function readTextFile(path) {
@ -144,8 +146,8 @@ module.exports.bytesPretty = bytesPretty
/**
* Get array of files inside dir
* @param {string} path
* @param {string} [relPathToReplace]
* @param {string} path
* @param {string} [relPathToReplace]
* @returns {{name:string, path:string, dirpath:string, reldirpath:string, fullpath:string, extension:string, deep:number}[]}
*/
async function recurseFiles(path, relPathToReplace = null) {
@ -177,55 +179,58 @@ async function recurseFiles(path, relPathToReplace = null) {
const directoriesToIgnore = []
list = list.filter((item) => {
if (item.error) {
Logger.error(`[fileUtils] Recurse files file "${item.fullname}" has error`, item.error)
return false
}
list = list
.filter((item) => {
if (item.error) {
Logger.error(`[fileUtils] Recurse files file "${item.fullname}" has error`, item.error)
return false
}
const relpath = item.fullname.replace(relPathToReplace, '')
let reldirname = Path.dirname(relpath)
if (reldirname === '.') reldirname = ''
const dirname = Path.dirname(item.fullname)
const relpath = item.fullname.replace(relPathToReplace, '')
let reldirname = Path.dirname(relpath)
if (reldirname === '.') reldirname = ''
const dirname = Path.dirname(item.fullname)
// Directory has a file named ".ignore" flag directory and ignore
if (item.name === '.ignore' && reldirname && reldirname !== '.' && !directoriesToIgnore.includes(dirname)) {
Logger.debug(`[fileUtils] .ignore found - ignoring directory "${reldirname}"`)
directoriesToIgnore.push(dirname)
return false
}
// Directory has a file named ".ignore" flag directory and ignore
if (item.name === '.ignore' && reldirname && reldirname !== '.' && !directoriesToIgnore.includes(dirname)) {
Logger.debug(`[fileUtils] .ignore found - ignoring directory "${reldirname}"`)
directoriesToIgnore.push(dirname)
return false
}
if (item.extension === '.part') {
Logger.debug(`[fileUtils] Ignoring .part file "${relpath}"`)
return false
}
if (item.extension === '.part') {
Logger.debug(`[fileUtils] Ignoring .part file "${relpath}"`)
return false
}
// Ignore any file if a directory or the filename starts with "."
if (relpath.split('/').find(p => p.startsWith('.'))) {
Logger.debug(`[fileUtils] Ignoring path has . "${relpath}"`)
return false
}
// Ignore any file if a directory or the filename starts with "."
if (relpath.split('/').find((p) => p.startsWith('.'))) {
Logger.debug(`[fileUtils] Ignoring path has . "${relpath}"`)
return false
}
return true
}).filter(item => {
// Filter out items in ignore directories
if (directoriesToIgnore.some(dir => item.fullname.startsWith(dir))) {
Logger.debug(`[fileUtils] Ignoring path in dir with .ignore "${item.fullname}"`)
return false
}
return true
}).map((item) => {
var isInRoot = (item.path + '/' === relPathToReplace)
return {
name: item.name,
path: item.fullname.replace(relPathToReplace, ''),
dirpath: item.path,
reldirpath: isInRoot ? '' : item.path.replace(relPathToReplace, ''),
fullpath: item.fullname,
extension: item.extension,
deep: item.deep
}
})
return true
})
.filter((item) => {
// Filter out items in ignore directories
if (directoriesToIgnore.some((dir) => item.fullname.startsWith(dir))) {
Logger.debug(`[fileUtils] Ignoring path in dir with .ignore "${item.fullname}"`)
return false
}
return true
})
.map((item) => {
var isInRoot = item.path + '/' === relPathToReplace
return {
name: item.name,
path: item.fullname.replace(relPathToReplace, ''),
dirpath: item.path,
reldirpath: isInRoot ? '' : item.path.replace(relPathToReplace, ''),
fullpath: item.fullname,
extension: item.extension,
deep: item.deep
}
})
// Sort from least deep to most
list.sort((a, b) => a.deep - b.deep)
@ -237,8 +242,8 @@ module.exports.recurseFiles = recurseFiles
/**
* Download file from web to local file system
* Uses SSRF filter to prevent internal URLs
*
* @param {string} url
*
* @param {string} url
* @param {string} filepath path to download the file to
* @param {Function} [contentTypeFilter] validate content type before writing
* @returns {Promise}
@ -251,33 +256,35 @@ module.exports.downloadFile = (url, filepath, contentTypeFilter = null) => {
method: 'GET',
responseType: 'stream',
timeout: 30000,
httpAgent: ssrfFilter(url),
httpsAgent: ssrfFilter(url)
}).then((response) => {
// Validate content type
if (contentTypeFilter && !contentTypeFilter?.(response.headers?.['content-type'])) {
return reject(new Error(`Invalid content type "${response.headers?.['content-type'] || ''}"`))
}
// Write to filepath
const writer = fs.createWriteStream(filepath)
response.data.pipe(writer)
writer.on('finish', resolve)
writer.on('error', reject)
}).catch((err) => {
Logger.error(`[fileUtils] Failed to download file "${filepath}"`, err)
reject(err)
httpAgent: global.DisableSsrfRequestFilter ? null : ssrfFilter(url),
httpsAgent: global.DisableSsrfRequestFilter ? null : ssrfFilter(url)
})
.then((response) => {
// Validate content type
if (contentTypeFilter && !contentTypeFilter?.(response.headers?.['content-type'])) {
return reject(new Error(`Invalid content type "${response.headers?.['content-type'] || ''}"`))
}
// Write to filepath
const writer = fs.createWriteStream(filepath)
response.data.pipe(writer)
writer.on('finish', resolve)
writer.on('error', reject)
})
.catch((err) => {
Logger.error(`[fileUtils] Failed to download file "${filepath}"`, err)
reject(err)
})
})
}
/**
* Download image file from web to local file system
* Response header must have content-type of image/ (excluding svg)
*
* @param {string} url
* @param {string} filepath
*
* @param {string} url
* @param {string} filepath
* @returns {Promise}
*/
module.exports.downloadImageFile = (url, filepath) => {
@ -350,22 +357,28 @@ module.exports.getAudioMimeTypeFromExtname = (extname) => {
module.exports.removeFile = (path) => {
if (!path) return false
return fs.remove(path).then(() => true).catch((error) => {
Logger.error(`[fileUtils] Failed remove file "${path}"`, error)
return false
})
return fs
.remove(path)
.then(() => true)
.catch((error) => {
Logger.error(`[fileUtils] Failed remove file "${path}"`, error)
return false
})
}
module.exports.encodeUriPath = (path) => {
const uri = new URL(path, "file://")
const uri = new URL('/', 'file://')
// we assign the path here to assure that URL control characters like # are
// actually interpreted as part of the URL path
uri.pathname = path
return uri.pathname
}
/**
* Check if directory is writable.
* This method is necessary because fs.access(directory, fs.constants.W_OK) does not work on Windows
*
* @param {string} directory
*
* @param {string} directory
* @returns {Promise<boolean>}
*/
module.exports.isWritable = async (directory) => {
@ -382,7 +395,7 @@ module.exports.isWritable = async (directory) => {
/**
* Get Windows drives as array e.g. ["C:/", "F:/"]
*
*
* @returns {Promise<string[]>}
*/
module.exports.getWindowsDrives = async () => {
@ -395,7 +408,11 @@ module.exports.getWindowsDrives = async () => {
reject(error)
return
}
let drives = stdout?.split(/\r?\n/).map(line => line.trim()).filter(line => line).slice(1)
let drives = stdout
?.split(/\r?\n/)
.map((line) => line.trim())
.filter((line) => line)
.slice(1)
const validDrives = []
for (const drive of drives) {
let drivepath = drive + '/'
@ -412,33 +429,35 @@ module.exports.getWindowsDrives = async () => {
/**
* Get array of directory paths in a directory
*
* @param {string} dirPath
*
* @param {string} dirPath
* @param {number} level
* @returns {Promise<{ path:string, dirname:string, level:number }[]>}
*/
module.exports.getDirectoriesInPath = async (dirPath, level) => {
try {
const paths = await fs.readdir(dirPath)
let dirs = await Promise.all(paths.map(async dirname => {
const fullPath = Path.join(dirPath, dirname)
let dirs = await Promise.all(
paths.map(async (dirname) => {
const fullPath = Path.join(dirPath, dirname)
const lstat = await fs.lstat(fullPath).catch((error) => {
Logger.debug(`Failed to lstat "${fullPath}"`, error)
return null
const lstat = await fs.lstat(fullPath).catch((error) => {
Logger.debug(`Failed to lstat "${fullPath}"`, error)
return null
})
if (!lstat?.isDirectory()) return null
return {
path: this.filePathToPOSIX(fullPath),
dirname,
level
}
})
if (!lstat?.isDirectory()) return null
return {
path: this.filePathToPOSIX(fullPath),
dirname,
level
}
}))
dirs = dirs.filter(d => d)
)
dirs = dirs.filter((d) => d)
return dirs
} catch (error) {
Logger.error('Failed to readdir', dirPath, error)
return []
}
}
}

View file

@ -114,7 +114,9 @@ module.exports.reqSupportsWebp = (req) => {
module.exports.areEquivalent = areEquivalent
module.exports.copyValue = (val) => {
if (!val) return val === false ? false : null
if (val === undefined || val === '') return null
else if (!val) return val
if (!this.isObject(val)) return val
if (Array.isArray(val)) {

View file

@ -18,7 +18,10 @@ async function extractFileFromEpub(epubPath, filepath) {
Logger.error(`[parseEpubMetadata] Failed to extract ${filepath} from epub at "${epubPath}"`, error)
})
const filedata = data?.toString('utf8')
await zip.close()
await zip.close().catch((error) => {
Logger.error(`[parseEpubMetadata] Failed to close zip`, error)
})
return filedata
}
@ -68,6 +71,9 @@ async function parse(ebookFile) {
Logger.debug(`Parsing metadata from epub at "${epubPath}"`)
// Entrypoint of the epub that contains the filepath to the package document (opf file)
const containerJson = await extractXmlToJson(epubPath, 'META-INF/container.xml')
if (!containerJson) {
return null
}
// Get package document opf filepath from container.xml
const packageDocPath = containerJson.container?.rootfiles?.[0]?.rootfile?.[0]?.$?.['full-path']

View file

@ -220,8 +220,8 @@ module.exports.parsePodcastRssFeedXml = async (xml, excludeEpisodeMetadata = fal
/**
* Get podcast RSS feed as JSON
* Uses SSRF filter to prevent internal URLs
*
* @param {string} feedUrl
*
* @param {string} feedUrl
* @param {boolean} [excludeEpisodeMetadata=false]
* @returns {Promise}
*/
@ -234,37 +234,38 @@ module.exports.getPodcastFeed = (feedUrl, excludeEpisodeMetadata = false) => {
timeout: 12000,
responseType: 'arraybuffer',
headers: { Accept: 'application/rss+xml, application/xhtml+xml, application/xml, */*;q=0.8' },
httpAgent: ssrfFilter(feedUrl),
httpsAgent: ssrfFilter(feedUrl)
}).then(async (data) => {
// Adding support for ios-8859-1 encoded RSS feeds.
// See: https://github.com/advplyr/audiobookshelf/issues/1489
const contentType = data.headers?.['content-type'] || '' // e.g. text/xml; charset=iso-8859-1
if (contentType.toLowerCase().includes('iso-8859-1')) {
data.data = data.data.toString('latin1')
} else {
data.data = data.data.toString()
}
if (!data?.data) {
Logger.error(`[podcastUtils] getPodcastFeed: Invalid podcast feed request response (${feedUrl})`)
return null
}
Logger.debug(`[podcastUtils] getPodcastFeed for "${feedUrl}" success - parsing xml`)
const payload = await this.parsePodcastRssFeedXml(data.data, excludeEpisodeMetadata)
if (!payload) {
return null
}
// RSS feed may be a private RSS feed
payload.podcast.metadata.feedUrl = feedUrl
return payload.podcast
}).catch((error) => {
Logger.error('[podcastUtils] getPodcastFeed Error', error)
return null
httpAgent: global.DisableSsrfRequestFilter ? null : ssrfFilter(feedUrl),
httpsAgent: global.DisableSsrfRequestFilter ? null : ssrfFilter(feedUrl)
})
.then(async (data) => {
// Adding support for ios-8859-1 encoded RSS feeds.
// See: https://github.com/advplyr/audiobookshelf/issues/1489
const contentType = data.headers?.['content-type'] || '' // e.g. text/xml; charset=iso-8859-1
if (contentType.toLowerCase().includes('iso-8859-1')) {
data.data = data.data.toString('latin1')
} else {
data.data = data.data.toString()
}
if (!data?.data) {
Logger.error(`[podcastUtils] getPodcastFeed: Invalid podcast feed request response (${feedUrl})`)
return null
}
Logger.debug(`[podcastUtils] getPodcastFeed for "${feedUrl}" success - parsing xml`)
const payload = await this.parsePodcastRssFeedXml(data.data, excludeEpisodeMetadata)
if (!payload) {
return null
}
// RSS feed may be a private RSS feed
payload.podcast.metadata.feedUrl = feedUrl
return payload.podcast
})
.catch((error) => {
Logger.error('[podcastUtils] getPodcastFeed Error', error)
return null
})
}
// Return array of episodes ordered by closest match (Levenshtein distance of 6 or less)
@ -283,7 +284,7 @@ module.exports.findMatchingEpisodesInFeed = (feed, searchTitle) => {
}
const matches = []
feed.episodes.forEach(ep => {
feed.episodes.forEach((ep) => {
if (!ep.title) return
const epTitle = ep.title.toLowerCase().trim()

View file

@ -20,7 +20,7 @@ function tryGrabBitRate(stream, all_streams, total_bit_rate) {
var tagDuration = stream.tags.DURATION || stream.tags['DURATION-eng'] || stream.tags['DURATION_eng']
var tagBytes = stream.tags.NUMBER_OF_BYTES || stream.tags['NUMBER_OF_BYTES-eng'] || stream.tags['NUMBER_OF_BYTES_eng']
if (tagDuration && tagBytes && !isNaN(tagDuration) && !isNaN(tagBytes)) {
var bps = Math.floor(Number(tagBytes) * 8 / Number(tagDuration))
var bps = Math.floor((Number(tagBytes) * 8) / Number(tagDuration))
if (bps && !isNaN(bps)) {
return bps
}
@ -33,7 +33,7 @@ function tryGrabBitRate(stream, all_streams, total_bit_rate) {
estimated_bit_rate -= Number(stream.bit_rate)
}
})
if (!all_streams.find(s => s.codec_type === 'audio' && s.bit_rate && Number(s.bit_rate) > estimated_bit_rate)) {
if (!all_streams.find((s) => s.codec_type === 'audio' && s.bit_rate && Number(s.bit_rate) > estimated_bit_rate)) {
return estimated_bit_rate
} else {
return total_bit_rate
@ -73,7 +73,7 @@ function tryGrabChannelLayout(stream) {
function tryGrabTags(stream, ...tags) {
if (!stream.tags) return null
for (let i = 0; i < tags.length; i++) {
const tagKey = Object.keys(stream.tags).find(t => t.toLowerCase() === tags[i].toLowerCase())
const tagKey = Object.keys(stream.tags).find((t) => t.toLowerCase() === tags[i].toLowerCase())
const value = stream.tags[tagKey]
if (value && value.trim()) return value.trim()
}
@ -101,7 +101,7 @@ function parseMediaStreamInfo(stream, all_streams, total_bit_rate) {
if (info.type === 'video') {
info.profile = stream.profile || null
info.is_avc = (stream.is_avc !== '0' && stream.is_avc !== 'false')
info.is_avc = stream.is_avc !== '0' && stream.is_avc !== 'false'
info.pix_fmt = stream.pix_fmt || null
info.frame_rate = tryGrabFrameRate(stream)
info.width = !isNaN(stream.width) ? Number(stream.width) : null
@ -123,7 +123,6 @@ function isNullOrNaN(val) {
return val === null || isNaN(val)
}
/* Example chapter object
* {
"id": 71,
@ -137,23 +136,28 @@ function isNullOrNaN(val) {
}
* }
*/
function parseChapters(chapters) {
if (!chapters) return []
let index = 0
return chapters.map(chap => {
let title = chap['TAG:title'] || chap.title || ''
if (!title && chap.tags?.title) title = chap.tags.title
function parseChapters(_chapters) {
if (!_chapters) return []
const timebase = chap.time_base?.includes('/') ? Number(chap.time_base.split('/')[1]) : 1
const start = !isNullOrNaN(chap.start_time) ? Number(chap.start_time) : !isNullOrNaN(chap.start) ? Number(chap.start) / timebase : 0
const end = !isNullOrNaN(chap.end_time) ? Number(chap.end_time) : !isNullOrNaN(chap.end) ? Number(chap.end) / timebase : 0
return {
id: index++,
start,
end,
title
}
})
return _chapters
.map((chap) => {
let title = chap['TAG:title'] || chap.title || ''
if (!title && chap.tags?.title) title = chap.tags.title
const timebase = chap.time_base?.includes('/') ? Number(chap.time_base.split('/')[1]) : 1
const start = !isNullOrNaN(chap.start_time) ? Number(chap.start_time) : !isNullOrNaN(chap.start) ? Number(chap.start) / timebase : 0
const end = !isNullOrNaN(chap.end_time) ? Number(chap.end_time) : !isNullOrNaN(chap.end) ? Number(chap.end) / timebase : 0
return {
start,
end,
title
}
})
.sort((a, b) => a.start - b.start)
.map((chap, index) => {
chap.id = index
return chap
})
}
function parseTags(format, verbose) {
@ -210,7 +214,7 @@ function parseTags(format, verbose) {
file_tag_movement: tryGrabTags(format, 'movement', 'mvin'),
file_tag_genre1: tryGrabTags(format, 'tmp_genre1', 'genre1'),
file_tag_genre2: tryGrabTags(format, 'tmp_genre2', 'genre2'),
file_tag_overdrive_media_marker: tryGrabTags(format, 'OverDrive MediaMarkers'),
file_tag_overdrive_media_marker: tryGrabTags(format, 'OverDrive MediaMarkers')
}
for (const key in tags) {
if (!tags[key]) {
@ -224,7 +228,7 @@ function parseTags(format, verbose) {
function getDefaultAudioStream(audioStreams) {
if (!audioStreams || !audioStreams.length) return null
if (audioStreams.length === 1) return audioStreams[0]
var defaultStream = audioStreams.find(a => a.is_default)
var defaultStream = audioStreams.find((a) => a.is_default)
if (!defaultStream) return audioStreams[0]
return defaultStream
}
@ -248,9 +252,9 @@ function parseProbeData(data, verbose = false) {
cleanedData.rawTags = format.tags
}
const cleaned_streams = streams.map(s => parseMediaStreamInfo(s, streams, cleanedData.bit_rate))
cleanedData.video_stream = cleaned_streams.find(s => s.type === 'video')
const audioStreams = cleaned_streams.filter(s => s.type === 'audio')
const cleaned_streams = streams.map((s) => parseMediaStreamInfo(s, streams, cleanedData.bit_rate))
cleanedData.video_stream = cleaned_streams.find((s) => s.type === 'video')
const audioStreams = cleaned_streams.filter((s) => s.type === 'audio')
cleanedData.audio_stream = getDefaultAudioStream(audioStreams)
if (cleanedData.audio_stream && cleanedData.video_stream) {
@ -280,8 +284,8 @@ function parseProbeData(data, verbose = false) {
/**
* Run ffprobe on audio filepath
* @param {string} filepath
* @param {boolean} [verbose=false]
* @param {string} filepath
* @param {boolean} [verbose=false]
* @returns {import('../scanner/MediaProbeData')|{error:string}}
*/
function probe(filepath, verbose = false) {
@ -290,7 +294,7 @@ function probe(filepath, verbose = false) {
}
return ffprobe(filepath)
.then(raw => {
.then((raw) => {
if (raw.error) {
return {
error: raw.error.string
@ -318,7 +322,7 @@ module.exports.probe = probe
/**
* Ffprobe for audio file path
*
*
* @param {string} filepath
* @returns {Object} ffprobe json output
*/
@ -327,11 +331,10 @@ function rawProbe(filepath) {
ffprobe.FFPROBE_PATH = process.env.FFPROBE_PATH
}
return ffprobe(filepath)
.catch((err) => {
return {
error: err
}
})
return ffprobe(filepath).catch((err) => {
return {
error: err
}
})
}
module.exports.rawProbe = rawProbe
module.exports.rawProbe = rawProbe

View file

@ -3,45 +3,61 @@ const Database = require('../../Database')
module.exports = {
/**
* Get authors with count of num books
* @param {string} libraryId
* @returns {{id:string, name:string, count:number}}
* Get authors total count
*
* @param {string} libraryId
* @returns {Promise<number>} count
*/
async getAuthorsWithCount(libraryId) {
const authors = await Database.authorModel.findAll({
where: [
{
libraryId
},
Sequelize.where(Sequelize.literal('count'), {
[Sequelize.Op.gt]: 0
})
],
attributes: [
'id',
'name',
[Sequelize.literal('(SELECT count(*) FROM bookAuthors ba WHERE ba.authorId = author.id)'), 'count']
],
order: [
['count', 'DESC']
]
async getAuthorsTotalCount(libraryId) {
const authorsCount = await Database.authorModel.count({
where: {
libraryId: libraryId
}
})
return authors.map(au => {
return authorsCount
},
/**
* Get authors with count of num books
*
* @param {string} libraryId
* @param {number} limit
* @returns {Promise<{id:string, name:string, count:number}>}
*/
async getAuthorsWithCount(libraryId, limit) {
const authors = await Database.bookAuthorModel.findAll({
include: [
{
model: Database.authorModel,
as: 'author', // Use the correct alias as defined in your associations
attributes: ['name'],
where: {
libraryId: libraryId
}
}
],
attributes: ['authorId', [Sequelize.fn('COUNT', Sequelize.col('authorId')), 'count']],
group: ['authorId', 'author.id'], // Include 'author.id' to satisfy GROUP BY with JOIN
order: [[Sequelize.literal('count'), 'DESC']],
limit: limit
})
return authors.map((au) => {
return {
id: au.id,
name: au.name,
count: au.dataValues.count
id: au.authorId,
name: au.author.name,
count: au.get('count') // Use get method to access aliased attributes
}
})
},
/**
* Search authors
* @param {string} libraryId
* @param {string} query
*
* @param {string} libraryId
* @param {string} query
* @param {number} limit
* @param {number} offset
* @returns {object[]} oldAuthor with numBooks
* @returns {Promise<Object[]>} oldAuthor with numBooks
*/
async search(libraryId, query, limit, offset) {
const authors = await Database.authorModel.findAll({
@ -52,9 +68,7 @@ module.exports = {
libraryId
},
attributes: {
include: [
[Sequelize.literal('(SELECT count(*) FROM bookAuthors ba WHERE ba.authorId = author.id)'), 'numBooks']
]
include: [[Sequelize.literal('(SELECT count(*) FROM bookAuthors ba WHERE ba.authorId = author.id)'), 'numBooks']]
},
limit,
offset

View file

@ -75,7 +75,7 @@ module.exports = {
/**
* Get library items for most recently added shelf
* @param {oldLibrary} library
* @param {import('../../objects/Library')} library
* @param {oldUser} user
* @param {string[]} include
* @param {number} limit
@ -120,14 +120,14 @@ module.exports = {
/**
* Get library items for continue series shelf
* @param {string} library
* @param {import('../../objects/Library')} library
* @param {oldUser} user
* @param {string[]} include
* @param {number} limit
* @returns {object} { libraryItems:LibraryItem[], count:number }
*/
async getLibraryItemsContinueSeries(library, user, include, limit) {
const { libraryItems, count } = await libraryItemsBookFilters.getContinueSeriesLibraryItems(library.id, user, include, limit, 0)
const { libraryItems, count } = await libraryItemsBookFilters.getContinueSeriesLibraryItems(library, user, include, limit, 0)
return {
libraryItems: libraryItems.map(li => {
const oldLibraryItem = Database.libraryItemModel.getOldLibraryItem(li).toJSONMinified()
@ -145,7 +145,7 @@ module.exports = {
/**
* Get library items or podcast episodes for the "Listen Again" and "Read Again" shelf
* @param {oldLibrary} library
* @param {import('../../objects/Library')} library
* @param {oldUser} user
* @param {string[]} include
* @param {number} limit
@ -451,7 +451,7 @@ module.exports = {
libraryId: libraryId
}
},
attributes: ['tags', 'genres']
attributes: ['tags', 'genres', 'language']
})
for (const podcast of podcasts) {
if (podcast.tags?.length) {
@ -460,6 +460,9 @@ module.exports = {
if (podcast.genres?.length) {
podcast.genres.forEach((genre) => data.genres.add(genre))
}
if (podcast.language) {
data.languages.add(podcast.language)
}
}
} else {
const books = await Database.bookModel.findAll({

View file

@ -34,6 +34,10 @@ module.exports = {
attributes: ['sequence']
}
}
],
order: [
[Database.authorModel, Database.bookAuthorModel, 'createdAt', 'ASC'],
[Database.seriesModel, 'bookSeries', 'createdAt', 'ASC']
]
})
for (const book of booksWithTag) {
@ -68,7 +72,7 @@ module.exports = {
/**
* Get all library items that have genres
* @param {string[]} genres
* @returns {Promise<LibraryItem[]>}
* @returns {Promise<import('../../models/LibraryItem')[]>}
*/
async getAllLibraryItemsWithGenres(genres) {
const libraryItems = []

View file

@ -7,7 +7,7 @@ const { asciiOnlyToLowerCase } = require('../index')
module.exports = {
/**
* User permissions to restrict books for explicit content & tags
* @param {import('../../objects/user/User')} user
* @param {import('../../objects/user/User')} user
* @returns {{ bookWhere:Sequelize.WhereOptions, replacements:object }}
*/
getUserPermissionBookWhereQuery(user) {
@ -25,9 +25,11 @@ module.exports = {
if (user.permissions.selectedTagsNotAccessible) {
bookWhere.push(Sequelize.where(Sequelize.literal(`(SELECT count(*) FROM json_each(tags) WHERE json_valid(tags) AND json_each.value IN (:userTagsSelected))`), 0))
} else {
bookWhere.push(Sequelize.where(Sequelize.literal(`(SELECT count(*) FROM json_each(tags) WHERE json_valid(tags) AND json_each.value IN (:userTagsSelected))`), {
[Sequelize.Op.gte]: 1
}))
bookWhere.push(
Sequelize.where(Sequelize.literal(`(SELECT count(*) FROM json_each(tags) WHERE json_valid(tags) AND json_each.value IN (:userTagsSelected))`), {
[Sequelize.Op.gte]: 1
})
)
}
}
return {
@ -39,8 +41,8 @@ module.exports = {
/**
* When collapsing series and filtering by progress
* different where options are required
*
* @param {string} value
*
* @param {string} value
* @returns {Sequelize.WhereOptions}
*/
getCollapseSeriesMediaProgressFilter(value) {
@ -90,8 +92,8 @@ module.exports = {
/**
* Get where options for Book model
* @param {string} group
* @param {[string]} value
* @param {string} group
* @param {[string]} value
* @returns {object} { Sequelize.WhereOptions, string[] }
*/
getMediaGroupQuery(group, value) {
@ -170,7 +172,7 @@ module.exports = {
Sequelize.where(Sequelize.fn('json_array_length', Sequelize.col('audioFiles')), 0),
{
'$mediaProgresses.isFinished$': true,
'ebookFile': {
ebookFile: {
[Sequelize.Op.not]: null
}
}
@ -204,6 +206,10 @@ module.exports = {
mediaWhere['ebookFile'] = {
[Sequelize.Op.not]: null
}
} else if (value == 'no-ebook') {
mediaWhere['ebookFile'] = {
[Sequelize.Op.eq]: null
}
}
} else if (group === 'missing') {
if (['asin', 'isbn', 'subtitle', 'publishedYear', 'description', 'publisher', 'language', 'cover'].includes(value)) {
@ -228,8 +234,8 @@ module.exports = {
/**
* Get sequelize order
* @param {string} sortBy
* @param {boolean} sortDesc
* @param {string} sortBy
* @param {boolean} sortDesc
* @param {boolean} collapseseries
* @returns {Sequelize.order}
*/
@ -274,18 +280,14 @@ module.exports = {
* When collapsing series get first book in each series
* to know which books to exclude from primary query.
* Additionally use this query to get the number of books in each series
*
* @param {Sequelize.ModelStatic} bookFindOptions
* @param {Sequelize.WhereOptions} seriesWhere
*
* @param {Sequelize.ModelStatic} bookFindOptions
* @param {Sequelize.WhereOptions} seriesWhere
* @returns {object} { booksToExclude, bookSeriesToInclude }
*/
async getCollapseSeriesBooksToExclude(bookFindOptions, seriesWhere) {
const allSeries = await Database.seriesModel.findAll({
attributes: [
'id',
'name',
[Sequelize.literal('(SELECT count(*) FROM bookSeries bs WHERE bs.seriesId = series.id)'), 'numBooks']
],
attributes: ['id', 'name', [Sequelize.literal('(SELECT count(*) FROM bookSeries bs WHERE bs.seriesId = series.id)'), 'numBooks']],
distinct: true,
subQuery: false,
where: seriesWhere,
@ -300,23 +302,22 @@ module.exports = {
required: true
}
],
order: [
Sequelize.literal('CAST(`books.bookSeries.sequence` AS FLOAT) ASC NULLS LAST')
]
order: [Sequelize.literal('CAST(`books.bookSeries.sequence` AS FLOAT) ASC NULLS LAST')]
})
const bookSeriesToInclude = []
const booksToInclude = []
let booksToExclude = []
allSeries.forEach(s => {
allSeries.forEach((s) => {
let found = false
for (let book of s.books) {
if (!found && !booksToInclude.includes(book.id)) {
booksToInclude.push(book.id)
bookSeriesToInclude.push({
id: book.bookSeries.id,
numBooks: s.dataValues.numBooks
numBooks: s.dataValues.numBooks,
libraryItemIds: s.books?.map((b) => b.libraryItem.id) || []
})
booksToExclude = booksToExclude.filter(bid => bid !== book.id)
booksToExclude = booksToExclude.filter((bid) => bid !== book.id)
found = true
} else if (!booksToExclude.includes(book.id) && !booksToInclude.includes(book.id)) {
booksToExclude.push(book.id)
@ -328,16 +329,16 @@ module.exports = {
/**
* Get library items for book media type using filter and sort
* @param {string} libraryId
* @param {string} libraryId
* @param {[oldUser]} user
* @param {[string]} filterGroup
* @param {[string]} filterValue
* @param {string} sortBy
* @param {string} sortDesc
* @param {[string]} filterGroup
* @param {[string]} filterValue
* @param {string} sortBy
* @param {string} sortDesc
* @param {boolean} collapseseries
* @param {string[]} include
* @param {number} limit
* @param {number} offset
* @param {number} limit
* @param {number} offset
* @param {boolean} isHomePage for home page shelves
* @returns {object} { libraryItems:LibraryItem[], count:number }
*/
@ -359,15 +360,11 @@ module.exports = {
let bookAttributes = null
if (sortBy === 'media.metadata.authorNameLF') {
bookAttributes = {
include: [
[Sequelize.literal(`(SELECT group_concat(a.lastFirst, ", ") FROM authors AS a, bookAuthors as ba WHERE ba.authorId = a.id AND ba.bookId = book.id)`), 'author_name']
]
include: [[Sequelize.literal(`(SELECT group_concat(lastFirst, ", ") FROM (SELECT a.lastFirst FROM authors AS a, bookAuthors as ba WHERE ba.authorId = a.id AND ba.bookId = book.id ORDER BY ba.createdAt ASC))`), 'author_name']]
}
} else if (sortBy === 'media.metadata.authorName') {
bookAttributes = {
include: [
[Sequelize.literal(`(SELECT group_concat(a.name, ", ") FROM authors AS a, bookAuthors as ba WHERE ba.authorId = a.id AND ba.bookId = book.id)`), 'author_name']
]
include: [[Sequelize.literal(`(SELECT group_concat(name, ", ") FROM (SELECT a.name FROM authors AS a, bookAuthors as ba WHERE ba.authorId = a.id AND ba.bookId = book.id ORDER BY ba.createdAt ASC))`), 'author_name']]
}
}
@ -382,9 +379,7 @@ module.exports = {
model: Database.seriesModel,
attributes: ['id', 'name', 'nameIgnorePrefix']
},
order: [
['createdAt', 'ASC']
],
order: [['createdAt', 'ASC']],
separate: true
}
@ -395,9 +390,7 @@ module.exports = {
model: Database.authorModel,
attributes: ['id', 'name']
},
order: [
['createdAt', 'ASC']
],
order: [['createdAt', 'ASC']],
separate: true
}
@ -421,6 +414,10 @@ module.exports = {
libraryItemWhere['libraryFiles'] = {
[Sequelize.Op.substring]: `"isSupplementary":true`
}
} else if (filterGroup === 'ebooks' && filterValue === 'no-supplementary') {
libraryItemWhere['libraryFiles'] = {
[Sequelize.Op.notLike]: Sequelize.literal(`\'%"isSupplementary":true%\'`)
}
} else if (filterGroup === 'missing' && filterValue === 'authors') {
authorInclude = {
model: Database.authorModel,
@ -488,7 +485,7 @@ module.exports = {
})
} else if (filterGroup === 'recent') {
libraryItemWhere['createdAt'] = {
[Sequelize.Op.gte]: new Date(new Date() - (60 * 24 * 60 * 60 * 1000)) // 60 days ago
[Sequelize.Op.gte]: new Date(new Date() - 60 * 24 * 60 * 60 * 1000) // 60 days ago
}
}
@ -543,9 +540,9 @@ module.exports = {
// When collapsing series and sorting by title then use the series name instead of the book title
// for this set an attribute "display_title" to use in sorting
if (global.ServerSettings.sortingIgnorePrefix) {
bookAttributes.include.push([Sequelize.literal(`IFNULL((SELECT s.nameIgnorePrefix FROM bookSeries AS bs, series AS s WHERE bs.seriesId = s.id AND bs.bookId = book.id AND bs.id IN (${bookSeriesToInclude.map(v => `"${v.id}"`).join(', ')})), titleIgnorePrefix)`), 'display_title'])
bookAttributes.include.push([Sequelize.literal(`IFNULL((SELECT s.nameIgnorePrefix FROM bookSeries AS bs, series AS s WHERE bs.seriesId = s.id AND bs.bookId = book.id AND bs.id IN (${bookSeriesToInclude.map((v) => `"${v.id}"`).join(', ')})), titleIgnorePrefix)`), 'display_title'])
} else {
bookAttributes.include.push([Sequelize.literal(`IFNULL((SELECT s.name FROM bookSeries AS bs, series AS s WHERE bs.seriesId = s.id AND bs.bookId = book.id AND bs.id IN (${bookSeriesToInclude.map(v => `"${v.id}"`).join(', ')})), \`book\`.\`title\`)`), 'display_title'])
bookAttributes.include.push([Sequelize.literal(`IFNULL((SELECT s.name FROM bookSeries AS bs, series AS s WHERE bs.seriesId = s.id AND bs.bookId = book.id AND bs.id IN (${bookSeriesToInclude.map((v) => `"${v.id}"`).join(', ')})), \`book\`.\`title\`)`), 'display_title'])
}
}
@ -590,15 +587,16 @@ module.exports = {
// For showing details of collapsed series
if (collapseseries && book.bookSeries?.length) {
const collapsedSeries = book.bookSeries.find(bs => collapseSeriesBookSeries.some(cbs => cbs.id === bs.id))
const collapsedSeries = book.bookSeries.find((bs) => collapseSeriesBookSeries.some((cbs) => cbs.id === bs.id))
if (collapsedSeries) {
const collapseSeriesObj = collapseSeriesBookSeries.find(csbs => csbs.id === collapsedSeries.id)
const collapseSeriesObj = collapseSeriesBookSeries.find((csbs) => csbs.id === collapsedSeries.id)
libraryItem.collapsedSeries = {
id: collapsedSeries.series.id,
name: collapsedSeries.series.name,
nameIgnorePrefix: collapsedSeries.series.nameIgnorePrefix,
sequence: collapsedSeries.sequence,
numBooks: collapseSeriesObj?.numBooks || 0
numBooks: collapseSeriesObj?.numBooks || 0,
libraryItemIds: collapseSeriesObj?.libraryItemIds || []
}
}
}
@ -625,14 +623,15 @@ module.exports = {
* 2. Has no books in progress
* 3. Has at least 1 unfinished book
* TODO: Reduce queries
* @param {string} libraryId
* @param {oldUser} user
* @param {string[]} include
* @param {number} limit
* @param {number} offset
* @returns {object} { libraryItems:LibraryItem[], count:number }
* @param {import('../../objects/Library')} library
* @param {import('../../objects/user/User')} user
* @param {string[]} include
* @param {number} limit
* @param {number} offset
* @returns {{ libraryItems:import('../../models/LibraryItem')[], count:number }}
*/
async getContinueSeriesLibraryItems(libraryId, user, include, limit, offset) {
async getContinueSeriesLibraryItems(library, user, include, limit, offset) {
const libraryId = library.id
const libraryItemIncludes = []
if (include.includes('rssfeed')) {
libraryItemIncludes.push({
@ -646,6 +645,16 @@ module.exports = {
const userPermissionBookWhere = this.getUserPermissionBookWhereQuery(user)
bookWhere.push(...userPermissionBookWhere.bookWhere)
let includeAttributes = [[Sequelize.literal('(SELECT max(mp.updatedAt) FROM bookSeries bs, mediaProgresses mp WHERE mp.mediaItemId = bs.bookId AND mp.userId = :userId AND bs.seriesId = series.id)'), 'recent_progress']]
let booksNotFinishedQuery = `SELECT count(*) FROM bookSeries bs LEFT OUTER JOIN mediaProgresses mp ON mp.mediaItemId = bs.bookId AND mp.userId = :userId WHERE bs.seriesId = series.id AND (mp.isFinished = 0 OR mp.isFinished IS NULL)`
if (library.settings.onlyShowLaterBooksInContinueSeries) {
const maxSequenceQuery = `(SELECT CAST(max(bs.sequence) as FLOAT) FROM bookSeries bs, mediaProgresses mp WHERE mp.mediaItemId = bs.bookId AND mp.isFinished = 1 AND mp.userId = :userId AND bs.seriesId = series.id)`
includeAttributes.push([Sequelize.literal(`${maxSequenceQuery}`), 'maxSequence'])
booksNotFinishedQuery = booksNotFinishedQuery + ` AND CAST(bs.sequence as FLOAT) > ${maxSequenceQuery}`
}
const { rows: series, count } = await Database.seriesModel.findAndCountAll({
where: [
{
@ -659,17 +668,15 @@ module.exports = {
Sequelize.where(Sequelize.literal(`(SELECT count(*) FROM mediaProgresses mp, bookSeries bs WHERE bs.seriesId = series.id AND mp.mediaItemId = bs.bookId AND mp.userId = :userId AND mp.isFinished = 1)`), {
[Sequelize.Op.gte]: 1
}),
// Has at least 1 book not finished
Sequelize.where(Sequelize.literal(`(SELECT count(*) FROM bookSeries bs LEFT OUTER JOIN mediaProgresses mp ON mp.mediaItemId = bs.bookId AND mp.userId = :userId WHERE bs.seriesId = series.id AND (mp.isFinished = 0 OR mp.isFinished IS NULL))`), {
// Has at least 1 book not finished (that has a sequence number higher than the highest already read, if library config is toggled)
Sequelize.where(Sequelize.literal(`(${booksNotFinishedQuery})`), {
[Sequelize.Op.gte]: 1
}),
// Has no books in progress
Sequelize.where(Sequelize.literal(`(SELECT count(*) FROM mediaProgresses mp, bookSeries bs WHERE mp.mediaItemId = bs.bookId AND mp.userId = :userId AND bs.seriesId = series.id AND mp.isFinished = 0 AND mp.currentTime > 0)`), 0)
],
attributes: {
include: [
[Sequelize.literal('(SELECT max(mp.updatedAt) FROM bookSeries bs, mediaProgresses mp WHERE mp.mediaItemId = bs.bookId AND mp.userId = :userId AND bs.seriesId = series.id)'), 'recent_progress']
]
include: includeAttributes
},
replacements: {
userId: user.id,
@ -680,9 +687,7 @@ module.exports = {
attributes: ['bookId', 'sequence'],
separate: true,
subQuery: false,
order: [
[Sequelize.literal('CAST(sequence AS FLOAT) ASC NULLS LAST')]
],
order: [[Sequelize.literal('CAST(sequence AS FLOAT) ASC NULLS LAST')]],
where: {
'$book.mediaProgresses.isFinished$': {
[Sequelize.Op.or]: [null, 0]
@ -712,31 +717,44 @@ module.exports = {
]
}
},
order: [
[Sequelize.literal('recent_progress DESC')]
],
order: [[Sequelize.literal('recent_progress DESC')]],
distinct: true,
subQuery: false,
limit,
offset
})
const libraryItems = series.map(s => {
if (!s.bookSeries.length) return null // this is only possible if user has restricted books in series
const libraryItem = s.bookSeries[0].book.libraryItem.toJSON()
const book = s.bookSeries[0].book.toJSON()
delete book.libraryItem
libraryItem.series = {
id: s.id,
name: s.name,
sequence: s.bookSeries[0].sequence
}
if (libraryItem.feeds?.length) {
libraryItem.rssFeed = libraryItem.feeds[0]
}
libraryItem.media = book
return libraryItem
}).filter(s => s)
const libraryItems = series
.map((s) => {
if (!s.bookSeries.length) return null // this is only possible if user has restricted books in series
let bookIndex = 0
// if the library setting is toggled, only show later entries in series, otherwise skip
if (library.settings.onlyShowLaterBooksInContinueSeries) {
bookIndex = s.bookSeries.findIndex(function (b) {
return parseFloat(b.dataValues.sequence) > s.dataValues.maxSequence
})
if (bookIndex === -1) {
// no later books than maxSequence
return null
}
}
const libraryItem = s.bookSeries[bookIndex].book.libraryItem.toJSON()
const book = s.bookSeries[bookIndex].book.toJSON()
delete book.libraryItem
libraryItem.series = {
id: s.id,
name: s.name,
sequence: s.bookSeries[bookIndex].sequence
}
if (libraryItem.feeds?.length) {
libraryItem.rssFeed = libraryItem.feeds[0]
}
libraryItem.media = book
return libraryItem
})
.filter((s) => s)
return {
libraryItems,
@ -748,10 +766,10 @@ module.exports = {
* Get book library items for the "Discover" shelf
* Random selection of books that are not started
* - only includes the first book of a not-started series
* @param {string} libraryId
* @param {oldUser} user
* @param {string[]} include
* @param {number} limit
* @param {string} libraryId
* @param {oldUser} user
* @param {string[]} include
* @param {number} limit
* @returns {object} {libraryItems:LibraryItem, count:number}
*/
async getDiscoverLibraryItems(libraryId, user, include, limit) {
@ -779,9 +797,7 @@ module.exports = {
model: Database.bookModel,
where: userPermissionBookWhere.bookWhere
},
order: [
[Sequelize.literal('CAST(sequence AS FLOAT) ASC NULLS LAST')]
],
order: [[Sequelize.literal('CAST(sequence AS FLOAT) ASC NULLS LAST')]],
limit: 1
},
subQuery: false,
@ -789,7 +805,7 @@ module.exports = {
order: Database.sequelize.random()
})
const booksFromSeriesToInclude = seriesNotStarted.map(se => se.bookSeries?.[0]?.bookId).filter(bid => bid)
const booksFromSeriesToInclude = seriesNotStarted.map((se) => se.bookSeries?.[0]?.bookId).filter((bid) => bid)
// optional include rssFeed
const libraryItemIncludes = []
@ -881,7 +897,7 @@ module.exports = {
/**
* Get book library items in a collection
* @param {oldCollection} collection
* @param {oldCollection} collection
* @returns {Promise<LibraryItem[]>}
*/
async getLibraryItemsForCollection(collection) {
@ -925,22 +941,22 @@ module.exports = {
/**
* Get library items for series
* @param {import('../../objects/entities/Series')} oldSeries
* @param {import('../../objects/user/User')} [oldUser]
* @param {import('../../objects/entities/Series')} oldSeries
* @param {import('../../objects/user/User')} [oldUser]
* @returns {Promise<import('../../objects/LibraryItem')[]>}
*/
async getLibraryItemsForSeries(oldSeries, oldUser) {
const { libraryItems } = await this.getFilteredLibraryItems(oldSeries.libraryId, oldUser, 'series', oldSeries.id, null, null, false, [], null, null)
return libraryItems.map(li => Database.libraryItemModel.getOldLibraryItem(li))
return libraryItems.map((li) => Database.libraryItemModel.getOldLibraryItem(li))
},
/**
* Search books, authors, series
* @param {import('../../objects/user/User')} oldUser
* @param {import('../../objects/Library')} oldLibrary
* @param {string} query
* @param {number} limit
* @param {number} offset
* @param {import('../../objects/Library')} oldLibrary
* @param {string} query
* @param {number} limit
* @param {number} offset
* @returns {{book:object[], narrators:object[], authors:object[], tags:object[], series:object[]}}
*/
async search(oldUser, oldLibrary, query, limit, offset) {
@ -1119,7 +1135,7 @@ module.exports = {
/**
* Genres with num books
* @param {string} libraryId
* @param {string} libraryId
* @returns {{genre:string, count:number}[]}
*/
async getGenresWithCount(libraryId) {
@ -1141,7 +1157,7 @@ module.exports = {
/**
* Get stats for book library
* @param {string} libraryId
* @param {string} libraryId
* @returns {Promise<{ totalSize:number, totalDuration:number, numAudioFiles:number, totalItems:number}>}
*/
async getBookLibraryStats(libraryId) {
@ -1155,8 +1171,8 @@ module.exports = {
/**
* Get longest books in library
* @param {string} libraryId
* @param {number} limit
* @param {string} libraryId
* @param {number} limit
* @returns {Promise<{ id:string, title:string, duration:number }[]>}
*/
async getLongestBooks(libraryId, limit) {
@ -1169,12 +1185,10 @@ module.exports = {
libraryId
}
},
order: [
['duration', 'DESC']
],
order: [['duration', 'DESC']],
limit
})
return books.map(book => {
return books.map((book) => {
return {
id: book.libraryItem.id,
title: book.title,
@ -1182,4 +1196,4 @@ module.exports = {
}
})
}
}
}

View file

@ -51,6 +51,8 @@ module.exports = {
[Sequelize.Op.gte]: 1
})
replacements.filterValue = value
} else if (group === 'languages') {
mediaWhere['language'] = value
}
return {