mirror of
https://github.com/advplyr/audiobookshelf.git
synced 2025-08-03 09:44:41 +02:00
Merge branch 'master' into authorSort
This commit is contained in:
commit
e50b06183e
109 changed files with 4845 additions and 967 deletions
271
server/Auth.js
271
server/Auth.js
|
@ -76,12 +76,16 @@ class Auth {
|
|||
return
|
||||
}
|
||||
|
||||
// Custom req timeout see: https://github.com/panva/node-openid-client/blob/main/docs/README.md#customizing
|
||||
OpenIDClient.custom.setHttpOptionsDefaults({ timeout: 10000 })
|
||||
|
||||
const openIdIssuerClient = new OpenIDClient.Issuer({
|
||||
issuer: global.ServerSettings.authOpenIDIssuerURL,
|
||||
authorization_endpoint: global.ServerSettings.authOpenIDAuthorizationURL,
|
||||
token_endpoint: global.ServerSettings.authOpenIDTokenURL,
|
||||
userinfo_endpoint: global.ServerSettings.authOpenIDUserInfoURL,
|
||||
jwks_uri: global.ServerSettings.authOpenIDJwksURL
|
||||
jwks_uri: global.ServerSettings.authOpenIDJwksURL,
|
||||
end_session_endpoint: global.ServerSettings.authOpenIDLogoutURL
|
||||
}).Client
|
||||
const openIdClient = new openIdIssuerClient({
|
||||
client_id: global.ServerSettings.authOpenIDClientID,
|
||||
|
@ -153,6 +157,9 @@ class Auth {
|
|||
return
|
||||
}
|
||||
|
||||
// We also have to save the id_token for later (used for logout) because we cannot set cookies here
|
||||
user.openid_id_token = tokenset.id_token
|
||||
|
||||
// permit login
|
||||
return done(null, user)
|
||||
}))
|
||||
|
@ -183,49 +190,48 @@ class Auth {
|
|||
}
|
||||
|
||||
/**
|
||||
* Stores the client's choice how the login callback should happen in temp cookies
|
||||
* Returns if the given auth method is API based.
|
||||
*
|
||||
* @param {string} authMethod
|
||||
* @returns {boolean}
|
||||
*/
|
||||
isAuthMethodAPIBased(authMethod) {
|
||||
return ['api', 'openid-mobile'].includes(authMethod)
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores the client's choice of login callback method in temporary cookies.
|
||||
*
|
||||
* The `authMethod` parameter specifies the authentication strategy and can have the following values:
|
||||
* - 'local': Standard authentication,
|
||||
* - 'api': Authentication for API use
|
||||
* - 'openid': OpenID authentication directly over web
|
||||
* - 'openid-mobile': OpenID authentication, but done via an mobile device
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
* @param {string} authMethod - The authentication method, default is 'local'.
|
||||
*/
|
||||
paramsToCookies(req, res) {
|
||||
// Set if isRest flag is set or if mobile oauth flow is used
|
||||
if (req.query.isRest?.toLowerCase() == 'true' || req.query.redirect_uri) {
|
||||
// store the isRest flag to the is_rest cookie
|
||||
res.cookie('is_rest', 'true', {
|
||||
maxAge: 120000, // 2 min
|
||||
httpOnly: true
|
||||
})
|
||||
} else {
|
||||
// no isRest-flag set -> set is_rest cookie to false
|
||||
res.cookie('is_rest', 'false', {
|
||||
maxAge: 120000, // 2 min
|
||||
httpOnly: true
|
||||
})
|
||||
paramsToCookies(req, res, authMethod = 'local') {
|
||||
const TWO_MINUTES = 120000 // 2 minutes in milliseconds
|
||||
const callback = req.query.redirect_uri || req.query.callback
|
||||
|
||||
// persist state if passed in
|
||||
// Additional handling for non-API based authMethod
|
||||
if (!this.isAuthMethodAPIBased(authMethod)) {
|
||||
// Store 'auth_state' if present in the request
|
||||
if (req.query.state) {
|
||||
res.cookie('auth_state', req.query.state, {
|
||||
maxAge: 120000, // 2 min
|
||||
httpOnly: true
|
||||
})
|
||||
res.cookie('auth_state', req.query.state, { maxAge: TWO_MINUTES, httpOnly: true })
|
||||
}
|
||||
|
||||
const callback = req.query.redirect_uri || req.query.callback
|
||||
|
||||
// check if we are missing a callback parameter - we need one if isRest=false
|
||||
// Validate and store the callback URL
|
||||
if (!callback) {
|
||||
res.status(400).send({
|
||||
message: 'No callback parameter'
|
||||
})
|
||||
return
|
||||
return res.status(400).send({ message: 'No callback parameter' })
|
||||
}
|
||||
// store the callback url to the auth_cb cookie
|
||||
res.cookie('auth_cb', callback, {
|
||||
maxAge: 120000, // 2 min
|
||||
httpOnly: true
|
||||
})
|
||||
res.cookie('auth_cb', callback, { maxAge: TWO_MINUTES, httpOnly: true })
|
||||
}
|
||||
|
||||
// Store the authentication method for long
|
||||
res.cookie('auth_method', authMethod, { maxAge: 1000 * 60 * 60 * 24 * 365 * 10, httpOnly: true })
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -239,7 +245,7 @@ class Auth {
|
|||
// get userLogin json (information about the user, server and the session)
|
||||
const data_json = await this.getUserLoginResponsePayload(req.user)
|
||||
|
||||
if (req.cookies.is_rest === 'true') {
|
||||
if (this.isAuthMethodAPIBased(req.cookies.auth_method)) {
|
||||
// REST request - send data
|
||||
res.json(data_json)
|
||||
} else {
|
||||
|
@ -269,109 +275,105 @@ class Auth {
|
|||
|
||||
// openid strategy login route (this redirects to the configured openid login provider)
|
||||
router.get('/auth/openid', (req, res, next) => {
|
||||
// Get the OIDC client from the strategy
|
||||
// We need to call the client manually, because the strategy does not support forwarding the code challenge
|
||||
// for API or mobile clients
|
||||
const oidcStrategy = passport._strategy('openid-client')
|
||||
const client = oidcStrategy._client
|
||||
const sessionKey = oidcStrategy._key
|
||||
|
||||
try {
|
||||
// helper function from openid-client
|
||||
function pick(object, ...paths) {
|
||||
const obj = {}
|
||||
for (const path of paths) {
|
||||
if (object[path] !== undefined) {
|
||||
obj[path] = object[path]
|
||||
}
|
||||
}
|
||||
return obj
|
||||
const protocol = req.secure || req.get('x-forwarded-proto') === 'https' ? 'https' : 'http'
|
||||
const hostUrl = new URL(`${protocol}://${req.get('host')}`)
|
||||
const isMobileFlow = req.query.response_type === 'code' || req.query.redirect_uri || req.query.code_challenge
|
||||
|
||||
// Only allow code flow (for mobile clients)
|
||||
if (req.query.response_type && req.query.response_type !== 'code') {
|
||||
Logger.debug(`[Auth] OIDC Invalid response_type=${req.query.response_type}`)
|
||||
return res.status(400).send('Invalid response_type, only code supported')
|
||||
}
|
||||
|
||||
// Get the OIDC client from the strategy
|
||||
// We need to call the client manually, because the strategy does not support forwarding the code challenge
|
||||
// for API or mobile clients
|
||||
const oidcStrategy = passport._strategy('openid-client')
|
||||
const protocol = (req.secure || req.get('x-forwarded-proto') === 'https') ? 'https' : 'http'
|
||||
// Generate a state on web flow or if no state supplied
|
||||
const state = (!isMobileFlow || !req.query.state) ? OpenIDClient.generators.random() : req.query.state
|
||||
|
||||
let mobile_redirect_uri = null
|
||||
|
||||
// The client wishes a different redirect_uri
|
||||
// We will allow if it is in the whitelist, by saving it into this.openIdAuthSession and setting the redirect uri to /auth/openid/mobile-redirect
|
||||
// where we will handle the redirect to it
|
||||
if (req.query.redirect_uri) {
|
||||
// Check if the redirect_uri is in the whitelist
|
||||
if (Database.serverSettings.authOpenIDMobileRedirectURIs.includes(req.query.redirect_uri) ||
|
||||
(Database.serverSettings.authOpenIDMobileRedirectURIs.length === 1 && Database.serverSettings.authOpenIDMobileRedirectURIs[0] === '*')) {
|
||||
oidcStrategy._params.redirect_uri = new URL(`${protocol}://${req.get('host')}/auth/openid/mobile-redirect`).toString()
|
||||
mobile_redirect_uri = req.query.redirect_uri
|
||||
} else {
|
||||
Logger.debug(`[Auth] Invalid redirect_uri=${req.query.redirect_uri} - not in whitelist`)
|
||||
// Redirect URL for the SSO provider
|
||||
let redirectUri
|
||||
if (isMobileFlow) {
|
||||
// Mobile required redirect uri
|
||||
// If it is in the whitelist, we will save into this.openIdAuthSession and set the redirect uri to /auth/openid/mobile-redirect
|
||||
// where we will handle the redirect to it
|
||||
if (!req.query.redirect_uri || !isValidRedirectUri(req.query.redirect_uri)) {
|
||||
Logger.debug(`[Auth] Invalid redirect_uri=${req.query.redirect_uri}`)
|
||||
return res.status(400).send('Invalid redirect_uri')
|
||||
}
|
||||
// We cannot save the supplied redirect_uri in the session, because it the mobile client uses browser instead of the API
|
||||
// for the request to mobile-redirect and as such the session is not shared
|
||||
this.openIdAuthSession.set(state, { mobile_redirect_uri: req.query.redirect_uri })
|
||||
|
||||
redirectUri = new URL('/auth/openid/mobile-redirect', hostUrl).toString()
|
||||
} else {
|
||||
oidcStrategy._params.redirect_uri = new URL(`${protocol}://${req.get('host')}/auth/openid/callback`).toString()
|
||||
}
|
||||
redirectUri = new URL('/auth/openid/callback', hostUrl).toString()
|
||||
|
||||
Logger.debug(`[Auth] Oidc redirect_uri=${oidcStrategy._params.redirect_uri}`)
|
||||
const client = oidcStrategy._client
|
||||
const sessionKey = oidcStrategy._key
|
||||
|
||||
let code_challenge
|
||||
let code_challenge_method
|
||||
|
||||
// If code_challenge is provided, expect that code_verifier will be handled by the client (mobile app)
|
||||
// The web frontend of ABS does not need to do a PKCE itself, because it never handles the "code" of the oauth flow
|
||||
// and as such will not send a code challenge, we will generate then one
|
||||
if (req.query.code_challenge) {
|
||||
code_challenge = req.query.code_challenge
|
||||
code_challenge_method = req.query.code_challenge_method || 'S256'
|
||||
|
||||
if (!['S256', 'plain'].includes(code_challenge_method)) {
|
||||
return res.status(400).send('Invalid code_challenge_method')
|
||||
if (req.query.state) {
|
||||
Logger.debug(`[Auth] Invalid state - not allowed on web openid flow`)
|
||||
return res.status(400).send('Invalid state, not allowed on web flow')
|
||||
}
|
||||
} else {
|
||||
// If no code_challenge is provided, assume a web application flow and generate one
|
||||
const code_verifier = OpenIDClient.generators.codeVerifier()
|
||||
code_challenge = OpenIDClient.generators.codeChallenge(code_verifier)
|
||||
code_challenge_method = 'S256'
|
||||
|
||||
// Store the code_verifier in the session for later use in the token exchange
|
||||
req.session[sessionKey] = { ...req.session[sessionKey], code_verifier }
|
||||
}
|
||||
oidcStrategy._params.redirect_uri = redirectUri
|
||||
Logger.debug(`[Auth] OIDC redirect_uri=${redirectUri}`)
|
||||
|
||||
const params = {
|
||||
state: OpenIDClient.generators.random(),
|
||||
// Other params by the passport strategy
|
||||
...oidcStrategy._params
|
||||
}
|
||||
|
||||
if (!params.nonce && params.response_type.includes('id_token')) {
|
||||
params.nonce = OpenIDClient.generators.random()
|
||||
}
|
||||
let { code_challenge, code_challenge_method, code_verifier } = generatePkce(req, isMobileFlow)
|
||||
|
||||
req.session[sessionKey] = {
|
||||
...req.session[sessionKey],
|
||||
...pick(params, 'nonce', 'state', 'max_age', 'response_type'),
|
||||
state: state,
|
||||
max_age: oidcStrategy._params.max_age,
|
||||
response_type: 'code',
|
||||
code_verifier: code_verifier, // not null if web flow
|
||||
mobile: req.query.redirect_uri, // Used in the abs callback later, set mobile if redirect_uri is filled out
|
||||
sso_redirect_uri: oidcStrategy._params.redirect_uri // Save the redirect_uri (for the SSO Provider) for the callback
|
||||
}
|
||||
|
||||
// We cannot save redirect_uri in the session, because it the mobile client uses browser instead of the API
|
||||
// for the request to mobile-redirect and as such the session is not shared
|
||||
this.openIdAuthSession.set(params.state, { mobile_redirect_uri: mobile_redirect_uri })
|
||||
|
||||
// Now get the URL to direct to
|
||||
const authorizationUrl = client.authorizationUrl({
|
||||
...params,
|
||||
scope: 'openid profile email',
|
||||
...oidcStrategy._params,
|
||||
state: state,
|
||||
response_type: 'code',
|
||||
code_challenge,
|
||||
code_challenge_method
|
||||
})
|
||||
|
||||
// params (isRest, callback) to a cookie that will be send to the client
|
||||
this.paramsToCookies(req, res)
|
||||
this.paramsToCookies(req, res, isMobileFlow ? 'openid-mobile' : 'openid')
|
||||
|
||||
// Redirect the user agent (browser) to the authorization URL
|
||||
res.redirect(authorizationUrl)
|
||||
} catch (error) {
|
||||
Logger.error(`[Auth] Error in /auth/openid route: ${error}`)
|
||||
res.status(500).send('Internal Server Error')
|
||||
}
|
||||
|
||||
function generatePkce(req, isMobileFlow) {
|
||||
if (isMobileFlow) {
|
||||
if (!req.query.code_challenge) {
|
||||
throw new Error('code_challenge required for mobile flow (PKCE)')
|
||||
}
|
||||
if (req.query.code_challenge_method && req.query.code_challenge_method !== 'S256') {
|
||||
throw new Error('Only S256 code_challenge_method method supported')
|
||||
}
|
||||
return {
|
||||
code_challenge: req.query.code_challenge,
|
||||
code_challenge_method: req.query.code_challenge_method || 'S256'
|
||||
}
|
||||
} else {
|
||||
const code_verifier = OpenIDClient.generators.codeVerifier()
|
||||
const code_challenge = OpenIDClient.generators.codeChallenge(code_verifier)
|
||||
return { code_challenge, code_challenge_method: 'S256', code_verifier }
|
||||
}
|
||||
}
|
||||
|
||||
function isValidRedirectUri(uri) {
|
||||
// Check if the redirect_uri is in the whitelist
|
||||
return Database.serverSettings.authOpenIDMobileRedirectURIs.includes(uri) ||
|
||||
(Database.serverSettings.authOpenIDMobileRedirectURIs.length === 1 && Database.serverSettings.authOpenIDMobileRedirectURIs[0] === '*')
|
||||
}
|
||||
})
|
||||
|
||||
// This will be the oauth2 callback route for mobile clients
|
||||
|
@ -453,6 +455,12 @@ class Auth {
|
|||
if (loginError) {
|
||||
return handleAuthError(isMobile, 500, 'Error during login', `[Auth] Error in openid callback: ${loginError}`)
|
||||
}
|
||||
|
||||
// The id_token does not provide access to the user, but is used to identify the user to the SSO provider
|
||||
// instead it containts a JWT with userinfo like user email, username, etc.
|
||||
// the client will get to know it anyway in the logout url according to the oauth2 spec
|
||||
// so it is safe to send it to the client, but we use strict settings
|
||||
res.cookie('openid_id_token', user.openid_id_token, { maxAge: 1000 * 60 * 60 * 24 * 365 * 10, httpOnly: true, secure: true, sameSite: 'Strict' })
|
||||
next()
|
||||
})
|
||||
}
|
||||
|
@ -521,7 +529,46 @@ class Auth {
|
|||
if (err) {
|
||||
res.sendStatus(500)
|
||||
} else {
|
||||
res.sendStatus(200)
|
||||
const authMethod = req.cookies.auth_method
|
||||
|
||||
res.clearCookie('auth_method')
|
||||
|
||||
if (authMethod === 'openid' || authMethod === 'openid-mobile') {
|
||||
// If we are using openid, we need to redirect to the logout endpoint
|
||||
// node-openid-client does not support doing it over passport
|
||||
const oidcStrategy = passport._strategy('openid-client')
|
||||
const client = oidcStrategy._client
|
||||
|
||||
let postLogoutRedirectUri = null
|
||||
|
||||
if (authMethod === 'openid') {
|
||||
const protocol = (req.secure || req.get('x-forwarded-proto') === 'https') ? 'https' : 'http'
|
||||
const host = req.get('host')
|
||||
// TODO: ABS does currently not support subfolders for installation
|
||||
// If we want to support it we need to include a config for the serverurl
|
||||
postLogoutRedirectUri = `${protocol}://${host}/login`
|
||||
}
|
||||
// else for openid-mobile we keep postLogoutRedirectUri on null
|
||||
// nice would be to redirect to the app here, but for example Authentik does not implement
|
||||
// the post_logout_redirect_uri parameter at all and for other providers
|
||||
// we would also need again to implement (and even before get to know somehow for 3rd party apps)
|
||||
// the correct app link like audiobookshelf://login (and maybe also provide a redirect like mobile-redirect).
|
||||
// Instead because its null (and this way the parameter will be omitted completly), the client/app can simply append something like
|
||||
// &post_logout_redirect_uri=audiobookshelf://login to the received logout url by itself which is the simplest solution
|
||||
// (The URL needs to be whitelisted in the config of the SSO/ID provider)
|
||||
|
||||
const logoutUrl = client.endSessionUrl({
|
||||
id_token_hint: req.cookies.openid_id_token,
|
||||
post_logout_redirect_uri: postLogoutRedirectUri
|
||||
})
|
||||
|
||||
res.clearCookie('openid_id_token')
|
||||
|
||||
// Tell the user agent (browser) to redirect to the authentification provider's logout URL
|
||||
res.send({ redirect_url: logoutUrl })
|
||||
} else {
|
||||
res.sendStatus(200)
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
@ -612,7 +659,7 @@ class Auth {
|
|||
* Checks if a username and password tuple is valid and the user active.
|
||||
* @param {string} username
|
||||
* @param {string} password
|
||||
* @param {function} done
|
||||
* @param {Promise<function>} done
|
||||
*/
|
||||
async localAuthCheckUserPw(username, password, done) {
|
||||
// Load the user given it's username
|
||||
|
@ -654,7 +701,7 @@ class Auth {
|
|||
/**
|
||||
* Hashes a password with bcrypt.
|
||||
* @param {string} password
|
||||
* @returns {string} hash
|
||||
* @returns {Promise<string>} hash
|
||||
*/
|
||||
hashPass(password) {
|
||||
return new Promise((resolve) => {
|
||||
|
@ -688,8 +735,8 @@ class Auth {
|
|||
/**
|
||||
*
|
||||
* @param {string} password
|
||||
* @param {*} user
|
||||
* @returns {boolean}
|
||||
* @param {import('./models/User')} user
|
||||
* @returns {Promise<boolean>}
|
||||
*/
|
||||
comparePassword(password, user) {
|
||||
if (user.type === 'root' && !password && !user.pash) return true
|
||||
|
|
|
@ -132,6 +132,11 @@ class Database {
|
|||
return this.models.playbackSession
|
||||
}
|
||||
|
||||
/** @type {typeof import('./models/CustomMetadataProvider')} */
|
||||
get customMetadataProviderModel() {
|
||||
return this.models.customMetadataProvider
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if db file exists
|
||||
* @returns {boolean}
|
||||
|
@ -245,6 +250,7 @@ class Database {
|
|||
require('./models/Feed').init(this.sequelize)
|
||||
require('./models/FeedEpisode').init(this.sequelize)
|
||||
require('./models/Setting').init(this.sequelize)
|
||||
require('./models/CustomMetadataProvider').init(this.sequelize)
|
||||
|
||||
return this.sequelize.sync({ force, alter: false })
|
||||
}
|
||||
|
@ -413,10 +419,21 @@ class Database {
|
|||
await this.models.libraryItem.fullCreateFromOld(oldLibraryItem)
|
||||
}
|
||||
|
||||
/**
|
||||
* Save metadata file and update library item
|
||||
*
|
||||
* @param {import('./objects/LibraryItem')} oldLibraryItem
|
||||
* @returns {Promise<boolean>}
|
||||
*/
|
||||
async updateLibraryItem(oldLibraryItem) {
|
||||
if (!this.sequelize) return false
|
||||
await oldLibraryItem.saveMetadata()
|
||||
return this.models.libraryItem.fullUpdateFromOld(oldLibraryItem)
|
||||
const updated = await this.models.libraryItem.fullUpdateFromOld(oldLibraryItem)
|
||||
// Clear library filter data cache
|
||||
if (updated) {
|
||||
delete this.libraryFilterData[oldLibraryItem.libraryId]
|
||||
}
|
||||
return updated
|
||||
}
|
||||
|
||||
async removeLibraryItem(libraryItemId) {
|
||||
|
|
|
@ -3,13 +3,17 @@ const { LogLevel } = require('./utils/constants')
|
|||
|
||||
class Logger {
|
||||
constructor() {
|
||||
/** @type {import('./managers/LogManager')} */
|
||||
this.logManager = null
|
||||
|
||||
this.isDev = process.env.NODE_ENV !== 'production'
|
||||
this.logLevel = !this.isDev ? LogLevel.INFO : LogLevel.TRACE
|
||||
this.socketListeners = []
|
||||
|
||||
this.logManager = null
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {string}
|
||||
*/
|
||||
get timestamp() {
|
||||
return date.format(new Date(), 'YYYY-MM-DD HH:mm:ss.SSS')
|
||||
}
|
||||
|
@ -23,6 +27,9 @@ class Logger {
|
|||
return 'UNKNOWN'
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {string}
|
||||
*/
|
||||
get source() {
|
||||
try {
|
||||
throw new Error()
|
||||
|
@ -62,24 +69,32 @@ class Logger {
|
|||
this.socketListeners = this.socketListeners.filter(s => s.id !== socketId)
|
||||
}
|
||||
|
||||
handleLog(level, args) {
|
||||
/**
|
||||
*
|
||||
* @param {number} level
|
||||
* @param {string[]} args
|
||||
* @param {string} src
|
||||
*/
|
||||
async handleLog(level, args, src) {
|
||||
const logObj = {
|
||||
timestamp: this.timestamp,
|
||||
source: this.source,
|
||||
source: src,
|
||||
message: args.join(' '),
|
||||
levelName: this.getLogLevelString(level),
|
||||
level
|
||||
}
|
||||
|
||||
if (level >= this.logLevel && this.logManager) {
|
||||
this.logManager.logToFile(logObj)
|
||||
}
|
||||
|
||||
// Emit log to sockets that are listening to log events
|
||||
this.socketListeners.forEach((socketListener) => {
|
||||
if (socketListener.level <= level) {
|
||||
socketListener.socket.emit('log', logObj)
|
||||
}
|
||||
})
|
||||
|
||||
// Save log to file
|
||||
if (level >= this.logLevel) {
|
||||
await this.logManager.logToFile(logObj)
|
||||
}
|
||||
}
|
||||
|
||||
setLogLevel(level) {
|
||||
|
@ -90,41 +105,47 @@ class Logger {
|
|||
trace(...args) {
|
||||
if (this.logLevel > LogLevel.TRACE) return
|
||||
console.trace(`[${this.timestamp}] TRACE:`, ...args)
|
||||
this.handleLog(LogLevel.TRACE, args)
|
||||
this.handleLog(LogLevel.TRACE, args, this.source)
|
||||
}
|
||||
|
||||
debug(...args) {
|
||||
if (this.logLevel > LogLevel.DEBUG) return
|
||||
console.debug(`[${this.timestamp}] DEBUG:`, ...args, `(${this.source})`)
|
||||
this.handleLog(LogLevel.DEBUG, args)
|
||||
this.handleLog(LogLevel.DEBUG, args, this.source)
|
||||
}
|
||||
|
||||
info(...args) {
|
||||
if (this.logLevel > LogLevel.INFO) return
|
||||
console.info(`[${this.timestamp}] INFO:`, ...args)
|
||||
this.handleLog(LogLevel.INFO, args)
|
||||
this.handleLog(LogLevel.INFO, args, this.source)
|
||||
}
|
||||
|
||||
warn(...args) {
|
||||
if (this.logLevel > LogLevel.WARN) return
|
||||
console.warn(`[${this.timestamp}] WARN:`, ...args, `(${this.source})`)
|
||||
this.handleLog(LogLevel.WARN, args)
|
||||
this.handleLog(LogLevel.WARN, args, this.source)
|
||||
}
|
||||
|
||||
error(...args) {
|
||||
if (this.logLevel > LogLevel.ERROR) return
|
||||
console.error(`[${this.timestamp}] ERROR:`, ...args, `(${this.source})`)
|
||||
this.handleLog(LogLevel.ERROR, args)
|
||||
this.handleLog(LogLevel.ERROR, args, this.source)
|
||||
}
|
||||
|
||||
/**
|
||||
* Fatal errors are ones that exit the process
|
||||
* Fatal logs are saved to crash_logs.txt
|
||||
*
|
||||
* @param {...any} args
|
||||
*/
|
||||
fatal(...args) {
|
||||
console.error(`[${this.timestamp}] FATAL:`, ...args, `(${this.source})`)
|
||||
this.handleLog(LogLevel.FATAL, args)
|
||||
return this.handleLog(LogLevel.FATAL, args, this.source)
|
||||
}
|
||||
|
||||
note(...args) {
|
||||
console.log(`[${this.timestamp}] NOTE:`, ...args)
|
||||
this.handleLog(LogLevel.NOTE, args)
|
||||
this.handleLog(LogLevel.NOTE, args, this.source)
|
||||
}
|
||||
}
|
||||
module.exports = new Logger()
|
|
@ -2,9 +2,9 @@ const Path = require('path')
|
|||
const Sequelize = require('sequelize')
|
||||
const express = require('express')
|
||||
const http = require('http')
|
||||
const util = require('util')
|
||||
const fs = require('./libs/fsExtra')
|
||||
const fileUpload = require('./libs/expressFileupload')
|
||||
const rateLimit = require('./libs/expressRateLimit')
|
||||
const cookieParser = require("cookie-parser")
|
||||
|
||||
const { version } = require('../package.json')
|
||||
|
@ -21,11 +21,11 @@ const SocketAuthority = require('./SocketAuthority')
|
|||
const ApiRouter = require('./routers/ApiRouter')
|
||||
const HlsRouter = require('./routers/HlsRouter')
|
||||
|
||||
const LogManager = require('./managers/LogManager')
|
||||
const NotificationManager = require('./managers/NotificationManager')
|
||||
const EmailManager = require('./managers/EmailManager')
|
||||
const AbMergeManager = require('./managers/AbMergeManager')
|
||||
const CacheManager = require('./managers/CacheManager')
|
||||
const LogManager = require('./managers/LogManager')
|
||||
const BackupManager = require('./managers/BackupManager')
|
||||
const PlaybackSessionManager = require('./managers/PlaybackSessionManager')
|
||||
const PodcastManager = require('./managers/PodcastManager')
|
||||
|
@ -67,7 +67,6 @@ class Server {
|
|||
this.notificationManager = new NotificationManager()
|
||||
this.emailManager = new EmailManager()
|
||||
this.backupManager = new BackupManager()
|
||||
this.logManager = new LogManager()
|
||||
this.abMergeManager = new AbMergeManager()
|
||||
this.playbackSessionManager = new PlaybackSessionManager()
|
||||
this.podcastManager = new PodcastManager(this.watcher, this.notificationManager)
|
||||
|
@ -81,7 +80,7 @@ class Server {
|
|||
this.apiRouter = new ApiRouter(this)
|
||||
this.hlsRouter = new HlsRouter(this.auth, this.playbackSessionManager)
|
||||
|
||||
Logger.logManager = this.logManager
|
||||
Logger.logManager = new LogManager()
|
||||
|
||||
this.server = null
|
||||
this.io = null
|
||||
|
@ -102,10 +101,13 @@ class Server {
|
|||
*/
|
||||
async init() {
|
||||
Logger.info('[Server] Init v' + version)
|
||||
|
||||
await this.playbackSessionManager.removeOrphanStreams()
|
||||
|
||||
await Database.init(false)
|
||||
|
||||
await Logger.logManager.init()
|
||||
|
||||
// Create token secret if does not exist (Added v2.1.0)
|
||||
if (!Database.serverSettings.tokenSecret) {
|
||||
await this.auth.initTokenSecret()
|
||||
|
@ -115,7 +117,6 @@ class Server {
|
|||
await CacheManager.ensureCachePaths()
|
||||
|
||||
await this.backupManager.init()
|
||||
await this.logManager.init()
|
||||
await this.rssFeedManager.init()
|
||||
|
||||
const libraries = await Database.libraryModel.getAllOldLibraries()
|
||||
|
@ -135,8 +136,41 @@ class Server {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Listen for SIGINT and uncaught exceptions
|
||||
*/
|
||||
initProcessEventListeners() {
|
||||
let sigintAlreadyReceived = false
|
||||
process.on('SIGINT', async () => {
|
||||
if (!sigintAlreadyReceived) {
|
||||
sigintAlreadyReceived = true
|
||||
Logger.info('SIGINT (Ctrl+C) received. Shutting down...')
|
||||
await this.stop()
|
||||
Logger.info('Server stopped. Exiting.')
|
||||
} else {
|
||||
Logger.info('SIGINT (Ctrl+C) received again. Exiting immediately.')
|
||||
}
|
||||
process.exit(0)
|
||||
})
|
||||
|
||||
/**
|
||||
* @see https://nodejs.org/api/process.html#event-uncaughtexceptionmonitor
|
||||
*/
|
||||
process.on('uncaughtExceptionMonitor', async (error, origin) => {
|
||||
await Logger.fatal(`[Server] Uncaught exception origin: ${origin}, error:`, util.format('%O', error))
|
||||
})
|
||||
/**
|
||||
* @see https://nodejs.org/api/process.html#event-unhandledrejection
|
||||
*/
|
||||
process.on('unhandledRejection', async (reason, promise) => {
|
||||
await Logger.fatal(`[Server] Unhandled rejection: ${reason}, promise:`, util.format('%O', promise))
|
||||
process.exit(1)
|
||||
})
|
||||
}
|
||||
|
||||
async start() {
|
||||
Logger.info('=== Starting Server ===')
|
||||
this.initProcessEventListeners()
|
||||
await this.init()
|
||||
|
||||
const app = express()
|
||||
|
@ -252,8 +286,6 @@ class Server {
|
|||
]
|
||||
dyanimicRoutes.forEach((route) => router.get(route, (req, res) => res.sendFile(Path.join(distPath, 'index.html'))))
|
||||
|
||||
// router.post('/login', passport.authenticate('local', this.auth.login), this.auth.loginResult.bind(this))
|
||||
// router.post('/logout', this.authMiddleware.bind(this), this.logout.bind(this))
|
||||
router.post('/init', (req, res) => {
|
||||
if (Database.hasRootUser) {
|
||||
Logger.error(`[Server] attempt to init server when server already has a root user`)
|
||||
|
@ -284,19 +316,6 @@ class Server {
|
|||
})
|
||||
app.get('/healthcheck', (req, res) => res.sendStatus(200))
|
||||
|
||||
let sigintAlreadyReceived = false
|
||||
process.on('SIGINT', async () => {
|
||||
if (!sigintAlreadyReceived) {
|
||||
sigintAlreadyReceived = true
|
||||
Logger.info('SIGINT (Ctrl+C) received. Shutting down...')
|
||||
await this.stop()
|
||||
Logger.info('Server stopped. Exiting.')
|
||||
} else {
|
||||
Logger.info('SIGINT (Ctrl+C) received again. Exiting immediately.')
|
||||
}
|
||||
process.exit(0)
|
||||
})
|
||||
|
||||
this.server.listen(this.Port, this.Host, () => {
|
||||
if (this.Host) Logger.info(`Listening on http://${this.Host}:${this.Port}`)
|
||||
else Logger.info(`Listening on port :${this.Port}`)
|
||||
|
@ -379,30 +398,6 @@ class Server {
|
|||
}
|
||||
}
|
||||
|
||||
// First time login rate limit is hit
|
||||
loginLimitReached(req, res, options) {
|
||||
Logger.error(`[Server] Login rate limit (${options.max}) was hit for ip ${req.ip}`)
|
||||
options.message = 'Too many attempts. Login temporarily locked.'
|
||||
}
|
||||
|
||||
getLoginRateLimiter() {
|
||||
return rateLimit({
|
||||
windowMs: Database.serverSettings.rateLimitLoginWindow, // 5 minutes
|
||||
max: Database.serverSettings.rateLimitLoginRequests,
|
||||
skipSuccessfulRequests: true,
|
||||
onLimitReached: this.loginLimitReached
|
||||
})
|
||||
}
|
||||
|
||||
logout(req, res) {
|
||||
if (req.body.socketId) {
|
||||
Logger.info(`[Server] User ${req.user ? req.user.username : 'Unknown'} is logging out with socket ${req.body.socketId}`)
|
||||
SocketAuthority.logout(req.body.socketId)
|
||||
}
|
||||
|
||||
res.sendStatus(200)
|
||||
}
|
||||
|
||||
/**
|
||||
* Gracefully stop server
|
||||
* Stops watcher and socket server
|
||||
|
|
|
@ -116,7 +116,6 @@ class SocketAuthority {
|
|||
// Logs
|
||||
socket.on('set_log_listener', (level) => Logger.addSocketListener(socket, level))
|
||||
socket.on('remove_log_listener', () => Logger.removeSocketListener(socket.id))
|
||||
socket.on('fetch_daily_logs', () => this.Server.logManager.socketRequestDailyLogs(socket))
|
||||
|
||||
// Sent automatically from socket.io clients
|
||||
socket.on('disconnect', (reason) => {
|
||||
|
@ -220,25 +219,6 @@ class SocketAuthority {
|
|||
client.socket.emit('init', initialPayload)
|
||||
}
|
||||
|
||||
logout(socketId) {
|
||||
// Strip user and client from client and client socket
|
||||
if (socketId && this.clients[socketId]) {
|
||||
const client = this.clients[socketId]
|
||||
const clientSocket = client.socket
|
||||
Logger.debug(`[SocketAuthority] Found user client ${clientSocket.id}, Has user: ${!!client.user}, Socket has client: ${!!clientSocket.sheepClient}`)
|
||||
|
||||
if (client.user) {
|
||||
Logger.debug('[SocketAuthority] User Offline ' + client.user.username)
|
||||
this.adminEmitter('user_offline', client.user.toJSONForPublic())
|
||||
}
|
||||
|
||||
delete this.clients[socketId].user
|
||||
if (clientSocket && clientSocket.sheepClient) delete this.clients[socketId].socket.sheepClient
|
||||
} else if (socketId) {
|
||||
Logger.warn(`[SocketAuthority] No client for socket ${socketId}`)
|
||||
}
|
||||
}
|
||||
|
||||
cancelScan(id) {
|
||||
Logger.debug('[SocketAuthority] Cancel scan', id)
|
||||
this.Server.cancelLibraryScan(id)
|
||||
|
|
117
server/controllers/CustomMetadataProviderController.js
Normal file
117
server/controllers/CustomMetadataProviderController.js
Normal file
|
@ -0,0 +1,117 @@
|
|||
const Logger = require('../Logger')
|
||||
const SocketAuthority = require('../SocketAuthority')
|
||||
const Database = require('../Database')
|
||||
|
||||
const { validateUrl } = require('../utils/index')
|
||||
|
||||
//
|
||||
// This is a controller for routes that don't have a home yet :(
|
||||
//
|
||||
class CustomMetadataProviderController {
|
||||
constructor() { }
|
||||
|
||||
/**
|
||||
* GET: /api/custom-metadata-providers
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async getAll(req, res) {
|
||||
const providers = await Database.customMetadataProviderModel.findAll()
|
||||
|
||||
res.json({
|
||||
providers
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* POST: /api/custom-metadata-providers
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async create(req, res) {
|
||||
const { name, url, mediaType, authHeaderValue } = req.body
|
||||
|
||||
if (!name || !url || !mediaType) {
|
||||
return res.status(400).send('Invalid request body')
|
||||
}
|
||||
|
||||
const validUrl = validateUrl(url)
|
||||
if (!validUrl) {
|
||||
Logger.error(`[CustomMetadataProviderController] Invalid url "${url}"`)
|
||||
return res.status(400).send('Invalid url')
|
||||
}
|
||||
|
||||
const provider = await Database.customMetadataProviderModel.create({
|
||||
name,
|
||||
mediaType,
|
||||
url,
|
||||
authHeaderValue: !authHeaderValue ? null : authHeaderValue,
|
||||
})
|
||||
|
||||
// TODO: Necessary to emit to all clients?
|
||||
SocketAuthority.emitter('custom_metadata_provider_added', provider.toClientJson())
|
||||
|
||||
res.json({
|
||||
provider
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* DELETE: /api/custom-metadata-providers/:id
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async delete(req, res) {
|
||||
const slug = `custom-${req.params.id}`
|
||||
|
||||
/** @type {import('../models/CustomMetadataProvider')} */
|
||||
const provider = req.customMetadataProvider
|
||||
const providerClientJson = provider.toClientJson()
|
||||
|
||||
const fallbackProvider = provider.mediaType === 'book' ? 'google' : 'itunes'
|
||||
|
||||
await provider.destroy()
|
||||
|
||||
// Libraries using this provider fallback to default provider
|
||||
await Database.libraryModel.update({
|
||||
provider: fallbackProvider
|
||||
}, {
|
||||
where: {
|
||||
provider: slug
|
||||
}
|
||||
})
|
||||
|
||||
// TODO: Necessary to emit to all clients?
|
||||
SocketAuthority.emitter('custom_metadata_provider_removed', providerClientJson)
|
||||
|
||||
res.sendStatus(200)
|
||||
}
|
||||
|
||||
/**
|
||||
* Middleware that requires admin or up
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
* @param {import('express').NextFunction} next
|
||||
*/
|
||||
async middleware(req, res, next) {
|
||||
if (!req.user.isAdminOrUp) {
|
||||
Logger.warn(`[CustomMetadataProviderController] Non-admin user "${req.user.username}" attempted access route "${req.path}"`)
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
|
||||
// If id param then add req.customMetadataProvider
|
||||
if (req.params.id) {
|
||||
req.customMetadataProvider = await Database.customMetadataProviderModel.findByPk(req.params.id)
|
||||
if (!req.customMetadataProvider) {
|
||||
return res.sendStatus(404)
|
||||
}
|
||||
}
|
||||
|
||||
next()
|
||||
}
|
||||
}
|
||||
module.exports = new CustomMetadataProviderController()
|
|
@ -33,6 +33,14 @@ class LibraryController {
|
|||
return res.status(500).send('Invalid request')
|
||||
}
|
||||
|
||||
// Validate that the custom provider exists if given any
|
||||
if (newLibraryPayload.provider?.startsWith('custom-')) {
|
||||
if (!await Database.customMetadataProviderModel.checkExistsBySlug(newLibraryPayload.provider)) {
|
||||
Logger.error(`[LibraryController] Custom metadata provider "${newLibraryPayload.provider}" does not exist`)
|
||||
return res.status(400).send('Custom metadata provider does not exist')
|
||||
}
|
||||
}
|
||||
|
||||
// Validate folder paths exist or can be created & resolve rel paths
|
||||
// returns 400 if a folder fails to access
|
||||
newLibraryPayload.folders = newLibraryPayload.folders.map(f => {
|
||||
|
@ -86,19 +94,27 @@ class LibraryController {
|
|||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* GET: /api/libraries/:id
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async findOne(req, res) {
|
||||
const includeArray = (req.query.include || '').split(',')
|
||||
if (includeArray.includes('filterdata')) {
|
||||
const filterdata = await libraryFilters.getFilterData(req.library.mediaType, req.library.id)
|
||||
const customMetadataProviders = await Database.customMetadataProviderModel.getForClientByMediaType(req.library.mediaType)
|
||||
|
||||
return res.json({
|
||||
filterdata,
|
||||
issues: filterdata.numIssues,
|
||||
numUserPlaylists: await Database.playlistModel.getNumPlaylistsForUserAndLibrary(req.user.id, req.library.id),
|
||||
customMetadataProviders,
|
||||
library: req.library
|
||||
})
|
||||
}
|
||||
return res.json(req.library)
|
||||
res.json(req.library)
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -115,6 +131,14 @@ class LibraryController {
|
|||
async update(req, res) {
|
||||
const library = req.library
|
||||
|
||||
// Validate that the custom provider exists if given any
|
||||
if (req.body.provider?.startsWith('custom-')) {
|
||||
if (!await Database.customMetadataProviderModel.checkExistsBySlug(req.body.provider)) {
|
||||
Logger.error(`[LibraryController] Custom metadata provider "${req.body.provider}" does not exist`)
|
||||
return res.status(400).send('Custom metadata provider does not exist')
|
||||
}
|
||||
}
|
||||
|
||||
// Validate new folder paths exist or can be created & resolve rel paths
|
||||
// returns 400 if a new folder fails to access
|
||||
if (req.body.folders) {
|
||||
|
|
|
@ -124,6 +124,11 @@ class LibraryItemController {
|
|||
const libraryItem = req.libraryItem
|
||||
const mediaPayload = req.body
|
||||
|
||||
if (mediaPayload.url) {
|
||||
await LibraryItemController.prototype.uploadCover.bind(this)(req, res, false)
|
||||
if (res.writableEnded) return
|
||||
}
|
||||
|
||||
// Book specific
|
||||
if (libraryItem.isBook) {
|
||||
await this.createAuthorsAndSeriesForItemUpdate(mediaPayload, libraryItem.libraryId)
|
||||
|
@ -146,7 +151,7 @@ class LibraryItemController {
|
|||
seriesRemoved = libraryItem.media.metadata.series.filter(se => !seriesIdsInUpdate.includes(se.id))
|
||||
}
|
||||
|
||||
const hasUpdates = libraryItem.media.update(mediaPayload)
|
||||
const hasUpdates = libraryItem.media.update(mediaPayload) || mediaPayload.url
|
||||
if (hasUpdates) {
|
||||
libraryItem.updatedAt = Date.now()
|
||||
|
||||
|
@ -171,7 +176,7 @@ class LibraryItemController {
|
|||
}
|
||||
|
||||
// POST: api/items/:id/cover
|
||||
async uploadCover(req, res) {
|
||||
async uploadCover(req, res, updateAndReturnJson = true) {
|
||||
if (!req.user.canUpload) {
|
||||
Logger.warn('User attempted to upload a cover without permission', req.user)
|
||||
return res.sendStatus(403)
|
||||
|
@ -196,12 +201,14 @@ class LibraryItemController {
|
|||
return res.status(500).send('Unknown error occurred')
|
||||
}
|
||||
|
||||
await Database.updateLibraryItem(libraryItem)
|
||||
SocketAuthority.emitter('item_updated', libraryItem.toJSONExpanded())
|
||||
res.json({
|
||||
success: true,
|
||||
cover: result.cover
|
||||
})
|
||||
if (updateAndReturnJson) {
|
||||
await Database.updateLibraryItem(libraryItem)
|
||||
SocketAuthority.emitter('item_updated', libraryItem.toJSONExpanded())
|
||||
res.json({
|
||||
success: true,
|
||||
cover: result.cover
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// PATCH: api/items/:id/cover
|
||||
|
@ -276,6 +283,9 @@ class LibraryItemController {
|
|||
return res.sendStatus(404)
|
||||
}
|
||||
|
||||
if (req.query.ts)
|
||||
res.set('Cache-Control', 'private, max-age=86400')
|
||||
|
||||
if (raw) { // any value
|
||||
if (global.XAccel) {
|
||||
const encodedURI = encodeUriPath(global.XAccel + libraryItem.media.coverPath)
|
||||
|
|
|
@ -336,7 +336,7 @@ class MeController {
|
|||
}
|
||||
|
||||
/**
|
||||
* GET: /api/stats/year/:year
|
||||
* GET: /api/me/stats/year/:year
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
|
|
|
@ -633,7 +633,7 @@ class MiscController {
|
|||
} else if (key === 'authOpenIDMobileRedirectURIs') {
|
||||
function isValidRedirectURI(uri) {
|
||||
if (typeof uri !== 'string') return false
|
||||
const pattern = new RegExp('^\\w+://[\\w.-]+$', 'i')
|
||||
const pattern = new RegExp('^\\w+://[\\w\\.-]+(/[\\w\\./-]*)*$', 'i')
|
||||
return pattern.test(uri)
|
||||
}
|
||||
|
||||
|
@ -699,7 +699,7 @@ class MiscController {
|
|||
}
|
||||
|
||||
/**
|
||||
* GET: /api/me/stats/year/:year
|
||||
* GET: /api/stats/year/:year
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
|
@ -717,5 +717,23 @@ class MiscController {
|
|||
const stats = await adminStats.getStatsForYear(year)
|
||||
res.json(stats)
|
||||
}
|
||||
|
||||
/**
|
||||
* GET: /api/logger-data
|
||||
* admin or up
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async getLoggerData(req, res) {
|
||||
if (!req.user.isAdminOrUp) {
|
||||
Logger.error(`[MiscController] Non-admin user "${req.user.username}" attempted to get logger data`)
|
||||
return res.sendStatus(403)
|
||||
}
|
||||
|
||||
res.json({
|
||||
currentDailyLogs: Logger.logManager.getMostRecentCurrentDailyLogs()
|
||||
})
|
||||
}
|
||||
}
|
||||
module.exports = new MiscController()
|
||||
|
|
|
@ -43,12 +43,15 @@ class SearchController {
|
|||
*/
|
||||
async findPodcasts(req, res) {
|
||||
const term = req.query.term
|
||||
const country = req.query.country || 'us'
|
||||
if (!term) {
|
||||
Logger.error('[SearchController] Invalid request query param "term" is required')
|
||||
return res.status(400).send('Invalid request query param "term" is required')
|
||||
}
|
||||
|
||||
const results = await PodcastFinder.search(term)
|
||||
const results = await PodcastFinder.search(term, {
|
||||
country
|
||||
})
|
||||
res.json(results)
|
||||
}
|
||||
|
||||
|
|
|
@ -161,7 +161,7 @@ class SessionController {
|
|||
* @typedef batchDeleteReqBody
|
||||
* @property {string[]} sessions
|
||||
*
|
||||
* @param {import('express').Request<{}, {}, batchDeleteReqBody, {}} req
|
||||
* @param {import('express').Request<{}, {}, batchDeleteReqBody, {}>} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async batchDelete(req, res) {
|
||||
|
|
|
@ -194,6 +194,23 @@ class UserController {
|
|||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* PATCH: /api/users/:id/openid-unlink
|
||||
*
|
||||
* @param {import('express').Request} req
|
||||
* @param {import('express').Response} res
|
||||
*/
|
||||
async unlinkFromOpenID(req, res) {
|
||||
Logger.debug(`[UserController] Unlinking user "${req.reqUser.username}" from OpenID with sub "${req.reqUser.authOpenIDSub}"`)
|
||||
req.reqUser.authOpenIDSub = null
|
||||
if (await Database.userModel.updateFromOld(req.reqUser)) {
|
||||
SocketAuthority.clientEmitter(req.user.id, 'user_updated', req.reqUser.toJSONForBrowser())
|
||||
res.sendStatus(200)
|
||||
} else {
|
||||
res.sendStatus(500)
|
||||
}
|
||||
}
|
||||
|
||||
// GET: api/users/:id/listening-sessions
|
||||
async getListeningSessions(req, res) {
|
||||
var listeningSessions = await this.getUserListeningSessionsHelper(req.params.id)
|
||||
|
|
|
@ -15,12 +15,19 @@ class AuthorFinder {
|
|||
return this.audnexus.findAuthorByASIN(asin, region)
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} name
|
||||
* @param {string} region
|
||||
* @param {Object} [options={}]
|
||||
* @returns {Promise<import('../providers/Audnexus').AuthorSearchObj>}
|
||||
*/
|
||||
async findAuthorByName(name, region, options = {}) {
|
||||
if (!name) return null
|
||||
const maxLevenshtein = !isNaN(options.maxLevenshtein) ? Number(options.maxLevenshtein) : 3
|
||||
|
||||
const author = await this.audnexus.findAuthorByName(name, region, maxLevenshtein)
|
||||
if (!author || !author.name) {
|
||||
if (!author?.name) {
|
||||
return null
|
||||
}
|
||||
return author
|
||||
|
|
|
@ -5,6 +5,7 @@ const iTunes = require('../providers/iTunes')
|
|||
const Audnexus = require('../providers/Audnexus')
|
||||
const FantLab = require('../providers/FantLab')
|
||||
const AudiobookCovers = require('../providers/AudiobookCovers')
|
||||
const CustomProviderAdapter = require('../providers/CustomProviderAdapter')
|
||||
const Logger = require('../Logger')
|
||||
const { levenshteinDistance, escapeRegExp } = require('../utils/index')
|
||||
|
||||
|
@ -17,6 +18,7 @@ class BookFinder {
|
|||
this.audnexus = new Audnexus()
|
||||
this.fantLab = new FantLab()
|
||||
this.audiobookCovers = new AudiobookCovers()
|
||||
this.customProviderAdapter = new CustomProviderAdapter()
|
||||
|
||||
this.providers = ['google', 'itunes', 'openlibrary', 'fantlab', 'audiobookcovers', 'audible', 'audible.ca', 'audible.uk', 'audible.au', 'audible.fr', 'audible.de', 'audible.jp', 'audible.it', 'audible.in', 'audible.es']
|
||||
|
||||
|
@ -147,6 +149,20 @@ class BookFinder {
|
|||
return books
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} title
|
||||
* @param {string} author
|
||||
* @param {string} providerSlug
|
||||
* @returns {Promise<Object[]>}
|
||||
*/
|
||||
async getCustomProviderResults(title, author, providerSlug) {
|
||||
const books = await this.customProviderAdapter.search(title, author, providerSlug, 'book')
|
||||
if (this.verbose) Logger.debug(`Custom provider '${providerSlug}' Search Results: ${books.length || 0}`)
|
||||
|
||||
return books
|
||||
}
|
||||
|
||||
static TitleCandidates = class {
|
||||
|
||||
constructor(cleanAuthor) {
|
||||
|
@ -315,6 +331,11 @@ class BookFinder {
|
|||
const maxFuzzySearches = !isNaN(options.maxFuzzySearches) ? Number(options.maxFuzzySearches) : 5
|
||||
let numFuzzySearches = 0
|
||||
|
||||
// Custom providers are assumed to be correct
|
||||
if (provider.startsWith('custom-')) {
|
||||
return this.getCustomProviderResults(title, author, provider)
|
||||
}
|
||||
|
||||
if (!title)
|
||||
return books
|
||||
|
||||
|
@ -397,8 +418,7 @@ class BookFinder {
|
|||
books = await this.getFantLabResults(title, author)
|
||||
} else if (provider === 'audiobookcovers') {
|
||||
books = await this.getAudiobookCoversResults(title)
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
books = await this.getGoogleBooksResults(title, author)
|
||||
}
|
||||
return books
|
||||
|
|
|
@ -6,10 +6,16 @@ class PodcastFinder {
|
|||
this.iTunesApi = new iTunes()
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} term
|
||||
* @param {{country:string}} options
|
||||
* @returns {Promise<import('../providers/iTunes').iTunesPodcastSearchResult[]>}
|
||||
*/
|
||||
async search(term, options = {}) {
|
||||
if (!term) return null
|
||||
Logger.debug(`[iTunes] Searching for podcast with term "${term}"`)
|
||||
var results = await this.iTunesApi.searchPodcasts(term, options)
|
||||
const results = await this.iTunesApi.searchPodcasts(term, options)
|
||||
Logger.debug(`[iTunes] Podcast search for "${term}" returned ${results.length} results`)
|
||||
return results
|
||||
}
|
||||
|
|
|
@ -1,20 +0,0 @@
|
|||
# MIT License
|
||||
|
||||
Copyright 2021 Nathan Friedly
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
@ -1,196 +0,0 @@
|
|||
"use strict";
|
||||
|
||||
//
|
||||
// modified for use in audiobookshelf
|
||||
// Source: https://github.com/nfriedly/express-rate-limit
|
||||
//
|
||||
|
||||
const MemoryStore = require("./memory-store");
|
||||
|
||||
function RateLimit(options) {
|
||||
options = Object.assign(
|
||||
{
|
||||
windowMs: 60 * 1000, // milliseconds - how long to keep records of requests in memory
|
||||
max: 5, // max number of recent connections during `window` milliseconds before sending a 429 response
|
||||
message: "Too many requests, please try again later.",
|
||||
statusCode: 429, // 429 status = Too Many Requests (RFC 6585)
|
||||
headers: true, //Send custom rate limit header with limit and remaining
|
||||
draft_polli_ratelimit_headers: false, //Support for the new RateLimit standardization headers
|
||||
// ability to manually decide if request was successful. Used when `skipSuccessfulRequests` and/or `skipFailedRequests` are set to `true`
|
||||
requestWasSuccessful: function (req, res) {
|
||||
return res.statusCode < 400;
|
||||
},
|
||||
skipFailedRequests: false, // Do not count failed requests
|
||||
skipSuccessfulRequests: false, // Do not count successful requests
|
||||
// allows to create custom keys (by default user IP is used)
|
||||
keyGenerator: function (req /*, res*/) {
|
||||
if (!req.ip) {
|
||||
console.error(
|
||||
"express-rate-limit: req.ip is undefined - you can avoid this by providing a custom keyGenerator function, but it may be indicative of a larger issue."
|
||||
);
|
||||
}
|
||||
return req.ip;
|
||||
},
|
||||
skip: function (/*req, res*/) {
|
||||
return false;
|
||||
},
|
||||
handler: function (req, res /*, next, optionsUsed*/) {
|
||||
res.status(options.statusCode).send(options.message);
|
||||
},
|
||||
onLimitReached: function (/*req, res, optionsUsed*/) { },
|
||||
requestPropertyName: "rateLimit", // Parameter name appended to req object
|
||||
},
|
||||
options
|
||||
);
|
||||
|
||||
// store to use for persisting rate limit data
|
||||
options.store = options.store || new MemoryStore(options.windowMs);
|
||||
|
||||
// ensure that the store has the incr method
|
||||
if (
|
||||
typeof options.store.incr !== "function" ||
|
||||
typeof options.store.resetKey !== "function" ||
|
||||
(options.skipFailedRequests &&
|
||||
typeof options.store.decrement !== "function")
|
||||
) {
|
||||
throw new Error("The store is not valid.");
|
||||
}
|
||||
|
||||
["global", "delayMs", "delayAfter"].forEach((key) => {
|
||||
// note: this doesn't trigger if delayMs or delayAfter are set to 0, because that essentially disables them
|
||||
if (options[key]) {
|
||||
throw new Error(
|
||||
`The ${key} option was removed from express-rate-limit v3.`
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
function rateLimit(req, res, next) {
|
||||
Promise.resolve(options.skip(req, res))
|
||||
.then((skip) => {
|
||||
if (skip) {
|
||||
return next();
|
||||
}
|
||||
|
||||
const key = options.keyGenerator(req, res);
|
||||
|
||||
options.store.incr(key, function (err, current, resetTime) {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
const maxResult =
|
||||
typeof options.max === "function"
|
||||
? options.max(req, res)
|
||||
: options.max;
|
||||
|
||||
Promise.resolve(maxResult)
|
||||
.then((max) => {
|
||||
req[options.requestPropertyName] = {
|
||||
limit: max,
|
||||
current: current,
|
||||
remaining: Math.max(max - current, 0),
|
||||
resetTime: resetTime,
|
||||
};
|
||||
|
||||
if (options.headers && !res.headersSent) {
|
||||
res.setHeader("X-RateLimit-Limit", max);
|
||||
res.setHeader(
|
||||
"X-RateLimit-Remaining",
|
||||
req[options.requestPropertyName].remaining
|
||||
);
|
||||
if (resetTime instanceof Date) {
|
||||
// if we have a resetTime, also provide the current date to help avoid issues with incorrect clocks
|
||||
res.setHeader("Date", new Date().toUTCString());
|
||||
res.setHeader(
|
||||
"X-RateLimit-Reset",
|
||||
Math.ceil(resetTime.getTime() / 1000)
|
||||
);
|
||||
}
|
||||
}
|
||||
if (options.draft_polli_ratelimit_headers && !res.headersSent) {
|
||||
res.setHeader("RateLimit-Limit", max);
|
||||
res.setHeader(
|
||||
"RateLimit-Remaining",
|
||||
req[options.requestPropertyName].remaining
|
||||
);
|
||||
if (resetTime) {
|
||||
const deltaSeconds = Math.ceil(
|
||||
(resetTime.getTime() - Date.now()) / 1000
|
||||
);
|
||||
res.setHeader("RateLimit-Reset", Math.max(0, deltaSeconds));
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
options.skipFailedRequests ||
|
||||
options.skipSuccessfulRequests
|
||||
) {
|
||||
let decremented = false;
|
||||
const decrementKey = () => {
|
||||
if (!decremented) {
|
||||
options.store.decrement(key);
|
||||
decremented = true;
|
||||
}
|
||||
};
|
||||
|
||||
if (options.skipFailedRequests) {
|
||||
res.on("finish", function () {
|
||||
if (!options.requestWasSuccessful(req, res)) {
|
||||
decrementKey();
|
||||
}
|
||||
});
|
||||
|
||||
res.on("close", () => {
|
||||
if (!res.finished) {
|
||||
decrementKey();
|
||||
}
|
||||
});
|
||||
|
||||
res.on("error", () => decrementKey());
|
||||
}
|
||||
|
||||
if (options.skipSuccessfulRequests) {
|
||||
res.on("finish", function () {
|
||||
if (options.requestWasSuccessful(req, res)) {
|
||||
options.store.decrement(key);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (max && current === max + 1) {
|
||||
options.onLimitReached(req, res, options);
|
||||
}
|
||||
|
||||
if (max && current > max) {
|
||||
if (options.headers && !res.headersSent) {
|
||||
res.setHeader(
|
||||
"Retry-After",
|
||||
Math.ceil(options.windowMs / 1000)
|
||||
);
|
||||
}
|
||||
return options.handler(req, res, next, options);
|
||||
}
|
||||
|
||||
next();
|
||||
|
||||
return null;
|
||||
})
|
||||
.catch(next);
|
||||
});
|
||||
|
||||
return null;
|
||||
})
|
||||
.catch(next);
|
||||
}
|
||||
|
||||
rateLimit.resetKey = options.store.resetKey.bind(options.store);
|
||||
|
||||
// Backward compatibility function
|
||||
rateLimit.resetIp = rateLimit.resetKey;
|
||||
|
||||
return rateLimit;
|
||||
}
|
||||
|
||||
module.exports = RateLimit;
|
|
@ -1,47 +0,0 @@
|
|||
"use strict";
|
||||
|
||||
function calculateNextResetTime(windowMs) {
|
||||
const d = new Date();
|
||||
d.setMilliseconds(d.getMilliseconds() + windowMs);
|
||||
return d;
|
||||
}
|
||||
|
||||
function MemoryStore(windowMs) {
|
||||
let hits = {};
|
||||
let resetTime = calculateNextResetTime(windowMs);
|
||||
|
||||
this.incr = function (key, cb) {
|
||||
if (hits[key]) {
|
||||
hits[key]++;
|
||||
} else {
|
||||
hits[key] = 1;
|
||||
}
|
||||
|
||||
cb(null, hits[key], resetTime);
|
||||
};
|
||||
|
||||
this.decrement = function (key) {
|
||||
if (hits[key]) {
|
||||
hits[key]--;
|
||||
}
|
||||
};
|
||||
|
||||
// export an API to allow hits all IPs to be reset
|
||||
this.resetAll = function () {
|
||||
hits = {};
|
||||
resetTime = calculateNextResetTime(windowMs);
|
||||
};
|
||||
|
||||
// export an API to allow hits from one IP to be reset
|
||||
this.resetKey = function (key) {
|
||||
delete hits[key];
|
||||
};
|
||||
|
||||
// simply reset ALL hits every windowMs
|
||||
const interval = setInterval(this.resetAll, windowMs);
|
||||
if (interval.unref) {
|
||||
interval.unref();
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = MemoryStore;
|
|
@ -1,3 +1,6 @@
|
|||
const child_process = require('child_process')
|
||||
const { promisify } = require('util')
|
||||
const exec = promisify(child_process.exec)
|
||||
const path = require('path')
|
||||
const which = require('../libs/which')
|
||||
const fs = require('../libs/fsExtra')
|
||||
|
@ -8,67 +11,143 @@ const fileUtils = require('../utils/fileUtils')
|
|||
class BinaryManager {
|
||||
|
||||
defaultRequiredBinaries = [
|
||||
{ name: 'ffmpeg', envVariable: 'FFMPEG_PATH' },
|
||||
{ name: 'ffprobe', envVariable: 'FFPROBE_PATH' }
|
||||
{ name: 'ffmpeg', envVariable: 'FFMPEG_PATH', validVersions: ['5.1'] },
|
||||
{ name: 'ffprobe', envVariable: 'FFPROBE_PATH', validVersions: ['5.1'] }
|
||||
]
|
||||
|
||||
constructor(requiredBinaries = this.defaultRequiredBinaries) {
|
||||
this.requiredBinaries = requiredBinaries
|
||||
this.mainInstallPath = process.pkg ? path.dirname(process.execPath) : global.appRoot
|
||||
this.altInstallPath = global.ConfigPath
|
||||
this.initialized = false
|
||||
this.exec = exec
|
||||
}
|
||||
|
||||
async init() {
|
||||
if (this.initialized) return
|
||||
const missingBinaries = await this.findRequiredBinaries()
|
||||
if (missingBinaries.length == 0) return
|
||||
await this.removeOldBinaries(missingBinaries)
|
||||
await this.install(missingBinaries)
|
||||
const missingBinariesAfterInstall = await this.findRequiredBinaries()
|
||||
if (missingBinariesAfterInstall.length != 0) {
|
||||
if (missingBinariesAfterInstall.length) {
|
||||
Logger.error(`[BinaryManager] Failed to find or install required binaries: ${missingBinariesAfterInstall.join(', ')}`)
|
||||
process.exit(1)
|
||||
}
|
||||
this.initialized = true
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove old/invalid binaries in main or alt install path
|
||||
*
|
||||
* @param {string[]} binaryNames
|
||||
*/
|
||||
async removeOldBinaries(binaryNames) {
|
||||
for (const binaryName of binaryNames) {
|
||||
const executable = this.getExecutableFileName(binaryName)
|
||||
const mainInstallPath = path.join(this.mainInstallPath, executable)
|
||||
if (await fs.pathExists(mainInstallPath)) {
|
||||
Logger.debug(`[BinaryManager] Removing old binary: ${mainInstallPath}`)
|
||||
await fs.remove(mainInstallPath)
|
||||
}
|
||||
const altInstallPath = path.join(this.altInstallPath, executable)
|
||||
if (await fs.pathExists(altInstallPath)) {
|
||||
Logger.debug(`[BinaryManager] Removing old binary: ${altInstallPath}`)
|
||||
await fs.remove(altInstallPath)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find required binaries and return array of binary names that are missing
|
||||
*
|
||||
* @returns {Promise<string[]>}
|
||||
*/
|
||||
async findRequiredBinaries() {
|
||||
const missingBinaries = []
|
||||
for (const binary of this.requiredBinaries) {
|
||||
const binaryPath = await this.findBinary(binary.name, binary.envVariable)
|
||||
const binaryPath = await this.findBinary(binary.name, binary.envVariable, binary.validVersions)
|
||||
if (binaryPath) {
|
||||
Logger.info(`[BinaryManager] Found ${binary.name} at ${binaryPath}`)
|
||||
Logger.info(`[BinaryManager] Found valid binary ${binary.name} at ${binaryPath}`)
|
||||
if (process.env[binary.envVariable] !== binaryPath) {
|
||||
Logger.info(`[BinaryManager] Updating process.env.${binary.envVariable}`)
|
||||
process.env[binary.envVariable] = binaryPath
|
||||
}
|
||||
} else {
|
||||
Logger.info(`[BinaryManager] ${binary.name} not found`)
|
||||
Logger.info(`[BinaryManager] ${binary.name} not found or version too old`)
|
||||
missingBinaries.push(binary.name)
|
||||
}
|
||||
}
|
||||
return missingBinaries
|
||||
}
|
||||
|
||||
async findBinary(name, envVariable) {
|
||||
const executable = name + (process.platform == 'win32' ? '.exe' : '')
|
||||
/**
|
||||
* Find absolute path for binary
|
||||
*
|
||||
* @param {string} name
|
||||
* @param {string} envVariable
|
||||
* @param {string[]} [validVersions]
|
||||
* @returns {Promise<string>} Path to binary
|
||||
*/
|
||||
async findBinary(name, envVariable, validVersions = []) {
|
||||
const executable = this.getExecutableFileName(name)
|
||||
// 1. check path specified in environment variable
|
||||
const defaultPath = process.env[envVariable]
|
||||
if (defaultPath && await fs.pathExists(defaultPath)) return defaultPath
|
||||
if (await this.isBinaryGood(defaultPath, validVersions)) return defaultPath
|
||||
// 2. find the first instance of the binary in the PATH environment variable
|
||||
const whichPath = which.sync(executable, { nothrow: true })
|
||||
if (whichPath) return whichPath
|
||||
if (await this.isBinaryGood(whichPath, validVersions)) return whichPath
|
||||
// 3. check main install path (binary root dir)
|
||||
const mainInstallPath = path.join(this.mainInstallPath, executable)
|
||||
if (await fs.pathExists(mainInstallPath)) return mainInstallPath
|
||||
if (await this.isBinaryGood(mainInstallPath, validVersions)) return mainInstallPath
|
||||
// 4. check alt install path (/config)
|
||||
const altInstallPath = path.join(this.altInstallPath, executable)
|
||||
if (await fs.pathExists(altInstallPath)) return altInstallPath
|
||||
if (await this.isBinaryGood(altInstallPath, validVersions)) return altInstallPath
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Check binary path exists and optionally check version is valid
|
||||
*
|
||||
* @param {string} binaryPath
|
||||
* @param {string[]} [validVersions]
|
||||
* @returns {Promise<boolean>}
|
||||
*/
|
||||
async isBinaryGood(binaryPath, validVersions = []) {
|
||||
if (!binaryPath || !await fs.pathExists(binaryPath)) return false
|
||||
if (!validVersions.length) return true
|
||||
try {
|
||||
const { stdout } = await this.exec('"' + binaryPath + '"' + ' -version')
|
||||
const version = stdout.match(/version\s([\d\.]+)/)?.[1]
|
||||
if (!version) return false
|
||||
return validVersions.some(validVersion => version.startsWith(validVersion))
|
||||
} catch (err) {
|
||||
Logger.error(`[BinaryManager] Failed to check version of ${binaryPath}`)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string[]} binaries
|
||||
*/
|
||||
async install(binaries) {
|
||||
if (binaries.length == 0) return
|
||||
if (!binaries.length) return
|
||||
Logger.info(`[BinaryManager] Installing binaries: ${binaries.join(', ')}`)
|
||||
let destination = await fileUtils.isWritable(this.mainInstallPath) ? this.mainInstallPath : this.altInstallPath
|
||||
await ffbinaries.downloadBinaries(binaries, { destination })
|
||||
await ffbinaries.downloadBinaries(binaries, { destination, version: '5.1', force: true })
|
||||
Logger.info(`[BinaryManager] Binaries installed to ${destination}`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Append .exe to binary name for Windows
|
||||
*
|
||||
* @param {string} name
|
||||
* @returns {string}
|
||||
*/
|
||||
getExecutableFileName(name) {
|
||||
return name + (process.platform == 'win32' ? '.exe' : '')
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = BinaryManager
|
|
@ -1,19 +1,34 @@
|
|||
const Path = require('path')
|
||||
const fs = require('../libs/fsExtra')
|
||||
|
||||
const Logger = require('../Logger')
|
||||
const DailyLog = require('../objects/DailyLog')
|
||||
|
||||
const Logger = require('../Logger')
|
||||
const { LogLevel } = require('../utils/constants')
|
||||
|
||||
const TAG = '[LogManager]'
|
||||
|
||||
/**
|
||||
* @typedef LogObject
|
||||
* @property {string} timestamp
|
||||
* @property {string} source
|
||||
* @property {string} message
|
||||
* @property {string} levelName
|
||||
* @property {number} level
|
||||
*/
|
||||
|
||||
class LogManager {
|
||||
constructor() {
|
||||
this.DailyLogPath = Path.posix.join(global.MetadataPath, 'logs', 'daily')
|
||||
this.ScanLogPath = Path.posix.join(global.MetadataPath, 'logs', 'scans')
|
||||
|
||||
/** @type {DailyLog} */
|
||||
this.currentDailyLog = null
|
||||
|
||||
/** @type {LogObject[]} */
|
||||
this.dailyLogBuffer = []
|
||||
|
||||
/** @type {string[]} */
|
||||
this.dailyLogFiles = []
|
||||
}
|
||||
|
||||
|
@ -26,12 +41,12 @@ class LogManager {
|
|||
await fs.ensureDir(this.ScanLogPath)
|
||||
}
|
||||
|
||||
async ensureScanLogDir() {
|
||||
if (!(await fs.pathExists(this.ScanLogPath))) {
|
||||
await fs.mkdir(this.ScanLogPath)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 1. Ensure log directories exist
|
||||
* 2. Load daily log files
|
||||
* 3. Remove old daily log files
|
||||
* 4. Create/set current daily log file
|
||||
*/
|
||||
async init() {
|
||||
await this.ensureLogDirs()
|
||||
|
||||
|
@ -46,11 +61,11 @@ class LogManager {
|
|||
}
|
||||
}
|
||||
|
||||
// set current daily log file or create if does not exist
|
||||
const currentDailyLogFilename = DailyLog.getCurrentDailyLogFilename()
|
||||
Logger.info(TAG, `Init current daily log filename: ${currentDailyLogFilename}`)
|
||||
|
||||
this.currentDailyLog = new DailyLog()
|
||||
this.currentDailyLog.setData({ dailyLogDirPath: this.DailyLogPath })
|
||||
this.currentDailyLog = new DailyLog(this.DailyLogPath)
|
||||
|
||||
if (this.dailyLogFiles.includes(currentDailyLogFilename)) {
|
||||
Logger.debug(TAG, `Daily log file already exists - set in Logger`)
|
||||
|
@ -59,7 +74,7 @@ class LogManager {
|
|||
this.dailyLogFiles.push(this.currentDailyLog.filename)
|
||||
}
|
||||
|
||||
// Log buffered Logs
|
||||
// Log buffered daily logs
|
||||
if (this.dailyLogBuffer.length) {
|
||||
this.dailyLogBuffer.forEach((logObj) => {
|
||||
this.currentDailyLog.appendLog(logObj)
|
||||
|
@ -68,9 +83,12 @@ class LogManager {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load all daily log filenames in /metadata/logs/daily
|
||||
*/
|
||||
async scanLogFiles() {
|
||||
const dailyFiles = await fs.readdir(this.DailyLogPath)
|
||||
if (dailyFiles && dailyFiles.length) {
|
||||
if (dailyFiles?.length) {
|
||||
dailyFiles.forEach((logFile) => {
|
||||
if (Path.extname(logFile) === '.txt') {
|
||||
Logger.debug('Daily Log file found', logFile)
|
||||
|
@ -83,30 +101,38 @@ class LogManager {
|
|||
this.dailyLogFiles.sort()
|
||||
}
|
||||
|
||||
async removeOldestLog() {
|
||||
if (!this.dailyLogFiles.length) return
|
||||
const oldestLog = this.dailyLogFiles[0]
|
||||
return this.removeLogFile(oldestLog)
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} filename
|
||||
*/
|
||||
async removeLogFile(filename) {
|
||||
const fullPath = Path.join(this.DailyLogPath, filename)
|
||||
const exists = await fs.pathExists(fullPath)
|
||||
if (!exists) {
|
||||
Logger.error(TAG, 'Invalid log dne ' + fullPath)
|
||||
this.dailyLogFiles = this.dailyLogFiles.filter(dlf => dlf.filename !== filename)
|
||||
this.dailyLogFiles = this.dailyLogFiles.filter(dlf => dlf !== filename)
|
||||
} else {
|
||||
try {
|
||||
await fs.unlink(fullPath)
|
||||
Logger.info(TAG, 'Removed daily log: ' + filename)
|
||||
this.dailyLogFiles = this.dailyLogFiles.filter(dlf => dlf.filename !== filename)
|
||||
this.dailyLogFiles = this.dailyLogFiles.filter(dlf => dlf !== filename)
|
||||
} catch (error) {
|
||||
Logger.error(TAG, 'Failed to unlink log file ' + fullPath)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logToFile(logObj) {
|
||||
/**
|
||||
*
|
||||
* @param {LogObject} logObj
|
||||
*/
|
||||
async logToFile(logObj) {
|
||||
// Fatal crashes get logged to a separate file
|
||||
if (logObj.level === LogLevel.FATAL) {
|
||||
await this.logCrashToFile(logObj)
|
||||
}
|
||||
|
||||
// Buffer when logging before daily logs have been initialized
|
||||
if (!this.currentDailyLog) {
|
||||
this.dailyLogBuffer.push(logObj)
|
||||
return
|
||||
|
@ -114,25 +140,39 @@ class LogManager {
|
|||
|
||||
// Check log rolls to next day
|
||||
if (this.currentDailyLog.id !== DailyLog.getCurrentDateString()) {
|
||||
const newDailyLog = new DailyLog()
|
||||
newDailyLog.setData({ dailyLogDirPath: this.DailyLogPath })
|
||||
this.currentDailyLog = newDailyLog
|
||||
this.currentDailyLog = new DailyLog(this.DailyLogPath)
|
||||
if (this.dailyLogFiles.length > this.loggerDailyLogsToKeep) {
|
||||
this.removeOldestLog()
|
||||
// Remove oldest log
|
||||
this.removeLogFile(this.dailyLogFiles[0])
|
||||
}
|
||||
}
|
||||
|
||||
// Append log line to log file
|
||||
this.currentDailyLog.appendLog(logObj)
|
||||
return this.currentDailyLog.appendLog(logObj)
|
||||
}
|
||||
|
||||
socketRequestDailyLogs(socket) {
|
||||
if (!this.currentDailyLog) {
|
||||
return
|
||||
}
|
||||
/**
|
||||
*
|
||||
* @param {LogObject} logObj
|
||||
*/
|
||||
async logCrashToFile(logObj) {
|
||||
const line = JSON.stringify(logObj) + '\n'
|
||||
|
||||
const lastLogs = this.currentDailyLog.logs.slice(-5000)
|
||||
socket.emit('daily_logs', lastLogs)
|
||||
const logsDir = Path.join(global.MetadataPath, 'logs')
|
||||
await fs.ensureDir(logsDir)
|
||||
const crashLogPath = Path.join(logsDir, 'crash_logs.txt')
|
||||
return fs.writeFile(crashLogPath, line, { flag: "a+" }).catch((error) => {
|
||||
console.log('[LogManager] Appended crash log', error)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Most recent 5000 daily logs
|
||||
*
|
||||
* @returns {string}
|
||||
*/
|
||||
getMostRecentCurrentDailyLogs() {
|
||||
return this.currentDailyLog?.logs.slice(-5000) || ''
|
||||
}
|
||||
}
|
||||
module.exports = LogManager
|
103
server/models/CustomMetadataProvider.js
Normal file
103
server/models/CustomMetadataProvider.js
Normal file
|
@ -0,0 +1,103 @@
|
|||
const { DataTypes, Model } = require('sequelize')
|
||||
|
||||
/**
|
||||
* @typedef ClientCustomMetadataProvider
|
||||
* @property {UUIDV4} id
|
||||
* @property {string} name
|
||||
* @property {string} url
|
||||
* @property {string} slug
|
||||
*/
|
||||
|
||||
class CustomMetadataProvider extends Model {
|
||||
constructor(values, options) {
|
||||
super(values, options)
|
||||
|
||||
/** @type {UUIDV4} */
|
||||
this.id
|
||||
/** @type {string} */
|
||||
this.mediaType
|
||||
/** @type {string} */
|
||||
this.name
|
||||
/** @type {string} */
|
||||
this.url
|
||||
/** @type {string} */
|
||||
this.authHeaderValue
|
||||
/** @type {Object} */
|
||||
this.extraData
|
||||
/** @type {Date} */
|
||||
this.createdAt
|
||||
/** @type {Date} */
|
||||
this.updatedAt
|
||||
}
|
||||
|
||||
getSlug() {
|
||||
return `custom-${this.id}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Safe for clients
|
||||
* @returns {ClientCustomMetadataProvider}
|
||||
*/
|
||||
toClientJson() {
|
||||
return {
|
||||
id: this.id,
|
||||
name: this.name,
|
||||
mediaType: this.mediaType,
|
||||
slug: this.getSlug()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get providers for client by media type
|
||||
* Currently only available for "book" media type
|
||||
*
|
||||
* @param {string} mediaType
|
||||
* @returns {Promise<ClientCustomMetadataProvider[]>}
|
||||
*/
|
||||
static async getForClientByMediaType(mediaType) {
|
||||
if (mediaType !== 'book') return []
|
||||
const customMetadataProviders = await this.findAll({
|
||||
where: {
|
||||
mediaType
|
||||
}
|
||||
})
|
||||
return customMetadataProviders.map(cmp => cmp.toClientJson())
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if provider exists by slug
|
||||
*
|
||||
* @param {string} providerSlug
|
||||
* @returns {Promise<boolean>}
|
||||
*/
|
||||
static async checkExistsBySlug(providerSlug) {
|
||||
const providerId = providerSlug?.split?.('custom-')[1]
|
||||
if (!providerId) return false
|
||||
|
||||
return (await this.count({ where: { id: providerId } })) > 0
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize model
|
||||
* @param {import('../Database').sequelize} sequelize
|
||||
*/
|
||||
static init(sequelize) {
|
||||
super.init({
|
||||
id: {
|
||||
type: DataTypes.UUID,
|
||||
defaultValue: DataTypes.UUIDV4,
|
||||
primaryKey: true
|
||||
},
|
||||
name: DataTypes.STRING,
|
||||
mediaType: DataTypes.STRING,
|
||||
url: DataTypes.STRING,
|
||||
authHeaderValue: DataTypes.STRING,
|
||||
extraData: DataTypes.JSON
|
||||
}, {
|
||||
sequelize,
|
||||
modelName: 'customMetadataProvider'
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = CustomMetadataProvider
|
|
@ -225,6 +225,12 @@ class LibraryItem extends Model {
|
|||
return newLibraryItem
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates libraryItem, book, authors and series from old library item
|
||||
*
|
||||
* @param {oldLibraryItem} oldLibraryItem
|
||||
* @returns {Promise<boolean>} true if updates were made
|
||||
*/
|
||||
static async fullUpdateFromOld(oldLibraryItem) {
|
||||
const libraryItemExpanded = await this.findByPk(oldLibraryItem.id, {
|
||||
include: [
|
||||
|
@ -306,17 +312,18 @@ class LibraryItem extends Model {
|
|||
const existingAuthors = libraryItemExpanded.media.authors || []
|
||||
const existingSeriesAll = libraryItemExpanded.media.series || []
|
||||
const updatedAuthors = oldLibraryItem.media.metadata.authors || []
|
||||
const uniqueUpdatedAuthors = updatedAuthors.filter((au, idx) => updatedAuthors.findIndex(a => a.id === au.id) === idx)
|
||||
const updatedSeriesAll = oldLibraryItem.media.metadata.series || []
|
||||
|
||||
for (const existingAuthor of existingAuthors) {
|
||||
// Author was removed from Book
|
||||
if (!updatedAuthors.some(au => au.id === existingAuthor.id)) {
|
||||
if (!uniqueUpdatedAuthors.some(au => au.id === existingAuthor.id)) {
|
||||
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" author "${existingAuthor.name}" was removed`)
|
||||
await this.sequelize.models.bookAuthor.removeByIds(existingAuthor.id, libraryItemExpanded.media.id)
|
||||
hasUpdates = true
|
||||
}
|
||||
}
|
||||
for (const updatedAuthor of updatedAuthors) {
|
||||
for (const updatedAuthor of uniqueUpdatedAuthors) {
|
||||
// Author was added
|
||||
if (!existingAuthors.some(au => au.id === updatedAuthor.id)) {
|
||||
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" author "${updatedAuthor.name}" was added`)
|
||||
|
@ -372,6 +379,9 @@ class LibraryItem extends Model {
|
|||
if (!areEquivalent(updatedLibraryItem[key], existingValue, true)) {
|
||||
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" ${key} updated from ${existingValue} to ${updatedLibraryItem[key]}`)
|
||||
hasLibraryItemUpdates = true
|
||||
if (key === 'updatedAt') {
|
||||
libraryItemExpanded.changed('updatedAt', true)
|
||||
}
|
||||
}
|
||||
}
|
||||
if (hasLibraryItemUpdates) {
|
||||
|
@ -399,6 +409,7 @@ class LibraryItem extends Model {
|
|||
isInvalid: !!oldLibraryItem.isInvalid,
|
||||
mtime: oldLibraryItem.mtimeMs,
|
||||
ctime: oldLibraryItem.ctimeMs,
|
||||
updatedAt: oldLibraryItem.updatedAt,
|
||||
birthtime: oldLibraryItem.birthtimeMs,
|
||||
size: oldLibraryItem.size,
|
||||
lastScan: oldLibraryItem.lastScan,
|
||||
|
|
|
@ -118,7 +118,9 @@ class PlaybackSession extends Model {
|
|||
|
||||
static createFromOld(oldPlaybackSession) {
|
||||
const playbackSession = this.getFromOld(oldPlaybackSession)
|
||||
return this.create(playbackSession)
|
||||
return this.create(playbackSession, {
|
||||
silent: true
|
||||
})
|
||||
}
|
||||
|
||||
static updateFromOld(oldPlaybackSession) {
|
||||
|
@ -126,7 +128,8 @@ class PlaybackSession extends Model {
|
|||
return this.update(playbackSession, {
|
||||
where: {
|
||||
id: playbackSession.id
|
||||
}
|
||||
},
|
||||
silent: true
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -1,23 +1,28 @@
|
|||
const Path = require('path')
|
||||
const date = require('../libs/dateAndTime')
|
||||
const fs = require('../libs/fsExtra')
|
||||
const { readTextFile } = require('../utils/fileUtils')
|
||||
const fileUtils = require('../utils/fileUtils')
|
||||
const Logger = require('../Logger')
|
||||
|
||||
class DailyLog {
|
||||
constructor() {
|
||||
this.id = null
|
||||
this.datePretty = null
|
||||
/**
|
||||
*
|
||||
* @param {string} dailyLogDirPath Path to daily logs /metadata/logs/daily
|
||||
*/
|
||||
constructor(dailyLogDirPath) {
|
||||
this.id = date.format(new Date(), 'YYYY-MM-DD')
|
||||
|
||||
this.dailyLogDirPath = null
|
||||
this.filename = null
|
||||
this.path = null
|
||||
this.fullPath = null
|
||||
this.dailyLogDirPath = dailyLogDirPath
|
||||
this.filename = this.id + '.txt'
|
||||
this.fullPath = Path.join(this.dailyLogDirPath, this.filename)
|
||||
|
||||
this.createdAt = null
|
||||
this.createdAt = Date.now()
|
||||
|
||||
/** @type {import('../managers/LogManager').LogObject[]} */
|
||||
this.logs = []
|
||||
/** @type {string[]} */
|
||||
this.bufferedLogLines = []
|
||||
|
||||
this.locked = false
|
||||
}
|
||||
|
||||
|
@ -32,8 +37,6 @@ class DailyLog {
|
|||
toJSON() {
|
||||
return {
|
||||
id: this.id,
|
||||
datePretty: this.datePretty,
|
||||
path: this.path,
|
||||
dailyLogDirPath: this.dailyLogDirPath,
|
||||
fullPath: this.fullPath,
|
||||
filename: this.filename,
|
||||
|
@ -41,36 +44,34 @@ class DailyLog {
|
|||
}
|
||||
}
|
||||
|
||||
setData(data) {
|
||||
this.id = date.format(new Date(), 'YYYY-MM-DD')
|
||||
this.datePretty = date.format(new Date(), 'ddd, MMM D YYYY')
|
||||
|
||||
this.dailyLogDirPath = data.dailyLogDirPath
|
||||
|
||||
this.filename = this.id + '.txt'
|
||||
this.path = Path.join('backups', this.filename)
|
||||
this.fullPath = Path.join(this.dailyLogDirPath, this.filename)
|
||||
|
||||
this.createdAt = Date.now()
|
||||
}
|
||||
|
||||
async appendBufferedLogs() {
|
||||
var buffered = [...this.bufferedLogLines]
|
||||
/**
|
||||
* Append all buffered lines to daily log file
|
||||
*/
|
||||
appendBufferedLogs() {
|
||||
let buffered = [...this.bufferedLogLines]
|
||||
this.bufferedLogLines = []
|
||||
|
||||
var oneBigLog = ''
|
||||
let oneBigLog = ''
|
||||
buffered.forEach((logLine) => {
|
||||
oneBigLog += logLine
|
||||
})
|
||||
this.appendLogLine(oneBigLog)
|
||||
return this.appendLogLine(oneBigLog)
|
||||
}
|
||||
|
||||
async appendLog(logObj) {
|
||||
/**
|
||||
*
|
||||
* @param {import('../managers/LogManager').LogObject} logObj
|
||||
*/
|
||||
appendLog(logObj) {
|
||||
this.logs.push(logObj)
|
||||
var line = JSON.stringify(logObj) + '\n'
|
||||
this.appendLogLine(line)
|
||||
return this.appendLogLine(JSON.stringify(logObj) + '\n')
|
||||
}
|
||||
|
||||
/**
|
||||
* Append log to daily log file
|
||||
*
|
||||
* @param {string} line
|
||||
*/
|
||||
async appendLogLine(line) {
|
||||
if (this.locked) {
|
||||
this.bufferedLogLines.push(line)
|
||||
|
@ -84,24 +85,29 @@ class DailyLog {
|
|||
|
||||
this.locked = false
|
||||
if (this.bufferedLogLines.length) {
|
||||
this.appendBufferedLogs()
|
||||
await this.appendBufferedLogs()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load all logs from file
|
||||
* Parses lines and re-saves the file if bad lines are removed
|
||||
*/
|
||||
async loadLogs() {
|
||||
var exists = await fs.pathExists(this.fullPath)
|
||||
if (!exists) {
|
||||
if (!await fs.pathExists(this.fullPath)) {
|
||||
console.error('Daily log does not exist')
|
||||
return
|
||||
}
|
||||
|
||||
var text = await readTextFile(this.fullPath)
|
||||
const text = await fileUtils.readTextFile(this.fullPath)
|
||||
|
||||
var hasFailures = false
|
||||
let hasFailures = false
|
||||
|
||||
var logLines = text.split(/\r?\n/)
|
||||
let logLines = text.split(/\r?\n/)
|
||||
// remove last log if empty
|
||||
if (logLines.length && !logLines[logLines.length - 1]) logLines = logLines.slice(0, -1)
|
||||
|
||||
// JSON parse log lines
|
||||
this.logs = logLines.map(t => {
|
||||
if (!t) {
|
||||
hasFailures = true
|
||||
|
@ -118,7 +124,7 @@ class DailyLog {
|
|||
|
||||
// Rewrite log file to remove errors
|
||||
if (hasFailures) {
|
||||
var newLogLines = this.logs.map(l => JSON.stringify(l)).join('\n') + '\n'
|
||||
const newLogLines = this.logs.map(l => JSON.stringify(l)).join('\n') + '\n'
|
||||
await fs.writeFile(this.fullPath, newLogLines)
|
||||
console.log('Re-Saved log file to remove bad lines')
|
||||
}
|
||||
|
|
|
@ -42,7 +42,13 @@ class Podcast {
|
|||
this.autoDownloadSchedule = podcast.autoDownloadSchedule || '0 * * * *' // Added in 2.1.3 so default to hourly
|
||||
this.lastEpisodeCheck = podcast.lastEpisodeCheck || 0
|
||||
this.maxEpisodesToKeep = podcast.maxEpisodesToKeep || 0
|
||||
this.maxNewEpisodesToDownload = podcast.maxNewEpisodesToDownload || 3
|
||||
|
||||
// Default is 3 but 0 is allowed
|
||||
if (typeof podcast.maxNewEpisodesToDownload !== 'number') {
|
||||
this.maxNewEpisodesToDownload = 3
|
||||
} else {
|
||||
this.maxNewEpisodesToDownload = podcast.maxNewEpisodesToDownload
|
||||
}
|
||||
}
|
||||
|
||||
toJSON() {
|
||||
|
|
|
@ -10,6 +10,7 @@ class LibrarySettings {
|
|||
this.audiobooksOnly = false
|
||||
this.hideSingleBookSeries = false // Do not show series that only have 1 book
|
||||
this.metadataPrecedence = ['folderStructure', 'audioMetatags', 'nfoFile', 'txtFiles', 'opfFile', 'absMetadata']
|
||||
this.podcastSearchRegion = 'us'
|
||||
|
||||
if (settings) {
|
||||
this.construct(settings)
|
||||
|
@ -30,6 +31,7 @@ class LibrarySettings {
|
|||
// Added in v2.4.5
|
||||
this.metadataPrecedence = ['folderStructure', 'audioMetatags', 'nfoFile', 'txtFiles', 'opfFile', 'absMetadata']
|
||||
}
|
||||
this.podcastSearchRegion = settings.podcastSearchRegion || 'us'
|
||||
}
|
||||
|
||||
toJSON() {
|
||||
|
@ -41,7 +43,8 @@ class LibrarySettings {
|
|||
autoScanCronExpression: this.autoScanCronExpression,
|
||||
audiobooksOnly: this.audiobooksOnly,
|
||||
hideSingleBookSeries: this.hideSingleBookSeries,
|
||||
metadataPrecedence: [...this.metadataPrecedence]
|
||||
metadataPrecedence: [...this.metadataPrecedence],
|
||||
podcastSearchRegion: this.podcastSearchRegion
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -113,7 +113,7 @@ class ServerSettings {
|
|||
this.version = settings.version || null
|
||||
this.buildNumber = settings.buildNumber || 0 // Added v2.4.5
|
||||
|
||||
this.authLoginCustomMessage = settings.authLoginCustomMessage || null // Added v2.7.3
|
||||
this.authLoginCustomMessage = settings.authLoginCustomMessage || null // Added v2.8.0
|
||||
this.authActiveAuthMethods = settings.authActiveAuthMethods || ['local']
|
||||
|
||||
this.authOpenIDIssuerURL = settings.authOpenIDIssuerURL || null
|
||||
|
|
|
@ -117,7 +117,8 @@ class User {
|
|||
createdAt: this.createdAt,
|
||||
permissions: this.permissions,
|
||||
librariesAccessible: [...this.librariesAccessible],
|
||||
itemTagsSelected: [...this.itemTagsSelected]
|
||||
itemTagsSelected: [...this.itemTagsSelected],
|
||||
hasOpenIDLink: !!this.authOpenIDSub
|
||||
}
|
||||
if (minimal) {
|
||||
delete json.mediaProgress
|
||||
|
|
|
@ -2,15 +2,30 @@ const axios = require('axios')
|
|||
const { levenshteinDistance } = require('../utils/index')
|
||||
const Logger = require('../Logger')
|
||||
|
||||
/**
|
||||
* @typedef AuthorSearchObj
|
||||
* @property {string} asin
|
||||
* @property {string} description
|
||||
* @property {string} image
|
||||
* @property {string} name
|
||||
*/
|
||||
|
||||
class Audnexus {
|
||||
constructor() {
|
||||
this.baseUrl = 'https://api.audnex.us'
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} name
|
||||
* @param {string} region
|
||||
* @returns {Promise<{asin:string, name:string}[]>}
|
||||
*/
|
||||
authorASINsRequest(name, region) {
|
||||
name = encodeURIComponent(name)
|
||||
const regionQuery = region ? `®ion=${region}` : ''
|
||||
const authorRequestUrl = `${this.baseUrl}/authors?name=${name}${regionQuery}`
|
||||
const searchParams = new URLSearchParams()
|
||||
searchParams.set('name', name)
|
||||
if (region) searchParams.set('region', region)
|
||||
const authorRequestUrl = `${this.baseUrl}/authors?${searchParams.toString()}`
|
||||
Logger.info(`[Audnexus] Searching for author "${authorRequestUrl}"`)
|
||||
return axios.get(authorRequestUrl).then((res) => {
|
||||
return res.data || []
|
||||
|
@ -20,6 +35,12 @@ class Audnexus {
|
|||
})
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} asin
|
||||
* @param {string} region
|
||||
* @returns {Promise<AuthorSearchObj>}
|
||||
*/
|
||||
authorRequest(asin, region) {
|
||||
asin = encodeURIComponent(asin)
|
||||
const regionQuery = region ? `?region=${region}` : ''
|
||||
|
@ -33,6 +54,12 @@ class Audnexus {
|
|||
})
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} asin
|
||||
* @param {string} region
|
||||
* @returns {Promise<AuthorSearchObj>}
|
||||
*/
|
||||
async findAuthorByASIN(asin, region) {
|
||||
const author = await this.authorRequest(asin, region)
|
||||
if (!author) {
|
||||
|
@ -46,14 +73,28 @@ class Audnexus {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} name
|
||||
* @param {string} region
|
||||
* @param {number} maxLevenshtein
|
||||
* @returns {Promise<AuthorSearchObj>}
|
||||
*/
|
||||
async findAuthorByName(name, region, maxLevenshtein = 3) {
|
||||
Logger.debug(`[Audnexus] Looking up author by name ${name}`)
|
||||
const asins = await this.authorASINsRequest(name, region)
|
||||
const matchingAsin = asins.find(obj => levenshteinDistance(obj.name, name) <= maxLevenshtein)
|
||||
if (!matchingAsin) {
|
||||
const authorAsinObjs = await this.authorASINsRequest(name, region)
|
||||
|
||||
let closestMatch = null
|
||||
authorAsinObjs.forEach((authorAsinObj) => {
|
||||
authorAsinObj.levenshteinDistance = levenshteinDistance(authorAsinObj.name, name)
|
||||
if (!closestMatch || closestMatch.levenshteinDistance > authorAsinObj.levenshteinDistance) {
|
||||
closestMatch = authorAsinObj
|
||||
}
|
||||
})
|
||||
if (!closestMatch || closestMatch.levenshteinDistance > maxLevenshtein) {
|
||||
return null
|
||||
}
|
||||
const author = await this.authorRequest(matchingAsin.asin)
|
||||
const author = await this.authorRequest(closestMatch.asin)
|
||||
if (!author) {
|
||||
return null
|
||||
}
|
||||
|
|
93
server/providers/CustomProviderAdapter.js
Normal file
93
server/providers/CustomProviderAdapter.js
Normal file
|
@ -0,0 +1,93 @@
|
|||
const Database = require('../Database')
|
||||
const axios = require('axios')
|
||||
const Logger = require('../Logger')
|
||||
|
||||
class CustomProviderAdapter {
|
||||
constructor() { }
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} title
|
||||
* @param {string} author
|
||||
* @param {string} providerSlug
|
||||
* @param {string} mediaType
|
||||
* @returns {Promise<Object[]>}
|
||||
*/
|
||||
async search(title, author, providerSlug, mediaType) {
|
||||
const providerId = providerSlug.split('custom-')[1]
|
||||
const provider = await Database.customMetadataProviderModel.findByPk(providerId)
|
||||
|
||||
if (!provider) {
|
||||
throw new Error("Custom provider not found for the given id")
|
||||
}
|
||||
|
||||
// Setup query params
|
||||
const queryObj = {
|
||||
mediaType,
|
||||
query: title
|
||||
}
|
||||
if (author) {
|
||||
queryObj.author = author
|
||||
}
|
||||
const queryString = (new URLSearchParams(queryObj)).toString()
|
||||
|
||||
// Setup headers
|
||||
const axiosOptions = {}
|
||||
if (provider.authHeaderValue) {
|
||||
axiosOptions.headers = {
|
||||
'Authorization': provider.authHeaderValue
|
||||
}
|
||||
}
|
||||
|
||||
const matches = await axios.get(`${provider.url}/search?${queryString}}`, axiosOptions).then((res) => {
|
||||
if (!res?.data || !Array.isArray(res.data.matches)) return null
|
||||
return res.data.matches
|
||||
}).catch(error => {
|
||||
Logger.error('[CustomMetadataProvider] Search error', error)
|
||||
return []
|
||||
})
|
||||
|
||||
if (!matches) {
|
||||
throw new Error("Custom provider returned malformed response")
|
||||
}
|
||||
|
||||
// re-map keys to throw out
|
||||
return matches.map(({
|
||||
title,
|
||||
subtitle,
|
||||
author,
|
||||
narrator,
|
||||
publisher,
|
||||
publishedYear,
|
||||
description,
|
||||
cover,
|
||||
isbn,
|
||||
asin,
|
||||
genres,
|
||||
tags,
|
||||
series,
|
||||
language,
|
||||
duration
|
||||
}) => {
|
||||
return {
|
||||
title,
|
||||
subtitle,
|
||||
author,
|
||||
narrator,
|
||||
publisher,
|
||||
publishedYear,
|
||||
description,
|
||||
cover,
|
||||
isbn,
|
||||
asin,
|
||||
genres,
|
||||
tags: tags?.join(',') || null,
|
||||
series: series?.length ? series : null,
|
||||
language,
|
||||
duration
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = CustomProviderAdapter
|
|
@ -2,16 +2,46 @@ const axios = require('axios')
|
|||
const Logger = require('../Logger')
|
||||
const htmlSanitizer = require('../utils/htmlSanitizer')
|
||||
|
||||
/**
|
||||
* @typedef iTunesSearchParams
|
||||
* @property {string} term
|
||||
* @property {string} country
|
||||
* @property {string} media
|
||||
* @property {string} entity
|
||||
* @property {number} limit
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef iTunesPodcastSearchResult
|
||||
* @property {string} id
|
||||
* @property {string} artistId
|
||||
* @property {string} title
|
||||
* @property {string} artistName
|
||||
* @property {string} description
|
||||
* @property {string} descriptionPlain
|
||||
* @property {string} releaseDate
|
||||
* @property {string[]} genres
|
||||
* @property {string} cover
|
||||
* @property {string} feedUrl
|
||||
* @property {string} pageUrl
|
||||
* @property {boolean} explicit
|
||||
*/
|
||||
|
||||
class iTunes {
|
||||
constructor() { }
|
||||
|
||||
// https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/iTuneSearchAPI/Searching.html
|
||||
/**
|
||||
* @see https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/iTuneSearchAPI/Searching.html
|
||||
*
|
||||
* @param {iTunesSearchParams} options
|
||||
* @returns {Promise<Object[]>}
|
||||
*/
|
||||
search(options) {
|
||||
if (!options.term) {
|
||||
Logger.error('[iTunes] Invalid search options - no term')
|
||||
return []
|
||||
}
|
||||
var query = {
|
||||
const query = {
|
||||
term: options.term,
|
||||
media: options.media,
|
||||
entity: options.entity,
|
||||
|
@ -82,6 +112,11 @@ class iTunes {
|
|||
})
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {Object} data
|
||||
* @returns {iTunesPodcastSearchResult}
|
||||
*/
|
||||
cleanPodcast(data) {
|
||||
return {
|
||||
id: data.collectionId,
|
||||
|
@ -100,6 +135,12 @@ class iTunes {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} term
|
||||
* @param {{country:string}} options
|
||||
* @returns {Promise<iTunesPodcastSearchResult[]>}
|
||||
*/
|
||||
searchPodcasts(term, options = {}) {
|
||||
return this.search({ term, entity: 'podcast', media: 'podcast', ...options }).then((results) => {
|
||||
return results.map(this.cleanPodcast.bind(this))
|
||||
|
|
|
@ -28,6 +28,7 @@ const SearchController = require('../controllers/SearchController')
|
|||
const CacheController = require('../controllers/CacheController')
|
||||
const ToolsController = require('../controllers/ToolsController')
|
||||
const RSSFeedController = require('../controllers/RSSFeedController')
|
||||
const CustomMetadataProviderController = require('../controllers/CustomMetadataProviderController')
|
||||
const MiscController = require('../controllers/MiscController')
|
||||
|
||||
const Author = require('../objects/entities/Author')
|
||||
|
@ -129,7 +130,7 @@ class ApiRouter {
|
|||
this.router.get('/users/:id', UserController.middleware.bind(this), UserController.findOne.bind(this))
|
||||
this.router.patch('/users/:id', UserController.middleware.bind(this), UserController.update.bind(this))
|
||||
this.router.delete('/users/:id', UserController.middleware.bind(this), UserController.delete.bind(this))
|
||||
|
||||
this.router.patch('/users/:id/openid-unlink', UserController.middleware.bind(this), UserController.unlinkFromOpenID.bind(this))
|
||||
this.router.get('/users/:id/listening-sessions', UserController.middleware.bind(this), UserController.getListeningSessions.bind(this))
|
||||
this.router.get('/users/:id/listening-stats', UserController.middleware.bind(this), UserController.getListeningStats.bind(this))
|
||||
|
||||
|
@ -299,6 +300,14 @@ class ApiRouter {
|
|||
this.router.post('/feeds/series/:seriesId/open', RSSFeedController.middleware.bind(this), RSSFeedController.openRSSFeedForSeries.bind(this))
|
||||
this.router.post('/feeds/:id/close', RSSFeedController.middleware.bind(this), RSSFeedController.closeRSSFeed.bind(this))
|
||||
|
||||
//
|
||||
// Custom Metadata Provider routes
|
||||
//
|
||||
this.router.get('/custom-metadata-providers', CustomMetadataProviderController.middleware.bind(this), CustomMetadataProviderController.getAll.bind(this))
|
||||
this.router.post('/custom-metadata-providers', CustomMetadataProviderController.middleware.bind(this), CustomMetadataProviderController.create.bind(this))
|
||||
this.router.delete('/custom-metadata-providers/:id', CustomMetadataProviderController.middleware.bind(this), CustomMetadataProviderController.delete.bind(this))
|
||||
|
||||
|
||||
//
|
||||
// Misc Routes
|
||||
//
|
||||
|
@ -318,6 +327,7 @@ class ApiRouter {
|
|||
this.router.patch('/auth-settings', MiscController.updateAuthSettings.bind(this))
|
||||
this.router.post('/watcher/update', MiscController.updateWatchedPath.bind(this))
|
||||
this.router.get('/stats/year/:year', MiscController.getAdminStatsForYear.bind(this))
|
||||
this.router.get('/logger-data', MiscController.getLoggerData.bind(this))
|
||||
}
|
||||
|
||||
//
|
||||
|
|
|
@ -134,10 +134,13 @@ class LibraryScan {
|
|||
}
|
||||
|
||||
async saveLog() {
|
||||
await Logger.logManager.ensureScanLogDir()
|
||||
const scanLogDir = Path.join(global.MetadataPath, 'logs', 'scans')
|
||||
|
||||
const logDir = Path.join(global.MetadataPath, 'logs', 'scans')
|
||||
const outputPath = Path.join(logDir, this.logFilename)
|
||||
if (!(await fs.pathExists(scanLogDir))) {
|
||||
await fs.mkdir(scanLogDir)
|
||||
}
|
||||
|
||||
const outputPath = Path.join(scanLogDir, this.logFilename)
|
||||
const logLines = [JSON.stringify(this.toJSON())]
|
||||
this.logs.forEach(l => {
|
||||
logLines.push(JSON.stringify(l))
|
||||
|
|
|
@ -101,8 +101,8 @@ module.exports.downloadPodcastEpisode = (podcastEpisodeDownload) => {
|
|||
})
|
||||
if (!response) return resolve(false)
|
||||
|
||||
|
||||
const ffmpeg = Ffmpeg(response.data)
|
||||
ffmpeg.addOption('-loglevel debug') // Debug logs printed on error
|
||||
ffmpeg.outputOptions(
|
||||
'-c', 'copy',
|
||||
'-metadata', 'podcast=1'
|
||||
|
@ -110,6 +110,7 @@ module.exports.downloadPodcastEpisode = (podcastEpisodeDownload) => {
|
|||
|
||||
const podcastMetadata = podcastEpisodeDownload.libraryItem.media.metadata
|
||||
const podcastEpisode = podcastEpisodeDownload.podcastEpisode
|
||||
const finalSizeInBytes = Number(podcastEpisode.enclosure?.length || 0)
|
||||
|
||||
const taggings = {
|
||||
'album': podcastMetadata.title,
|
||||
|
@ -147,13 +148,30 @@ module.exports.downloadPodcastEpisode = (podcastEpisodeDownload) => {
|
|||
|
||||
ffmpeg.addOutput(podcastEpisodeDownload.targetPath)
|
||||
|
||||
const stderrLines = []
|
||||
ffmpeg.on('stderr', (stderrLine) => {
|
||||
if (typeof stderrLine === 'string') {
|
||||
stderrLines.push(stderrLine)
|
||||
}
|
||||
})
|
||||
ffmpeg.on('start', (cmd) => {
|
||||
Logger.debug(`[FfmpegHelpers] downloadPodcastEpisode: Cmd: ${cmd}`)
|
||||
})
|
||||
ffmpeg.on('error', (err, stdout, stderr) => {
|
||||
Logger.error(`[FfmpegHelpers] downloadPodcastEpisode: Error ${err} ${stdout} ${stderr}`)
|
||||
ffmpeg.on('error', (err) => {
|
||||
Logger.error(`[FfmpegHelpers] downloadPodcastEpisode: Error ${err}`)
|
||||
if (stderrLines.length) {
|
||||
Logger.error(`Full stderr dump for episode url "${podcastEpisodeDownload.url}": ${stderrLines.join('\n')}`)
|
||||
}
|
||||
resolve(false)
|
||||
})
|
||||
ffmpeg.on('progress', (progress) => {
|
||||
let progressPercent = 0
|
||||
if (finalSizeInBytes && progress.targetSize && !isNaN(progress.targetSize)) {
|
||||
const finalSizeInKb = Math.floor(finalSizeInBytes / 1000)
|
||||
progressPercent = Math.min(1, progress.targetSize / finalSizeInKb) * 100
|
||||
}
|
||||
Logger.debug(`[FfmpegHelpers] downloadPodcastEpisode: Progress estimate ${progressPercent.toFixed(0)}% (${progress?.targetSize || 'N/A'} KB) for "${podcastEpisodeDownload.url}"`)
|
||||
})
|
||||
ffmpeg.on('end', () => {
|
||||
Logger.debug(`[FfmpegHelpers] downloadPodcastEpisode: Complete`)
|
||||
resolve(podcastEpisodeDownload.targetPath)
|
||||
|
|
|
@ -366,15 +366,16 @@ module.exports.encodeUriPath = (path) => {
|
|||
* This method is necessary because fs.access(directory, fs.constants.W_OK) does not work on Windows
|
||||
*
|
||||
* @param {string} directory
|
||||
* @returns {boolean}
|
||||
* @returns {Promise<boolean>}
|
||||
*/
|
||||
module.exports.isWritable = async (directory) => {
|
||||
try {
|
||||
const accessTestFile = path.join(directory, 'accessTest')
|
||||
const accessTestFile = Path.join(directory, 'accessTest')
|
||||
await fs.writeFile(accessTestFile, '')
|
||||
await fs.remove(accessTestFile)
|
||||
return true
|
||||
} catch (err) {
|
||||
Logger.info(`[fileUtils] Directory is not writable "${directory}"`, err)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
|
|
@ -114,7 +114,9 @@ module.exports.reqSupportsWebp = (req) => {
|
|||
module.exports.areEquivalent = areEquivalent
|
||||
|
||||
module.exports.copyValue = (val) => {
|
||||
if (!val) return val === false ? false : null
|
||||
if (val === undefined || val === '') return null
|
||||
else if (!val) return val
|
||||
|
||||
if (!this.isObject(val)) return val
|
||||
|
||||
if (Array.isArray(val)) {
|
||||
|
|
|
@ -1,9 +0,0 @@
|
|||
const { parentPort } = require("worker_threads")
|
||||
const prober = require('./prober')
|
||||
|
||||
parentPort.on("message", async ({ mediaPath }) => {
|
||||
const results = await prober.probe(mediaPath)
|
||||
parentPort.postMessage({
|
||||
data: results,
|
||||
})
|
||||
})
|
|
@ -110,7 +110,7 @@ module.exports = {
|
|||
})
|
||||
|
||||
// Filter out bad genres like "audiobook" and "audio book"
|
||||
const genres = (ls.mediaMetadata.genres || []).filter(g => !g.toLowerCase().includes('audiobook') && !g.toLowerCase().includes('audio book'))
|
||||
const genres = (ls.mediaMetadata.genres || []).filter(g => g && !g.toLowerCase().includes('audiobook') && !g.toLowerCase().includes('audio book'))
|
||||
genres.forEach((genre) => {
|
||||
if (!genreListeningMap[genre]) genreListeningMap[genre] = 0
|
||||
genreListeningMap[genre] += (ls.timeListening || 0)
|
||||
|
|
|
@ -204,6 +204,10 @@ module.exports = {
|
|||
mediaWhere['ebookFile'] = {
|
||||
[Sequelize.Op.not]: null
|
||||
}
|
||||
} else if (value == 'no-ebook') {
|
||||
mediaWhere['ebookFile'] = {
|
||||
[Sequelize.Op.eq]: null
|
||||
}
|
||||
}
|
||||
} else if (group === 'missing') {
|
||||
if (['asin', 'isbn', 'subtitle', 'publishedYear', 'description', 'publisher', 'language', 'cover'].includes(value)) {
|
||||
|
@ -421,6 +425,10 @@ module.exports = {
|
|||
libraryItemWhere['libraryFiles'] = {
|
||||
[Sequelize.Op.substring]: `"isSupplementary":true`
|
||||
}
|
||||
} else if (filterGroup === 'ebooks' && filterValue === 'no-supplementary') {
|
||||
libraryItemWhere['libraryFiles'] = {
|
||||
[Sequelize.Op.notLike]: Sequelize.literal(`\'%"isSupplementary":true%\'`),
|
||||
}
|
||||
} else if (filterGroup === 'missing' && filterValue === 'authors') {
|
||||
authorInclude = {
|
||||
model: Database.authorModel,
|
||||
|
|
|
@ -141,7 +141,7 @@ module.exports = {
|
|||
})
|
||||
|
||||
// Filter out bad genres like "audiobook" and "audio book"
|
||||
const genres = (ls.mediaMetadata.genres || []).filter(g => !g.toLowerCase().includes('audiobook') && !g.toLowerCase().includes('audio book'))
|
||||
const genres = (ls.mediaMetadata.genres || []).filter(g => g && !g.toLowerCase().includes('audiobook') && !g.toLowerCase().includes('audio book'))
|
||||
genres.forEach((genre) => {
|
||||
if (!genreListeningMap[genre]) genreListeningMap[genre] = 0
|
||||
genreListeningMap[genre] += listeningSessionListeningTime
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue