Merge branch 'advplyr:master' into audible-confidence-score

This commit is contained in:
mikiher 2025-06-16 10:26:58 +03:00 committed by GitHub
commit 5017e7ce9e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
54 changed files with 1639 additions and 254 deletions

View file

@ -765,6 +765,15 @@ class Database {
if (badSessionsRemoved > 0) {
Logger.warn(`Removed ${badSessionsRemoved} sessions that were 3 seconds or less`)
}
// Remove mediaProgresses with duplicate mediaItemId (remove the oldest updatedAt)
// const [duplicateMediaProgresses] = await this.sequelize.query(`SELECT id, mediaItemId FROM mediaProgresses WHERE (mediaItemId, userId, updatedAt) IN (SELECT mediaItemId, userId, MIN(updatedAt) FROM mediaProgresses GROUP BY mediaItemId, userId HAVING COUNT(*) > 1)`)
// for (const duplicateMediaProgress of duplicateMediaProgresses) {
// Logger.warn(`Found duplicate mediaProgress for mediaItem "${duplicateMediaProgress.mediaItemId}" - removing it`)
// await this.mediaProgressModel.destroy({
// where: { id: duplicateMediaProgress.id }
// })
// }
}
async createTextSearchQuery(query) {

View file

@ -12,6 +12,7 @@ const { version } = require('../package.json')
// Utils
const fileUtils = require('./utils/fileUtils')
const { toNumber } = require('./utils/index')
const Logger = require('./Logger')
const Auth = require('./Auth')
@ -84,12 +85,8 @@ class Server {
global.DisableSsrfRequestFilter = (url) => whitelistedUrls.includes(new URL(url).hostname)
}
}
if (process.env.PODCAST_DOWNLOAD_TIMEOUT) {
global.PodcastDownloadTimeout = process.env.PODCAST_DOWNLOAD_TIMEOUT
} else {
global.PodcastDownloadTimeout = 30000
}
global.PodcastDownloadTimeout = toNumber(process.env.PODCAST_DOWNLOAD_TIMEOUT, 30000)
global.MaxFailedEpisodeChecks = toNumber(process.env.MAX_FAILED_EPISODE_CHECKS, 24)
if (!fs.pathExistsSync(global.ConfigPath)) {
fs.mkdirSync(global.ConfigPath)

View file

@ -84,49 +84,73 @@ class FileSystemController {
*/
async checkPathExists(req, res) {
if (!req.user.canUpload) {
Logger.error(`[FileSystemController] Non-admin user "${req.user.username}" attempting to check path exists`)
Logger.error(`[FileSystemController] User "${req.user.username}" without upload permissions attempting to check path exists`)
return res.sendStatus(403)
}
const { filepath, directory, folderPath } = req.body
const { directory, folderPath } = req.body
if (!directory?.length || typeof directory !== 'string' || !folderPath?.length || typeof folderPath !== 'string') {
Logger.error(`[FileSystemController] Invalid request body: ${JSON.stringify(req.body)}`)
return res.status(400).json({
error: 'Invalid request body'
})
}
if (!filepath?.length || typeof filepath !== 'string') {
// Check that library folder exists
const libraryFolder = await Database.libraryFolderModel.findOne({
where: {
path: folderPath
}
})
if (!libraryFolder) {
Logger.error(`[FileSystemController] Library folder not found: ${folderPath}`)
return res.sendStatus(404)
}
if (!req.user.checkCanAccessLibrary(libraryFolder.libraryId)) {
Logger.error(`[FileSystemController] User "${req.user.username}" attempting to check path exists for library "${libraryFolder.libraryId}" without access`)
return res.sendStatus(403)
}
let filepath = Path.join(libraryFolder.path, directory)
filepath = fileUtils.filePathToPOSIX(filepath)
// Ensure filepath is inside library folder (prevents directory traversal)
if (!filepath.startsWith(libraryFolder.path)) {
Logger.error(`[FileSystemController] Filepath is not inside library folder: ${filepath}`)
return res.sendStatus(400)
}
const exists = await fs.pathExists(filepath)
if (exists) {
if (await fs.pathExists(filepath)) {
return res.json({
exists: true
})
}
// If directory and folderPath are passed in, check if a library item exists in a subdirectory
// Check if a library item exists in a subdirectory
// See: https://github.com/advplyr/audiobookshelf/issues/4146
if (typeof directory === 'string' && typeof folderPath === 'string' && directory.length > 0 && folderPath.length > 0) {
const cleanedDirectory = directory.split('/').filter(Boolean).join('/')
if (cleanedDirectory.includes('/')) {
// Can only be 2 levels deep
const possiblePaths = []
const subdir = Path.dirname(directory)
possiblePaths.push(fileUtils.filePathToPOSIX(Path.join(folderPath, subdir)))
if (subdir.includes('/')) {
possiblePaths.push(fileUtils.filePathToPOSIX(Path.join(folderPath, Path.dirname(subdir))))
}
const cleanedDirectory = directory.split('/').filter(Boolean).join('/')
if (cleanedDirectory.includes('/')) {
// Can only be 2 levels deep
const possiblePaths = []
const subdir = Path.dirname(directory)
possiblePaths.push(fileUtils.filePathToPOSIX(Path.join(folderPath, subdir)))
if (subdir.includes('/')) {
possiblePaths.push(fileUtils.filePathToPOSIX(Path.join(folderPath, Path.dirname(subdir))))
}
const libraryItem = await Database.libraryItemModel.findOne({
where: {
path: possiblePaths
}
const libraryItem = await Database.libraryItemModel.findOne({
where: {
path: possiblePaths
}
})
if (libraryItem) {
return res.json({
exists: true,
libraryItemTitle: libraryItem.title
})
if (libraryItem) {
return res.json({
exists: true,
libraryItemTitle: libraryItem.title
})
}
}
}

View file

@ -59,6 +59,12 @@ class MiscController {
if (!library) {
return res.status(404).send('Library not found')
}
if (!req.user.checkCanAccessLibrary(library.id)) {
Logger.error(`[MiscController] User "${req.user.username}" attempting to upload to library "${library.id}" without access`)
return res.sendStatus(403)
}
const folder = library.libraryFolders.find((fold) => fold.id === folderId)
if (!folder) {
return res.status(404).send('Folder not found')

View file

@ -9,6 +9,7 @@ const fs = require('../libs/fsExtra')
const { getPodcastFeed, findMatchingEpisodes } = require('../utils/podcastUtils')
const { getFileTimestampsWithIno, filePathToPOSIX } = require('../utils/fileUtils')
const { validateUrl } = require('../utils/index')
const htmlSanitizer = require('../utils/htmlSanitizer')
const Scanner = require('../scanner/Scanner')
const CoverManager = require('../managers/CoverManager')
@ -404,6 +405,15 @@ class PodcastController {
const supportedStringKeys = ['title', 'subtitle', 'description', 'pubDate', 'episode', 'season', 'episodeType']
for (const key in req.body) {
if (supportedStringKeys.includes(key) && typeof req.body[key] === 'string') {
// Sanitize description HTML
if (key === 'description' && req.body[key]) {
const sanitizedDescription = htmlSanitizer.sanitize(req.body[key])
if (sanitizedDescription !== req.body[key]) {
Logger.debug(`[PodcastController] Sanitized description from "${req.body[key]}" to "${sanitizedDescription}"`)
req.body[key] = sanitizedDescription
}
}
updatePayload[key] = req.body[key]
} else if (key === 'chapters' && Array.isArray(req.body[key]) && req.body[key].every((ch) => typeof ch === 'object' && ch.title && ch.start)) {
updatePayload[key] = req.body[key]

File diff suppressed because one or more lines are too long

View file

@ -203,7 +203,15 @@ class AbMergeManager {
// Move library item tracks to cache
for (const [index, trackPath] of task.data.originalTrackPaths.entries()) {
const trackFilename = Path.basename(trackPath)
const moveToPath = Path.join(task.data.itemCachePath, trackFilename)
let moveToPath = Path.join(task.data.itemCachePath, trackFilename)
// If the track is the same as the temp file, we need to rename it to avoid overwriting it
if (task.data.tempFilepath === moveToPath) {
const trackExtname = Path.extname(task.data.tempFilepath)
const newTrackFilename = Path.basename(task.data.tempFilepath, trackExtname) + '.backup' + trackExtname
moveToPath = Path.join(task.data.itemCachePath, newTrackFilename)
}
Logger.debug(`[AbMergeManager] Backing up original track "${trackPath}" to ${moveToPath}`)
if (index === 0) {
// copy the first track to the cache directory

View file

@ -71,6 +71,54 @@ class NotificationManager {
this.triggerNotification('onBackupCompleted', eventData)
}
/**
* Handles scheduled episode download RSS feed request failed
*
* @param {string} feedUrl
* @param {number} numFailed
* @param {string} title
*/
async onRSSFeedFailed(feedUrl, numFailed, title) {
if (!Database.notificationSettings.isUseable) return
if (!Database.notificationSettings.getHasActiveNotificationsForEvent('onRSSFeedFailed')) {
Logger.debug(`[NotificationManager] onRSSFeedFailed: No active notifications`)
return
}
Logger.debug(`[NotificationManager] onRSSFeedFailed: RSS feed request failed for ${feedUrl}`)
const eventData = {
feedUrl: feedUrl,
numFailed: numFailed || 0,
title: title || 'Unknown Title'
}
this.triggerNotification('onRSSFeedFailed', eventData)
}
/**
* Handles scheduled episode downloads disabled due to too many failed attempts
*
* @param {string} feedUrl
* @param {number} numFailed
* @param {string} title
*/
async onRSSFeedDisabled(feedUrl, numFailed, title) {
if (!Database.notificationSettings.isUseable) return
if (!Database.notificationSettings.getHasActiveNotificationsForEvent('onRSSFeedDisabled')) {
Logger.debug(`[NotificationManager] onRSSFeedDisabled: No active notifications`)
return
}
Logger.debug(`[NotificationManager] onRSSFeedDisabled: Podcast scheduled episode download disabled due to ${numFailed} failed requests for ${feedUrl}`)
const eventData = {
feedUrl: feedUrl,
numFailed: numFailed || 0,
title: title || 'Unknown Title'
}
this.triggerNotification('onRSSFeedDisabled', eventData)
}
/**
*
* @param {string} errorMsg

View file

@ -107,7 +107,7 @@ class PlaybackSessionManager {
const syncResults = []
for (const sessionJson of sessions) {
Logger.info(`[PlaybackSessionManager] Syncing local session "${sessionJson.displayTitle}" (${sessionJson.id})`)
Logger.info(`[PlaybackSessionManager] Syncing local session "${sessionJson.displayTitle}" (${sessionJson.id}) (updatedAt: ${sessionJson.updatedAt})`)
const result = await this.syncLocalSession(user, sessionJson, deviceInfo)
syncResults.push(result)
}
@ -230,9 +230,9 @@ class PlaybackSessionManager {
let userProgressForItem = user.getMediaProgress(mediaItemId)
if (userProgressForItem) {
if (userProgressForItem.updatedAt.valueOf() > session.updatedAt) {
Logger.debug(`[PlaybackSessionManager] Not updating progress for "${session.displayTitle}" because it has been updated more recently`)
Logger.info(`[PlaybackSessionManager] Not updating progress for "${session.displayTitle}" because it has been updated more recently (${userProgressForItem.updatedAt.valueOf()} > ${session.updatedAt}) (incoming currentTime: ${session.currentTime}) (current currentTime: ${userProgressForItem.currentTime})`)
} else {
Logger.debug(`[PlaybackSessionManager] Updating progress for "${session.displayTitle}" with current time ${session.currentTime} (previously ${userProgressForItem.currentTime})`)
Logger.info(`[PlaybackSessionManager] Updating progress for "${session.displayTitle}" with current time ${session.currentTime} (previously ${userProgressForItem.currentTime})`)
const updateResponse = await user.createUpdateMediaProgressFromPayload({
libraryItemId: libraryItem.id,
episodeId: session.episodeId,
@ -246,7 +246,7 @@ class PlaybackSessionManager {
}
}
} else {
Logger.debug(`[PlaybackSessionManager] Creating new media progress for media item "${session.displayTitle}"`)
Logger.info(`[PlaybackSessionManager] Creating new media progress for media item "${session.displayTitle}"`)
const updateResponse = await user.createUpdateMediaProgressFromPayload({
libraryItemId: libraryItem.id,
episodeId: session.episodeId,

View file

@ -30,7 +30,7 @@ class PodcastManager {
this.currentDownload = null
this.failedCheckMap = {}
this.MaxFailedEpisodeChecks = 24
this.MaxFailedEpisodeChecks = global.MaxFailedEpisodeChecks
}
getEpisodeDownloadsInQueue(libraryItemId) {
@ -345,12 +345,14 @@ class PodcastManager {
// Allow up to MaxFailedEpisodeChecks failed attempts before disabling auto download
if (!this.failedCheckMap[libraryItem.id]) this.failedCheckMap[libraryItem.id] = 0
this.failedCheckMap[libraryItem.id]++
if (this.failedCheckMap[libraryItem.id] >= this.MaxFailedEpisodeChecks) {
if (this.MaxFailedEpisodeChecks !== 0 && this.failedCheckMap[libraryItem.id] >= this.MaxFailedEpisodeChecks) {
Logger.error(`[PodcastManager] runEpisodeCheck ${this.failedCheckMap[libraryItem.id]} failed attempts at checking episodes for "${libraryItem.media.title}" - disabling auto download`)
void NotificationManager.onRSSFeedDisabled(libraryItem.media.feedURL, this.failedCheckMap[libraryItem.id], libraryItem.media.title)
libraryItem.media.autoDownloadEpisodes = false
delete this.failedCheckMap[libraryItem.id]
} else {
Logger.warn(`[PodcastManager] runEpisodeCheck ${this.failedCheckMap[libraryItem.id]} failed attempts at checking episodes for "${libraryItem.media.title}"`)
void NotificationManager.onRSSFeedFailed(libraryItem.media.feedURL, this.failedCheckMap[libraryItem.id], libraryItem.media.title)
}
} else if (newEpisodes.length) {
delete this.failedCheckMap[libraryItem.id]
@ -384,7 +386,17 @@ class PodcastManager {
Logger.error(`[PodcastManager] checkPodcastForNewEpisodes no feed url for ${podcastLibraryItem.media.title} (ID: ${podcastLibraryItem.id})`)
return null
}
const feed = await getPodcastFeed(podcastLibraryItem.media.feedURL)
const feed = await Promise.race([
getPodcastFeed(podcastLibraryItem.media.feedURL),
new Promise((_, reject) =>
// The added second is to make sure that axios can fail first and only falls back later
setTimeout(() => reject(new Error('Timeout. getPodcastFeed seemed to timeout but not triggering the timeout.')), global.PodcastDownloadTimeout + 1000)
)
]).catch((error) => {
Logger.error(`[PodcastManager] checkPodcastForNewEpisodes failed to fetch feed for ${podcastLibraryItem.media.title} (ID: ${podcastLibraryItem.id}):`, error)
return null
})
if (!feed?.episodes) {
Logger.error(`[PodcastManager] checkPodcastForNewEpisodes invalid feed payload for ${podcastLibraryItem.media.title} (ID: ${podcastLibraryItem.id})`, feed)
return null

View file

@ -377,8 +377,17 @@ class Book extends Model {
if (typeof payload.metadata[key] == 'number') {
payload.metadata[key] = String(payload.metadata[key])
}
if ((typeof payload.metadata[key] === 'string' || payload.metadata[key] === null) && this[key] !== payload.metadata[key]) {
// Sanitize description HTML
if (key === 'description' && payload.metadata[key]) {
const sanitizedDescription = htmlSanitizer.sanitize(payload.metadata[key])
if (sanitizedDescription !== payload.metadata[key]) {
Logger.debug(`[Book] "${this.title}" Sanitized description from "${payload.metadata[key]}" to "${sanitizedDescription}"`)
payload.metadata[key] = sanitizedDescription
}
}
this[key] = payload.metadata[key] || null
if (key === 'title') {

View file

@ -222,13 +222,13 @@ class MediaProgress extends Model {
const markAsFinishedPercentComplete = Number(progressPayload.markAsFinishedPercentComplete) / 100
shouldMarkAsFinished = markAsFinishedPercentComplete < this.progress
if (shouldMarkAsFinished) {
Logger.debug(`[MediaProgress] Marking media progress as finished because progress (${this.progress}) is greater than ${markAsFinishedPercentComplete}`)
Logger.info(`[MediaProgress] Marking media progress as finished because progress (${this.progress}) is greater than ${markAsFinishedPercentComplete} (media item ${this.mediaItemId})`)
}
} else {
const markAsFinishedTimeRemaining = isNullOrNaN(progressPayload.markAsFinishedTimeRemaining) ? 10 : Number(progressPayload.markAsFinishedTimeRemaining)
shouldMarkAsFinished = timeRemaining < markAsFinishedTimeRemaining
if (shouldMarkAsFinished) {
Logger.debug(`[MediaProgress] Marking media progress as finished because time remaining (${timeRemaining}) is less than ${markAsFinishedTimeRemaining} seconds`)
Logger.info(`[MediaProgress] Marking media progress as finished because time remaining (${timeRemaining}) is less than ${markAsFinishedTimeRemaining} seconds (media item ${this.mediaItemId})`)
}
}
}
@ -246,9 +246,11 @@ class MediaProgress extends Model {
// For local sync
if (progressPayload.lastUpdate) {
this.updatedAt = progressPayload.lastUpdate
Logger.info(`[MediaProgress] Manually setting updatedAt to ${this.updatedAt} (media item ${this.mediaItemId})`)
this.changed('updatedAt', true)
}
return this.save()
return this.save({ silent: !!progressPayload.lastUpdate })
}
}

View file

@ -2,6 +2,7 @@ const { DataTypes, Model } = require('sequelize')
const { getTitlePrefixAtEnd, getTitleIgnorePrefix } = require('../utils')
const Logger = require('../Logger')
const libraryItemsPodcastFilters = require('../utils/queries/libraryItemsPodcastFilters')
const htmlSanitizer = require('../utils/htmlSanitizer')
/**
* @typedef PodcastExpandedProperties
@ -215,6 +216,15 @@ class Podcast extends Model {
newKey = 'itunesPageURL'
}
if ((typeof payload.metadata[key] === 'string' || payload.metadata[key] === null) && payload.metadata[key] !== this[newKey]) {
// Sanitize description HTML
if (key === 'description' && payload.metadata[key]) {
const sanitizedDescription = htmlSanitizer.sanitize(payload.metadata[key])
if (sanitizedDescription !== payload.metadata[key]) {
Logger.debug(`[Podcast] "${this.title}" Sanitized description from "${payload.metadata[key]}" to "${sanitizedDescription}"`)
payload.metadata[key] = sanitizedDescription
}
}
this[newKey] = payload.metadata[key] || null
if (key === 'title') {

View file

@ -41,7 +41,7 @@ class Audible {
}
cleanResult(item) {
const { title, subtitle, asin, authors, narrators, publisherName, summary, releaseDate, image, genres, seriesPrimary, seriesSecondary, language, runtimeLengthMin, formatType } = item
const { title, subtitle, asin, authors, narrators, publisherName, summary, releaseDate, image, genres, seriesPrimary, seriesSecondary, language, runtimeLengthMin, formatType, isbn } = item
const series = []
if (seriesPrimary) {
@ -70,6 +70,7 @@ class Audible {
description: summary || null,
cover: image,
asin,
isbn,
genres: genresFiltered.length ? genresFiltered : null,
tags: tagsFiltered.length ? tagsFiltered.join(', ') : null,
series: series.length ? series : null,

View file

@ -370,7 +370,7 @@ class Scanner {
let numEpisodesUpdated = 0
for (const episode of episodesToQuickMatch) {
const episodeMatches = findMatchingEpisodesInFeed(feed, episode.title)
const episodeMatches = findMatchingEpisodesInFeed(feed, episode.title, 0.1)
if (episodeMatches?.length) {
const wasUpdated = await this.updateEpisodeWithMatch(episode, episodeMatches[0].episode, options)
if (wasUpdated) numEpisodesUpdated++

View file

@ -103,18 +103,39 @@ module.exports.resizeImage = resizeImage
*/
module.exports.downloadPodcastEpisode = (podcastEpisodeDownload) => {
return new Promise(async (resolve) => {
const response = await axios({
url: podcastEpisodeDownload.url,
method: 'GET',
responseType: 'stream',
headers: {
'User-Agent': 'audiobookshelf (+https://audiobookshelf.org)'
},
timeout: global.PodcastDownloadTimeout
}).catch((error) => {
Logger.error(`[ffmpegHelpers] Failed to download podcast episode with url "${podcastEpisodeDownload.url}"`, error)
return null
})
// Some podcasts fail due to user agent strings
// See: https://github.com/advplyr/audiobookshelf/issues/3246 (requires iTMS user agent)
// See: https://github.com/advplyr/audiobookshelf/issues/4401 (requires no iTMS user agent)
const userAgents = ['audiobookshelf (+https://audiobookshelf.org; like iTMS)', 'audiobookshelf (+https://audiobookshelf.org)']
let response = null
let lastError = null
for (const userAgent of userAgents) {
try {
response = await axios({
url: podcastEpisodeDownload.url,
method: 'GET',
responseType: 'stream',
headers: {
'User-Agent': userAgent
},
timeout: global.PodcastDownloadTimeout
})
Logger.debug(`[ffmpegHelpers] Successfully connected with User-Agent: ${userAgent}`)
break
} catch (error) {
lastError = error
Logger.warn(`[ffmpegHelpers] Failed to download podcast episode with User-Agent "${userAgent}" for url "${podcastEpisodeDownload.url}"`, error.message)
// If this is the last attempt, log the full error
if (userAgent === userAgents[userAgents.length - 1]) {
Logger.error(`[ffmpegHelpers] All User-Agent attempts failed for url "${podcastEpisodeDownload.url}"`, lastError)
}
}
}
if (!response) {
return resolve({
success: false

View file

@ -60,6 +60,38 @@ module.exports.notificationData = {
errorMsg: 'Example error message'
}
},
{
name: 'onRSSFeedFailed',
requiresLibrary: true,
description: 'Triggered when the RSS feed request fails for an automatic episode download',
descriptionKey: 'NotificationOnRSSFeedFailedDescription',
variables: ['feedUrl', 'numFailed', 'title'],
defaults: {
title: 'RSS Feed Request Failed',
body: 'Failed to request RSS feed for {{title}}.\nFeed URL: {{feedUrl}}\nNumber of failed attempts: {{numFailed}}'
},
testData: {
title: 'Test RSS Feed',
feedUrl: 'https://example.com/rss',
numFailed: 3
}
},
{
name: 'onRSSFeedDisabled',
requiresLibrary: true,
description: 'Triggered when automatic episode downloads are disabled due to too many failed attempts',
descriptionKey: 'NotificationOnRSSFeedDisabledDescription',
variables: ['feedUrl', 'numFailed', 'title'],
defaults: {
title: 'Podcast Episode Download Schedule Disabled',
body: 'Automatic episode downloads for {{title}} have been disabled due to too many failed RSS feed requests.\nFeed URL: {{feedUrl}}\nNumber of failed attempts: {{numFailed}}'
},
testData: {
title: 'Test RSS Feed',
feedUrl: 'https://example.com/rss',
numFailed: 5
}
},
{
name: 'onTest',
requiresLibrary: false,

View file

@ -1,8 +1,9 @@
const axios = require('axios')
const ssrfFilter = require('ssrf-req-filter')
const Logger = require('../Logger')
const { xmlToJSON, levenshteinDistance, timestampToSeconds } = require('./index')
const { xmlToJSON, timestampToSeconds } = require('./index')
const htmlSanitizer = require('../utils/htmlSanitizer')
const Fuse = require('../libs/fusejs')
/**
* @typedef RssPodcastChapter
@ -205,7 +206,7 @@ function extractEpisodeData(item) {
} else if (typeof guidItem?._ === 'string') {
episode.guid = guidItem._
} else {
Logger.error(`[podcastUtils] Invalid guid ${item['guid']} for ${episode.enclosure.url}`)
Logger.error(`[podcastUtils] Invalid guid for ${episode.enclosure.url}`, item['guid'])
}
}
@ -407,7 +408,7 @@ module.exports.getPodcastFeed = (feedUrl, excludeEpisodeMetadata = false) => {
})
}
// Return array of episodes ordered by closest match (Levenshtein distance of 6 or less)
// Return array of episodes ordered by closest match using fuse.js
module.exports.findMatchingEpisodes = async (feedUrl, searchTitle) => {
const feed = await this.getPodcastFeed(feedUrl).catch(() => {
return null
@ -420,32 +421,29 @@ module.exports.findMatchingEpisodes = async (feedUrl, searchTitle) => {
*
* @param {RssPodcast} feed
* @param {string} searchTitle
* @returns {Array<{ episode: RssPodcastEpisode, levenshtein: number }>}
* @param {number} [threshold=0.4] - 0.0 for perfect match, 1.0 for match anything
* @returns {Array<{ episode: RssPodcastEpisode }>}
*/
module.exports.findMatchingEpisodesInFeed = (feed, searchTitle) => {
searchTitle = searchTitle.toLowerCase().trim()
module.exports.findMatchingEpisodesInFeed = (feed, searchTitle, threshold = 0.4) => {
if (!feed?.episodes) {
return null
}
const fuseOptions = {
ignoreDiacritics: true,
threshold,
keys: [
{ name: 'title', weight: 0.7 }, // prefer match in title
{ name: 'subtitle', weight: 0.3 }
]
}
const fuse = new Fuse(feed.episodes, fuseOptions)
const matches = []
feed.episodes.forEach((ep) => {
if (!ep.title) return
const epTitle = ep.title.toLowerCase().trim()
if (epTitle === searchTitle) {
matches.push({
episode: ep,
levenshtein: 0
})
} else {
const levenshtein = levenshteinDistance(searchTitle, epTitle, true)
if (levenshtein <= 6 && epTitle.length > levenshtein) {
matches.push({
episode: ep,
levenshtein
})
}
}
fuse.search(searchTitle).forEach((match) => {
matches.push({
episode: match.item
})
})
return matches.sort((a, b) => a.levenshtein - b.levenshtein)
return matches
}

View file

@ -264,9 +264,15 @@ module.exports = {
} else if (sortBy === 'media.metadata.publishedYear') {
return [[Sequelize.literal(`CAST(\`book\`.\`publishedYear\` AS INTEGER)`), dir]]
} else if (sortBy === 'media.metadata.authorNameLF') {
return [[Sequelize.literal('`libraryItem`.`authorNamesLastFirst` COLLATE NOCASE'), dir]]
return [
[Sequelize.literal('`libraryItem`.`authorNamesLastFirst` COLLATE NOCASE'), dir],
[Sequelize.literal('`libraryItem`.`title` COLLATE NOCASE'), dir]
]
} else if (sortBy === 'media.metadata.authorName') {
return [[Sequelize.literal('`libraryItem`.`authorNamesFirstLast` COLLATE NOCASE'), dir]]
return [
[Sequelize.literal('`libraryItem`.`authorNamesFirstLast` COLLATE NOCASE'), dir],
[Sequelize.literal('`libraryItem`.`title` COLLATE NOCASE'), dir]
]
} else if (sortBy === 'media.metadata.title') {
if (collapseseries) {
return [[Sequelize.literal('display_title COLLATE NOCASE'), dir]]

View file

@ -149,11 +149,12 @@ module.exports = {
libraryId
}
const libraryItemIncludes = []
if (includeRSSFeed) {
if (filterGroup === 'feed-open' || includeRSSFeed) {
const rssFeedRequired = filterGroup === 'feed-open'
libraryItemIncludes.push({
model: Database.feedModel,
required: filterGroup === 'feed-open',
separate: true
required: rssFeedRequired,
separate: !rssFeedRequired
})
}
if (filterGroup === 'issues') {