Merge remote-tracking branch 'remotes/upstream/master' into allow-mrss-item-enclosures-for-podcasts

This commit is contained in:
Toni Barth 2025-01-20 10:28:09 +01:00
commit bdd8e5bb58
116 changed files with 4080 additions and 3726 deletions

View file

@ -189,8 +189,14 @@ class CbzStreamZipComicBookExtractor extends AbstractComicBookExtractor {
}
close() {
this.archive?.close()
Logger.debug(`[CbzStreamZipComicBookExtractor] Closed comic book "${this.comicPath}"`)
this.archive
?.close()
.then(() => {
Logger.debug(`[CbzStreamZipComicBookExtractor] Closed comic book "${this.comicPath}"`)
})
.catch((error) => {
Logger.error(`[CbzStreamZipComicBookExtractor] Failed to close comic book "${this.comicPath}"`, error)
})
}
}

View file

@ -5,11 +5,10 @@ const fs = require('../libs/fsExtra')
const Path = require('path')
const Logger = require('../Logger')
const { filePathToPOSIX, copyToExisting } = require('./fileUtils')
const LibraryItem = require('../objects/LibraryItem')
function escapeSingleQuotes(path) {
// return path.replace(/'/g, '\'\\\'\'')
return filePathToPOSIX(path).replace(/ /g, '\\ ').replace(/'/g, "\\'")
// A ' within a quoted string is escaped with '\'' in ffmpeg (see https://www.ffmpeg.org/ffmpeg-utils.html#Quoting-and-escaping)
return filePathToPOSIX(path).replace(/'/g, "'\\''")
}
// Returns first track start time
@ -33,7 +32,7 @@ async function writeConcatFile(tracks, outputPath, startTime = 0) {
var tracksToInclude = tracks.filter((t) => t.index >= trackToStartWithIndex)
var trackPaths = tracksToInclude.map((t) => {
var line = 'file ' + escapeSingleQuotes(t.metadata.path) + '\n' + `duration ${t.duration}`
var line = "file '" + escapeSingleQuotes(t.metadata.path) + "'\n" + `duration ${t.duration}`
return line
})
var inputstr = trackPaths.join('\n\n')
@ -97,6 +96,11 @@ async function resizeImage(filePath, outputPath, width, height) {
}
module.exports.resizeImage = resizeImage
/**
*
* @param {import('../objects/PodcastEpisodeDownload')} podcastEpisodeDownload
* @returns {Promise<{success: boolean, isFfmpegError?: boolean}>}
*/
module.exports.downloadPodcastEpisode = (podcastEpisodeDownload) => {
return new Promise(async (resolve) => {
const response = await axios({
@ -106,44 +110,49 @@ module.exports.downloadPodcastEpisode = (podcastEpisodeDownload) => {
headers: {
'User-Agent': 'audiobookshelf (+https://audiobookshelf.org)'
},
timeout: 30000
timeout: global.PodcastDownloadTimeout
}).catch((error) => {
Logger.error(`[ffmpegHelpers] Failed to download podcast episode with url "${podcastEpisodeDownload.url}"`, error)
return null
})
if (!response) return resolve(false)
if (!response) {
return resolve({
success: false
})
}
/** @type {import('../libs/fluentFfmpeg/index').FfmpegCommand} */
const ffmpeg = Ffmpeg(response.data)
ffmpeg.addOption('-loglevel debug') // Debug logs printed on error
ffmpeg.outputOptions('-c:a', 'copy', '-map', '0:a', '-metadata', 'podcast=1')
const podcastMetadata = podcastEpisodeDownload.libraryItem.media.metadata
const podcastEpisode = podcastEpisodeDownload.podcastEpisode
/** @type {import('../models/Podcast')} */
const podcast = podcastEpisodeDownload.libraryItem.media
const podcastEpisode = podcastEpisodeDownload.rssPodcastEpisode
const finalSizeInBytes = Number(podcastEpisode.enclosure?.length || 0)
const taggings = {
album: podcastMetadata.title,
'album-sort': podcastMetadata.title,
artist: podcastMetadata.author,
'artist-sort': podcastMetadata.author,
album: podcast.title,
'album-sort': podcast.title,
artist: podcast.author,
'artist-sort': podcast.author,
comment: podcastEpisode.description,
subtitle: podcastEpisode.subtitle,
disc: podcastEpisode.season,
genre: podcastMetadata.genres.length ? podcastMetadata.genres.join(';') : null,
language: podcastMetadata.language,
MVNM: podcastMetadata.title,
genre: podcast.genres.length ? podcast.genres.join(';') : null,
language: podcast.language,
MVNM: podcast.title,
MVIN: podcastEpisode.episode,
track: podcastEpisode.episode,
'series-part': podcastEpisode.episode,
title: podcastEpisode.title,
'title-sort': podcastEpisode.title,
year: podcastEpisode.pubYear,
year: podcastEpisodeDownload.pubYear,
date: podcastEpisode.pubDate,
releasedate: podcastEpisode.pubDate,
'itunes-id': podcastMetadata.itunesId,
'podcast-type': podcastMetadata.type,
'episode-type': podcastMetadata.episodeType
'itunes-id': podcast.itunesId,
'podcast-type': podcast.podcastType,
'episode-type': podcastEpisode.episodeType
}
for (const tag in taggings) {
@ -172,7 +181,10 @@ module.exports.downloadPodcastEpisode = (podcastEpisodeDownload) => {
if (stderrLines.length) {
Logger.error(`Full stderr dump for episode url "${podcastEpisodeDownload.url}": ${stderrLines.join('\n')}`)
}
resolve(false)
resolve({
success: false,
isFfmpegError: true
})
})
ffmpeg.on('progress', (progress) => {
let progressPercent = 0
@ -184,7 +196,9 @@ module.exports.downloadPodcastEpisode = (podcastEpisodeDownload) => {
})
ffmpeg.on('end', () => {
Logger.debug(`[FfmpegHelpers] downloadPodcastEpisode: Complete`)
resolve(podcastEpisodeDownload.targetPath)
resolve({
success: true
})
})
ffmpeg.run()
})
@ -359,28 +373,26 @@ function escapeFFMetadataValue(value) {
/**
* Retrieves the FFmpeg metadata object for a given library item.
*
* @param {LibraryItem} libraryItem - The library item containing the media metadata.
* @param {import('../models/LibraryItem')} libraryItem - The library item containing the media metadata.
* @param {number} audioFilesLength - The length of the audio files.
* @returns {Object} - The FFmpeg metadata object.
*/
function getFFMetadataObject(libraryItem, audioFilesLength) {
const metadata = libraryItem.media.metadata
const ffmetadata = {
title: metadata.title,
artist: metadata.authorName,
album_artist: metadata.authorName,
album: (metadata.title || '') + (metadata.subtitle ? `: ${metadata.subtitle}` : ''),
TIT3: metadata.subtitle, // mp3 only
genre: metadata.genres?.join('; '),
date: metadata.publishedYear,
comment: metadata.description,
description: metadata.description,
composer: metadata.narratorName,
copyright: metadata.publisher,
publisher: metadata.publisher, // mp3 only
title: libraryItem.media.title,
artist: libraryItem.media.authorName,
album_artist: libraryItem.media.authorName,
album: (libraryItem.media.title || '') + (libraryItem.media.subtitle ? `: ${libraryItem.media.subtitle}` : ''),
TIT3: libraryItem.media.subtitle, // mp3 only
genre: libraryItem.media.genres?.join('; '),
date: libraryItem.media.publishedYear,
comment: libraryItem.media.description,
description: libraryItem.media.description,
composer: (libraryItem.media.narrators || []).join(', '),
copyright: libraryItem.media.publisher,
publisher: libraryItem.media.publisher, // mp3 only
TRACKTOTAL: `${audioFilesLength}`, // mp3 only
grouping: metadata.series?.map((s) => s.name + (s.sequence ? ` #${s.sequence}` : '')).join('; ')
grouping: libraryItem.media.series?.map((s) => s.name + (s.bookSeries.sequence ? ` #${s.bookSeries.sequence}` : '')).join('; ')
}
Object.keys(ffmetadata).forEach((key) => {
if (!ffmetadata[key]) {
@ -396,7 +408,7 @@ module.exports.getFFMetadataObject = getFFMetadataObject
/**
* Merges audio files into a single output file using FFmpeg.
*
* @param {Array} audioTracks - The audio tracks to merge.
* @param {import('../models/Book').AudioFileObject} audioTracks - The audio tracks to merge.
* @param {number} duration - The total duration of the audio tracks.
* @param {string} itemCachePath - The path to the item cache.
* @param {string} outputFilePath - The path to the output file.

View file

@ -6,35 +6,41 @@ const naturalSort = createNewSortInstance({
})
module.exports = {
getSeriesFromBooks(books, filterSeries, hideSingleBookSeries) {
/**
*
* @param {import('../models/LibraryItem')[]} libraryItems
* @param {*} filterSeries
* @param {*} hideSingleBookSeries
* @returns
*/
getSeriesFromBooks(libraryItems, filterSeries, hideSingleBookSeries) {
const _series = {}
const seriesToFilterOut = {}
books.forEach((libraryItem) => {
libraryItems.forEach((libraryItem) => {
// get all book series for item that is not already filtered out
const bookSeries = (libraryItem.media.metadata.series || []).filter((se) => !seriesToFilterOut[se.id])
if (!bookSeries.length) return
const allBookSeries = (libraryItem.media.series || []).filter((se) => !seriesToFilterOut[se.id])
if (!allBookSeries.length) return
bookSeries.forEach((bookSeriesObj) => {
// const series = allSeries.find(se => se.id === bookSeriesObj.id)
const abJson = libraryItem.toJSONMinified()
abJson.sequence = bookSeriesObj.sequence
allBookSeries.forEach((bookSeries) => {
const abJson = libraryItem.toOldJSONMinified()
abJson.sequence = bookSeries.bookSeries.sequence
if (filterSeries) {
abJson.filterSeriesSequence = libraryItem.media.metadata.getSeries(filterSeries).sequence
const series = libraryItem.media.series.find((se) => se.id === filterSeries)
abJson.filterSeriesSequence = series.bookSeries.sequence
}
if (!_series[bookSeriesObj.id]) {
_series[bookSeriesObj.id] = {
id: bookSeriesObj.id,
name: bookSeriesObj.name,
nameIgnorePrefix: getTitlePrefixAtEnd(bookSeriesObj.name),
nameIgnorePrefixSort: getTitleIgnorePrefix(bookSeriesObj.name),
if (!_series[bookSeries.id]) {
_series[bookSeries.id] = {
id: bookSeries.id,
name: bookSeries.name,
nameIgnorePrefix: getTitlePrefixAtEnd(bookSeries.name),
nameIgnorePrefixSort: getTitleIgnorePrefix(bookSeries.name),
type: 'series',
books: [abJson],
totalDuration: isNullOrNaN(abJson.media.duration) ? 0 : Number(abJson.media.duration)
}
} else {
_series[bookSeriesObj.id].books.push(abJson)
_series[bookSeriesObj.id].totalDuration += isNullOrNaN(abJson.media.duration) ? 0 : Number(abJson.media.duration)
_series[bookSeries.id].books.push(abJson)
_series[bookSeries.id].totalDuration += isNullOrNaN(abJson.media.duration) ? 0 : Number(abJson.media.duration)
}
})
})
@ -52,6 +58,13 @@ module.exports = {
})
},
/**
*
* @param {import('../models/LibraryItem')[]} libraryItems
* @param {string} filterSeries - series id
* @param {boolean} hideSingleBookSeries
* @returns
*/
collapseBookSeries(libraryItems, filterSeries, hideSingleBookSeries) {
// Get series from the library items. If this list is being collapsed after filtering for a series,
// don't collapse that series, only books that are in other series.
@ -123,8 +136,9 @@ module.exports = {
let libraryItems = books
.map((book) => {
const libraryItem = book.libraryItem
delete book.libraryItem
libraryItem.media = book
return Database.libraryItemModel.getOldLibraryItem(libraryItem)
return libraryItem
})
.filter((li) => {
return user.checkCanAccessLibraryItem(li)
@ -143,15 +157,18 @@ module.exports = {
if (!payload.sortBy || payload.sortBy === 'sequence') {
sortArray = [
{
[direction]: (li) => li.media.metadata.getSeries(seriesId).sequence
[direction]: (li) => {
const series = li.media.series.find((se) => se.id === seriesId)
return series.bookSeries.sequence
}
},
{
// If no series sequence then fallback to sorting by title (or collapsed series name for sub-series)
[direction]: (li) => {
if (sortingIgnorePrefix) {
return li.collapsedSeries?.nameIgnorePrefix || li.media.metadata.titleIgnorePrefix
return li.collapsedSeries?.nameIgnorePrefix || li.media.titleIgnorePrefix
} else {
return li.collapsedSeries?.name || li.media.metadata.title
return li.collapsedSeries?.name || li.media.title
}
}
}
@ -174,9 +191,9 @@ module.exports = {
[direction]: (li) => {
if (payload.sortBy === 'media.metadata.title') {
if (sortingIgnorePrefix) {
return li.collapsedSeries?.nameIgnorePrefix || li.media.metadata.titleIgnorePrefix
return li.collapsedSeries?.nameIgnorePrefix || li.media.titleIgnorePrefix
} else {
return li.collapsedSeries?.name || li.media.metadata.title
return li.collapsedSeries?.name || li.media.title
}
} else {
return payload.sortBy.split('.').reduce((a, b) => a[b], li)
@ -194,12 +211,12 @@ module.exports = {
return Promise.all(
libraryItems.map(async (li) => {
const filteredSeries = li.media.metadata.getSeries(seriesId)
const json = li.toJSONMinified()
const filteredSeries = li.media.series.find((se) => se.id === seriesId)
const json = li.toOldJSONMinified()
json.media.metadata.series = {
id: filteredSeries.id,
name: filteredSeries.name,
sequence: filteredSeries.sequence
sequence: filteredSeries.bookSeries.sequence
}
if (li.collapsedSeries) {

View file

@ -1200,7 +1200,7 @@ async function migrationPatchNewColumns(queryInterface) {
*/
async function handleOldLibraryItems(ctx) {
const oldLibraryItems = await oldDbFiles.loadOldData('libraryItems')
const libraryItems = await ctx.models.libraryItem.getAllOldLibraryItems()
const libraryItems = await ctx.models.libraryItem.findAllExpandedWhere()
const bulkUpdateItems = []
const bulkUpdateEpisodes = []
@ -1218,8 +1218,8 @@ async function handleOldLibraryItems(ctx) {
}
})
if (libraryItem.media.episodes?.length && matchingOldLibraryItem.media.episodes?.length) {
for (const podcastEpisode of libraryItem.media.episodes) {
if (libraryItem.media.podcastEpisodes?.length && matchingOldLibraryItem.media.episodes?.length) {
for (const podcastEpisode of libraryItem.media.podcastEpisodes) {
// Find matching old episode by audio file ino
const matchingOldPodcastEpisode = matchingOldLibraryItem.media.episodes.find((oep) => oep.audioFile?.ino && oep.audioFile.ino === podcastEpisode.audioFile?.ino)
if (matchingOldPodcastEpisode) {

View file

@ -43,7 +43,9 @@ async function parse(ebookFile) {
archive = createComicBookExtractor(comicPath)
await archive.open()
const filePaths = await archive.getFilePaths()
const filePaths = await archive.getFilePaths().catch((error) => {
Logger.error(`[parseComicMetadata] Failed to get file paths from comic at "${comicPath}"`, error)
})
// Sort the file paths in a natural order to get the first image
filePaths.sort((a, b) => {

View file

@ -4,11 +4,77 @@ const Logger = require('../Logger')
const { xmlToJSON, levenshteinDistance } = require('./index')
const htmlSanitizer = require('../utils/htmlSanitizer')
/**
* @typedef RssPodcastEpisode
* @property {string} title
* @property {string} subtitle
* @property {string} description
* @property {string} descriptionPlain
* @property {string} pubDate
* @property {string} episodeType
* @property {string} season
* @property {string} episode
* @property {string} author
* @property {string} duration
* @property {string} explicit
* @property {number} publishedAt - Unix timestamp
* @property {{ url: string, type?: string, length?: string }} enclosure
* @property {string} guid
* @property {string} chaptersUrl
* @property {string} chaptersType
*/
/**
* @typedef RssPodcastMetadata
* @property {string} title
* @property {string} language
* @property {string} explicit
* @property {string} author
* @property {string} pubDate
* @property {string} link
* @property {string} image
* @property {string[]} categories
* @property {string} feedUrl
* @property {string} description
* @property {string} descriptionPlain
* @property {string} type
*/
/**
* @typedef RssPodcast
* @property {RssPodcastMetadata} metadata
* @property {RssPodcastEpisode[]} episodes
* @property {number} numEpisodes
*/
function extractFirstArrayItem(json, key) {
if (!json[key]?.length) return null
return json[key][0]
}
function extractStringOrStringify(json) {
try {
if (typeof json[Object.keys(json)[0]]?.[0] === 'string') {
return json[Object.keys(json)[0]][0]
}
// Handles case where html was included without being wrapped in CDATA
return JSON.stringify(value)
} catch {
return ''
}
}
function extractFirstArrayItemString(json, key) {
const item = extractFirstArrayItem(json, key)
if (!item) return ''
if (typeof item === 'object') {
if (item?.['_'] && typeof item['_'] === 'string') return item['_']
return extractStringOrStringify(item)
}
return typeof item === 'string' ? item : ''
}
function extractImage(channel) {
if (!channel.image || !channel.image.url || !channel.image.url.length) {
if (!channel['itunes:image'] || !channel['itunes:image'].length || !channel['itunes:image'][0]['$']) {
@ -58,7 +124,7 @@ function extractPodcastMetadata(channel) {
}
if (channel['description']) {
const rawDescription = extractFirstArrayItem(channel, 'description') || ''
const rawDescription = extractFirstArrayItemString(channel, 'description')
metadata.description = htmlSanitizer.sanitize(rawDescription.trim())
metadata.descriptionPlain = htmlSanitizer.stripAllTags(rawDescription.trim())
}
@ -106,7 +172,8 @@ function extractEpisodeData(item) {
// Supposed to be the plaintext description but not always followed
if (item['description']) {
const rawDescription = extractFirstArrayItem(item, 'description') || ''
const rawDescription = extractFirstArrayItemString(item, 'description')
if (!episode.description) episode.description = htmlSanitizer.sanitize(rawDescription.trim())
episode.descriptionPlain = htmlSanitizer.stripAllTags(rawDescription.trim())
}
@ -136,9 +203,7 @@ function extractEpisodeData(item) {
const arrayFields = ['title', 'itunes:episodeType', 'itunes:season', 'itunes:episode', 'itunes:author', 'itunes:duration', 'itunes:explicit', 'itunes:subtitle']
arrayFields.forEach((key) => {
const cleanKey = key.split(':').pop()
let value = extractFirstArrayItem(item, key)
if (value?.['_']) value = value['_']
episode[cleanKey] = value
episode[cleanKey] = extractFirstArrayItemString(item, key)
})
return episode
}
@ -227,7 +292,7 @@ module.exports.parsePodcastRssFeedXml = async (xml, excludeEpisodeMetadata = fal
*
* @param {string} feedUrl
* @param {boolean} [excludeEpisodeMetadata=false]
* @returns {Promise}
* @returns {Promise<RssPodcast|null>}
*/
module.exports.getPodcastFeed = (feedUrl, excludeEpisodeMetadata = false) => {
Logger.debug(`[podcastUtils] getPodcastFeed for "${feedUrl}"`)
@ -242,7 +307,7 @@ module.exports.getPodcastFeed = (feedUrl, excludeEpisodeMetadata = false) => {
return axios({
url: feedUrl,
method: 'GET',
timeout: 12000,
timeout: global.PodcastDownloadTimeout,
responseType: 'arraybuffer',
headers: {
Accept: 'application/rss+xml, application/xhtml+xml, application/xml, */*;q=0.8',
@ -291,6 +356,12 @@ module.exports.findMatchingEpisodes = async (feedUrl, searchTitle) => {
return this.findMatchingEpisodesInFeed(feed, searchTitle)
}
/**
*
* @param {RssPodcast} feed
* @param {string} searchTitle
* @returns {Array<{ episode: RssPodcastEpisode, levenshtein: number }>}
*/
module.exports.findMatchingEpisodesInFeed = (feed, searchTitle) => {
searchTitle = searchTitle.toLowerCase().trim()
if (!feed?.episodes) {

View file

@ -143,6 +143,7 @@ function parseChapters(_chapters) {
.map((chap) => {
let title = chap['TAG:title'] || chap.title || ''
if (!title && chap.tags?.title) title = chap.tags.title
title = title.trim()
const timebase = chap.time_base?.includes('/') ? Number(chap.time_base.split('/')[1]) : 1
const start = !isNullOrNaN(chap.start_time) ? Number(chap.start_time) : !isNullOrNaN(chap.start) ? Number(chap.start) / timebase : 0

View file

@ -18,7 +18,7 @@ module.exports = {
* @param {string} libraryId
* @param {import('../../models/User')} user
* @param {object} options
* @returns {object} { libraryItems:LibraryItem[], count:number }
* @returns {Promise<{ libraryItems:import('../../models/LibraryItem')[], count:number }>}
*/
async getFilteredLibraryItems(libraryId, user, options) {
const { filterBy, sortBy, sortDesc, limit, offset, collapseseries, include, mediaType } = options
@ -52,7 +52,7 @@ module.exports = {
const { libraryItems, count } = await libraryItemsBookFilters.getFilteredLibraryItems(library.id, user, 'progress', 'in-progress', 'progress', true, false, include, limit, 0, true)
return {
items: libraryItems.map((li) => {
const oldLibraryItem = Database.libraryItemModel.getOldLibraryItem(li).toJSONMinified()
const oldLibraryItem = li.toOldJSONMinified()
if (li.rssFeed) {
oldLibraryItem.rssFeed = li.rssFeed.toOldJSONMinified()
}
@ -68,7 +68,7 @@ module.exports = {
return {
count,
items: libraryItems.map((li) => {
const oldLibraryItem = Database.libraryItemModel.getOldLibraryItem(li).toJSONMinified()
const oldLibraryItem = li.toOldJSONMinified()
oldLibraryItem.recentEpisode = li.recentEpisode
return oldLibraryItem
})
@ -89,7 +89,7 @@ module.exports = {
const { libraryItems, count } = await libraryItemsBookFilters.getFilteredLibraryItems(library.id, user, 'recent', null, 'addedAt', true, false, include, limit, 0)
return {
libraryItems: libraryItems.map((li) => {
const oldLibraryItem = Database.libraryItemModel.getOldLibraryItem(li).toJSONMinified()
const oldLibraryItem = li.toOldJSONMinified()
if (li.rssFeed) {
oldLibraryItem.rssFeed = li.rssFeed.toOldJSONMinified()
}
@ -107,7 +107,7 @@ module.exports = {
const { libraryItems, count } = await libraryItemsPodcastFilters.getFilteredLibraryItems(library.id, user, 'recent', null, 'addedAt', true, include, limit, 0)
return {
libraryItems: libraryItems.map((li) => {
const oldLibraryItem = Database.libraryItemModel.getOldLibraryItem(li).toJSONMinified()
const oldLibraryItem = li.toOldJSONMinified()
if (li.rssFeed) {
oldLibraryItem.rssFeed = li.rssFeed.toOldJSONMinified()
}
@ -136,7 +136,7 @@ module.exports = {
const { libraryItems, count } = await libraryItemsBookFilters.getContinueSeriesLibraryItems(library, user, include, limit, 0)
return {
libraryItems: libraryItems.map((li) => {
const oldLibraryItem = Database.libraryItemModel.getOldLibraryItem(li).toJSONMinified()
const oldLibraryItem = li.toOldJSONMinified()
if (li.rssFeed) {
oldLibraryItem.rssFeed = li.rssFeed.toOldJSONMinified()
}
@ -166,7 +166,7 @@ module.exports = {
const { libraryItems, count } = await libraryItemsBookFilters.getFilteredLibraryItems(library.id, user, 'progress', 'finished', 'progress', true, false, include, limit, 0)
return {
items: libraryItems.map((li) => {
const oldLibraryItem = Database.libraryItemModel.getOldLibraryItem(li).toJSONMinified()
const oldLibraryItem = li.toOldJSONMinified()
if (li.rssFeed) {
oldLibraryItem.rssFeed = li.rssFeed.toOldJSONMinified()
}
@ -182,7 +182,7 @@ module.exports = {
return {
count,
items: libraryItems.map((li) => {
const oldLibraryItem = Database.libraryItemModel.getOldLibraryItem(li).toJSONMinified()
const oldLibraryItem = li.toOldJSONMinified()
oldLibraryItem.recentEpisode = li.recentEpisode
return oldLibraryItem
})
@ -293,15 +293,17 @@ module.exports = {
})
oldSeries.books = s.bookSeries
.map((bs) => {
const libraryItem = bs.book.libraryItem?.toJSON()
const libraryItem = bs.book.libraryItem
if (!libraryItem) {
Logger.warn(`Book series book has no libraryItem`, bs, bs.book, 'series=', series)
return null
}
delete bs.book.libraryItem
bs.book.authors = [] // Not needed
bs.book.series = [] // Not needed
libraryItem.media = bs.book
const oldLibraryItem = Database.libraryItemModel.getOldLibraryItem(libraryItem).toJSONMinified()
const oldLibraryItem = libraryItem.toOldJSONMinified()
return oldLibraryItem
})
.filter((b) => b)
@ -373,7 +375,7 @@ module.exports = {
const { libraryItems, count } = await libraryItemsBookFilters.getDiscoverLibraryItems(library.id, user, include, limit)
return {
libraryItems: libraryItems.map((li) => {
const oldLibraryItem = Database.libraryItemModel.getOldLibraryItem(li).toJSONMinified()
const oldLibraryItem = li.toOldJSONMinified()
if (li.rssFeed) {
oldLibraryItem.rssFeed = li.rssFeed.toOldJSONMinified()
}
@ -400,7 +402,7 @@ module.exports = {
return {
count,
libraryItems: libraryItems.map((li) => {
const oldLibraryItem = Database.libraryItemModel.getOldLibraryItem(li).toJSONMinified()
const oldLibraryItem = li.toOldJSONMinified()
oldLibraryItem.recentEpisode = li.recentEpisode
return oldLibraryItem
})
@ -413,7 +415,7 @@ module.exports = {
* @param {import('../../models/User')} user
* @param {number} limit
* @param {number} offset
* @returns {Promise<{ libraryItems:import('../../objects/LibraryItem')[], count:number }>}
* @returns {Promise<{ libraryItems:import('../../models/LibraryItem')[], count:number }>}
*/
async getLibraryItemsForAuthor(author, user, limit, offset) {
const { libraryItems, count } = await libraryItemsBookFilters.getFilteredLibraryItems(author.libraryId, user, 'authors', author.id, 'addedAt', true, false, [], limit, offset)

View file

@ -349,7 +349,7 @@ module.exports = {
* @param {number} limit
* @param {number} offset
* @param {boolean} isHomePage for home page shelves
* @returns {object} { libraryItems:LibraryItem[], count:number }
* @returns {{ libraryItems: import('../../models/LibraryItem')[], count: number }}
*/
async getFilteredLibraryItems(libraryId, user, filterGroup, filterValue, sortBy, sortDesc, collapseseries, include, limit, offset, isHomePage = false) {
// TODO: Handle collapse sub-series
@ -583,8 +583,8 @@ module.exports = {
})
const libraryItems = books.map((bookExpanded) => {
const libraryItem = bookExpanded.libraryItem.toJSON()
const book = bookExpanded.toJSON()
const libraryItem = bookExpanded.libraryItem
const book = bookExpanded
if (filterGroup === 'series' && book.series?.length) {
// For showing sequence on book cover when filtering for series
@ -596,27 +596,37 @@ module.exports = {
}
delete book.libraryItem
delete book.authors
delete book.series
book.series =
book.bookSeries?.map((bs) => {
const series = bs.series
delete bs.series
series.bookSeries = bs
return series
}) || []
delete book.bookSeries
book.authors = book.bookAuthors?.map((ba) => ba.author) || []
delete book.bookAuthors
// For showing details of collapsed series
if (collapseseries && book.bookSeries?.length) {
const collapsedSeries = book.bookSeries.find((bs) => collapseSeriesBookSeries.some((cbs) => cbs.id === bs.id))
if (collapseseries && book.series?.length) {
const collapsedSeries = book.series.find((bs) => collapseSeriesBookSeries.some((cbs) => cbs.id === bs.bookSeries.id))
if (collapsedSeries) {
const collapseSeriesObj = collapseSeriesBookSeries.find((csbs) => csbs.id === collapsedSeries.id)
const collapseSeriesObj = collapseSeriesBookSeries.find((csbs) => csbs.id === collapsedSeries.bookSeries.id)
libraryItem.collapsedSeries = {
id: collapsedSeries.series.id,
name: collapsedSeries.series.name,
nameIgnorePrefix: collapsedSeries.series.nameIgnorePrefix,
sequence: collapsedSeries.sequence,
id: collapsedSeries.id,
name: collapsedSeries.name,
nameIgnorePrefix: collapsedSeries.nameIgnorePrefix,
sequence: collapsedSeries.bookSeries.sequence,
numBooks: collapseSeriesObj?.numBooks || 0,
libraryItemIds: collapseSeriesObj?.libraryItemIds || []
}
}
}
if (bookExpanded.libraryItem.feeds?.length) {
libraryItem.rssFeed = bookExpanded.libraryItem.feeds[0]
if (libraryItem.feeds?.length) {
libraryItem.rssFeed = libraryItem.feeds[0]
}
if (includeMediaItemShare) {
@ -646,7 +656,7 @@ module.exports = {
* @param {string[]} include
* @param {number} limit
* @param {number} offset
* @returns {{ libraryItems:import('../../models/LibraryItem')[], count:number }}
* @returns {Promise<{ libraryItems:import('../../models/LibraryItem')[], count:number }>}
*/
async getContinueSeriesLibraryItems(library, user, include, limit, offset) {
const libraryId = library.id
@ -758,16 +768,19 @@ module.exports = {
}
}
const libraryItem = s.bookSeries[bookIndex].book.libraryItem.toJSON()
const book = s.bookSeries[bookIndex].book.toJSON()
const libraryItem = s.bookSeries[bookIndex].book.libraryItem
const book = s.bookSeries[bookIndex].book
delete book.libraryItem
book.series = []
libraryItem.series = {
id: s.id,
name: s.name,
sequence: s.bookSeries[bookIndex].sequence
}
if (s.bookSeries[bookIndex].book.libraryItem.feeds?.length) {
libraryItem.rssFeed = s.bookSeries[bookIndex].book.libraryItem.feeds[0]
if (libraryItem.feeds?.length) {
libraryItem.rssFeed = libraryItem.feeds[0]
}
libraryItem.media = book
return libraryItem
@ -788,7 +801,7 @@ module.exports = {
* @param {import('../../models/User')} user
* @param {string[]} include
* @param {number} limit
* @returns {object} {libraryItems:LibraryItem, count:number}
* @returns {Promise<{ libraryItems: import('../../models/LibraryItem')[], count: number }>}
*/
async getDiscoverLibraryItems(libraryId, user, include, limit) {
const userPermissionBookWhere = this.getUserPermissionBookWhereQuery(user)
@ -895,13 +908,26 @@ module.exports = {
// Step 3: Map books to library items
const libraryItems = books.map((bookExpanded) => {
const libraryItem = bookExpanded.libraryItem.toJSON()
const book = bookExpanded.toJSON()
const libraryItem = bookExpanded.libraryItem
const book = bookExpanded
delete book.libraryItem
book.series =
book.bookSeries?.map((bs) => {
const series = bs.series
delete bs.series
series.bookSeries = bs
return series
}) || []
delete book.bookSeries
book.authors = book.bookAuthors?.map((ba) => ba.author) || []
delete book.bookAuthors
libraryItem.media = book
if (bookExpanded.libraryItem.feeds?.length) {
libraryItem.rssFeed = bookExpanded.libraryItem.feeds[0]
if (libraryItem.feeds?.length) {
libraryItem.rssFeed = libraryItem.feeds[0]
}
return libraryItem
@ -961,11 +987,11 @@ module.exports = {
* Get library items for series
* @param {import('../../models/Series')} series
* @param {import('../../models/User')} [user]
* @returns {Promise<import('../../objects/LibraryItem')[]>}
* @returns {Promise<import('../../models/LibraryItem')[]>}
*/
async getLibraryItemsForSeries(series, user) {
const { libraryItems } = await this.getFilteredLibraryItems(series.libraryId, user, 'series', series.id, null, null, false, [], null, null)
return libraryItems.map((li) => Database.libraryItemModel.getOldLibraryItem(li))
return libraryItems
},
/**
@ -1040,9 +1066,21 @@ module.exports = {
for (const book of books) {
const libraryItem = book.libraryItem
delete book.libraryItem
book.series = book.bookSeries.map((bs) => {
const series = bs.series
delete bs.series
series.bookSeries = bs
return series
})
delete book.bookSeries
book.authors = book.bookAuthors.map((ba) => ba.author)
delete book.bookAuthors
libraryItem.media = book
itemMatches.push({
libraryItem: Database.libraryItemModel.getOldLibraryItem(libraryItem).toJSONExpanded()
libraryItem: libraryItem.toOldJSONExpanded()
})
}
@ -1132,7 +1170,9 @@ module.exports = {
const books = series.bookSeries.map((bs) => {
const libraryItem = bs.book.libraryItem
libraryItem.media = bs.book
return Database.libraryItemModel.getOldLibraryItem(libraryItem).toJSON()
libraryItem.media.authors = []
libraryItem.media.series = []
return libraryItem.toOldJSON()
})
seriesMatches.push({
series: series.toOldJSON(),

View file

@ -107,7 +107,7 @@ module.exports = {
* @param {string[]} include
* @param {number} limit
* @param {number} offset
* @returns {object} { libraryItems:LibraryItem[], count:number }
* @returns {Promise<{ libraryItems: import('../../models/LibraryItem')[], count: number }>}
*/
async getFilteredLibraryItems(libraryId, user, filterGroup, filterValue, sortBy, sortDesc, include, limit, offset) {
const includeRSSFeed = include.includes('rssfeed')
@ -175,16 +175,19 @@ module.exports = {
})
const libraryItems = podcasts.map((podcastExpanded) => {
const libraryItem = podcastExpanded.libraryItem.toJSON()
const podcast = podcastExpanded.toJSON()
const libraryItem = podcastExpanded.libraryItem
const podcast = podcastExpanded
delete podcast.libraryItem
if (podcastExpanded.libraryItem.feeds?.length) {
libraryItem.rssFeed = podcastExpanded.libraryItem.feeds[0]
if (libraryItem.feeds?.length) {
libraryItem.rssFeed = libraryItem.feeds[0]
}
if (podcast.numEpisodesIncomplete) {
libraryItem.numEpisodesIncomplete = podcast.numEpisodesIncomplete
if (podcast.dataValues.numEpisodesIncomplete) {
libraryItem.numEpisodesIncomplete = podcast.dataValues.numEpisodesIncomplete
}
if (podcast.dataValues.numEpisodes) {
podcast.numEpisodes = podcast.dataValues.numEpisodes
}
libraryItem.media = podcast
@ -209,7 +212,7 @@ module.exports = {
* @param {number} limit
* @param {number} offset
* @param {boolean} isHomePage for home page shelves
* @returns {object} {libraryItems:LibraryItem[], count:number}
* @returns {Promise<{ libraryItems: import('../../models/LibraryItem')[], count: number }>}
*/
async getFilteredPodcastEpisodes(libraryId, user, filterGroup, filterValue, sortBy, sortDesc, limit, offset, isHomePage = false) {
if (sortBy === 'progress' && filterGroup !== 'progress') {
@ -289,11 +292,12 @@ module.exports = {
})
const libraryItems = podcastEpisodes.map((ep) => {
const libraryItem = ep.podcast.libraryItem.toJSON()
const podcast = ep.podcast.toJSON()
const libraryItem = ep.podcast.libraryItem
const podcast = ep.podcast
delete podcast.libraryItem
libraryItem.media = podcast
libraryItem.recentEpisode = ep.getOldPodcastEpisode(libraryItem.id).toJSON()
libraryItem.recentEpisode = ep.toOldJSON(libraryItem.id)
return libraryItem
})
@ -362,8 +366,9 @@ module.exports = {
const libraryItem = podcast.libraryItem
delete podcast.libraryItem
libraryItem.media = podcast
libraryItem.media.podcastEpisodes = []
itemMatches.push({
libraryItem: Database.libraryItemModel.getOldLibraryItem(libraryItem).toJSONExpanded()
libraryItem: libraryItem.toOldJSONExpanded()
})
}
@ -455,13 +460,14 @@ module.exports = {
})
const episodeResults = episodes.map((ep) => {
const libraryItem = ep.podcast.libraryItem
libraryItem.media = ep.podcast
const oldPodcast = Database.podcastModel.getOldPodcast(libraryItem)
const oldPodcastEpisode = ep.getOldPodcastEpisode(libraryItem.id).toJSONExpanded()
oldPodcastEpisode.podcast = oldPodcast
oldPodcastEpisode.libraryId = libraryItem.libraryId
return oldPodcastEpisode
ep.podcast.podcastEpisodes = [] // Not needed
const oldPodcastJson = ep.podcast.toOldJSON(ep.podcast.libraryItem.id)
const oldPodcastEpisodeJson = ep.toOldJSONExpanded(ep.podcast.libraryItem.id)
oldPodcastEpisodeJson.podcast = oldPodcastJson
oldPodcastEpisodeJson.libraryId = ep.podcast.libraryItem.libraryId
return oldPodcastEpisodeJson
})
return episodeResults

View file

@ -162,6 +162,12 @@ module.exports = {
include: [
{
model: Database.libraryItemModel
},
{
model: Database.authorModel
},
{
model: Database.seriesModel
}
]
},
@ -195,10 +201,10 @@ module.exports = {
})
})
oldSeries.books = s.bookSeries.map((bs) => {
const libraryItem = bs.book.libraryItem.toJSON()
const libraryItem = bs.book.libraryItem
delete bs.book.libraryItem
libraryItem.media = bs.book
const oldLibraryItem = Database.libraryItemModel.getOldLibraryItem(libraryItem).toJSONMinified()
const oldLibraryItem = libraryItem.toOldJSONMinified()
return oldLibraryItem
})
allOldSeries.push(oldSeries)