Merge branch 'advplyr:master' into dewyer/add-custom-metadata-provider

This commit is contained in:
FlyinPancake 2024-01-13 01:08:23 +01:00 committed by GitHub
commit 6ef4944d89
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
31 changed files with 764 additions and 309 deletions

View file

@ -182,11 +182,11 @@ class Database {
if (process.env.QUERY_LOGGING === "log") {
// Setting QUERY_LOGGING=log will log all Sequelize queries before they run
Logger.info(`[Database] Query logging enabled`)
logging = (query) => Logger.dev(`Running the following query:\n ${query}`)
logging = (query) => Logger.debug(`Running the following query:\n ${query}`)
} else if (process.env.QUERY_LOGGING === "benchmark") {
// Setting QUERY_LOGGING=benchmark will log all Sequelize queries and their execution times, after they run
Logger.info(`[Database] Query benchmarking enabled"`)
logging = (query, time) => Logger.dev(`Ran the following query in ${time}ms:\n ${query}`)
logging = (query, time) => Logger.debug(`Ran the following query in ${time}ms:\n ${query}`)
benchmark = true
}

View file

@ -5,7 +5,6 @@ class Logger {
constructor() {
this.isDev = process.env.NODE_ENV !== 'production'
this.logLevel = !this.isDev ? LogLevel.INFO : LogLevel.TRACE
this.hideDevLogs = process.env.HIDE_DEV_LOGS === undefined ? !this.isDev : process.env.HIDE_DEV_LOGS === '1'
this.socketListeners = []
this.logManager = null
@ -88,15 +87,6 @@ class Logger {
this.debug(`Set Log Level to ${this.levelString}`)
}
/**
* Only to console and only for development
* @param {...any} args
*/
dev(...args) {
if (this.hideDevLogs) return
console.log(`[${this.timestamp}] DEV:`, ...args)
}
trace(...args) {
if (this.logLevel > LogLevel.TRACE) return
console.trace(`[${this.timestamp}] TRACE:`, ...args)

View file

@ -1,31 +1,69 @@
const Path = require('path')
const Logger = require('../Logger')
const Database = require('../Database')
const fs = require('../libs/fsExtra')
const { toNumber } = require('../utils/index')
const fileUtils = require('../utils/fileUtils')
class FileSystemController {
constructor() { }
/**
*
* @param {import('express').Request} req
* @param {import('express').Response} res
*/
async getPaths(req, res) {
if (!req.user.isAdminOrUp) {
Logger.error(`[FileSystemController] Non-admin user attempting to get filesystem paths`, req.user)
return res.sendStatus(403)
}
const excludedDirs = ['node_modules', 'client', 'server', '.git', 'static', 'build', 'dist', 'metadata', 'config', 'sys', 'proc'].map(dirname => {
return Path.sep + dirname
})
const relpath = req.query.path
const level = toNumber(req.query.level, 0)
// Do not include existing mapped library paths in response
const libraryFoldersPaths = await Database.libraryFolderModel.getAllLibraryFolderPaths()
libraryFoldersPaths.forEach((path) => {
let dir = path || ''
if (dir.includes(global.appRoot)) dir = dir.replace(global.appRoot, '')
excludedDirs.push(dir)
// Validate path. Must be absolute
if (relpath && (!Path.isAbsolute(relpath) || !await fs.pathExists(relpath))) {
Logger.error(`[FileSystemController] Invalid path in query string "${relpath}"`)
return res.status(400).send('Invalid "path" query string')
}
Logger.debug(`[FileSystemController] Getting file paths at ${relpath || 'root'} (${level})`)
let directories = []
// Windows returns drives first
if (global.isWin) {
if (relpath) {
directories = await fileUtils.getDirectoriesInPath(relpath, level)
} else {
const drives = await fileUtils.getWindowsDrives().catch((error) => {
Logger.error(`[FileSystemController] Failed to get windows drives`, error)
return []
})
if (drives.length) {
directories = drives.map(d => {
return {
path: d,
dirname: d,
level: 0
}
})
}
}
} else {
directories = await fileUtils.getDirectoriesInPath(relpath || '/', level)
}
// Exclude some dirs from this project to be cleaner in Docker
const excludedDirs = ['node_modules', 'client', 'server', '.git', 'static', 'build', 'dist', 'metadata', 'config', 'sys', 'proc', '.devcontainer', '.nyc_output', '.github', '.vscode'].map(dirname => {
return fileUtils.filePathToPOSIX(Path.join(global.appRoot, dirname))
})
directories = directories.filter(dir => {
return !excludedDirs.includes(dir.path)
})
res.json({
directories: await this.getDirectories(global.appRoot, '/', excludedDirs)
posix: !global.isWin,
directories
})
}

View file

@ -7,6 +7,8 @@ const imageType = require('../libs/imageType')
const globals = require('../utils/globals')
const { downloadImageFile, filePathToPOSIX, checkPathIsFile } = require('../utils/fileUtils')
const { extractCoverArt } = require('../utils/ffmpegHelpers')
const parseEbookMetadata = require('../utils/parsers/parseEbookMetadata')
const CacheManager = require('../managers/CacheManager')
class CoverManager {
@ -234,6 +236,7 @@ class CoverManager {
/**
* Extract cover art from audio file and save for library item
*
* @param {import('../models/Book').AudioFileObject[]} audioFiles
* @param {string} libraryItemId
* @param {string} [libraryItemPath] null for isFile library items
@ -268,6 +271,44 @@ class CoverManager {
return null
}
/**
* Extract cover art from ebook and save for library item
*
* @param {import('../utils/parsers/parseEbookMetadata').EBookFileScanData} ebookFileScanData
* @param {string} libraryItemId
* @param {string} [libraryItemPath] null for isFile library items
* @returns {Promise<string>} returns cover path
*/
async saveEbookCoverArt(ebookFileScanData, libraryItemId, libraryItemPath) {
if (!ebookFileScanData?.ebookCoverPath) return null
let coverDirPath = null
if (global.ServerSettings.storeCoverWithItem && libraryItemPath) {
coverDirPath = libraryItemPath
} else {
coverDirPath = Path.posix.join(global.MetadataPath, 'items', libraryItemId)
}
await fs.ensureDir(coverDirPath)
let extname = Path.extname(ebookFileScanData.ebookCoverPath) || '.jpg'
if (extname === '.jpeg') extname = '.jpg'
const coverFilename = `cover${extname}`
const coverFilePath = Path.join(coverDirPath, coverFilename)
// TODO: Overwrite if exists?
const coverAlreadyExists = await fs.pathExists(coverFilePath)
if (coverAlreadyExists) {
Logger.warn(`[CoverManager] Extract embedded cover art but cover already exists for "${coverFilePath}" - overwriting`)
}
const success = await parseEbookMetadata.extractCoverImage(ebookFileScanData, coverFilePath)
if (success) {
await CacheManager.purgeCoverCache(libraryItemId)
return coverFilePath
}
return null
}
/**
*
* @param {string} url

View file

@ -233,7 +233,7 @@ class Library extends Model {
for (let i = 0; i < libraries.length; i++) {
const library = libraries[i]
if (library.displayOrder !== i + 1) {
Logger.dev(`[Library] Updating display order of library from ${library.displayOrder} to ${i + 1}`)
Logger.debug(`[Library] Updating display order of library from ${library.displayOrder} to ${i + 1}`)
await library.update({ displayOrder: i + 1 }).catch((error) => {
Logger.error(`[Library] Failed to update library display order to ${i + 1}`, error)
})

View file

@ -264,7 +264,7 @@ class LibraryItem extends Model {
for (const existingPodcastEpisode of existingPodcastEpisodes) {
// Episode was removed
if (!updatedPodcastEpisodes.some(ep => ep.id === existingPodcastEpisode.id)) {
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" episode "${existingPodcastEpisode.title}" was removed`)
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" episode "${existingPodcastEpisode.title}" was removed`)
await existingPodcastEpisode.destroy()
hasUpdates = true
}
@ -272,7 +272,7 @@ class LibraryItem extends Model {
for (const updatedPodcastEpisode of updatedPodcastEpisodes) {
const existingEpisodeMatch = existingPodcastEpisodes.find(ep => ep.id === updatedPodcastEpisode.id)
if (!existingEpisodeMatch) {
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" episode "${updatedPodcastEpisode.title}" was added`)
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" episode "${updatedPodcastEpisode.title}" was added`)
await this.sequelize.models.podcastEpisode.createFromOld(updatedPodcastEpisode)
hasUpdates = true
} else {
@ -283,7 +283,7 @@ class LibraryItem extends Model {
if (existingValue instanceof Date) existingValue = existingValue.valueOf()
if (!areEquivalent(updatedEpisodeCleaned[key], existingValue, true)) {
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" episode "${existingEpisodeMatch.title}" ${key} was updated from "${existingValue}" to "${updatedEpisodeCleaned[key]}"`)
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" episode "${existingEpisodeMatch.title}" ${key} was updated from "${existingValue}" to "${updatedEpisodeCleaned[key]}"`)
episodeHasUpdates = true
}
}
@ -304,7 +304,7 @@ class LibraryItem extends Model {
for (const existingAuthor of existingAuthors) {
// Author was removed from Book
if (!updatedAuthors.some(au => au.id === existingAuthor.id)) {
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" author "${existingAuthor.name}" was removed`)
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" author "${existingAuthor.name}" was removed`)
await this.sequelize.models.bookAuthor.removeByIds(existingAuthor.id, libraryItemExpanded.media.id)
hasUpdates = true
}
@ -312,7 +312,7 @@ class LibraryItem extends Model {
for (const updatedAuthor of updatedAuthors) {
// Author was added
if (!existingAuthors.some(au => au.id === updatedAuthor.id)) {
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" author "${updatedAuthor.name}" was added`)
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" author "${updatedAuthor.name}" was added`)
await this.sequelize.models.bookAuthor.create({ authorId: updatedAuthor.id, bookId: libraryItemExpanded.media.id })
hasUpdates = true
}
@ -320,7 +320,7 @@ class LibraryItem extends Model {
for (const existingSeries of existingSeriesAll) {
// Series was removed
if (!updatedSeriesAll.some(se => se.id === existingSeries.id)) {
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" series "${existingSeries.name}" was removed`)
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" series "${existingSeries.name}" was removed`)
await this.sequelize.models.bookSeries.removeByIds(existingSeries.id, libraryItemExpanded.media.id)
hasUpdates = true
}
@ -329,11 +329,11 @@ class LibraryItem extends Model {
// Series was added/updated
const existingSeriesMatch = existingSeriesAll.find(se => se.id === updatedSeries.id)
if (!existingSeriesMatch) {
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" series "${updatedSeries.name}" was added`)
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" series "${updatedSeries.name}" was added`)
await this.sequelize.models.bookSeries.create({ seriesId: updatedSeries.id, bookId: libraryItemExpanded.media.id, sequence: updatedSeries.sequence })
hasUpdates = true
} else if (existingSeriesMatch.bookSeries.sequence !== updatedSeries.sequence) {
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" series "${updatedSeries.name}" sequence was updated from "${existingSeriesMatch.bookSeries.sequence}" to "${updatedSeries.sequence}"`)
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" series "${updatedSeries.name}" sequence was updated from "${existingSeriesMatch.bookSeries.sequence}" to "${updatedSeries.sequence}"`)
await existingSeriesMatch.bookSeries.update({ id: updatedSeries.id, sequence: updatedSeries.sequence })
hasUpdates = true
}
@ -346,7 +346,7 @@ class LibraryItem extends Model {
if (existingValue instanceof Date) existingValue = existingValue.valueOf()
if (!areEquivalent(updatedMedia[key], existingValue, true)) {
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" ${libraryItemExpanded.mediaType}.${key} updated from ${existingValue} to ${updatedMedia[key]}`)
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" ${libraryItemExpanded.mediaType}.${key} updated from ${existingValue} to ${updatedMedia[key]}`)
hasMediaUpdates = true
}
}
@ -363,7 +363,7 @@ class LibraryItem extends Model {
if (existingValue instanceof Date) existingValue = existingValue.valueOf()
if (!areEquivalent(updatedLibraryItem[key], existingValue, true)) {
Logger.dev(`[LibraryItem] "${libraryItemExpanded.media.title}" ${key} updated from ${existingValue} to ${updatedLibraryItem[key]}`)
Logger.debug(`[LibraryItem] "${libraryItemExpanded.media.title}" ${key} updated from ${existingValue} to ${updatedLibraryItem[key]}`)
hasLibraryItemUpdates = true
}
}
@ -541,7 +541,7 @@ class LibraryItem extends Model {
})
}
}
Logger.dev(`Loaded ${itemsInProgressPayload.items.length} of ${itemsInProgressPayload.count} items for "Continue Listening/Reading" in ${((Date.now() - fullStart) / 1000).toFixed(2)}s`)
Logger.debug(`Loaded ${itemsInProgressPayload.items.length} of ${itemsInProgressPayload.count} items for "Continue Listening/Reading" in ${((Date.now() - fullStart) / 1000).toFixed(2)}s`)
let start = Date.now()
if (library.isBook) {
@ -558,7 +558,7 @@ class LibraryItem extends Model {
total: continueSeriesPayload.count
})
}
Logger.dev(`Loaded ${continueSeriesPayload.libraryItems.length} of ${continueSeriesPayload.count} items for "Continue Series" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
Logger.debug(`Loaded ${continueSeriesPayload.libraryItems.length} of ${continueSeriesPayload.count} items for "Continue Series" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
} else if (library.isPodcast) {
// "Newest Episodes" shelf
const newestEpisodesPayload = await libraryFilters.getNewestPodcastEpisodes(library, user, limit)
@ -572,7 +572,7 @@ class LibraryItem extends Model {
total: newestEpisodesPayload.count
})
}
Logger.dev(`Loaded ${newestEpisodesPayload.libraryItems.length} of ${newestEpisodesPayload.count} episodes for "Newest Episodes" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
Logger.debug(`Loaded ${newestEpisodesPayload.libraryItems.length} of ${newestEpisodesPayload.count} episodes for "Newest Episodes" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
}
start = Date.now()
@ -588,7 +588,7 @@ class LibraryItem extends Model {
total: mostRecentPayload.count
})
}
Logger.dev(`Loaded ${mostRecentPayload.libraryItems.length} of ${mostRecentPayload.count} items for "Recently Added" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
Logger.debug(`Loaded ${mostRecentPayload.libraryItems.length} of ${mostRecentPayload.count} items for "Recently Added" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
if (library.isBook) {
start = Date.now()
@ -604,7 +604,7 @@ class LibraryItem extends Model {
total: seriesMostRecentPayload.count
})
}
Logger.dev(`Loaded ${seriesMostRecentPayload.series.length} of ${seriesMostRecentPayload.count} series for "Recent Series" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
Logger.debug(`Loaded ${seriesMostRecentPayload.series.length} of ${seriesMostRecentPayload.count} series for "Recent Series" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
start = Date.now()
// "Discover" shelf
@ -619,7 +619,7 @@ class LibraryItem extends Model {
total: discoverLibraryItemsPayload.count
})
}
Logger.dev(`Loaded ${discoverLibraryItemsPayload.libraryItems.length} of ${discoverLibraryItemsPayload.count} items for "Discover" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
Logger.debug(`Loaded ${discoverLibraryItemsPayload.libraryItems.length} of ${discoverLibraryItemsPayload.count} items for "Discover" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
}
start = Date.now()
@ -650,7 +650,7 @@ class LibraryItem extends Model {
})
}
}
Logger.dev(`Loaded ${mediaFinishedPayload.items.length} of ${mediaFinishedPayload.count} items for "Listen/Read Again" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
Logger.debug(`Loaded ${mediaFinishedPayload.items.length} of ${mediaFinishedPayload.count} items for "Listen/Read Again" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
if (library.isBook) {
start = Date.now()
@ -666,7 +666,7 @@ class LibraryItem extends Model {
total: newestAuthorsPayload.count
})
}
Logger.dev(`Loaded ${newestAuthorsPayload.authors.length} of ${newestAuthorsPayload.count} authors for "Newest Authors" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
Logger.debug(`Loaded ${newestAuthorsPayload.authors.length} of ${newestAuthorsPayload.count} authors for "Newest Authors" in ${((Date.now() - start) / 1000).toFixed(2)}s`)
}
Logger.debug(`Loaded ${shelves.length} personalized shelves in ${((Date.now() - fullStart) / 1000).toFixed(2)}s`)

View file

@ -324,35 +324,6 @@ class ApiRouter {
this.router.delete('/custom-metadata-providers/admin/:id', MiscController.deleteCustomMetadataProviders.bind(this))
}
async getDirectories(dir, relpath, excludedDirs, level = 0) {
try {
const paths = await fs.readdir(dir)
let dirs = await Promise.all(paths.map(async dirname => {
const fullPath = Path.join(dir, dirname)
const path = Path.join(relpath, dirname)
const isDir = (await fs.lstat(fullPath)).isDirectory()
if (isDir && !excludedDirs.includes(path) && dirname !== 'node_modules') {
return {
path,
dirname,
fullPath,
level,
dirs: level < 4 ? (await this.getDirectories(fullPath, path, excludedDirs, level + 1)) : []
}
} else {
return false
}
}))
dirs = dirs.filter(d => d)
return dirs
} catch (error) {
Logger.error('Failed to readdir', dir, error)
return []
}
}
//
// Helper Methods
//

View file

@ -36,6 +36,8 @@ class AbsMetadataFileScanner {
for (const key in abMetadata) {
// TODO: When to override with null or empty arrays?
if (abMetadata[key] === undefined || abMetadata[key] === null) continue
if (key === 'authors' && !abMetadata.authors?.length) continue
if (key === 'genres' && !abMetadata.genres?.length) continue
if (key === 'tags' && !abMetadata.tags?.length) continue
if (key === 'chapters' && !abMetadata.chapters?.length) continue

View file

@ -3,8 +3,8 @@ const Path = require('path')
const sequelize = require('sequelize')
const { LogLevel } = require('../utils/constants')
const { getTitleIgnorePrefix, areEquivalent } = require('../utils/index')
const abmetadataGenerator = require('../utils/generators/abmetadataGenerator')
const parseNameString = require('../utils/parsers/parseNameString')
const parseEbookMetadata = require('../utils/parsers/parseEbookMetadata')
const globals = require('../utils/globals')
const AudioFileScanner = require('./AudioFileScanner')
const Database = require('../Database')
@ -170,7 +170,9 @@ class BookScanner {
hasMediaChanges = true
}
const bookMetadata = await this.getBookMetadataFromScanData(media.audioFiles, libraryItemData, libraryScan, librarySettings, existingLibraryItem.id)
const ebookFileScanData = await parseEbookMetadata.parse(media.ebookFile)
const bookMetadata = await this.getBookMetadataFromScanData(media.audioFiles, ebookFileScanData, libraryItemData, libraryScan, librarySettings, existingLibraryItem.id)
let authorsUpdated = false
const bookAuthorsRemoved = []
let seriesUpdated = false
@ -317,24 +319,34 @@ class BookScanner {
})
}
// If no cover then extract cover from audio file if available OR search for cover if enabled in server settings
// If no cover then extract cover from audio file OR from ebook
const libraryItemDir = existingLibraryItem.isFile ? null : existingLibraryItem.path
if (!media.coverPath) {
const libraryItemDir = existingLibraryItem.isFile ? null : existingLibraryItem.path
const extractedCoverPath = await CoverManager.saveEmbeddedCoverArt(media.audioFiles, existingLibraryItem.id, libraryItemDir)
let extractedCoverPath = await CoverManager.saveEmbeddedCoverArt(media.audioFiles, existingLibraryItem.id, libraryItemDir)
if (extractedCoverPath) {
libraryScan.addLog(LogLevel.DEBUG, `Updating book "${bookMetadata.title}" extracted embedded cover art from audio file to path "${extractedCoverPath}"`)
media.coverPath = extractedCoverPath
hasMediaChanges = true
} else if (Database.serverSettings.scannerFindCovers) {
const authorName = media.authors.map(au => au.name).filter(au => au).join(', ')
const coverPath = await this.searchForCover(existingLibraryItem.id, libraryItemDir, media.title, authorName, libraryScan)
if (coverPath) {
media.coverPath = coverPath
} else if (ebookFileScanData?.ebookCoverPath) {
extractedCoverPath = await CoverManager.saveEbookCoverArt(ebookFileScanData, existingLibraryItem.id, libraryItemDir)
if (extractedCoverPath) {
libraryScan.addLog(LogLevel.DEBUG, `Updating book "${bookMetadata.title}" extracted embedded cover art from ebook file to path "${extractedCoverPath}"`)
media.coverPath = extractedCoverPath
hasMediaChanges = true
}
}
}
// If no cover then search for cover if enabled in server settings
if (!media.coverPath && Database.serverSettings.scannerFindCovers) {
const authorName = media.authors.map(au => au.name).filter(au => au).join(', ')
const coverPath = await this.searchForCover(existingLibraryItem.id, libraryItemDir, media.title, authorName, libraryScan)
if (coverPath) {
media.coverPath = coverPath
hasMediaChanges = true
}
}
existingLibraryItem.media = media
let libraryItemUpdated = false
@ -408,12 +420,14 @@ class BookScanner {
return null
}
let ebookFileScanData = null
if (ebookLibraryFile) {
ebookLibraryFile = ebookLibraryFile.toJSON()
ebookLibraryFile.ebookFormat = ebookLibraryFile.metadata.ext.slice(1).toLowerCase()
ebookFileScanData = await parseEbookMetadata.parse(ebookLibraryFile)
}
const bookMetadata = await this.getBookMetadataFromScanData(scannedAudioFiles, libraryItemData, libraryScan, librarySettings)
const bookMetadata = await this.getBookMetadataFromScanData(scannedAudioFiles, ebookFileScanData, libraryItemData, libraryScan, librarySettings)
bookMetadata.explicit = !!bookMetadata.explicit // Ensure boolean
bookMetadata.abridged = !!bookMetadata.abridged // Ensure boolean
@ -481,19 +495,28 @@ class BookScanner {
}
}
// If cover was not found in folder then check embedded covers in audio files OR search for cover
// If cover was not found in folder then check embedded covers in audio files OR ebook file
const libraryItemDir = libraryItemObj.isFile ? null : libraryItemObj.path
if (!bookObject.coverPath) {
const libraryItemDir = libraryItemObj.isFile ? null : libraryItemObj.path
// Extract and save embedded cover art
const extractedCoverPath = await CoverManager.saveEmbeddedCoverArt(scannedAudioFiles, libraryItemObj.id, libraryItemDir)
let extractedCoverPath = await CoverManager.saveEmbeddedCoverArt(scannedAudioFiles, libraryItemObj.id, libraryItemDir)
if (extractedCoverPath) {
libraryScan.addLog(LogLevel.DEBUG, `Extracted embedded cover from audio file at "${extractedCoverPath}" for book "${bookObject.title}"`)
bookObject.coverPath = extractedCoverPath
} else if (Database.serverSettings.scannerFindCovers) {
const authorName = bookMetadata.authors.join(', ')
bookObject.coverPath = await this.searchForCover(libraryItemObj.id, libraryItemDir, bookObject.title, authorName, libraryScan)
} else if (ebookFileScanData?.ebookCoverPath) {
extractedCoverPath = await CoverManager.saveEbookCoverArt(ebookFileScanData, libraryItemObj.id, libraryItemDir)
if (extractedCoverPath) {
libraryScan.addLog(LogLevel.DEBUG, `Extracted embedded cover from ebook file at "${extractedCoverPath}" for book "${bookObject.title}"`)
bookObject.coverPath = extractedCoverPath
}
}
}
// If cover not found then search for cover if enabled in settings
if (!bookObject.coverPath && Database.serverSettings.scannerFindCovers) {
const authorName = bookMetadata.authors.join(', ')
bookObject.coverPath = await this.searchForCover(libraryItemObj.id, libraryItemDir, bookObject.title, authorName, libraryScan)
}
libraryItemObj.book = bookObject
const libraryItem = await Database.libraryItemModel.create(libraryItemObj, {
include: {
@ -570,13 +593,14 @@ class BookScanner {
/**
*
* @param {import('../models/Book').AudioFileObject[]} audioFiles
* @param {import('../utils/parsers/parseEbookMetadata').EBookFileScanData} ebookFileScanData
* @param {import('./LibraryItemScanData')} libraryItemData
* @param {LibraryScan} libraryScan
* @param {import('../models/Library').LibrarySettingsObject} librarySettings
* @param {string} [existingLibraryItemId]
* @returns {Promise<BookMetadataObject>}
*/
async getBookMetadataFromScanData(audioFiles, libraryItemData, libraryScan, librarySettings, existingLibraryItemId = null) {
async getBookMetadataFromScanData(audioFiles, ebookFileScanData, libraryItemData, libraryScan, librarySettings, existingLibraryItemId = null) {
// First set book metadata from folder/file names
const bookMetadata = {
title: libraryItemData.mediaMetadata.title, // required
@ -599,7 +623,7 @@ class BookScanner {
coverPath: undefined
}
const bookMetadataSourceHandler = new BookScanner.BookMetadataSourceHandler(bookMetadata, audioFiles, libraryItemData, libraryScan, existingLibraryItemId)
const bookMetadataSourceHandler = new BookScanner.BookMetadataSourceHandler(bookMetadata, audioFiles, ebookFileScanData, libraryItemData, libraryScan, existingLibraryItemId)
const metadataPrecedence = librarySettings.metadataPrecedence || ['folderStructure', 'audioMetatags', 'nfoFile', 'txtFiles', 'opfFile', 'absMetadata']
libraryScan.addLog(LogLevel.DEBUG, `"${bookMetadata.title}" Getting metadata with precedence [${metadataPrecedence.join(', ')}]`)
for (const metadataSource of metadataPrecedence) {
@ -627,13 +651,15 @@ class BookScanner {
*
* @param {Object} bookMetadata
* @param {import('../models/Book').AudioFileObject[]} audioFiles
* @param {import('../utils/parsers/parseEbookMetadata').EBookFileScanData} ebookFileScanData
* @param {import('./LibraryItemScanData')} libraryItemData
* @param {LibraryScan} libraryScan
* @param {string} existingLibraryItemId
*/
constructor(bookMetadata, audioFiles, libraryItemData, libraryScan, existingLibraryItemId) {
constructor(bookMetadata, audioFiles, ebookFileScanData, libraryItemData, libraryScan, existingLibraryItemId) {
this.bookMetadata = bookMetadata
this.audioFiles = audioFiles
this.ebookFileScanData = ebookFileScanData
this.libraryItemData = libraryItemData
this.libraryScan = libraryScan
this.existingLibraryItemId = existingLibraryItemId
@ -647,13 +673,42 @@ class BookScanner {
}
/**
* Metadata from audio file meta tags
* Metadata from audio file meta tags OR metadata from ebook file
*/
audioMetatags() {
if (!this.audioFiles.length) return
// Modifies bookMetadata with metadata mapped from audio file meta tags
const bookTitle = this.bookMetadata.title || this.libraryItemData.mediaMetadata.title
AudioFileScanner.setBookMetadataFromAudioMetaTags(bookTitle, this.audioFiles, this.bookMetadata, this.libraryScan)
if (this.audioFiles.length) {
// Modifies bookMetadata with metadata mapped from audio file meta tags
const bookTitle = this.bookMetadata.title || this.libraryItemData.mediaMetadata.title
AudioFileScanner.setBookMetadataFromAudioMetaTags(bookTitle, this.audioFiles, this.bookMetadata, this.libraryScan)
} else if (this.ebookFileScanData) {
const ebookMetdataObject = this.ebookFileScanData.metadata
for (const key in ebookMetdataObject) {
if (key === 'tags') {
if (ebookMetdataObject.tags.length) {
this.bookMetadata.tags = ebookMetdataObject.tags
}
} else if (key === 'genres') {
if (ebookMetdataObject.genres.length) {
this.bookMetadata.genres = ebookMetdataObject.genres
}
} else if (key === 'authors') {
if (ebookMetdataObject.authors?.length) {
this.bookMetadata.authors = ebookMetdataObject.authors
}
} else if (key === 'narrators') {
if (ebookMetdataObject.narrators?.length) {
this.bookMetadata.narrators = ebookMetdataObject.narrators
}
} else if (key === 'series') {
if (ebookMetdataObject.series?.length) {
this.bookMetadata.series = ebookMetdataObject.series
}
} else if (ebookMetdataObject[key] && key !== 'sequence') {
this.bookMetadata[key] = ebookMetdataObject[key]
}
}
}
return null
}
/**

View file

@ -2,7 +2,6 @@ const uuidv4 = require("uuid").v4
const Path = require('path')
const { LogLevel } = require('../utils/constants')
const { getTitleIgnorePrefix } = require('../utils/index')
const abmetadataGenerator = require('../utils/generators/abmetadataGenerator')
const AudioFileScanner = require('./AudioFileScanner')
const Database = require('../Database')
const { filePathToPOSIX, getFileTimestampsWithIno } = require('../utils/fileUtils')

View file

@ -1,6 +1,7 @@
const axios = require('axios')
const Path = require('path')
const ssrfFilter = require('ssrf-req-filter')
const exec = require('child_process').exec
const fs = require('../libs/fsExtra')
const rra = require('../libs/recursiveReaddirAsync')
const Logger = require('../Logger')
@ -378,3 +379,65 @@ module.exports.isWritable = async (directory) => {
}
}
/**
* Get Windows drives as array e.g. ["C:/", "F:/"]
*
* @returns {Promise<string[]>}
*/
module.exports.getWindowsDrives = async () => {
if (!global.isWin) {
return []
}
return new Promise((resolve, reject) => {
exec('wmic logicaldisk get name', async (error, stdout, stderr) => {
if (error) {
reject(error)
return
}
let drives = stdout?.split(/\r?\n/).map(line => line.trim()).filter(line => line).slice(1)
const validDrives = []
for (const drive of drives) {
let drivepath = drive + '/'
if (await fs.pathExists(drivepath)) {
validDrives.push(drivepath)
} else {
Logger.error(`Invalid drive ${drivepath}`)
}
}
resolve(validDrives)
})
})
}
/**
* Get array of directory paths in a directory
*
* @param {string} dirPath
* @param {number} level
* @returns {Promise<{ path:string, dirname:string, level:number }[]>}
*/
module.exports.getDirectoriesInPath = async (dirPath, level) => {
try {
const paths = await fs.readdir(dirPath)
let dirs = await Promise.all(paths.map(async dirname => {
const fullPath = Path.join(dirPath, dirname)
const lstat = await fs.lstat(fullPath).catch((error) => {
Logger.debug(`Failed to lstat "${fullPath}"`, error)
return null
})
if (!lstat?.isDirectory()) return null
return {
path: this.filePathToPOSIX(fullPath),
dirname,
level
}
}))
dirs = dirs.filter(d => d)
return dirs
} catch (error) {
Logger.error('Failed to readdir', dirPath, error)
return []
}
}

View file

@ -1,4 +1,5 @@
const xml = require('../../libs/xml')
const escapeForXML = require('../../libs/xml/escapeForXML')
/**
* Generate OPML file string for podcasts in a library
@ -12,18 +13,18 @@ module.exports.generate = (podcasts, indent = true) => {
if (!podcast.feedURL) return
const feedAttributes = {
type: 'rss',
text: podcast.title,
title: podcast.title,
xmlUrl: podcast.feedURL
text: escapeForXML(podcast.title),
title: escapeForXML(podcast.title),
xmlUrl: escapeForXML(podcast.feedURL)
}
if (podcast.description) {
feedAttributes.description = podcast.description
feedAttributes.description = escapeForXML(podcast.description)
}
if (podcast.itunesPageUrl) {
feedAttributes.htmlUrl = podcast.itunesPageUrl
feedAttributes.htmlUrl = escapeForXML(podcast.itunesPageUrl)
}
if (podcast.language) {
feedAttributes.language = podcast.language
feedAttributes.language = escapeForXML(podcast.language)
}
bodyItems.push({
outline: {

View file

@ -0,0 +1,42 @@
const parseEpubMetadata = require('./parseEpubMetadata')
/**
* @typedef EBookFileScanData
* @property {string} path
* @property {string} ebookFormat
* @property {string} ebookCoverPath internal image path
* @property {import('../../scanner/BookScanner').BookMetadataObject} metadata
*/
/**
* Parse metadata from ebook file
*
* @param {import('../../models/Book').EBookFileObject} ebookFile
* @returns {Promise<EBookFileScanData>}
*/
async function parse(ebookFile) {
if (!ebookFile) return null
if (ebookFile.ebookFormat === 'epub') {
return parseEpubMetadata.parse(ebookFile.metadata.path)
}
return null
}
module.exports.parse = parse
/**
* Extract cover from ebook file
*
* @param {EBookFileScanData} ebookFileScanData
* @param {string} outputCoverPath
* @returns {Promise<boolean>}
*/
async function extractCoverImage(ebookFileScanData, outputCoverPath) {
if (!ebookFileScanData?.ebookCoverPath) return false
if (ebookFileScanData.ebookFormat === 'epub') {
return parseEpubMetadata.extractCoverImage(ebookFileScanData.path, ebookFileScanData.ebookCoverPath, outputCoverPath)
}
return false
}
module.exports.extractCoverImage = extractCoverImage

View file

@ -0,0 +1,109 @@
const Path = require('path')
const Logger = require('../../Logger')
const StreamZip = require('../../libs/nodeStreamZip')
const parseOpfMetadata = require('./parseOpfMetadata')
const { xmlToJSON } = require('../index')
/**
* Extract file from epub and return string content
*
* @param {string} epubPath
* @param {string} filepath
* @returns {Promise<string>}
*/
async function extractFileFromEpub(epubPath, filepath) {
const zip = new StreamZip.async({ file: epubPath })
const data = await zip.entryData(filepath).catch((error) => {
Logger.error(`[parseEpubMetadata] Failed to extract ${filepath} from epub at "${epubPath}"`, error)
})
const filedata = data?.toString('utf8')
await zip.close()
return filedata
}
/**
* Extract an XML file from epub and return JSON
*
* @param {string} epubPath
* @param {string} xmlFilepath
* @returns {Promise<Object>}
*/
async function extractXmlToJson(epubPath, xmlFilepath) {
const filedata = await extractFileFromEpub(epubPath, xmlFilepath)
if (!filedata) return null
return xmlToJSON(filedata)
}
/**
* Extract cover image from epub return true if success
*
* @param {string} epubPath
* @param {string} epubImageFilepath
* @param {string} outputCoverPath
* @returns {Promise<boolean>}
*/
async function extractCoverImage(epubPath, epubImageFilepath, outputCoverPath) {
const zip = new StreamZip.async({ file: epubPath })
const success = await zip.extract(epubImageFilepath, outputCoverPath).then(() => true).catch((error) => {
Logger.error(`[parseEpubMetadata] Failed to extract image ${epubImageFilepath} from epub at "${epubPath}"`, error)
return false
})
await zip.close()
return success
}
module.exports.extractCoverImage = extractCoverImage
/**
* Parse metadata from epub
*
* @param {string} epubPath
* @returns {Promise<import('./parseEbookMetadata').EBookFileScanData>}
*/
async function parse(epubPath) {
Logger.debug(`Parsing metadata from epub at "${epubPath}"`)
// Entrypoint of the epub that contains the filepath to the package document (opf file)
const containerJson = await extractXmlToJson(epubPath, 'META-INF/container.xml')
// Get package document opf filepath from container.xml
const packageDocPath = containerJson.container?.rootfiles?.[0]?.rootfile?.[0]?.$?.['full-path']
if (!packageDocPath) {
Logger.error(`Failed to get package doc path in Container.xml`, JSON.stringify(containerJson, null, 2))
return null
}
// Extract package document to JSON
const packageJson = await extractXmlToJson(epubPath, packageDocPath)
if (!packageJson) {
return null
}
// Parse metadata from package document opf file
const opfMetadata = parseOpfMetadata.parseOpfMetadataJson(packageJson)
if (!opfMetadata) {
Logger.error(`Unable to parse metadata in package doc with json`, JSON.stringify(packageJson, null, 2))
return null
}
const payload = {
path: epubPath,
ebookFormat: 'epub',
metadata: opfMetadata
}
// Attempt to find filepath to cover image
const manifestFirstImage = packageJson.package?.manifest?.[0]?.item?.find(item => item.$?.['media-type']?.startsWith('image/'))
let coverImagePath = manifestFirstImage?.$?.href
if (coverImagePath) {
const packageDirname = Path.dirname(packageDocPath)
payload.ebookCoverPath = Path.posix.join(packageDirname, coverImagePath)
} else {
Logger.warn(`Cover image not found in manifest for epub at "${epubPath}"`)
}
return payload
}
module.exports.parse = parse

View file

@ -103,15 +103,24 @@ function fetchSeries(metadataMeta) {
if (!metadataMeta) return []
const result = []
for (let i = 0; i < metadataMeta.length; i++) {
if (metadataMeta[i].$?.name === "calibre:series" && metadataMeta[i].$.content?.trim()) {
if (metadataMeta[i].$?.name === 'calibre:series' && metadataMeta[i].$.content?.trim()) {
const name = metadataMeta[i].$.content.trim()
let sequence = null
if (metadataMeta[i + 1]?.$?.name === "calibre:series_index" && metadataMeta[i + 1].$?.content?.trim()) {
if (metadataMeta[i + 1]?.$?.name === 'calibre:series_index' && metadataMeta[i + 1].$?.content?.trim()) {
sequence = metadataMeta[i + 1].$.content.trim()
}
result.push({ name, sequence })
}
}
// If one series was found with no series_index then check if any series_index meta can be found
// this is to support when calibre:series_index is not directly underneath calibre:series
if (result.length === 1 && !result[0].sequence) {
const seriesIndexMeta = metadataMeta.find(m => m.$?.name === 'calibre:series_index' && m.$.content?.trim())
if (seriesIndexMeta) {
result[0].sequence = seriesIndexMeta.$.content.trim()
}
}
return result
}
@ -136,11 +145,7 @@ function stripPrefix(str) {
return str.split(':').pop()
}
module.exports.parseOpfMetadataXML = async (xml) => {
const json = await xmlToJSON(xml)
if (!json) return null
module.exports.parseOpfMetadataJson = (json) => {
// Handle <package ...> or with prefix <ns0:package ...>
const packageKey = Object.keys(json).find(key => stripPrefix(key) === 'package')
if (!packageKey) return null
@ -167,7 +172,7 @@ module.exports.parseOpfMetadataXML = async (xml) => {
const creators = parseCreators(metadata)
const authors = (fetchCreators(creators, 'aut') || []).map(au => au?.trim()).filter(au => au)
const narrators = (fetchNarrators(creators, metadata) || []).map(nrt => nrt?.trim()).filter(nrt => nrt)
const data = {
return {
title: fetchTitle(metadata),
subtitle: fetchSubtitle(metadata),
authors,
@ -182,5 +187,10 @@ module.exports.parseOpfMetadataXML = async (xml) => {
series: fetchSeries(metadataMeta),
tags: fetchTags(metadata)
}
return data
}
module.exports.parseOpfMetadataXML = async (xml) => {
const json = await xmlToJSON(xml)
if (!json) return null
return this.parseOpfMetadataJson(json)
}

View file

@ -233,7 +233,7 @@ module.exports.getPodcastFeed = (feedUrl, excludeEpisodeMetadata = false) => {
method: 'GET',
timeout: 12000,
responseType: 'arraybuffer',
headers: { Accept: 'application/rss+xml, application/xhtml+xml, application/xml' },
headers: { Accept: 'application/rss+xml, application/xhtml+xml, application/xml, */*;q=0.8' },
httpAgent: ssrfFilter(feedUrl),
httpsAgent: ssrfFilter(feedUrl)
}).then(async (data) => {