mirror of
https://github.com/advplyr/audiobookshelf.git
synced 2025-07-02 21:44:56 +02:00
This commit is contained in:
parent
0c168b3da4
commit
04f92c33c2
18 changed files with 258 additions and 149 deletions
|
@ -24,6 +24,9 @@ class BackupManager {
|
|||
this.scheduleTask = null
|
||||
|
||||
this.backups = []
|
||||
|
||||
// If backup exceeds this value it will be aborted
|
||||
this.MaxBytesBeforeAbort = 1000000000 // ~ 1GB
|
||||
}
|
||||
|
||||
get serverSettings() {
|
||||
|
@ -191,6 +194,7 @@ class BackupManager {
|
|||
}
|
||||
|
||||
async runBackup() {
|
||||
// Check if Metadata Path is inside Config Path (otherwise there will be an infinite loop as the archiver tries to zip itself)
|
||||
Logger.info(`[BackupManager] Running Backup`)
|
||||
var metadataBooksPath = this.serverSettings.backupMetadataCovers ? Path.join(this.MetadataPath, 'books') : null
|
||||
|
||||
|
@ -233,6 +237,7 @@ class BackupManager {
|
|||
|
||||
async removeBackup(backup) {
|
||||
try {
|
||||
Logger.debug(`[BackupManager] Removing Backup "${backup.fullPath}"`)
|
||||
await fs.remove(backup.fullPath)
|
||||
this.backups = this.backups.filter(b => b.id !== backup.id)
|
||||
Logger.info(`[BackupManager] Backup "${backup.id}" Removed`)
|
||||
|
@ -263,6 +268,15 @@ class BackupManager {
|
|||
Logger.debug('Data has been drained')
|
||||
})
|
||||
|
||||
output.on('finish', () => {
|
||||
Logger.debug('Write Stream Finished')
|
||||
})
|
||||
|
||||
output.on('error', (err) => {
|
||||
Logger.debug('Write Stream Error', err)
|
||||
reject(err)
|
||||
})
|
||||
|
||||
// good practice to catch warnings (ie stat failures and other non-blocking errors)
|
||||
archive.on('warning', function (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
|
@ -279,6 +293,16 @@ class BackupManager {
|
|||
Logger.error(`[BackupManager] Archiver error: ${err.message}`)
|
||||
reject(err)
|
||||
})
|
||||
archive.on('progress', ({ fs: fsobj }) => {
|
||||
if (fsobj.processedBytes > this.MaxBytesBeforeAbort) {
|
||||
Logger.error(`[BackupManager] Archiver is too large - aborting to prevent endless loop, Bytes Processed: ${fsobj.processedBytes}`)
|
||||
archive.abort()
|
||||
setTimeout(() => {
|
||||
this.removeBackup(backup)
|
||||
output.destroy('Backup too large') // Promise is reject in write stream error evt
|
||||
}, 500)
|
||||
}
|
||||
})
|
||||
|
||||
// pipe archive data to the file
|
||||
archive.pipe(output)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue