From a5e8b451b2a5af7a937ec578d7d67cbecc6e9a97 Mon Sep 17 00:00:00 2001 From: Mert <101130780+mertalev@users.noreply.github.com> Date: Wed, 22 May 2024 23:58:29 -0400 Subject: [PATCH] feat(server): qsv hardware decoding and tone-mapping (#9689) * qsv hw decoding and tone-mapping * fix vaapi * add tests * formatting * handle device name without path --- server/src/services/media.service.spec.ts | 68 +++++++++++++++++++++++ server/src/services/media.service.ts | 7 ++- server/src/utils/media.ts | 65 ++++++++++++++++++++-- 3 files changed, 132 insertions(+), 8 deletions(-) diff --git a/server/src/services/media.service.spec.ts b/server/src/services/media.service.spec.ts index 044ca764e7..3c8200944e 100644 --- a/server/src/services/media.service.spec.ts +++ b/server/src/services/media.service.spec.ts @@ -1482,6 +1482,74 @@ describe(MediaService.name, () => { expect(mediaMock.transcode).not.toHaveBeenCalled(); }); + it('should use hardware decoding for qsv if enabled', async () => { + storageMock.readdir.mockResolvedValue(['renderD128']); + mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer); + systemMock.get.mockResolvedValue({ + ffmpeg: { accel: TranscodeHWAccel.QSV, accelDecode: true }, + }); + assetMock.getByIds.mockResolvedValue([assetStub.video]); + + await sut.handleVideoConversion({ id: assetStub.video.id }); + + expect(mediaMock.transcode).toHaveBeenCalledWith( + '/original/path.ext', + 'upload/encoded-video/user-id/as/se/asset-id.mp4', + { + inputOptions: expect.arrayContaining(['-hwaccel qsv', '-async_depth 4', '-threads 1']), + outputOptions: expect.arrayContaining([ + expect.stringContaining('scale_qsv=-1:720:async_depth=4:mode=hq:format=nv12'), + ]), + twoPass: false, + }, + ); + }); + + it('should use hardware tone-mapping for qsv if hardware decoding is enabled and should tone map', async () => { + storageMock.readdir.mockResolvedValue(['renderD128']); + mediaMock.probe.mockResolvedValue(probeStub.videoStreamHDR); + systemMock.get.mockResolvedValue({ + ffmpeg: { accel: TranscodeHWAccel.QSV, accelDecode: true }, + }); + assetMock.getByIds.mockResolvedValue([assetStub.video]); + + await sut.handleVideoConversion({ id: assetStub.video.id }); + + expect(mediaMock.transcode).toHaveBeenCalledWith( + '/original/path.ext', + 'upload/encoded-video/user-id/as/se/asset-id.mp4', + { + inputOptions: expect.arrayContaining(['-hwaccel qsv', '-async_depth 4', '-threads 1']), + outputOptions: expect.arrayContaining([ + expect.stringContaining( + 'hwmap=derive_device=opencl,tonemap_opencl=desat=0:format=nv12:matrix=bt709:primaries=bt709:range=pc:tonemap=hable:transfer=bt709,hwmap=derive_device=vaapi:reverse=1', + ), + ]), + twoPass: false, + }, + ); + }); + + it('should use preferred device for qsv when hardware decoding', async () => { + storageMock.readdir.mockResolvedValue(['renderD128', 'renderD129', 'renderD130']); + mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer); + systemMock.get.mockResolvedValue({ + ffmpeg: { accel: TranscodeHWAccel.QSV, accelDecode: true, preferredHwDevice: 'renderD129' }, + }); + assetMock.getByIds.mockResolvedValue([assetStub.video]); + + await sut.handleVideoConversion({ id: assetStub.video.id }); + expect(mediaMock.transcode).toHaveBeenCalledWith( + '/original/path.ext', + 'upload/encoded-video/user-id/as/se/asset-id.mp4', + { + inputOptions: expect.arrayContaining(['-hwaccel qsv', '-qsv_device /dev/dri/renderD129']), + outputOptions: expect.any(Array), + twoPass: false, + }, + ); + }); + it('should set options for vaapi', async () => { storageMock.readdir.mockResolvedValue(['renderD128']); mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer); diff --git a/server/src/services/media.service.ts b/server/src/services/media.service.ts index dc252b4c1c..7e52fe384c 100644 --- a/server/src/services/media.service.ts +++ b/server/src/services/media.service.ts @@ -38,7 +38,8 @@ import { HEVCConfig, NvencHwDecodeConfig, NvencSwDecodeConfig, - QSVConfig, + QsvHwDecodeConfig, + QsvSwDecodeConfig, RkmppHwDecodeConfig, RkmppSwDecodeConfig, ThumbnailConfig, @@ -499,7 +500,9 @@ export class MediaService { break; } case TranscodeHWAccel.QSV: { - handler = new QSVConfig(config, await this.getDevices()); + handler = config.accelDecode + ? new QsvHwDecodeConfig(config, await this.getDevices()) + : new QsvSwDecodeConfig(config, await this.getDevices()); break; } case TranscodeHWAccel.VAAPI: { diff --git a/server/src/utils/media.ts b/server/src/utils/media.ts index 6c51c0d303..268f1f60ce 100644 --- a/server/src/utils/media.ts +++ b/server/src/utils/media.ts @@ -302,10 +302,10 @@ export class BaseHWConfig extends BaseConfig implements VideoCodecHWConfig { return this.config.gopSize; } - getPreferredHardwareDevice(): string | null { + getPreferredHardwareDevice(): string | undefined { const device = this.config.preferredHwDevice; if (device === 'auto') { - return null; + return; } const deviceName = device.replace('/dev/dri/', ''); @@ -313,7 +313,7 @@ export class BaseHWConfig extends BaseConfig implements VideoCodecHWConfig { throw new Error(`Device '${device}' does not exist`); } - return device; + return `/dev/dri/${deviceName}`; } } @@ -567,7 +567,7 @@ export class NvencHwDecodeConfig extends NvencSwDecodeConfig { } } -export class QSVConfig extends BaseHWConfig { +export class QsvSwDecodeConfig extends BaseHWConfig { getBaseInputOptions() { if (this.devices.length === 0) { throw new Error('No QSV device found'); @@ -575,7 +575,7 @@ export class QSVConfig extends BaseHWConfig { let qsvString = ''; const hwDevice = this.getPreferredHardwareDevice(); - if (hwDevice !== null) { + if (hwDevice) { qsvString = `,child_device=${hwDevice}`; } @@ -643,6 +643,59 @@ export class QSVConfig extends BaseHWConfig { } } +export class QsvHwDecodeConfig extends QsvSwDecodeConfig { + getBaseInputOptions() { + if (this.devices.length === 0) { + throw new Error('No QSV device found'); + } + + const options = ['-hwaccel qsv', '-async_depth 4', '-threads 1']; + const hwDevice = this.getPreferredHardwareDevice(); + if (hwDevice) { + options.push(`-qsv_device ${hwDevice}`); + } + + return options; + } + + getFilterOptions(videoStream: VideoStreamInfo) { + const options = []; + if (this.shouldScale(videoStream) || !this.shouldToneMap(videoStream)) { + let scaling = `scale_qsv=${this.getScaling(videoStream)}:async_depth=4:mode=hq`; + if (!this.shouldToneMap(videoStream)) { + scaling += ':format=nv12'; + } + options.push(scaling); + } + + options.push(...this.getToneMapping(videoStream)); + return options; + } + + getToneMapping(videoStream: VideoStreamInfo): string[] { + if (!this.shouldToneMap(videoStream)) { + return []; + } + + const colors = this.getColors(); + const tonemapOptions = [ + 'desat=0', + 'format=nv12', + `matrix=${colors.matrix}`, + `primaries=${colors.primaries}`, + 'range=pc', + `tonemap=${this.config.tonemap}`, + `transfer=${colors.transfer}`, + ]; + + return [ + 'hwmap=derive_device=opencl', + `tonemap_opencl=${tonemapOptions.join(':')}`, + 'hwmap=derive_device=vaapi:reverse=1', + ]; + } +} + export class VAAPIConfig extends BaseHWConfig { getBaseInputOptions() { if (this.devices.length === 0) { @@ -650,7 +703,7 @@ export class VAAPIConfig extends BaseHWConfig { } let hwDevice = this.getPreferredHardwareDevice(); - if (hwDevice === null) { + if (!hwDevice) { hwDevice = `/dev/dri/${this.devices[0]}`; }