diff --git a/server/src/ffmpeg/builder/options/hardwareAcceleration/VaapiOptions.ts b/server/src/ffmpeg/builder/options/hardwareAcceleration/VaapiOptions.ts index 3244502c..9fd91007 100644 --- a/server/src/ffmpeg/builder/options/hardwareAcceleration/VaapiOptions.ts +++ b/server/src/ffmpeg/builder/options/hardwareAcceleration/VaapiOptions.ts @@ -6,11 +6,25 @@ export class VaapiHardwareAccelerationOption extends GlobalOption { constructor( private vaapiDevice: string, private canHardwardDecode: boolean, + private withOpenclDerivation: boolean = false, ) { super(); } options(): string[] { + if (this.withOpenclDerivation) { + // Use named device init so that OpenCL can be derived from the VAAPI + // device, which is required for hwmap=derive_device=opencl to work. + const initDevices = [ + '-init_hw_device', + `vaapi=va:${this.vaapiDevice}`, + '-init_hw_device', + 'opencl=ocl@va', + ]; + return this.canHardwardDecode + ? [...initDevices, '-hwaccel', 'vaapi', '-hwaccel_device', 'va'] + : initDevices; + } return this.canHardwardDecode ? ['-hwaccel', 'vaapi', '-vaapi_device', this.vaapiDevice] : ['-vaapi_device', this.vaapiDevice]; diff --git a/server/src/ffmpeg/builder/pipeline/hardware/VaapiPipeline.local.test.ts b/server/src/ffmpeg/builder/pipeline/hardware/VaapiPipeline.local.test.ts index 2387b3ae..80335984 100644 --- a/server/src/ffmpeg/builder/pipeline/hardware/VaapiPipeline.local.test.ts +++ b/server/src/ffmpeg/builder/pipeline/hardware/VaapiPipeline.local.test.ts @@ -1,6 +1,10 @@ +import dayjs from 'dayjs'; +import duration from 'dayjs/plugin/duration.js'; import path from 'node:path'; import { fileURLToPath } from 'node:url'; import { FileStreamSource } from '../../../../stream/types.ts'; + +dayjs.extend(duration); import { createTempWorkdir, probeFile, @@ -9,13 +13,46 @@ import { import { binaries, vaapiInfo, + vaapiOpenclSupported, vaapiTest, } from '../../../../testing/ffmpeg/FfmpegTestFixtures.ts'; -import { AudioFormats, FileOutputLocation } from '../../constants.ts'; -import { PixelFormatYuv420P } from '../../format/PixelFormat.ts'; -import { AudioInputSource } from '../../input/AudioInputSource.ts'; +import { + EmptyFfmpegCapabilities, + FfmpegCapabilities, +} from '../../capabilities/FfmpegCapabilities.ts'; +import { + VaapiEntrypoint, + VaapiHardwareCapabilities, + VaapiProfileEntrypoint, + VaapiProfiles, +} from '../../capabilities/VaapiHardwareCapabilities.ts'; +import { + AudioFormats, + ColorPrimaries, + ColorRanges, + ColorSpaces, + ColorTransferFormats, + FileOutputLocation, + VideoFormats, +} from '../../constants.ts'; +import { ColorFormat } from '../../format/ColorFormat.ts'; +import { + PixelFormatYuv420P, + PixelFormatYuv420P10Le, +} from '../../format/PixelFormat.ts'; +import { + AudioInputFilterSource, + AudioInputSource, +} from '../../input/AudioInputSource.ts'; +import { LavfiVideoInputSource } from '../../input/LavfiVideoInputSource.ts'; import { VideoInputSource } from '../../input/VideoInputSource.ts'; -import { AudioStream, VideoStream } from '../../MediaStream.ts'; +import { WatermarkInputSource } from '../../input/WatermarkInputSource.ts'; +import { + AudioStream, + StillImageStream, + VideoStream, +} from '../../MediaStream.ts'; +import { KnownFfmpegFilters } from '../../options/KnownFfmpegOptions.ts'; import { AudioState } from '../../state/AudioState.ts'; import { DefaultPipelineOptions, @@ -23,6 +60,7 @@ import { } from '../../state/FfmpegState.ts'; import { FrameState } from '../../state/FrameState.ts'; import { FrameSize } from '../../types.ts'; +import { TONEMAP_ENABLED, TUNARR_ENV_VARS } from '../../../../util/env.ts'; import { VaapiPipelineBuilder } from './VaapiPipelineBuilder.ts'; const fixturesDir = path.join( @@ -33,8 +71,87 @@ const fixturesDir = path.join( const Fixtures = { video720p: path.join(fixturesDir, '720p_h264.ts'), video1080p: path.join(fixturesDir, '1080p_h264.ts'), + video480p43: path.join(fixturesDir, '480p_h264.ts'), + videoHdr720p: path.join(fixturesDir, '720p_hevc_hdr10.ts'), + watermark: path.join(fixturesDir, 'watermark.png'), } as const; +// Limit output to 1 second in all integration tests to keep runs fast +const testDuration = dayjs.duration(1, 'second'); + +// ─── Shared helpers ─────────────────────────────────────────────────────────── + +function makeVideoInput(inputPath: string, frameSize: FrameSize) { + return VideoInputSource.withStream( + new FileStreamSource(inputPath), + VideoStream.create({ + codec: 'h264', + displayAspectRatio: '16:9', + frameSize, + index: 0, + pixelFormat: new PixelFormatYuv420P(), + providedSampleAspectRatio: null, + colorFormat: null, + }), + ); +} + +function makeAudioInput(inputPath: string) { + return AudioInputSource.withStream( + new FileStreamSource(inputPath), + AudioStream.create({ + channels: 2, + codec: 'aac', + index: 1, + }), + AudioState.create({ + audioEncoder: AudioFormats.Aac, + audioChannels: 2, + audioBitrate: 192, + audioBufferSize: 384, + }), + ); +} + +function make43VideoInput(inputPath: string) { + return VideoInputSource.withStream( + new FileStreamSource(inputPath), + VideoStream.create({ + codec: 'h264', + profile: 'main', + displayAspectRatio: '4:3', + frameSize: FrameSize.withDimensions(640, 480), + index: 0, + pixelFormat: new PixelFormatYuv420P(), + providedSampleAspectRatio: null, + colorFormat: null, + }), + ); +} + +function makeHdrVideoInput(inputPath: string) { + return VideoInputSource.withStream( + new FileStreamSource(inputPath), + VideoStream.create({ + codec: VideoFormats.Hevc, + profile: 'main 10', + displayAspectRatio: '16:9', + frameSize: FrameSize.withDimensions(1280, 720), + index: 0, + pixelFormat: new PixelFormatYuv420P10Le(), + providedSampleAspectRatio: '1:1', + colorFormat: new ColorFormat({ + colorRange: ColorRanges.Tv, + colorSpace: ColorSpaces.Bt2020nc, + colorPrimaries: ColorPrimaries.Bt2020, + colorTransfer: ColorTransferFormats.Smpte2084, + }), + }), + ); +} + +// ─── Existing basic tests ───────────────────────────────────────────────────── + describe.skipIf(!binaries || !vaapiInfo)( 'VaapiPipelineBuilder integration', () => { @@ -47,39 +164,6 @@ describe.skipIf(!binaries || !vaapiInfo)( afterAll(() => cleanup()); - function makeVideoInput(inputPath: string, frameSize: FrameSize) { - return VideoInputSource.withStream( - new FileStreamSource(inputPath), - VideoStream.create({ - codec: 'h264', - profile: 'high', - displayAspectRatio: '16:9', - frameSize, - index: 0, - pixelFormat: new PixelFormatYuv420P(), - providedSampleAspectRatio: null, - colorFormat: null, - }), - ); - } - - function makeAudioInput(inputPath: string) { - return AudioInputSource.withStream( - new FileStreamSource(inputPath), - AudioStream.create({ - channels: 2, - codec: 'aac', - index: 1, - }), - AudioState.create({ - audioEncoder: AudioFormats.Aac, - audioChannels: 2, - audioBitrate: 192, - audioBufferSize: 384, - }), - ); - } - vaapiTest( 'basic h264 vaapi transcode', async ({ binaryCapabilities, ffmpegVersion, resolvedVaapi }) => { @@ -103,11 +187,13 @@ describe.skipIf(!binaries || !vaapiInfo)( isAnamorphic: false, scaledSize: FrameSize.withDimensions(1280, 720), paddedSize: FrameSize.withDimensions(1280, 720), + pixelFormat: new PixelFormatYuv420P(), }); const outputPath = path.join(workdir, 'vaapi_transcode.ts'); const pipeline = builder.build( FfmpegState.create({ + duration: testDuration, version: ffmpegVersion, outputLocation: FileOutputLocation(outputPath, true), vaapiDevice: resolvedVaapi.device, @@ -154,11 +240,13 @@ describe.skipIf(!binaries || !vaapiInfo)( isAnamorphic: false, scaledSize: FrameSize.withDimensions(1280, 720), paddedSize: FrameSize.withDimensions(1280, 720), + pixelFormat: new PixelFormatYuv420P(), }); const outputPath = path.join(workdir, 'vaapi_scale.ts'); const pipeline = builder.build( FfmpegState.create({ + duration: testDuration, version: ffmpegVersion, outputLocation: FileOutputLocation(outputPath, true), vaapiDevice: resolvedVaapi.device, @@ -211,6 +299,7 @@ describe.skipIf(!binaries || !vaapiInfo)( const outputPath = path.join(workdir, 'vaapi_copy.ts'); const pipeline = builder.build( FfmpegState.create({ + duration: testDuration, version: ffmpegVersion, outputLocation: FileOutputLocation(outputPath, true), vaapiDevice: resolvedVaapi.device, @@ -235,3 +324,1103 @@ describe.skipIf(!binaries || !vaapiInfo)( ); }, ); + +// ─── Pipeline Options ───────────────────────────────────────────────────────── + +describe.skipIf(!binaries || !vaapiInfo)( + 'VaapiPipelineBuilder pipeline options integration', + () => { + let workdir: string; + let cleanup: () => Promise; + + beforeAll(async () => { + ({ dir: workdir, cleanup } = await createTempWorkdir()); + }); + + afterAll(() => cleanup()); + + vaapiTest( + 'hardware decoding disabled', + async ({ binaryCapabilities, ffmpegVersion, resolvedVaapi }) => { + const video = makeVideoInput( + Fixtures.video720p, + FrameSize.withDimensions(1280, 720), + ); + const audio = makeAudioInput(Fixtures.video720p); + + const builder = new VaapiPipelineBuilder( + resolvedVaapi.capabilities, + binaryCapabilities, + video, + audio, + null, + null, + null, + ); + + const outputPath = path.join(workdir, 'decode_disabled.ts'); + const pipeline = builder.build( + FfmpegState.create({ + duration: testDuration, + version: ffmpegVersion, + outputLocation: FileOutputLocation(outputPath, true), + vaapiDevice: resolvedVaapi.device, + }), + new FrameState({ + isAnamorphic: false, + scaledSize: FrameSize.withDimensions(1280, 720), + paddedSize: FrameSize.withDimensions(1280, 720), + }), + // Also disable encoding: sw-decode + hw-encode without hwupload is a + // known pipeline bug; this tests graceful sw-only fallback instead. + { + ...DefaultPipelineOptions, + disableHardwareDecoding: true, + disableHardwareEncoding: true, + }, + ); + + const { exitCode, stderr } = runFfmpegWithPipeline( + binaries!.ffmpeg, + pipeline.getCommandArgs(), + ); + + expect( + exitCode, + `Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`, + ).toBe(0); + const probe = probeFile(binaries!.ffprobe, outputPath); + expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true); + }, + ); + + vaapiTest( + 'hardware encoding disabled', + async ({ binaryCapabilities, ffmpegVersion, resolvedVaapi }) => { + const video = makeVideoInput( + Fixtures.video720p, + FrameSize.withDimensions(1280, 720), + ); + const audio = makeAudioInput(Fixtures.video720p); + + const builder = new VaapiPipelineBuilder( + resolvedVaapi.capabilities, + binaryCapabilities, + video, + audio, + null, + null, + null, + ); + + const outputPath = path.join(workdir, 'encode_disabled.ts'); + const pipeline = builder.build( + FfmpegState.create({ + duration: testDuration, + version: ffmpegVersion, + outputLocation: FileOutputLocation(outputPath, true), + vaapiDevice: resolvedVaapi.device, + }), + new FrameState({ + isAnamorphic: false, + scaledSize: FrameSize.withDimensions(1280, 720), + paddedSize: FrameSize.withDimensions(1280, 720), + }), + { ...DefaultPipelineOptions, disableHardwareEncoding: true }, + ); + + const { exitCode, stderr } = runFfmpegWithPipeline( + binaries!.ffmpeg, + pipeline.getCommandArgs(), + ); + + expect( + exitCode, + `Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`, + ).toBe(0); + const probe = probeFile(binaries!.ffprobe, outputPath); + expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true); + }, + ); + + vaapiTest( + 'hardware filters disabled', + async ({ binaryCapabilities, ffmpegVersion, resolvedVaapi }) => { + const video = makeVideoInput( + Fixtures.video720p, + FrameSize.withDimensions(1280, 720), + ); + const audio = makeAudioInput(Fixtures.video720p); + + const builder = new VaapiPipelineBuilder( + resolvedVaapi.capabilities, + binaryCapabilities, + video, + audio, + null, + null, + null, + ); + + const outputPath = path.join(workdir, 'filters_disabled.ts'); + const pipeline = builder.build( + FfmpegState.create({ + duration: testDuration, + version: ffmpegVersion, + outputLocation: FileOutputLocation(outputPath, true), + vaapiDevice: resolvedVaapi.device, + }), + new FrameState({ + isAnamorphic: false, + scaledSize: FrameSize.withDimensions(1280, 720), + paddedSize: FrameSize.withDimensions(1280, 720), + }), + // Also disable encoding: sw-decode + hw-encode without hwupload is a + // known pipeline bug; this tests graceful sw-only fallback instead. + { + ...DefaultPipelineOptions, + disableHardwareFilters: true, + disableHardwareEncoding: true, + }, + ); + + const { exitCode, stderr } = runFfmpegWithPipeline( + binaries!.ffmpeg, + pipeline.getCommandArgs(), + ); + + expect( + exitCode, + `Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`, + ).toBe(0); + const probe = probeFile(binaries!.ffprobe, outputPath); + expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true); + }, + ); + + vaapiTest( + 'lavfi error text with noise audio', + async ({ binaryCapabilities, ffmpegVersion, resolvedVaapi }) => { + const audioState = AudioState.create({ + audioEncoder: AudioFormats.Aac, + audioChannels: 2, + audioBitrate: 192, + audioBufferSize: 384, + }); + + const builder = new VaapiPipelineBuilder( + resolvedVaapi.capabilities, + binaryCapabilities, + LavfiVideoInputSource.errorText(FrameSize.FHD, 'Error', 'Test'), + AudioInputFilterSource.noise(audioState), + null, + null, + null, + ); + + const outputPath = path.join(workdir, 'lavfi_error_text.ts'); + const pipeline = builder.build( + FfmpegState.create({ + duration: testDuration, + version: ffmpegVersion, + outputLocation: FileOutputLocation(outputPath, true), + vaapiDevice: resolvedVaapi.device, + }), + new FrameState({ + isAnamorphic: false, + scaledSize: FrameSize.FHD, + paddedSize: FrameSize.FHD, + videoFormat: VideoFormats.H264, + pixelFormat: new PixelFormatYuv420P(), + }), + DefaultPipelineOptions, + ); + + const { exitCode, stderr } = runFfmpegWithPipeline( + binaries!.ffmpeg, + pipeline.getCommandArgs(), + ); + + expect( + exitCode, + `Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`, + ).toBe(0); + const probe = probeFile(binaries!.ffprobe, outputPath); + expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true); + }, + ); + }, +); + +// ─── Padding ────────────────────────────────────────────────────────────────── + +describe.skipIf(!binaries || !vaapiInfo)( + 'VaapiPipelineBuilder pad integration', + () => { + let workdir: string; + let cleanup: () => Promise; + + beforeAll(async () => { + ({ dir: workdir, cleanup } = await createTempWorkdir()); + }); + + afterAll(() => cleanup()); + + afterEach(() => { + delete process.env[TUNARR_ENV_VARS.DISABLE_VAAPI_PAD]; + }); + + // 4:3 → scaledSize=1440x1080, paddedSize=1920x1080 (needs pillarboxing) + function makePadFrameState() { + const videoStream = VideoStream.create({ + codec: 'h264', + profile: 'main', + displayAspectRatio: '4:3', + frameSize: FrameSize.withDimensions(640, 480), + index: 0, + pixelFormat: new PixelFormatYuv420P(), + providedSampleAspectRatio: null, + colorFormat: null, + }); + return new FrameState({ + isAnamorphic: false, + scaledSize: videoStream.squarePixelFrameSize(FrameSize.FHD), + paddedSize: FrameSize.FHD, + pixelFormat: new PixelFormatYuv420P(), + videoFormat: VideoFormats.H264, + }); + } + + vaapiTest( + '4:3 source padded to 1920x1080 (real capabilities)', + async ({ binaryCapabilities, ffmpegVersion, resolvedVaapi }) => { + const video = make43VideoInput(Fixtures.video480p43); + const audio = makeAudioInput(Fixtures.video480p43); + + const builder = new VaapiPipelineBuilder( + resolvedVaapi.capabilities, + binaryCapabilities, + video, + audio, + null, + null, + null, + ); + + const outputPath = path.join(workdir, 'pad_real_caps.ts'); + const pipeline = builder.build( + FfmpegState.create({ + duration: testDuration, + version: ffmpegVersion, + outputLocation: FileOutputLocation(outputPath, true), + vaapiDevice: resolvedVaapi.device, + }), + makePadFrameState(), + DefaultPipelineOptions, + ); + + const { exitCode, stderr } = runFfmpegWithPipeline( + binaries!.ffmpeg, + pipeline.getCommandArgs(), + ); + + expect( + exitCode, + `Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`, + ).toBe(0); + + const probe = probeFile(binaries!.ffprobe, outputPath); + const videoStream = probe.streams.find((s) => s.codec_type === 'video'); + expect(videoStream).toBeDefined(); + expect(videoStream!.width).toBe(1920); + expect(videoStream!.height).toBe(1080); + }, + ); + + vaapiTest( + '4:3 source padded to 1920x1080 (software pad, EmptyFfmpegCapabilities)', + async ({ ffmpegVersion, resolvedVaapi }) => { + const video = make43VideoInput(Fixtures.video480p43); + const audio = makeAudioInput(Fixtures.video480p43); + + const builder = new VaapiPipelineBuilder( + resolvedVaapi.capabilities, + EmptyFfmpegCapabilities, + video, + audio, + null, + null, + null, + ); + + const outputPath = path.join(workdir, 'pad_software.ts'); + const pipeline = builder.build( + FfmpegState.create({ + duration: testDuration, + version: ffmpegVersion, + outputLocation: FileOutputLocation(outputPath, true), + vaapiDevice: resolvedVaapi.device, + }), + makePadFrameState(), + DefaultPipelineOptions, + ); + + const { exitCode, stderr } = runFfmpegWithPipeline( + binaries!.ffmpeg, + pipeline.getCommandArgs(), + ); + + expect( + exitCode, + `Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`, + ).toBe(0); + + const probe = probeFile(binaries!.ffprobe, outputPath); + const videoStream = probe.streams.find((s) => s.codec_type === 'video'); + expect(videoStream).toBeDefined(); + expect(videoStream!.width).toBe(1920); + expect(videoStream!.height).toBe(1080); + }, + ); + + vaapiTest( + '4:3 source padded to 1920x1080 (hardware decode disabled)', + async ({ binaryCapabilities, ffmpegVersion, resolvedVaapi }) => { + const video = make43VideoInput(Fixtures.video480p43); + const audio = makeAudioInput(Fixtures.video480p43); + + const builder = new VaapiPipelineBuilder( + resolvedVaapi.capabilities, + binaryCapabilities, + video, + audio, + null, + null, + null, + ); + + const outputPath = path.join(workdir, 'pad_decode_disabled.ts'); + const pipeline = builder.build( + FfmpegState.create({ + duration: testDuration, + version: ffmpegVersion, + outputLocation: FileOutputLocation(outputPath, true), + vaapiDevice: resolvedVaapi.device, + }), + makePadFrameState(), + { ...DefaultPipelineOptions, disableHardwareDecoding: true }, + ); + + const { exitCode, stderr } = runFfmpegWithPipeline( + binaries!.ffmpeg, + pipeline.getCommandArgs(), + ); + + expect( + exitCode, + `Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`, + ).toBe(0); + + const probe = probeFile(binaries!.ffprobe, outputPath); + const videoStream = probe.streams.find((s) => s.codec_type === 'video'); + expect(videoStream).toBeDefined(); + expect(videoStream!.width).toBe(1920); + expect(videoStream!.height).toBe(1080); + }, + ); + + vaapiTest( + 'TUNARR_DISABLE_VAAPI_PAD=true forces software pad', + async ({ binaryCapabilities, ffmpegVersion, resolvedVaapi }) => { + process.env[TUNARR_ENV_VARS.DISABLE_VAAPI_PAD] = 'true'; + + const video = make43VideoInput(Fixtures.video480p43); + const audio = makeAudioInput(Fixtures.video480p43); + + const builder = new VaapiPipelineBuilder( + resolvedVaapi.capabilities, + binaryCapabilities, + video, + audio, + null, + null, + null, + ); + + const outputPath = path.join(workdir, 'pad_env_disabled.ts'); + const pipeline = builder.build( + FfmpegState.create({ + duration: testDuration, + version: ffmpegVersion, + outputLocation: FileOutputLocation(outputPath, true), + vaapiDevice: resolvedVaapi.device, + }), + makePadFrameState(), + DefaultPipelineOptions, + ); + + const { exitCode, stderr } = runFfmpegWithPipeline( + binaries!.ffmpeg, + pipeline.getCommandArgs(), + ); + + expect( + exitCode, + `Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`, + ).toBe(0); + + const probe = probeFile(binaries!.ffprobe, outputPath); + const videoStream = probe.streams.find((s) => s.codec_type === 'video'); + expect(videoStream).toBeDefined(); + expect(videoStream!.width).toBe(1920); + expect(videoStream!.height).toBe(1080); + }, + ); + + vaapiTest( + '16:9 FHD source needs no padding (1080p fixture)', + async ({ binaryCapabilities, ffmpegVersion, resolvedVaapi }) => { + const video = makeVideoInput( + Fixtures.video1080p, + FrameSize.withDimensions(1920, 1080), + ); + const audio = makeAudioInput(Fixtures.video1080p); + + const builder = new VaapiPipelineBuilder( + resolvedVaapi.capabilities, + binaryCapabilities, + video, + audio, + null, + null, + null, + ); + + const outputPath = path.join(workdir, 'pad_fhd_no_pad.ts'); + const pipeline = builder.build( + FfmpegState.create({ + duration: testDuration, + version: ffmpegVersion, + outputLocation: FileOutputLocation(outputPath, true), + vaapiDevice: resolvedVaapi.device, + }), + new FrameState({ + isAnamorphic: false, + scaledSize: FrameSize.FHD, + paddedSize: FrameSize.FHD, + pixelFormat: new PixelFormatYuv420P(), + videoFormat: VideoFormats.H264, + }), + DefaultPipelineOptions, + ); + + const { exitCode, stderr } = runFfmpegWithPipeline( + binaries!.ffmpeg, + pipeline.getCommandArgs(), + ); + + expect( + exitCode, + `Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`, + ).toBe(0); + const probe = probeFile(binaries!.ffprobe, outputPath); + expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true); + }, + ); + + vaapiTest( + '4:3 source with watermark overlay', + async ({ binaryCapabilities, ffmpegVersion, resolvedVaapi }) => { + const video = make43VideoInput(Fixtures.video480p43); + const audio = makeAudioInput(Fixtures.video480p43); + const watermark = new WatermarkInputSource( + new FileStreamSource(Fixtures.watermark), + StillImageStream.create({ + frameSize: FrameSize.withDimensions(100, 100), + index: 0, + }), + { + enabled: true, + position: 'top-left', + width: 100, + verticalMargin: 5, + horizontalMargin: 5, + duration: 0, + opacity: 100, + }, + ); + + const builder = new VaapiPipelineBuilder( + resolvedVaapi.capabilities, + binaryCapabilities, + video, + audio, + watermark, + null, + null, + ); + + const outputPath = path.join(workdir, 'pad_watermark.ts'); + const pipeline = builder.build( + FfmpegState.create({ + duration: testDuration, + version: ffmpegVersion, + outputLocation: FileOutputLocation(outputPath, true), + vaapiDevice: resolvedVaapi.device, + }), + makePadFrameState(), + DefaultPipelineOptions, + ); + + const { exitCode, stderr } = runFfmpegWithPipeline( + binaries!.ffmpeg, + pipeline.getCommandArgs(), + ); + + expect( + exitCode, + `Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`, + ).toBe(0); + const probe = probeFile(binaries!.ffprobe, outputPath); + expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true); + }, + ); + }, +); + +// ─── Scaling ────────────────────────────────────────────────────────────────── + +describe.skipIf(!binaries || !vaapiInfo)( + 'VaapiPipelineBuilder scale integration', + () => { + let workdir: string; + let cleanup: () => Promise; + + beforeAll(async () => { + ({ dir: workdir, cleanup } = await createTempWorkdir()); + }); + + afterAll(() => cleanup()); + + // Capabilities that include both H264 and HEVC decode/encode + function makeVaapiCapsWithH264AndHevc() { + return new VaapiHardwareCapabilities([ + new VaapiProfileEntrypoint(VaapiProfiles.H264Main, VaapiEntrypoint.Decode), + new VaapiProfileEntrypoint(VaapiProfiles.H264Main, VaapiEntrypoint.Encode), + new VaapiProfileEntrypoint(VaapiProfiles.HevcMain10, VaapiEntrypoint.Decode), + new VaapiProfileEntrypoint(VaapiProfiles.HevcMain, VaapiEntrypoint.Encode), + ]); + } + + // 4:3 640x480 → scaledSize=1440x1080, paddedSize=1920x1080 + function make43ScaleFrameState() { + const videoStream = VideoStream.create({ + codec: 'h264', + profile: 'main', + displayAspectRatio: '4:3', + frameSize: FrameSize.withDimensions(640, 480), + index: 0, + pixelFormat: new PixelFormatYuv420P(), + providedSampleAspectRatio: null, + colorFormat: null, + }); + return new FrameState({ + isAnamorphic: false, + scaledSize: videoStream.squarePixelFrameSize(FrameSize.FHD), + paddedSize: FrameSize.FHD, + pixelFormat: new PixelFormatYuv420P(), + videoFormat: VideoFormats.H264, + }); + } + + function assertFhdOutput(probePath: string) { + const probe = probeFile(binaries!.ffprobe, probePath); + const videoStream = probe.streams.find((s) => s.codec_type === 'video'); + expect(videoStream).toBeDefined(); + expect(videoStream!.width).toBe(1920); + expect(videoStream!.height).toBe(1080); + } + + vaapiTest( + '4:3 source hardware scale to 1920x1080', + async ({ binaryCapabilities, ffmpegVersion, resolvedVaapi }) => { + const video = make43VideoInput(Fixtures.video480p43); + const audio = makeAudioInput(Fixtures.video480p43); + + const builder = new VaapiPipelineBuilder( + makeVaapiCapsWithH264AndHevc(), + binaryCapabilities, + video, + audio, + null, + null, + null, + ); + + const outputPath = path.join(workdir, 'scale_hw_43.ts'); + const pipeline = builder.build( + FfmpegState.create({ + duration: testDuration, + version: ffmpegVersion, + outputLocation: FileOutputLocation(outputPath, true), + vaapiDevice: resolvedVaapi.device, + }), + make43ScaleFrameState(), + DefaultPipelineOptions, + ); + + const { exitCode, stderr } = runFfmpegWithPipeline( + binaries!.ffmpeg, + pipeline.getCommandArgs(), + ); + + expect( + exitCode, + `Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`, + ).toBe(0); + assertFhdOutput(outputPath); + }, + ); + + vaapiTest( + '4:3 source fully software pipeline (hw decode, encode, and filters all disabled)', + async ({ binaryCapabilities, ffmpegVersion, resolvedVaapi }) => { + const video = make43VideoInput(Fixtures.video480p43); + const audio = makeAudioInput(Fixtures.video480p43); + + const builder = new VaapiPipelineBuilder( + makeVaapiCapsWithH264AndHevc(), + binaryCapabilities, + video, + audio, + null, + null, + null, + ); + + const outputPath = path.join(workdir, 'scale_sw_both_disabled.ts'); + const pipeline = builder.build( + FfmpegState.create({ + duration: testDuration, + version: ffmpegVersion, + outputLocation: FileOutputLocation(outputPath, true), + vaapiDevice: resolvedVaapi.device, + }), + make43ScaleFrameState(), + { + ...DefaultPipelineOptions, + disableHardwareDecoding: true, + disableHardwareEncoding: true, + disableHardwareFilters: true, + }, + ); + + const { exitCode, stderr } = runFfmpegWithPipeline( + binaries!.ffmpeg, + pipeline.getCommandArgs(), + ); + + expect( + exitCode, + `Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`, + ).toBe(0); + assertFhdOutput(outputPath); + }, + ); + + vaapiTest( + '4:3 source hardware scale when both hw disabled but deinterlace enabled', + async ({ binaryCapabilities, ffmpegVersion, resolvedVaapi }) => { + const video = make43VideoInput(Fixtures.video480p43); + const audio = makeAudioInput(Fixtures.video480p43); + + const builder = new VaapiPipelineBuilder( + makeVaapiCapsWithH264AndHevc(), + binaryCapabilities, + video, + audio, + null, + null, + null, + ); + + const outputPath = path.join(workdir, 'scale_hw_deinterlace.ts'); + const pipeline = builder.build( + FfmpegState.create({ + duration: testDuration, + version: ffmpegVersion, + outputLocation: FileOutputLocation(outputPath, true), + vaapiDevice: resolvedVaapi.device, + }), + new FrameState({ + isAnamorphic: false, + scaledSize: VideoStream.create({ + codec: 'h264', + profile: 'main', + displayAspectRatio: '4:3', + frameSize: FrameSize.withDimensions(640, 480), + index: 0, + pixelFormat: new PixelFormatYuv420P(), + providedSampleAspectRatio: null, + colorFormat: null, + }).squarePixelFrameSize(FrameSize.FHD), + paddedSize: FrameSize.FHD, + pixelFormat: new PixelFormatYuv420P(), + videoFormat: VideoFormats.H264, + deinterlace: true, + }), + { + ...DefaultPipelineOptions, + disableHardwareDecoding: true, + disableHardwareEncoding: true, + }, + ); + + const { exitCode, stderr } = runFfmpegWithPipeline( + binaries!.ffmpeg, + pipeline.getCommandArgs(), + ); + + expect( + exitCode, + `Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`, + ).toBe(0); + assertFhdOutput(outputPath); + }, + ); + + vaapiTest( + '16:9 720p source hardware scale to 1920x1080 (no padding needed)', + async ({ binaryCapabilities, ffmpegVersion, resolvedVaapi }) => { + const video = makeVideoInput( + Fixtures.video720p, + FrameSize.withDimensions(1280, 720), + ); + const audio = makeAudioInput(Fixtures.video720p); + + const builder = new VaapiPipelineBuilder( + makeVaapiCapsWithH264AndHevc(), + binaryCapabilities, + video, + audio, + null, + null, + null, + ); + + const outputPath = path.join(workdir, 'scale_sw_169_no_pad.ts'); + const pipeline = builder.build( + FfmpegState.create({ + duration: testDuration, + version: ffmpegVersion, + outputLocation: FileOutputLocation(outputPath, true), + vaapiDevice: resolvedVaapi.device, + }), + // 16:9 → scaledSize == paddedSize, no padding needed; hw decode → frames on hw → scale_vaapi + new FrameState({ + isAnamorphic: false, + scaledSize: FrameSize.FHD, + paddedSize: FrameSize.FHD, + pixelFormat: new PixelFormatYuv420P(), + videoFormat: VideoFormats.H264, + }), + DefaultPipelineOptions, + ); + + const { exitCode, stderr } = runFfmpegWithPipeline( + binaries!.ffmpeg, + pipeline.getCommandArgs(), + ); + + expect( + exitCode, + `Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`, + ).toBe(0); + assertFhdOutput(outputPath); + }, + ); + + vaapiTest( + '4:3 source software scale when hardware filters disabled', + async ({ binaryCapabilities, ffmpegVersion, resolvedVaapi }) => { + const video = make43VideoInput(Fixtures.video480p43); + const audio = makeAudioInput(Fixtures.video480p43); + + const builder = new VaapiPipelineBuilder( + makeVaapiCapsWithH264AndHevc(), + binaryCapabilities, + video, + audio, + null, + null, + null, + ); + + const outputPath = path.join(workdir, 'scale_sw_filters_disabled.ts'); + const pipeline = builder.build( + FfmpegState.create({ + duration: testDuration, + version: ffmpegVersion, + outputLocation: FileOutputLocation(outputPath, true), + vaapiDevice: resolvedVaapi.device, + }), + make43ScaleFrameState(), + { ...DefaultPipelineOptions, disableHardwareFilters: true }, + ); + + const { exitCode, stderr } = runFfmpegWithPipeline( + binaries!.ffmpeg, + pipeline.getCommandArgs(), + ); + + expect( + exitCode, + `Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`, + ).toBe(0); + assertFhdOutput(outputPath); + }, + ); + }, +); + +// ─── Tonemapping ───────────────────────────────────────────────────────────── + +const canDecodeHdr = + vaapiInfo?.capabilities.canDecode( + VideoFormats.Hevc, + 'main 10', + new PixelFormatYuv420P10Le(), + ) ?? false; + +describe.skipIf(!binaries || !vaapiInfo || !canDecodeHdr)( + 'VaapiPipelineBuilder tonemap integration', + () => { + let workdir: string; + let cleanup: () => Promise; + + beforeAll(async () => { + ({ dir: workdir, cleanup } = await createTempWorkdir()); + }); + + afterAll(() => cleanup()); + + afterEach(() => { + delete process.env[TONEMAP_ENABLED]; + }); + + function makeHdrVaapiCapabilities() { + return new VaapiHardwareCapabilities([ + new VaapiProfileEntrypoint(VaapiProfiles.HevcMain10, VaapiEntrypoint.Decode), + new VaapiProfileEntrypoint(VaapiProfiles.HevcMain, VaapiEntrypoint.Encode), + new VaapiProfileEntrypoint(VaapiProfiles.H264Main, VaapiEntrypoint.Encode), + ]); + } + + function makeHdrFrameState() { + return new FrameState({ + isAnamorphic: false, + scaledSize: FrameSize.FHD, + paddedSize: FrameSize.FHD, + pixelFormat: new PixelFormatYuv420P(), + videoFormat: VideoFormats.H264, + }); + } + + vaapiTest.skipIf(!vaapiOpenclSupported)( + 'HDR10 content tonemapped via opencl', + async ({ binaryCapabilities, ffmpegVersion, resolvedVaapi }) => { + process.env[TONEMAP_ENABLED] = 'true'; + + const video = makeHdrVideoInput(Fixtures.videoHdr720p); + const audio = makeAudioInput(Fixtures.videoHdr720p); + + // Merge TonemapOpencl into real capabilities so this path is available + const capsWithOpencl = new FfmpegCapabilities( + binaryCapabilities.allOptions(), + binaryCapabilities.allVideoEncoders(), + new Set([...binaryCapabilities.allFilters(), KnownFfmpegFilters.TonemapOpencl]), + new Set(), + ); + + const builder = new VaapiPipelineBuilder( + makeHdrVaapiCapabilities(), + capsWithOpencl, + video, + audio, + null, + null, + null, + ); + + const outputPath = path.join(workdir, 'tonemap_opencl.ts'); + const pipeline = builder.build( + FfmpegState.create({ + duration: testDuration, + version: ffmpegVersion, + outputLocation: FileOutputLocation(outputPath, true), + vaapiDevice: resolvedVaapi.device, + }), + makeHdrFrameState(), + { + ...DefaultPipelineOptions, + vaapiPipelineOptions: { tonemapPreference: 'opencl' }, + }, + ); + + const { exitCode, stderr } = runFfmpegWithPipeline( + binaries!.ffmpeg, + pipeline.getCommandArgs(), + ); + + expect( + exitCode, + `Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`, + ).toBe(0); + const probe = probeFile(binaries!.ffprobe, outputPath); + expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true); + }, + ); + + vaapiTest( + 'TONEMAP_ENABLED=false skips tonemapping', + async ({ binaryCapabilities, ffmpegVersion, resolvedVaapi }) => { + // No TONEMAP_ENABLED set (defaults to off) + const video = makeHdrVideoInput(Fixtures.videoHdr720p); + const audio = makeAudioInput(Fixtures.videoHdr720p); + + const builder = new VaapiPipelineBuilder( + makeHdrVaapiCapabilities(), + binaryCapabilities, + video, + audio, + null, + null, + null, + ); + + const outputPath = path.join(workdir, 'tonemap_disabled.ts'); + const pipeline = builder.build( + FfmpegState.create({ + duration: testDuration, + version: ffmpegVersion, + outputLocation: FileOutputLocation(outputPath, true), + vaapiDevice: resolvedVaapi.device, + }), + makeHdrFrameState(), + DefaultPipelineOptions, + ); + + const { exitCode, stderr } = runFfmpegWithPipeline( + binaries!.ffmpeg, + pipeline.getCommandArgs(), + ); + + expect( + exitCode, + `Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`, + ).toBe(0); + const probe = probeFile(binaries!.ffprobe, outputPath); + expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true); + }, + ); + + vaapiTest( + 'software tonemap fallback (no hardware tonemap capability)', + async ({ ffmpegVersion, resolvedVaapi }) => { + process.env[TONEMAP_ENABLED] = 'true'; + + const video = makeHdrVideoInput(Fixtures.videoHdr720p); + const audio = makeAudioInput(Fixtures.videoHdr720p); + + const builder = new VaapiPipelineBuilder( + makeHdrVaapiCapabilities(), + EmptyFfmpegCapabilities, + video, + audio, + null, + null, + null, + ); + + const outputPath = path.join(workdir, 'tonemap_software.ts'); + const pipeline = builder.build( + FfmpegState.create({ + duration: testDuration, + version: ffmpegVersion, + outputLocation: FileOutputLocation(outputPath, true), + vaapiDevice: resolvedVaapi.device, + }), + makeHdrFrameState(), + DefaultPipelineOptions, + ); + + const { exitCode, stderr } = runFfmpegWithPipeline( + binaries!.ffmpeg, + pipeline.getCommandArgs(), + ); + + expect( + exitCode, + `Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`, + ).toBe(0); + const probe = probeFile(binaries!.ffprobe, outputPath); + expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true); + }, + ); + + vaapiTest( + 'tonemap_vaapi preference', + async ({ binaryCapabilities, ffmpegVersion, resolvedVaapi }) => { + process.env[TONEMAP_ENABLED] = 'true'; + + const video = makeHdrVideoInput(Fixtures.videoHdr720p); + const audio = makeAudioInput(Fixtures.videoHdr720p); + + // Force only tonemap_vaapi to be available + const capsWithVaapiTonemap = new FfmpegCapabilities( + binaryCapabilities.allOptions(), + binaryCapabilities.allVideoEncoders(), + new Set([...binaryCapabilities.allFilters(), KnownFfmpegFilters.TonemapVaapi]), + new Set(), + ); + + const builder = new VaapiPipelineBuilder( + makeHdrVaapiCapabilities(), + capsWithVaapiTonemap, + video, + audio, + null, + null, + null, + ); + + const outputPath = path.join(workdir, 'tonemap_vaapi.ts'); + const pipeline = builder.build( + FfmpegState.create({ + duration: testDuration, + version: ffmpegVersion, + outputLocation: FileOutputLocation(outputPath, true), + vaapiDevice: resolvedVaapi.device, + }), + makeHdrFrameState(), + { + ...DefaultPipelineOptions, + vaapiPipelineOptions: { tonemapPreference: 'vaapi' }, + }, + ); + + const { exitCode, stderr } = runFfmpegWithPipeline( + binaries!.ffmpeg, + pipeline.getCommandArgs(), + ); + + expect( + exitCode, + `Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`, + ).toBe(0); + const probe = probeFile(binaries!.ffprobe, outputPath); + expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true); + }, + ); + }, +); diff --git a/server/src/ffmpeg/builder/pipeline/hardware/VaapiPipelineBuilder.test.ts b/server/src/ffmpeg/builder/pipeline/hardware/VaapiPipelineBuilder.test.ts index a0cf2b76..a942d6bd 100644 --- a/server/src/ffmpeg/builder/pipeline/hardware/VaapiPipelineBuilder.test.ts +++ b/server/src/ffmpeg/builder/pipeline/hardware/VaapiPipelineBuilder.test.ts @@ -20,6 +20,7 @@ import { VideoFormats, } from '../../constants.ts'; import { PadFilter } from '../../filter/PadFilter.ts'; +import { ScaleFilter } from '../../filter/ScaleFilter.ts'; import { PadVaapiFilter } from '../../filter/vaapi/PadVaapiFilter.ts'; import { ScaleVaapiFilter } from '../../filter/vaapi/ScaleVaapiFilter.ts'; import { TonemapVaapiFilter } from '../../filter/vaapi/TonemapVaapiFilter.ts'; @@ -56,6 +57,59 @@ import { FrameSize } from '../../types.ts'; import { Pipeline } from '../Pipeline.ts'; import { VaapiPipelineBuilder } from './VaapiPipelineBuilder.ts'; +// ─── Shared helpers ───────────────────────────────────────────────────────── + +const fakeVersion = { + versionString: 'n7.0.2', + majorVersion: 7, + minorVersion: 0, + patchVersion: 2, + isUnknown: false, +}; + +// 16:9 FHD: squarePixelFrameSize(FHD) = 1920x1080 = paddedSize → no scale, no pad needed +function create169FhdVideoStream(): VideoStream { + return VideoStream.create({ + index: 0, + codec: 'h264', + profile: 'main', + pixelFormat: new PixelFormatYuv420P(), + frameSize: FrameSize.FHD, + displayAspectRatio: '16:9', + providedSampleAspectRatio: '1:1', + colorFormat: null, + }); +} + +// 4:3 640x480: squarePixelFrameSize(FHD) = 1440x1080, paddedSize = 1920x1080 +// scale IS needed (640 != 1440), padding IS needed (1440 != 1920) +function create43VideoStream(): VideoStream { + return VideoStream.create({ + index: 0, + codec: 'h264', + profile: 'main', + pixelFormat: new PixelFormatYuv420P(), + frameSize: FrameSize.withDimensions(640, 480), + displayAspectRatio: '4:3', + providedSampleAspectRatio: null, + colorFormat: null, + }); +} + +// ─── Shared env-var save/restore (applied to all describe blocks) ──────────── + +const originalEnv = process.env; + +beforeEach(() => { + process.env = { ...originalEnv }; +}); + +afterEach(() => { + process.env = originalEnv; +}); + +// ───────────────────────────────────────────────────────────────────────────── + describe('VaapiPipelineBuilder', () => { test('should work', () => { const capabilities = new VaapiHardwareCapabilities([]); @@ -477,59 +531,14 @@ describe('VaapiPipelineBuilder', () => { }); describe('VaapiPipelineBuilder pad', () => { - const originalEnv = process.env; - - beforeEach(() => { - process.env = { ...originalEnv }; - }); - - afterEach(() => { - process.env = originalEnv; - }); - - const fakeVersion = { - versionString: 'n7.0.2', - majorVersion: 7, - minorVersion: 0, - patchVersion: 2, - isUnknown: false, - }; - - // 16:9 FHD video that exactly fills the target: no padding needed - // squarePixelFrameSize(FHD) = 1920x1080 = paddedSize - function create169FhdVideoStream(): VideoStream { - return VideoStream.create({ - index: 0, - codec: 'h264', - profile: 'main', - pixelFormat: new PixelFormatYuv420P(), - frameSize: FrameSize.FHD, - displayAspectRatio: '16:9', - providedSampleAspectRatio: '1:1', - colorFormat: null, - }); - } - - // 4:3 video that needs pillarboxing to fit in 16:9 FHD: - // squarePixelFrameSize(FHD) = 1440x1080, paddedSize = 1920x1080 - function create43VideoStream(): VideoStream { - return VideoStream.create({ - index: 0, - codec: 'h264', - profile: 'main', - pixelFormat: new PixelFormatYuv420P(), - frameSize: FrameSize.withDimensions(640, 480), - displayAspectRatio: '4:3', - providedSampleAspectRatio: null, - colorFormat: null, - }); - } + // 4:3 video needs pillarboxing: squarePixelFrameSize(FHD) = 1440x1080, paddedSize = 1920x1080 function buildWithPad(opts: { videoStream: VideoStream; binaryCapabilities?: FfmpegCapabilities; disableHardwareDecoding?: boolean; disableHardwareEncoding?: boolean; + watermarkStream?: StillImageStream; }) { const capabilities = new VaapiHardwareCapabilities([ new VaapiProfileEntrypoint( @@ -556,12 +565,29 @@ describe('VaapiPipelineBuilder pad', () => { opts.videoStream, ); + let wm: WatermarkInputSource | null = null; + if (opts.watermarkStream) { + wm = new WatermarkInputSource( + new FileStreamSource('/path/to/watermark.png'), + opts.watermarkStream, + { + duration: 0, + enabled: true, + horizontalMargin: 0, + opacity: 1, + position: 'top-left', + verticalMargin: 0, + width: 100, + }, + ); + } + const builder = new VaapiPipelineBuilder( capabilities, binaryCapabilities, video, null, - null, + wm, null, null, ); @@ -746,26 +772,31 @@ describe('VaapiPipelineBuilder pad', () => { expect(args).not.toContain('pad_vaapi'); expect(args).not.toContain('pad='); }); + + test('hardware download after pad_vaapi with watermark', () => { + const pipeline = buildWithPad({ + videoStream: VideoStream.create({ + index: 0, + codec: VideoFormats.Mpeg4, + // profile: 'main', + pixelFormat: new PixelFormatYuv420P(), + frameSize: FrameSize.withDimensions(1920, 1050), + displayAspectRatio: '4:3', + providedSampleAspectRatio: null, + colorFormat: null, + }), + watermarkStream: StillImageStream.create({ + frameSize: FrameSize.withDimensions(100, 100), + index: 0, + }), + }); + + const args = pipeline.getCommandArgs(); + console.log(args.join(' ')); + }); }); describe('VaapiPipelineBuilder tonemap', () => { - const originalEnv = process.env; - const fakeVersion = { - versionString: 'n7.0.2', - majorVersion: 7, - minorVersion: 0, - patchVersion: 2, - isUnknown: false, - }; - - beforeEach(() => { - process.env = { ...originalEnv }; - }); - - afterEach(() => { - process.env = originalEnv; - }); - function createHdrVideoStream( colorFormat: ColorFormat = new ColorFormat({ colorRange: ColorRanges.Tv, @@ -983,8 +1014,11 @@ describe('VaapiPipelineBuilder tonemap', () => { }); const args = pipeline.getCommandArgs().join(' '); + console.log(args); const tonemapIndex = args.indexOf('tonemap_opencl'); - const scaleIndex = args.indexOf('scale_vaapi'); + // buildWithTonemap sets scaledSize=FHD=paddedSize, but frames are on hardware after tonemap + // → scale_vaapi is used (frames stay on hardware) + const scaleIndex = args.indexOf('scale_vaapi='); expect(tonemapIndex).toBeGreaterThan(-1); expect(scaleIndex).toBeGreaterThan(-1); @@ -1047,8 +1081,11 @@ describe('VaapiPipelineBuilder tonemap', () => { }); const args = pipeline.getCommandArgs().join(' '); + console.log(args); const tonemapIndex = args.indexOf('tonemap_opencl'); - const scaleIndex = args.indexOf('scale_vaapi'); + // buildWithTonemap sets scaledSize=FHD=paddedSize, but frames are on hardware after tonemap + // → scale_vaapi is used (frames stay on hardware) + const scaleIndex = args.indexOf('scale_vaapi='); expect(tonemapIndex).toBeGreaterThan(-1); expect(scaleIndex).toBeGreaterThan(-1); @@ -1280,9 +1317,9 @@ describe('VaapiPipelineBuilder tonemap', () => { ); }); - // This test verifies that software decode triggers a scale_vaapi because of the tonemap - // to ensure we don't excessively move frames from hardware <-> software - test('8-bit yuv420p HDR input uses vaapi tonemap and scale_vaapi (software decode)', () => { + // After tonemap uploads frames to hardware, condition 2 (decoder!=VAAPI && frames on Hardware) + // triggers software scale — frames are downloaded from hardware before the software scale. + test('8-bit yuv420p HDR input uses vaapi tonemap and software scale (software decode)', () => { process.env[TONEMAP_ENABLED] = 'true'; // Unusual but valid: 8-bit stream tagged with HDR color metadata @@ -1316,16 +1353,19 @@ describe('VaapiPipelineBuilder tonemap', () => { }, }); + const args = pipeline.getCommandArgs().join(' '); + console.log(args); const filters = pipeline.getComplexFilter()!.filterChain.videoFilterSteps; expect(hasVaapiTonemapFilter(pipeline)).to.eq(true); - const scaleFilter = filters.find( - (filter) => filter instanceof ScaleVaapiFilter, - ); - expect(scaleFilter).toBeDefined(); + // decoder=None, tonemap uploads to hardware → condition 2 fires → ScaleFilter (software scale) + expect(filters.some((f) => f instanceof ScaleFilter)).toBe(true); + expect(filters.some((f) => f instanceof ScaleVaapiFilter)).toBe(false); + // Frames come from hardware → ScaleFilter inserts hwdownload + expect(args).toContain('hwdownload'); + expect(args).toContain('scale='); }); - // This test verifies that hardware decode also uses scale_vaapi after vaapi tonemap - test('8-bit yuv420p HDR input uses vaapi tonemap and scale_vaapi (hardware decode)', () => { + test('8-bit yuv420p HDR input uses vaapi tonemap and hardware scale (hardware decode)', () => { process.env[TONEMAP_ENABLED] = 'true'; // Unusual but valid: 8-bit stream tagged with HDR color metadata @@ -1358,12 +1398,15 @@ describe('VaapiPipelineBuilder tonemap', () => { }, }); + const args = pipeline.getCommandArgs().join(' '); + console.log(args); const filters = pipeline.getComplexFilter()!.filterChain.videoFilterSteps; expect(hasVaapiTonemapFilter(pipeline)).to.eq(true); - const scaleFilter = filters.find( - (filter) => filter instanceof ScaleVaapiFilter, - ); - expect(scaleFilter).toBeDefined(); + // Frames on hardware after tonemap → scale_vaapi is used (frames stay on hardware) + expect(filters.some((f) => f instanceof ScaleVaapiFilter)).toBe(true); + expect(filters.some((f) => f instanceof ScaleFilter)).toBe(false); + expect(args).toContain('scale_vaapi='); + expect(args).not.toContain('scale='); }); describe('still image stream', () => { @@ -1450,3 +1493,364 @@ describe('VaapiPipelineBuilder tonemap', () => { }); }); }); + +describe('VaapiPipelineBuilder scale', () => { + // 16:9 1280x720 — squarePixelFrameSize(FHD) = 1920x1080 = paddedSize + // willNeedPad = false, scale IS needed (1280 != 1920) + function create169VideoStream(): VideoStream { + return VideoStream.create({ + index: 0, + codec: 'h264', + profile: 'main', + pixelFormat: new PixelFormatYuv420P(), + frameSize: FrameSize.withDimensions(1280, 720), + displayAspectRatio: '16:9', + providedSampleAspectRatio: '1:1', + colorFormat: null, + }); + } + + // 4:3 HDR HEVC stream — squarePixelFrameSize(FHD) = 1440x1080, paddedSize = 1920x1080 + // willNeedPad = true, scale is needed; HDR triggers tonemap when TONEMAP_ENABLED=true + function createHdr43VideoStream(): VideoStream { + return VideoStream.create({ + index: 0, + codec: 'hevc', + profile: 'main 10', + pixelFormat: new PixelFormatYuv420P10Le(), + frameSize: FrameSize.withDimensions(640, 480), + displayAspectRatio: '4:3', + providedSampleAspectRatio: null, + colorFormat: new ColorFormat({ + colorRange: ColorRanges.Tv, + colorSpace: ColorSpaces.Bt2020nc, + colorPrimaries: ColorPrimaries.Bt2020, + colorTransfer: ColorTransferFormats.Smpte2084, + }), + }); + } + + function buildWithScale(opts: { + videoStream: VideoStream; + binaryCapabilities?: FfmpegCapabilities; + disableHardwareDecoding?: boolean; + disableHardwareEncoding?: boolean; + disableHardwareFilters?: boolean; + deinterlace?: boolean; + }): Pipeline { + const capabilities = new VaapiHardwareCapabilities([ + new VaapiProfileEntrypoint(VaapiProfiles.H264Main, VaapiEntrypoint.Decode), + new VaapiProfileEntrypoint(VaapiProfiles.H264Main, VaapiEntrypoint.Encode), + new VaapiProfileEntrypoint( + VaapiProfiles.HevcMain10, + VaapiEntrypoint.Decode, + ), + new VaapiProfileEntrypoint(VaapiProfiles.HevcMain, VaapiEntrypoint.Encode), + ]); + + const binaryCapabilities = + opts.binaryCapabilities ?? + new FfmpegCapabilities( + new Set(), + new Map(), + new Set([KnownFfmpegFilters.PadVaapi]), + new Set(), + ); + + const video = VideoInputSource.withStream( + new FileStreamSource('/path/to/video.mkv'), + opts.videoStream, + ); + + const builder = new VaapiPipelineBuilder( + capabilities, + binaryCapabilities, + video, + null, + null, + null, + null, + ); + + const state = FfmpegState.create({ version: fakeVersion }); + const videoStream = video.streams[0]!; + + return builder.build( + state, + new FrameState({ + isAnamorphic: false, + scaledSize: videoStream.squarePixelFrameSize(FrameSize.FHD), + paddedSize: FrameSize.FHD, + pixelFormat: new PixelFormatYuv420P(), + videoFormat: VideoFormats.H264, + deinterlace: opts.deinterlace ?? false, + }), + { + ...DefaultPipelineOptions, + vaapiDevice: '/dev/dri/renderD128', + disableHardwareDecoding: opts.disableHardwareDecoding ?? false, + disableHardwareEncoding: opts.disableHardwareEncoding ?? false, + disableHardwareFilters: opts.disableHardwareFilters ?? false, + }, + ); + } + + function getVideoFilterSteps(pipeline: Pipeline) { + return pipeline.getComplexFilter()?.filterChain.videoFilterSteps ?? []; + } + + // ─── Baseline: hardware scale ──────────────────────────────────────────────── + + test('uses scale_vaapi when VAAPI decode+encode, padding is needed, and hw pad is available', () => { + // Baseline: all conditions for software scale are false → hardware scale + const pipeline = buildWithScale({ videoStream: create43VideoStream() }); + const args = pipeline.getCommandArgs().join(' '); + console.log('scale baseline (hardware):', args); + + const filters = getVideoFilterSteps(pipeline); + expect(filters.some((f) => f instanceof ScaleVaapiFilter)).toBe(true); + expect(filters.some((f) => f instanceof ScaleFilter)).toBe(false); + expect(args).toContain('scale_vaapi='); + expect(args).not.toContain('scale='); + }); + + // ─── Condition 1: both decode and encode are None, no deinterlace ───────────── + + test('uses software scale when both hardware decode and encode are disabled (no deinterlace)', () => { + // decoderMode=None, encoderMode=None, !shouldDeinterlace → condition 1 true → software scale + const pipeline = buildWithScale({ + videoStream: create43VideoStream(), + disableHardwareDecoding: true, + disableHardwareEncoding: true, + }); + const args = pipeline.getCommandArgs().join(' '); + console.log('scale sw (both disabled, no deinterlace):', args); + + const filters = getVideoFilterSteps(pipeline); + expect(filters.some((f) => f instanceof ScaleFilter)).toBe(true); + expect(filters.some((f) => f instanceof ScaleVaapiFilter)).toBe(false); + expect(args).toContain('scale='); // 'scale=' without the vaapi suffix + expect(args).not.toContain('scale_vaapi='); + }); + + test('uses hardware scale when both decode and encode are disabled but deinterlace is requested', () => { + // decoderMode=None, encoderMode=None, shouldDeinterlace=true → condition 1 is false + // All other conditions are also false (padding needed, hw pad available, hw filters enabled) + // → hardware scale; ScaleVaapiFilter prepends format+hwupload since frames are in software + const pipeline = buildWithScale({ + videoStream: create43VideoStream(), + disableHardwareDecoding: true, + disableHardwareEncoding: true, + deinterlace: true, + }); + const args = pipeline.getCommandArgs().join(' '); + console.log('scale hw (both disabled + deinterlace):', args); + + const filters = getVideoFilterSteps(pipeline); + expect(filters.some((f) => f instanceof ScaleVaapiFilter)).toBe(true); + expect(filters.some((f) => f instanceof ScaleFilter)).toBe(false); + // Software frames → ScaleVaapiFilter prepends format+hwupload before scale_vaapi + expect(args).toContain('hwupload'); + expect(args).toContain('scale_vaapi='); + }); + + // ─── Condition 2: decoder != VAAPI, frame data location decides ────────────── + + test('uses hardware scale when decode is disabled and frames remain in software (no tonemap, padding needed)', () => { + // decoderMode=None (decode disabled), frames in Software (no tonemap runs) + // Condition 2: decoder!=VAAPI (true) && frameDataLocation==Hardware (FALSE) → false + // Old code would software scale here: decoder!=VAAPI && !shouldPerformTonemap && canTonemapOnHardware + // New code correctly uses hardware scale since frames are not on hardware + const pipeline = buildWithScale({ + videoStream: create43VideoStream(), // SDR, no tonemap triggered + binaryCapabilities: new FfmpegCapabilities( + new Set(), + new Map(), + // Has TonemapVaapi (so old condition canTonemapOnHardware=true would fire), + // but TONEMAP_ENABLED is false so no tonemap actually runs + new Set([KnownFfmpegFilters.PadVaapi, KnownFfmpegFilters.TonemapVaapi]), + new Set(), + ), + disableHardwareDecoding: true, + // TONEMAP_ENABLED not set → frames stay in Software + }); + const args = pipeline.getCommandArgs().join(' '); + console.log('scale hw (decode disabled, frames in sw, no tonemap):', args); + + const filters = getVideoFilterSteps(pipeline); + expect(filters.some((f) => f instanceof ScaleVaapiFilter)).toBe(true); + expect(filters.some((f) => f instanceof ScaleFilter)).toBe(false); + // Frames are in Software → ScaleVaapiFilter adds format+hwupload prefix + expect(args).toContain('hwupload'); + expect(args).toContain('scale_vaapi='); + expect(args).not.toContain('scale='); + }); + + test('uses software scale when decode is disabled but tonemap uploads frames to hardware', () => { + // decoderMode=None (decode disabled), tonemap runs and uploads frames to Hardware + // Condition 2: decoder!=VAAPI (true) && frameDataLocation==Hardware (TRUE after tonemap) → software scale + process.env[TONEMAP_ENABLED] = 'true'; + + const pipeline = buildWithScale({ + videoStream: createHdr43VideoStream(), // HDR + 4:3 → tonemap runs, padding needed + binaryCapabilities: new FfmpegCapabilities( + new Set(), + new Map(), + // TonemapOpencl matches default vaapiPipelineOptions.tonemapPreference='opencl' + new Set([KnownFfmpegFilters.PadVaapi, KnownFfmpegFilters.TonemapOpencl]), + new Set(), + ), + disableHardwareDecoding: true, + // After TonemapOpenclFilter.nextState: frameDataLocation = Hardware + // → condition 2 fires → software scale + }); + const args = pipeline.getCommandArgs().join(' '); + console.log('scale sw (decode disabled, frames on hw via tonemap):', args); + + const filters = getVideoFilterSteps(pipeline); + expect(filters.some((f) => f instanceof ScaleFilter)).toBe(true); + expect(filters.some((f) => f instanceof ScaleVaapiFilter)).toBe(false); + // Frames came from hardware (tonemap) → ScaleFilter adds hwdownload before software scale + expect(args).toContain('hwdownload'); + expect(args).toContain('scale='); + expect(args).not.toContain('scale_vaapi='); + }); + + // ─── Condition 3: !willNeedPad — scaledSize equals paddedSize ──────────────── + + test('uses hardware scale even when no padding is needed (16:9 source, hw decode → frames on hardware)', () => { + // 1280x720 16:9 → squarePixelFrameSize(FHD) = 1920x1080 = paddedSize → !willNeedPad = true + // But hw decode puts frames on hardware → condition 3 does NOT fire → scale_vaapi + const pipeline = buildWithScale({ videoStream: create169VideoStream() }); + const args = pipeline.getCommandArgs().join(' '); + console.log('scale hw (!willNeedPad but frames on hw):', args); + + const filters = getVideoFilterSteps(pipeline); + expect(filters.some((f) => f instanceof ScaleVaapiFilter)).toBe(true); + expect(filters.some((f) => f instanceof ScaleFilter)).toBe(false); + expect(args).toContain('scale_vaapi='); + expect(args).not.toContain('scale='); + }); + + // ─── Condition 4: !canPadOnHardware — no hardware pad filter available ──────── + + test('uses hardware scale when hw pad capability is not available (hw decode → frames on hardware; pad will hwdownload)', () => { + // 4:3 → needs padding; no PadVaapi/PadOpencl → canPadOnHardware=false + // But hw decode puts frames on hardware → !canPadOnHardware alone does NOT force sw scale + // scale_vaapi runs first; PadFilter then auto-prepends hwdownload before software pad + const pipeline = buildWithScale({ + videoStream: create43VideoStream(), + binaryCapabilities: new FfmpegCapabilities( + new Set(), + new Map(), + new Set(), // no pad_vaapi, no pad_opencl + new Set(), + ), + }); + const args = pipeline.getCommandArgs().join(' '); + console.log('scale hw (!canPadOnHardware but frames on hw):', args); + + const filters = getVideoFilterSteps(pipeline); + expect(filters.some((f) => f instanceof ScaleVaapiFilter)).toBe(true); + expect(filters.some((f) => f instanceof ScaleFilter)).toBe(false); + expect(args).toContain('scale_vaapi='); + expect(args).not.toContain('scale='); + }); + + test('uses hardware scale when only pad_opencl is available (not pad_vaapi)', () => { + // pad_opencl satisfies canPadOnHardware → hardware scale + const pipeline = buildWithScale({ + videoStream: create43VideoStream(), + binaryCapabilities: new FfmpegCapabilities( + new Set(), + new Map(), + new Set([KnownFfmpegFilters.PadOpencl]), + new Set(), + ), + }); + const args = pipeline.getCommandArgs().join(' '); + console.log('scale hw (pad_opencl satisfies canPadOnHardware):', args); + + const filters = getVideoFilterSteps(pipeline); + expect(filters.some((f) => f instanceof ScaleVaapiFilter)).toBe(true); + expect(filters.some((f) => f instanceof ScaleFilter)).toBe(false); + expect(args).toContain('scale_vaapi='); + }); + + // ─── Condition 5: disableHardwareFilters ───────────────────────────────────── + + test('uses software scale when hardware filters are disabled', () => { + // disableHardwareFilters → condition 5 fires → software scale + // Also makes canPadOnHardware=false, but condition 5 fires first + const pipeline = buildWithScale({ + videoStream: create43VideoStream(), + disableHardwareFilters: true, + }); + const args = pipeline.getCommandArgs().join(' '); + console.log('scale sw (disableHardwareFilters):', args); + + const filters = getVideoFilterSteps(pipeline); + expect(filters.some((f) => f instanceof ScaleFilter)).toBe(true); + expect(filters.some((f) => f instanceof ScaleVaapiFilter)).toBe(false); + expect(args).toContain('scale='); + expect(args).not.toContain('scale_vaapi='); + }); + + test('disableHardwareFilters overrides available pad_vaapi capability and forces software scale', () => { + // Even with pad_vaapi available, disableHardwareFilters forces software path + const pipeline = buildWithScale({ + videoStream: create43VideoStream(), + binaryCapabilities: new FfmpegCapabilities( + new Set(), + new Map(), + new Set([KnownFfmpegFilters.PadVaapi]), + new Set(), + ), + disableHardwareFilters: true, + }); + const args = pipeline.getCommandArgs().join(' '); + console.log('scale sw (disableHardwareFilters overrides pad_vaapi):', args); + + expect(args).not.toContain('scale_vaapi='); + expect(args).not.toContain('pad_vaapi'); + expect(args).toContain('scale='); + }); + + // ─── Regression: HDR content with TONEMAP_ENABLED=false ───────────────────── + + test('uses hardware scale for HDR content when TONEMAP_ENABLED is false (regression)', () => { + // Regression: HDR content with TONEMAP_ENABLED=false caused software scale because + // canPadOnHardware() returns false for HDR, and !canPadOnHardware was incorrectly + // forcing software scale regardless of frame location. With hw decode active, frames + // are on hardware → scale_vaapi should be used. + process.env[TONEMAP_ENABLED] = 'false'; + + const pipeline = buildWithScale({ videoStream: createHdr43VideoStream() }); + const args = pipeline.getCommandArgs().join(' '); + console.log('scale hw (HDR, TONEMAP_ENABLED=false, regression):', args); + + const filters = getVideoFilterSteps(pipeline); + expect(filters.some((f) => f instanceof ScaleVaapiFilter)).toBe(true); + expect(filters.some((f) => f instanceof ScaleFilter)).toBe(false); + expect(args).toContain('scale_vaapi='); + expect(args).not.toContain('scale='); + }); + + // ─── No scale needed ───────────────────────────────────────────────────────── + + test('produces no resize when source already matches desired scaled size', () => { + // 1920x1080 FHD source → currentState.scaledSize == desiredState.scaledSize + // setScale creates a ScaleVaapiFilter only for pixel format conversion (not resize). + // ScaleVaapiFilter with matching sizes outputs 'scale_vaapi=format=...' without dimensions. + const pipeline = buildWithScale({ videoStream: create169FhdVideoStream() }); + const args = pipeline.getCommandArgs().join(' '); + console.log('no resize (sizes already match):', args); + + const filters = getVideoFilterSteps(pipeline); + // No software resize + expect(filters.some((f) => f instanceof ScaleFilter)).toBe(false); + // scale_vaapi with an actual resize includes 'force_divisible_by'; format-only does not + expect(args).not.toContain('force_divisible_by'); + expect(args).not.toContain('scale='); + }); +}); diff --git a/server/src/ffmpeg/builder/pipeline/hardware/VaapiPipelineBuilder.ts b/server/src/ffmpeg/builder/pipeline/hardware/VaapiPipelineBuilder.ts index 4e941f25..9dfc9f2b 100644 --- a/server/src/ffmpeg/builder/pipeline/hardware/VaapiPipelineBuilder.ts +++ b/server/src/ffmpeg/builder/pipeline/hardware/VaapiPipelineBuilder.ts @@ -71,6 +71,11 @@ import { import { FrameDataLocation, RateControlMode } from '../../types.ts'; export class VaapiPipelineBuilder extends SoftwarePipelineBuilder { + // Set in setHardwareAccelState(); used in setScale() to avoid a + // hwdownload+hwupload cycle that breaks with the named init_hw_device setup + // required for tonemap_opencl. + private willUseOpenclTonemap = false; + constructor( private hardwareCapabilities: BaseFfmpegHardwareCapabilities, binaryCapabilities: FfmpegCapabilities, @@ -109,8 +114,26 @@ export class VaapiPipelineBuilder extends SoftwarePipelineBuilder { } if (isNonEmptyString(ffmpegState.vaapiDevice)) { + // Use OpenCL device derivation when tonemap_opencl will be selected. + // The named init_hw_device approach is required for hwmap=derive_device=opencl + // to resolve the parent VAAPI device. Only enable it when the binary + // actually supports tonemap_opencl (i.e. OpenCL is available on the host). + const { pipelineOptions } = this.context; + this.willUseOpenclTonemap = + !pipelineOptions?.disableHardwareFilters && + getBooleanEnvVar(TONEMAP_ENABLED, false) && + isVideoPipelineContext(this.context) && + isHdrContent(this.context.videoStream) && + (pipelineOptions?.vaapiPipelineOptions?.tonemapPreference ?? + 'opencl') === 'opencl' && + this.ffmpegCapabilities.hasFilter(KnownFfmpegFilters.TonemapOpencl); + this.pipelineSteps.push( - new VaapiHardwareAccelerationOption(ffmpegState.vaapiDevice, canDecode), + new VaapiHardwareAccelerationOption( + ffmpegState.vaapiDevice, + canDecode, + this.willUseOpenclTonemap, + ), ); if (isNonEmptyString(ffmpegState.vaapiDriver)) { @@ -327,7 +350,7 @@ export class VaapiPipelineBuilder extends SoftwarePipelineBuilder { HardwareAccelerationMode.Vaapi && currentState.frameDataLocation === FrameDataLocation.Software ) { - steps.push(new HardwareUploadVaapiFilter(needsVaapiSetFormat, 64)); + steps.push(new HardwareUploadVaapiFilter(needsVaapiSetFormat)); } } @@ -399,10 +422,15 @@ export class VaapiPipelineBuilder extends SoftwarePipelineBuilder { } let nextState = currentState; - const { desiredState, ffmpegState, shouldDeinterlace, videoStream } = + const { desiredState, ffmpegState, shouldDeinterlace, pipelineOptions } = this.context; let scaleOption: FilterOption; + const willNeedPad = !desiredState.scaledSize.equals( + desiredState.paddedSize, + ); + const canPadOnHardware = this.canPadOnHardware(); + if ( !currentState.scaledSize.equals(desiredState.scaledSize) && ((ffmpegState.decoderHwAccelMode === HardwareAccelerationMode.None && @@ -412,8 +440,16 @@ export class VaapiPipelineBuilder extends SoftwarePipelineBuilder { // performed a software decode, we'll have had to upload to hardware to tonemap anyway (most likely) // so try to continue on hardware if possible (ffmpegState.decoderHwAccelMode !== HardwareAccelerationMode.Vaapi && - !this.shouldPerformTonemap(videoStream) && - this.canTonemapOnHardware())) + currentState.frameDataLocation === FrameDataLocation.Hardware) || + // Use software scale only when frames are not already on hardware. + // If frames are on hardware (from hw decode or tonemap), keep them + // there and use scale_vaapi — downloading for a software scale and + // re-uploading is wasteful, and breaks the named-device init_hw_device + // setup used for tonemap_opencl. Pad capability does not affect the + // scale decision: if padding requires software, it can hwdownload after. + ((!willNeedPad || !canPadOnHardware) && + currentState.frameDataLocation !== FrameDataLocation.Hardware) || + pipelineOptions.disableHardwareFilters) ) { scaleOption = ScaleFilter.create( currentState, @@ -459,10 +495,9 @@ export class VaapiPipelineBuilder extends SoftwarePipelineBuilder { } // Enabled by default - const disableHardwarePad = getBooleanEnvVar( - TUNARR_ENV_VARS.DISABLE_VAAPI_PAD, - false, - ); + const disableHardwarePad = + getBooleanEnvVar(TUNARR_ENV_VARS.DISABLE_VAAPI_PAD, false) || + this.context.pipelineOptions.disableHardwareFilters; let padFilter: Maybe; if (isHdrContent(this.context.videoStream)) { padFilter = PadFilter.create(currentState, this.desiredState); @@ -645,11 +680,25 @@ export class VaapiPipelineBuilder extends SoftwarePipelineBuilder { ); } - private canTonemapOnHardware() { + private canPadOnHardware() { + if (!isVideoPipelineContext(this.context)) { + return false; + } + const disableHardwarePad = + getBooleanEnvVar(TUNARR_ENV_VARS.DISABLE_VAAPI_PAD, false) || + this.context.pipelineOptions.disableHardwareFilters; + + if (disableHardwarePad) { + return false; + } + + if (isHdrContent(this.context.videoStream)) { + return false; + } + return ( - !this.context.pipelineOptions.disableHardwareFilters && - (this.ffmpegCapabilities.hasFilter(KnownFfmpegFilters.TonemapVaapi) || - this.ffmpegCapabilities.hasFilter(KnownFfmpegFilters.TonemapOpencl)) + this.ffmpegCapabilities.hasFilter(KnownFfmpegFilters.PadVaapi) || + this.ffmpegCapabilities.hasFilter(KnownFfmpegFilters.PadOpencl) ); } } diff --git a/server/src/testing/ffmpeg/FfmpegIntegrationHelper.ts b/server/src/testing/ffmpeg/FfmpegIntegrationHelper.ts index 30fd7f9b..5fb56db4 100644 --- a/server/src/testing/ffmpeg/FfmpegIntegrationHelper.ts +++ b/server/src/testing/ffmpeg/FfmpegIntegrationHelper.ts @@ -11,6 +11,20 @@ import type { VaapiHardwareCapabilities } from '../../ffmpeg/builder/capabilitie import { VaapiHardwareCapabilitiesParser } from '../../ffmpeg/builder/capabilities/VaapiHardwareCapabilitiesParser.ts'; import { FfprobeMediaInfoSchema } from '../../types/ffmpeg.ts'; +function whichFirst(...candidates: string[]): string { + for (const candidate of candidates) { + try { + const result = execFileSync('which', [candidate], { + encoding: 'utf-8' as const, + }).trim(); + if (result) return result; + } catch { + // not found, try next + } + } + return ''; +} + export function discoverFfmpegBinaries(): { ffmpeg: string; ffprobe: string; @@ -18,11 +32,11 @@ export function discoverFfmpegBinaries(): { try { const ffmpeg = process.env['TUNARR_TEST_FFMPEG'] ?? - execFileSync('which', ['ffmpeg'], { encoding: 'utf-8' as const }).trim(); + whichFirst('ffmpeg7.1', 'ffmpeg'); const ffprobe = process.env['TUNARR_TEST_FFPROBE'] ?? - execFileSync('which', ['ffprobe'], { encoding: 'utf-8' as const }).trim(); + whichFirst('ffprobe7.1', 'ffprobe'); if (!ffmpeg || !ffprobe) { return null; @@ -124,6 +138,47 @@ export function probeFile( // Hardware discovery helpers // --------------------------------------------------------------------------- +/** + * Returns true if an OpenCL device can be derived from the given VAAPI device, + * which is the prerequisite for tonemap_opencl pipelines. + * + * Uses -init_hw_device vaapi=va: -init_hw_device opencl=ocl@va — the + * same device-init approach the pipeline builder uses when OpenCL tonemap is + * active. A synthetic lavfi source is used so no input file is required; we + * only care that device initialisation succeeds (exit 0), not that any filter + * chain runs. + */ +export function discoverVaapiOpenclSupport( + ffmpegPath: string, + device: string, +): boolean { + try { + const result = spawnSync( + ffmpegPath, + [ + '-hide_banner', + '-init_hw_device', + `vaapi=va:${device}`, + '-init_hw_device', + 'opencl=ocl@va', + '-f', + 'lavfi', + '-i', + 'nullsrc=s=64x64', + '-frames:v', + '1', + '-f', + 'null', + '-', + ], + { encoding: 'utf-8' as const }, + ); + return result.status === 0; + } catch { + return false; + } +} + export type VaapiDeviceInfo = { device: string; capabilities: VaapiHardwareCapabilities; diff --git a/server/src/testing/ffmpeg/FfmpegTestFixtures.ts b/server/src/testing/ffmpeg/FfmpegTestFixtures.ts index f1245b7a..60d12c24 100644 --- a/server/src/testing/ffmpeg/FfmpegTestFixtures.ts +++ b/server/src/testing/ffmpeg/FfmpegTestFixtures.ts @@ -11,6 +11,7 @@ import { discoverNvidiaCapabilities, discoverQsvCapabilities, discoverVaapiDevice, + discoverVaapiOpenclSupport, type VaapiDeviceInfo, } from './FfmpegIntegrationHelper.ts'; @@ -26,6 +27,11 @@ export const nvidiaCaps = binaries ? discoverNvidiaCapabilities(binaries.ffmpeg) : null; +export const vaapiOpenclSupported = + binaries && vaapiInfo + ? discoverVaapiOpenclSupport(binaries.ffmpeg, vaapiInfo.device) + : false; + const noopLogger = pino({ level: 'silent' }) as Logger; function makeFfmpegInfo(): FfmpegInfo { diff --git a/server/src/testing/ffmpeg/fixtures/480p_h264.ts b/server/src/testing/ffmpeg/fixtures/480p_h264.ts new file mode 100644 index 00000000..1fa4f559 Binary files /dev/null and b/server/src/testing/ffmpeg/fixtures/480p_h264.ts differ diff --git a/server/src/testing/ffmpeg/fixtures/720p_hevc_hdr10.ts b/server/src/testing/ffmpeg/fixtures/720p_hevc_hdr10.ts new file mode 100644 index 00000000..6c047b82 Binary files /dev/null and b/server/src/testing/ffmpeg/fixtures/720p_hevc_hdr10.ts differ diff --git a/server/src/testing/ffmpeg/fixtures/watermark.png b/server/src/testing/ffmpeg/fixtures/watermark.png new file mode 100644 index 00000000..a102dbb7 Binary files /dev/null and b/server/src/testing/ffmpeg/fixtures/watermark.png differ diff --git a/vitest.config.ts b/vitest.config.ts index 3386134f..fbd05673 100644 --- a/vitest.config.ts +++ b/vitest.config.ts @@ -2,6 +2,11 @@ import { defineConfig } from 'vitest/config'; export default defineConfig({ test: { - projects: ['web', 'server', 'shared'], + projects: [ + 'web/vitest.config.ts', + 'server/vitest.config.ts', + 'server/vitest.local.config.ts', + 'shared', + ], }, });