fix: ensure scaling happens on hw for vaapi when tonemap env var is false

includes many other integration tests
This commit is contained in:
Christian Benincasa
2026-03-28 11:04:52 -04:00
parent f9851f73cb
commit fcfa537279
10 changed files with 1855 additions and 133 deletions

View File

@@ -6,11 +6,25 @@ export class VaapiHardwareAccelerationOption extends GlobalOption {
constructor( constructor(
private vaapiDevice: string, private vaapiDevice: string,
private canHardwardDecode: boolean, private canHardwardDecode: boolean,
private withOpenclDerivation: boolean = false,
) { ) {
super(); super();
} }
options(): string[] { options(): string[] {
if (this.withOpenclDerivation) {
// Use named device init so that OpenCL can be derived from the VAAPI
// device, which is required for hwmap=derive_device=opencl to work.
const initDevices = [
'-init_hw_device',
`vaapi=va:${this.vaapiDevice}`,
'-init_hw_device',
'opencl=ocl@va',
];
return this.canHardwardDecode
? [...initDevices, '-hwaccel', 'vaapi', '-hwaccel_device', 'va']
: initDevices;
}
return this.canHardwardDecode return this.canHardwardDecode
? ['-hwaccel', 'vaapi', '-vaapi_device', this.vaapiDevice] ? ['-hwaccel', 'vaapi', '-vaapi_device', this.vaapiDevice]
: ['-vaapi_device', this.vaapiDevice]; : ['-vaapi_device', this.vaapiDevice];

View File

@@ -20,6 +20,7 @@ import {
VideoFormats, VideoFormats,
} from '../../constants.ts'; } from '../../constants.ts';
import { PadFilter } from '../../filter/PadFilter.ts'; import { PadFilter } from '../../filter/PadFilter.ts';
import { ScaleFilter } from '../../filter/ScaleFilter.ts';
import { PadVaapiFilter } from '../../filter/vaapi/PadVaapiFilter.ts'; import { PadVaapiFilter } from '../../filter/vaapi/PadVaapiFilter.ts';
import { ScaleVaapiFilter } from '../../filter/vaapi/ScaleVaapiFilter.ts'; import { ScaleVaapiFilter } from '../../filter/vaapi/ScaleVaapiFilter.ts';
import { TonemapVaapiFilter } from '../../filter/vaapi/TonemapVaapiFilter.ts'; import { TonemapVaapiFilter } from '../../filter/vaapi/TonemapVaapiFilter.ts';
@@ -56,6 +57,59 @@ import { FrameSize } from '../../types.ts';
import { Pipeline } from '../Pipeline.ts'; import { Pipeline } from '../Pipeline.ts';
import { VaapiPipelineBuilder } from './VaapiPipelineBuilder.ts'; import { VaapiPipelineBuilder } from './VaapiPipelineBuilder.ts';
// ─── Shared helpers ─────────────────────────────────────────────────────────
const fakeVersion = {
versionString: 'n7.0.2',
majorVersion: 7,
minorVersion: 0,
patchVersion: 2,
isUnknown: false,
};
// 16:9 FHD: squarePixelFrameSize(FHD) = 1920x1080 = paddedSize → no scale, no pad needed
function create169FhdVideoStream(): VideoStream {
return VideoStream.create({
index: 0,
codec: 'h264',
profile: 'main',
pixelFormat: new PixelFormatYuv420P(),
frameSize: FrameSize.FHD,
displayAspectRatio: '16:9',
providedSampleAspectRatio: '1:1',
colorFormat: null,
});
}
// 4:3 640x480: squarePixelFrameSize(FHD) = 1440x1080, paddedSize = 1920x1080
// scale IS needed (640 != 1440), padding IS needed (1440 != 1920)
function create43VideoStream(): VideoStream {
return VideoStream.create({
index: 0,
codec: 'h264',
profile: 'main',
pixelFormat: new PixelFormatYuv420P(),
frameSize: FrameSize.withDimensions(640, 480),
displayAspectRatio: '4:3',
providedSampleAspectRatio: null,
colorFormat: null,
});
}
// ─── Shared env-var save/restore (applied to all describe blocks) ────────────
const originalEnv = process.env;
beforeEach(() => {
process.env = { ...originalEnv };
});
afterEach(() => {
process.env = originalEnv;
});
// ─────────────────────────────────────────────────────────────────────────────
describe('VaapiPipelineBuilder', () => { describe('VaapiPipelineBuilder', () => {
test('should work', () => { test('should work', () => {
const capabilities = new VaapiHardwareCapabilities([]); const capabilities = new VaapiHardwareCapabilities([]);
@@ -477,59 +531,14 @@ describe('VaapiPipelineBuilder', () => {
}); });
describe('VaapiPipelineBuilder pad', () => { describe('VaapiPipelineBuilder pad', () => {
const originalEnv = process.env; // 4:3 video needs pillarboxing: squarePixelFrameSize(FHD) = 1440x1080, paddedSize = 1920x1080
beforeEach(() => {
process.env = { ...originalEnv };
});
afterEach(() => {
process.env = originalEnv;
});
const fakeVersion = {
versionString: 'n7.0.2',
majorVersion: 7,
minorVersion: 0,
patchVersion: 2,
isUnknown: false,
};
// 16:9 FHD video that exactly fills the target: no padding needed
// squarePixelFrameSize(FHD) = 1920x1080 = paddedSize
function create169FhdVideoStream(): VideoStream {
return VideoStream.create({
index: 0,
codec: 'h264',
profile: 'main',
pixelFormat: new PixelFormatYuv420P(),
frameSize: FrameSize.FHD,
displayAspectRatio: '16:9',
providedSampleAspectRatio: '1:1',
colorFormat: null,
});
}
// 4:3 video that needs pillarboxing to fit in 16:9 FHD:
// squarePixelFrameSize(FHD) = 1440x1080, paddedSize = 1920x1080
function create43VideoStream(): VideoStream {
return VideoStream.create({
index: 0,
codec: 'h264',
profile: 'main',
pixelFormat: new PixelFormatYuv420P(),
frameSize: FrameSize.withDimensions(640, 480),
displayAspectRatio: '4:3',
providedSampleAspectRatio: null,
colorFormat: null,
});
}
function buildWithPad(opts: { function buildWithPad(opts: {
videoStream: VideoStream; videoStream: VideoStream;
binaryCapabilities?: FfmpegCapabilities; binaryCapabilities?: FfmpegCapabilities;
disableHardwareDecoding?: boolean; disableHardwareDecoding?: boolean;
disableHardwareEncoding?: boolean; disableHardwareEncoding?: boolean;
watermarkStream?: StillImageStream;
}) { }) {
const capabilities = new VaapiHardwareCapabilities([ const capabilities = new VaapiHardwareCapabilities([
new VaapiProfileEntrypoint( new VaapiProfileEntrypoint(
@@ -556,12 +565,29 @@ describe('VaapiPipelineBuilder pad', () => {
opts.videoStream, opts.videoStream,
); );
let wm: WatermarkInputSource | null = null;
if (opts.watermarkStream) {
wm = new WatermarkInputSource(
new FileStreamSource('/path/to/watermark.png'),
opts.watermarkStream,
{
duration: 0,
enabled: true,
horizontalMargin: 0,
opacity: 1,
position: 'top-left',
verticalMargin: 0,
width: 100,
},
);
}
const builder = new VaapiPipelineBuilder( const builder = new VaapiPipelineBuilder(
capabilities, capabilities,
binaryCapabilities, binaryCapabilities,
video, video,
null, null,
null, wm,
null, null,
null, null,
); );
@@ -746,26 +772,31 @@ describe('VaapiPipelineBuilder pad', () => {
expect(args).not.toContain('pad_vaapi'); expect(args).not.toContain('pad_vaapi');
expect(args).not.toContain('pad='); expect(args).not.toContain('pad=');
}); });
test('hardware download after pad_vaapi with watermark', () => {
const pipeline = buildWithPad({
videoStream: VideoStream.create({
index: 0,
codec: VideoFormats.Mpeg4,
// profile: 'main',
pixelFormat: new PixelFormatYuv420P(),
frameSize: FrameSize.withDimensions(1920, 1050),
displayAspectRatio: '4:3',
providedSampleAspectRatio: null,
colorFormat: null,
}),
watermarkStream: StillImageStream.create({
frameSize: FrameSize.withDimensions(100, 100),
index: 0,
}),
});
const args = pipeline.getCommandArgs();
console.log(args.join(' '));
});
}); });
describe('VaapiPipelineBuilder tonemap', () => { describe('VaapiPipelineBuilder tonemap', () => {
const originalEnv = process.env;
const fakeVersion = {
versionString: 'n7.0.2',
majorVersion: 7,
minorVersion: 0,
patchVersion: 2,
isUnknown: false,
};
beforeEach(() => {
process.env = { ...originalEnv };
});
afterEach(() => {
process.env = originalEnv;
});
function createHdrVideoStream( function createHdrVideoStream(
colorFormat: ColorFormat = new ColorFormat({ colorFormat: ColorFormat = new ColorFormat({
colorRange: ColorRanges.Tv, colorRange: ColorRanges.Tv,
@@ -983,8 +1014,11 @@ describe('VaapiPipelineBuilder tonemap', () => {
}); });
const args = pipeline.getCommandArgs().join(' '); const args = pipeline.getCommandArgs().join(' ');
console.log(args);
const tonemapIndex = args.indexOf('tonemap_opencl'); const tonemapIndex = args.indexOf('tonemap_opencl');
const scaleIndex = args.indexOf('scale_vaapi'); // buildWithTonemap sets scaledSize=FHD=paddedSize, but frames are on hardware after tonemap
// → scale_vaapi is used (frames stay on hardware)
const scaleIndex = args.indexOf('scale_vaapi=');
expect(tonemapIndex).toBeGreaterThan(-1); expect(tonemapIndex).toBeGreaterThan(-1);
expect(scaleIndex).toBeGreaterThan(-1); expect(scaleIndex).toBeGreaterThan(-1);
@@ -1047,8 +1081,11 @@ describe('VaapiPipelineBuilder tonemap', () => {
}); });
const args = pipeline.getCommandArgs().join(' '); const args = pipeline.getCommandArgs().join(' ');
console.log(args);
const tonemapIndex = args.indexOf('tonemap_opencl'); const tonemapIndex = args.indexOf('tonemap_opencl');
const scaleIndex = args.indexOf('scale_vaapi'); // buildWithTonemap sets scaledSize=FHD=paddedSize, but frames are on hardware after tonemap
// → scale_vaapi is used (frames stay on hardware)
const scaleIndex = args.indexOf('scale_vaapi=');
expect(tonemapIndex).toBeGreaterThan(-1); expect(tonemapIndex).toBeGreaterThan(-1);
expect(scaleIndex).toBeGreaterThan(-1); expect(scaleIndex).toBeGreaterThan(-1);
@@ -1280,9 +1317,9 @@ describe('VaapiPipelineBuilder tonemap', () => {
); );
}); });
// This test verifies that software decode triggers a scale_vaapi because of the tonemap // After tonemap uploads frames to hardware, condition 2 (decoder!=VAAPI && frames on Hardware)
// to ensure we don't excessively move frames from hardware <-> software // triggers software scale — frames are downloaded from hardware before the software scale.
test('8-bit yuv420p HDR input uses vaapi tonemap and scale_vaapi (software decode)', () => { test('8-bit yuv420p HDR input uses vaapi tonemap and software scale (software decode)', () => {
process.env[TONEMAP_ENABLED] = 'true'; process.env[TONEMAP_ENABLED] = 'true';
// Unusual but valid: 8-bit stream tagged with HDR color metadata // Unusual but valid: 8-bit stream tagged with HDR color metadata
@@ -1316,16 +1353,19 @@ describe('VaapiPipelineBuilder tonemap', () => {
}, },
}); });
const args = pipeline.getCommandArgs().join(' ');
console.log(args);
const filters = pipeline.getComplexFilter()!.filterChain.videoFilterSteps; const filters = pipeline.getComplexFilter()!.filterChain.videoFilterSteps;
expect(hasVaapiTonemapFilter(pipeline)).to.eq(true); expect(hasVaapiTonemapFilter(pipeline)).to.eq(true);
const scaleFilter = filters.find( // decoder=None, tonemap uploads to hardware → condition 2 fires → ScaleFilter (software scale)
(filter) => filter instanceof ScaleVaapiFilter, expect(filters.some((f) => f instanceof ScaleFilter)).toBe(true);
); expect(filters.some((f) => f instanceof ScaleVaapiFilter)).toBe(false);
expect(scaleFilter).toBeDefined(); // Frames come from hardware → ScaleFilter inserts hwdownload
expect(args).toContain('hwdownload');
expect(args).toContain('scale=');
}); });
// This test verifies that hardware decode also uses scale_vaapi after vaapi tonemap test('8-bit yuv420p HDR input uses vaapi tonemap and hardware scale (hardware decode)', () => {
test('8-bit yuv420p HDR input uses vaapi tonemap and scale_vaapi (hardware decode)', () => {
process.env[TONEMAP_ENABLED] = 'true'; process.env[TONEMAP_ENABLED] = 'true';
// Unusual but valid: 8-bit stream tagged with HDR color metadata // Unusual but valid: 8-bit stream tagged with HDR color metadata
@@ -1358,12 +1398,15 @@ describe('VaapiPipelineBuilder tonemap', () => {
}, },
}); });
const args = pipeline.getCommandArgs().join(' ');
console.log(args);
const filters = pipeline.getComplexFilter()!.filterChain.videoFilterSteps; const filters = pipeline.getComplexFilter()!.filterChain.videoFilterSteps;
expect(hasVaapiTonemapFilter(pipeline)).to.eq(true); expect(hasVaapiTonemapFilter(pipeline)).to.eq(true);
const scaleFilter = filters.find( // Frames on hardware after tonemap → scale_vaapi is used (frames stay on hardware)
(filter) => filter instanceof ScaleVaapiFilter, expect(filters.some((f) => f instanceof ScaleVaapiFilter)).toBe(true);
); expect(filters.some((f) => f instanceof ScaleFilter)).toBe(false);
expect(scaleFilter).toBeDefined(); expect(args).toContain('scale_vaapi=');
expect(args).not.toContain('scale=');
}); });
describe('still image stream', () => { describe('still image stream', () => {
@@ -1450,3 +1493,364 @@ describe('VaapiPipelineBuilder tonemap', () => {
}); });
}); });
}); });
describe('VaapiPipelineBuilder scale', () => {
// 16:9 1280x720 — squarePixelFrameSize(FHD) = 1920x1080 = paddedSize
// willNeedPad = false, scale IS needed (1280 != 1920)
function create169VideoStream(): VideoStream {
return VideoStream.create({
index: 0,
codec: 'h264',
profile: 'main',
pixelFormat: new PixelFormatYuv420P(),
frameSize: FrameSize.withDimensions(1280, 720),
displayAspectRatio: '16:9',
providedSampleAspectRatio: '1:1',
colorFormat: null,
});
}
// 4:3 HDR HEVC stream — squarePixelFrameSize(FHD) = 1440x1080, paddedSize = 1920x1080
// willNeedPad = true, scale is needed; HDR triggers tonemap when TONEMAP_ENABLED=true
function createHdr43VideoStream(): VideoStream {
return VideoStream.create({
index: 0,
codec: 'hevc',
profile: 'main 10',
pixelFormat: new PixelFormatYuv420P10Le(),
frameSize: FrameSize.withDimensions(640, 480),
displayAspectRatio: '4:3',
providedSampleAspectRatio: null,
colorFormat: new ColorFormat({
colorRange: ColorRanges.Tv,
colorSpace: ColorSpaces.Bt2020nc,
colorPrimaries: ColorPrimaries.Bt2020,
colorTransfer: ColorTransferFormats.Smpte2084,
}),
});
}
function buildWithScale(opts: {
videoStream: VideoStream;
binaryCapabilities?: FfmpegCapabilities;
disableHardwareDecoding?: boolean;
disableHardwareEncoding?: boolean;
disableHardwareFilters?: boolean;
deinterlace?: boolean;
}): Pipeline {
const capabilities = new VaapiHardwareCapabilities([
new VaapiProfileEntrypoint(VaapiProfiles.H264Main, VaapiEntrypoint.Decode),
new VaapiProfileEntrypoint(VaapiProfiles.H264Main, VaapiEntrypoint.Encode),
new VaapiProfileEntrypoint(
VaapiProfiles.HevcMain10,
VaapiEntrypoint.Decode,
),
new VaapiProfileEntrypoint(VaapiProfiles.HevcMain, VaapiEntrypoint.Encode),
]);
const binaryCapabilities =
opts.binaryCapabilities ??
new FfmpegCapabilities(
new Set(),
new Map(),
new Set([KnownFfmpegFilters.PadVaapi]),
new Set(),
);
const video = VideoInputSource.withStream(
new FileStreamSource('/path/to/video.mkv'),
opts.videoStream,
);
const builder = new VaapiPipelineBuilder(
capabilities,
binaryCapabilities,
video,
null,
null,
null,
null,
);
const state = FfmpegState.create({ version: fakeVersion });
const videoStream = video.streams[0]!;
return builder.build(
state,
new FrameState({
isAnamorphic: false,
scaledSize: videoStream.squarePixelFrameSize(FrameSize.FHD),
paddedSize: FrameSize.FHD,
pixelFormat: new PixelFormatYuv420P(),
videoFormat: VideoFormats.H264,
deinterlace: opts.deinterlace ?? false,
}),
{
...DefaultPipelineOptions,
vaapiDevice: '/dev/dri/renderD128',
disableHardwareDecoding: opts.disableHardwareDecoding ?? false,
disableHardwareEncoding: opts.disableHardwareEncoding ?? false,
disableHardwareFilters: opts.disableHardwareFilters ?? false,
},
);
}
function getVideoFilterSteps(pipeline: Pipeline) {
return pipeline.getComplexFilter()?.filterChain.videoFilterSteps ?? [];
}
// ─── Baseline: hardware scale ────────────────────────────────────────────────
test('uses scale_vaapi when VAAPI decode+encode, padding is needed, and hw pad is available', () => {
// Baseline: all conditions for software scale are false → hardware scale
const pipeline = buildWithScale({ videoStream: create43VideoStream() });
const args = pipeline.getCommandArgs().join(' ');
console.log('scale baseline (hardware):', args);
const filters = getVideoFilterSteps(pipeline);
expect(filters.some((f) => f instanceof ScaleVaapiFilter)).toBe(true);
expect(filters.some((f) => f instanceof ScaleFilter)).toBe(false);
expect(args).toContain('scale_vaapi=');
expect(args).not.toContain('scale=');
});
// ─── Condition 1: both decode and encode are None, no deinterlace ─────────────
test('uses software scale when both hardware decode and encode are disabled (no deinterlace)', () => {
// decoderMode=None, encoderMode=None, !shouldDeinterlace → condition 1 true → software scale
const pipeline = buildWithScale({
videoStream: create43VideoStream(),
disableHardwareDecoding: true,
disableHardwareEncoding: true,
});
const args = pipeline.getCommandArgs().join(' ');
console.log('scale sw (both disabled, no deinterlace):', args);
const filters = getVideoFilterSteps(pipeline);
expect(filters.some((f) => f instanceof ScaleFilter)).toBe(true);
expect(filters.some((f) => f instanceof ScaleVaapiFilter)).toBe(false);
expect(args).toContain('scale='); // 'scale=' without the vaapi suffix
expect(args).not.toContain('scale_vaapi=');
});
test('uses hardware scale when both decode and encode are disabled but deinterlace is requested', () => {
// decoderMode=None, encoderMode=None, shouldDeinterlace=true → condition 1 is false
// All other conditions are also false (padding needed, hw pad available, hw filters enabled)
// → hardware scale; ScaleVaapiFilter prepends format+hwupload since frames are in software
const pipeline = buildWithScale({
videoStream: create43VideoStream(),
disableHardwareDecoding: true,
disableHardwareEncoding: true,
deinterlace: true,
});
const args = pipeline.getCommandArgs().join(' ');
console.log('scale hw (both disabled + deinterlace):', args);
const filters = getVideoFilterSteps(pipeline);
expect(filters.some((f) => f instanceof ScaleVaapiFilter)).toBe(true);
expect(filters.some((f) => f instanceof ScaleFilter)).toBe(false);
// Software frames → ScaleVaapiFilter prepends format+hwupload before scale_vaapi
expect(args).toContain('hwupload');
expect(args).toContain('scale_vaapi=');
});
// ─── Condition 2: decoder != VAAPI, frame data location decides ──────────────
test('uses hardware scale when decode is disabled and frames remain in software (no tonemap, padding needed)', () => {
// decoderMode=None (decode disabled), frames in Software (no tonemap runs)
// Condition 2: decoder!=VAAPI (true) && frameDataLocation==Hardware (FALSE) → false
// Old code would software scale here: decoder!=VAAPI && !shouldPerformTonemap && canTonemapOnHardware
// New code correctly uses hardware scale since frames are not on hardware
const pipeline = buildWithScale({
videoStream: create43VideoStream(), // SDR, no tonemap triggered
binaryCapabilities: new FfmpegCapabilities(
new Set(),
new Map(),
// Has TonemapVaapi (so old condition canTonemapOnHardware=true would fire),
// but TONEMAP_ENABLED is false so no tonemap actually runs
new Set([KnownFfmpegFilters.PadVaapi, KnownFfmpegFilters.TonemapVaapi]),
new Set(),
),
disableHardwareDecoding: true,
// TONEMAP_ENABLED not set → frames stay in Software
});
const args = pipeline.getCommandArgs().join(' ');
console.log('scale hw (decode disabled, frames in sw, no tonemap):', args);
const filters = getVideoFilterSteps(pipeline);
expect(filters.some((f) => f instanceof ScaleVaapiFilter)).toBe(true);
expect(filters.some((f) => f instanceof ScaleFilter)).toBe(false);
// Frames are in Software → ScaleVaapiFilter adds format+hwupload prefix
expect(args).toContain('hwupload');
expect(args).toContain('scale_vaapi=');
expect(args).not.toContain('scale=');
});
test('uses software scale when decode is disabled but tonemap uploads frames to hardware', () => {
// decoderMode=None (decode disabled), tonemap runs and uploads frames to Hardware
// Condition 2: decoder!=VAAPI (true) && frameDataLocation==Hardware (TRUE after tonemap) → software scale
process.env[TONEMAP_ENABLED] = 'true';
const pipeline = buildWithScale({
videoStream: createHdr43VideoStream(), // HDR + 4:3 → tonemap runs, padding needed
binaryCapabilities: new FfmpegCapabilities(
new Set(),
new Map(),
// TonemapOpencl matches default vaapiPipelineOptions.tonemapPreference='opencl'
new Set([KnownFfmpegFilters.PadVaapi, KnownFfmpegFilters.TonemapOpencl]),
new Set(),
),
disableHardwareDecoding: true,
// After TonemapOpenclFilter.nextState: frameDataLocation = Hardware
// → condition 2 fires → software scale
});
const args = pipeline.getCommandArgs().join(' ');
console.log('scale sw (decode disabled, frames on hw via tonemap):', args);
const filters = getVideoFilterSteps(pipeline);
expect(filters.some((f) => f instanceof ScaleFilter)).toBe(true);
expect(filters.some((f) => f instanceof ScaleVaapiFilter)).toBe(false);
// Frames came from hardware (tonemap) → ScaleFilter adds hwdownload before software scale
expect(args).toContain('hwdownload');
expect(args).toContain('scale=');
expect(args).not.toContain('scale_vaapi=');
});
// ─── Condition 3: !willNeedPad — scaledSize equals paddedSize ────────────────
test('uses hardware scale even when no padding is needed (16:9 source, hw decode → frames on hardware)', () => {
// 1280x720 16:9 → squarePixelFrameSize(FHD) = 1920x1080 = paddedSize → !willNeedPad = true
// But hw decode puts frames on hardware → condition 3 does NOT fire → scale_vaapi
const pipeline = buildWithScale({ videoStream: create169VideoStream() });
const args = pipeline.getCommandArgs().join(' ');
console.log('scale hw (!willNeedPad but frames on hw):', args);
const filters = getVideoFilterSteps(pipeline);
expect(filters.some((f) => f instanceof ScaleVaapiFilter)).toBe(true);
expect(filters.some((f) => f instanceof ScaleFilter)).toBe(false);
expect(args).toContain('scale_vaapi=');
expect(args).not.toContain('scale=');
});
// ─── Condition 4: !canPadOnHardware — no hardware pad filter available ────────
test('uses hardware scale when hw pad capability is not available (hw decode → frames on hardware; pad will hwdownload)', () => {
// 4:3 → needs padding; no PadVaapi/PadOpencl → canPadOnHardware=false
// But hw decode puts frames on hardware → !canPadOnHardware alone does NOT force sw scale
// scale_vaapi runs first; PadFilter then auto-prepends hwdownload before software pad
const pipeline = buildWithScale({
videoStream: create43VideoStream(),
binaryCapabilities: new FfmpegCapabilities(
new Set(),
new Map(),
new Set(), // no pad_vaapi, no pad_opencl
new Set(),
),
});
const args = pipeline.getCommandArgs().join(' ');
console.log('scale hw (!canPadOnHardware but frames on hw):', args);
const filters = getVideoFilterSteps(pipeline);
expect(filters.some((f) => f instanceof ScaleVaapiFilter)).toBe(true);
expect(filters.some((f) => f instanceof ScaleFilter)).toBe(false);
expect(args).toContain('scale_vaapi=');
expect(args).not.toContain('scale=');
});
test('uses hardware scale when only pad_opencl is available (not pad_vaapi)', () => {
// pad_opencl satisfies canPadOnHardware → hardware scale
const pipeline = buildWithScale({
videoStream: create43VideoStream(),
binaryCapabilities: new FfmpegCapabilities(
new Set(),
new Map(),
new Set([KnownFfmpegFilters.PadOpencl]),
new Set(),
),
});
const args = pipeline.getCommandArgs().join(' ');
console.log('scale hw (pad_opencl satisfies canPadOnHardware):', args);
const filters = getVideoFilterSteps(pipeline);
expect(filters.some((f) => f instanceof ScaleVaapiFilter)).toBe(true);
expect(filters.some((f) => f instanceof ScaleFilter)).toBe(false);
expect(args).toContain('scale_vaapi=');
});
// ─── Condition 5: disableHardwareFilters ─────────────────────────────────────
test('uses software scale when hardware filters are disabled', () => {
// disableHardwareFilters → condition 5 fires → software scale
// Also makes canPadOnHardware=false, but condition 5 fires first
const pipeline = buildWithScale({
videoStream: create43VideoStream(),
disableHardwareFilters: true,
});
const args = pipeline.getCommandArgs().join(' ');
console.log('scale sw (disableHardwareFilters):', args);
const filters = getVideoFilterSteps(pipeline);
expect(filters.some((f) => f instanceof ScaleFilter)).toBe(true);
expect(filters.some((f) => f instanceof ScaleVaapiFilter)).toBe(false);
expect(args).toContain('scale=');
expect(args).not.toContain('scale_vaapi=');
});
test('disableHardwareFilters overrides available pad_vaapi capability and forces software scale', () => {
// Even with pad_vaapi available, disableHardwareFilters forces software path
const pipeline = buildWithScale({
videoStream: create43VideoStream(),
binaryCapabilities: new FfmpegCapabilities(
new Set(),
new Map(),
new Set([KnownFfmpegFilters.PadVaapi]),
new Set(),
),
disableHardwareFilters: true,
});
const args = pipeline.getCommandArgs().join(' ');
console.log('scale sw (disableHardwareFilters overrides pad_vaapi):', args);
expect(args).not.toContain('scale_vaapi=');
expect(args).not.toContain('pad_vaapi');
expect(args).toContain('scale=');
});
// ─── Regression: HDR content with TONEMAP_ENABLED=false ─────────────────────
test('uses hardware scale for HDR content when TONEMAP_ENABLED is false (regression)', () => {
// Regression: HDR content with TONEMAP_ENABLED=false caused software scale because
// canPadOnHardware() returns false for HDR, and !canPadOnHardware was incorrectly
// forcing software scale regardless of frame location. With hw decode active, frames
// are on hardware → scale_vaapi should be used.
process.env[TONEMAP_ENABLED] = 'false';
const pipeline = buildWithScale({ videoStream: createHdr43VideoStream() });
const args = pipeline.getCommandArgs().join(' ');
console.log('scale hw (HDR, TONEMAP_ENABLED=false, regression):', args);
const filters = getVideoFilterSteps(pipeline);
expect(filters.some((f) => f instanceof ScaleVaapiFilter)).toBe(true);
expect(filters.some((f) => f instanceof ScaleFilter)).toBe(false);
expect(args).toContain('scale_vaapi=');
expect(args).not.toContain('scale=');
});
// ─── No scale needed ─────────────────────────────────────────────────────────
test('produces no resize when source already matches desired scaled size', () => {
// 1920x1080 FHD source → currentState.scaledSize == desiredState.scaledSize
// setScale creates a ScaleVaapiFilter only for pixel format conversion (not resize).
// ScaleVaapiFilter with matching sizes outputs 'scale_vaapi=format=...' without dimensions.
const pipeline = buildWithScale({ videoStream: create169FhdVideoStream() });
const args = pipeline.getCommandArgs().join(' ');
console.log('no resize (sizes already match):', args);
const filters = getVideoFilterSteps(pipeline);
// No software resize
expect(filters.some((f) => f instanceof ScaleFilter)).toBe(false);
// scale_vaapi with an actual resize includes 'force_divisible_by'; format-only does not
expect(args).not.toContain('force_divisible_by');
expect(args).not.toContain('scale=');
});
});

View File

@@ -71,6 +71,11 @@ import {
import { FrameDataLocation, RateControlMode } from '../../types.ts'; import { FrameDataLocation, RateControlMode } from '../../types.ts';
export class VaapiPipelineBuilder extends SoftwarePipelineBuilder { export class VaapiPipelineBuilder extends SoftwarePipelineBuilder {
// Set in setHardwareAccelState(); used in setScale() to avoid a
// hwdownload+hwupload cycle that breaks with the named init_hw_device setup
// required for tonemap_opencl.
private willUseOpenclTonemap = false;
constructor( constructor(
private hardwareCapabilities: BaseFfmpegHardwareCapabilities, private hardwareCapabilities: BaseFfmpegHardwareCapabilities,
binaryCapabilities: FfmpegCapabilities, binaryCapabilities: FfmpegCapabilities,
@@ -109,8 +114,26 @@ export class VaapiPipelineBuilder extends SoftwarePipelineBuilder {
} }
if (isNonEmptyString(ffmpegState.vaapiDevice)) { if (isNonEmptyString(ffmpegState.vaapiDevice)) {
// Use OpenCL device derivation when tonemap_opencl will be selected.
// The named init_hw_device approach is required for hwmap=derive_device=opencl
// to resolve the parent VAAPI device. Only enable it when the binary
// actually supports tonemap_opencl (i.e. OpenCL is available on the host).
const { pipelineOptions } = this.context;
this.willUseOpenclTonemap =
!pipelineOptions?.disableHardwareFilters &&
getBooleanEnvVar(TONEMAP_ENABLED, false) &&
isVideoPipelineContext(this.context) &&
isHdrContent(this.context.videoStream) &&
(pipelineOptions?.vaapiPipelineOptions?.tonemapPreference ??
'opencl') === 'opencl' &&
this.ffmpegCapabilities.hasFilter(KnownFfmpegFilters.TonemapOpencl);
this.pipelineSteps.push( this.pipelineSteps.push(
new VaapiHardwareAccelerationOption(ffmpegState.vaapiDevice, canDecode), new VaapiHardwareAccelerationOption(
ffmpegState.vaapiDevice,
canDecode,
this.willUseOpenclTonemap,
),
); );
if (isNonEmptyString(ffmpegState.vaapiDriver)) { if (isNonEmptyString(ffmpegState.vaapiDriver)) {
@@ -327,7 +350,7 @@ export class VaapiPipelineBuilder extends SoftwarePipelineBuilder {
HardwareAccelerationMode.Vaapi && HardwareAccelerationMode.Vaapi &&
currentState.frameDataLocation === FrameDataLocation.Software currentState.frameDataLocation === FrameDataLocation.Software
) { ) {
steps.push(new HardwareUploadVaapiFilter(needsVaapiSetFormat, 64)); steps.push(new HardwareUploadVaapiFilter(needsVaapiSetFormat));
} }
} }
@@ -399,10 +422,15 @@ export class VaapiPipelineBuilder extends SoftwarePipelineBuilder {
} }
let nextState = currentState; let nextState = currentState;
const { desiredState, ffmpegState, shouldDeinterlace, videoStream } = const { desiredState, ffmpegState, shouldDeinterlace, pipelineOptions } =
this.context; this.context;
let scaleOption: FilterOption; let scaleOption: FilterOption;
const willNeedPad = !desiredState.scaledSize.equals(
desiredState.paddedSize,
);
const canPadOnHardware = this.canPadOnHardware();
if ( if (
!currentState.scaledSize.equals(desiredState.scaledSize) && !currentState.scaledSize.equals(desiredState.scaledSize) &&
((ffmpegState.decoderHwAccelMode === HardwareAccelerationMode.None && ((ffmpegState.decoderHwAccelMode === HardwareAccelerationMode.None &&
@@ -412,8 +440,16 @@ export class VaapiPipelineBuilder extends SoftwarePipelineBuilder {
// performed a software decode, we'll have had to upload to hardware to tonemap anyway (most likely) // performed a software decode, we'll have had to upload to hardware to tonemap anyway (most likely)
// so try to continue on hardware if possible // so try to continue on hardware if possible
(ffmpegState.decoderHwAccelMode !== HardwareAccelerationMode.Vaapi && (ffmpegState.decoderHwAccelMode !== HardwareAccelerationMode.Vaapi &&
!this.shouldPerformTonemap(videoStream) && currentState.frameDataLocation === FrameDataLocation.Hardware) ||
this.canTonemapOnHardware())) // Use software scale only when frames are not already on hardware.
// If frames are on hardware (from hw decode or tonemap), keep them
// there and use scale_vaapi — downloading for a software scale and
// re-uploading is wasteful, and breaks the named-device init_hw_device
// setup used for tonemap_opencl. Pad capability does not affect the
// scale decision: if padding requires software, it can hwdownload after.
((!willNeedPad || !canPadOnHardware) &&
currentState.frameDataLocation !== FrameDataLocation.Hardware) ||
pipelineOptions.disableHardwareFilters)
) { ) {
scaleOption = ScaleFilter.create( scaleOption = ScaleFilter.create(
currentState, currentState,
@@ -459,10 +495,9 @@ export class VaapiPipelineBuilder extends SoftwarePipelineBuilder {
} }
// Enabled by default // Enabled by default
const disableHardwarePad = getBooleanEnvVar( const disableHardwarePad =
TUNARR_ENV_VARS.DISABLE_VAAPI_PAD, getBooleanEnvVar(TUNARR_ENV_VARS.DISABLE_VAAPI_PAD, false) ||
false, this.context.pipelineOptions.disableHardwareFilters;
);
let padFilter: Maybe<FilterOption>; let padFilter: Maybe<FilterOption>;
if (isHdrContent(this.context.videoStream)) { if (isHdrContent(this.context.videoStream)) {
padFilter = PadFilter.create(currentState, this.desiredState); padFilter = PadFilter.create(currentState, this.desiredState);
@@ -645,11 +680,25 @@ export class VaapiPipelineBuilder extends SoftwarePipelineBuilder {
); );
} }
private canTonemapOnHardware() { private canPadOnHardware() {
if (!isVideoPipelineContext(this.context)) {
return false;
}
const disableHardwarePad =
getBooleanEnvVar(TUNARR_ENV_VARS.DISABLE_VAAPI_PAD, false) ||
this.context.pipelineOptions.disableHardwareFilters;
if (disableHardwarePad) {
return false;
}
if (isHdrContent(this.context.videoStream)) {
return false;
}
return ( return (
!this.context.pipelineOptions.disableHardwareFilters && this.ffmpegCapabilities.hasFilter(KnownFfmpegFilters.PadVaapi) ||
(this.ffmpegCapabilities.hasFilter(KnownFfmpegFilters.TonemapVaapi) || this.ffmpegCapabilities.hasFilter(KnownFfmpegFilters.PadOpencl)
this.ffmpegCapabilities.hasFilter(KnownFfmpegFilters.TonemapOpencl))
); );
} }
} }

View File

@@ -11,6 +11,20 @@ import type { VaapiHardwareCapabilities } from '../../ffmpeg/builder/capabilitie
import { VaapiHardwareCapabilitiesParser } from '../../ffmpeg/builder/capabilities/VaapiHardwareCapabilitiesParser.ts'; import { VaapiHardwareCapabilitiesParser } from '../../ffmpeg/builder/capabilities/VaapiHardwareCapabilitiesParser.ts';
import { FfprobeMediaInfoSchema } from '../../types/ffmpeg.ts'; import { FfprobeMediaInfoSchema } from '../../types/ffmpeg.ts';
function whichFirst(...candidates: string[]): string {
for (const candidate of candidates) {
try {
const result = execFileSync('which', [candidate], {
encoding: 'utf-8' as const,
}).trim();
if (result) return result;
} catch {
// not found, try next
}
}
return '';
}
export function discoverFfmpegBinaries(): { export function discoverFfmpegBinaries(): {
ffmpeg: string; ffmpeg: string;
ffprobe: string; ffprobe: string;
@@ -18,11 +32,11 @@ export function discoverFfmpegBinaries(): {
try { try {
const ffmpeg = const ffmpeg =
process.env['TUNARR_TEST_FFMPEG'] ?? process.env['TUNARR_TEST_FFMPEG'] ??
execFileSync('which', ['ffmpeg'], { encoding: 'utf-8' as const }).trim(); whichFirst('ffmpeg7.1', 'ffmpeg');
const ffprobe = const ffprobe =
process.env['TUNARR_TEST_FFPROBE'] ?? process.env['TUNARR_TEST_FFPROBE'] ??
execFileSync('which', ['ffprobe'], { encoding: 'utf-8' as const }).trim(); whichFirst('ffprobe7.1', 'ffprobe');
if (!ffmpeg || !ffprobe) { if (!ffmpeg || !ffprobe) {
return null; return null;
@@ -124,6 +138,47 @@ export function probeFile(
// Hardware discovery helpers // Hardware discovery helpers
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
/**
* Returns true if an OpenCL device can be derived from the given VAAPI device,
* which is the prerequisite for tonemap_opencl pipelines.
*
* Uses -init_hw_device vaapi=va:<device> -init_hw_device opencl=ocl@va — the
* same device-init approach the pipeline builder uses when OpenCL tonemap is
* active. A synthetic lavfi source is used so no input file is required; we
* only care that device initialisation succeeds (exit 0), not that any filter
* chain runs.
*/
export function discoverVaapiOpenclSupport(
ffmpegPath: string,
device: string,
): boolean {
try {
const result = spawnSync(
ffmpegPath,
[
'-hide_banner',
'-init_hw_device',
`vaapi=va:${device}`,
'-init_hw_device',
'opencl=ocl@va',
'-f',
'lavfi',
'-i',
'nullsrc=s=64x64',
'-frames:v',
'1',
'-f',
'null',
'-',
],
{ encoding: 'utf-8' as const },
);
return result.status === 0;
} catch {
return false;
}
}
export type VaapiDeviceInfo = { export type VaapiDeviceInfo = {
device: string; device: string;
capabilities: VaapiHardwareCapabilities; capabilities: VaapiHardwareCapabilities;

View File

@@ -11,6 +11,7 @@ import {
discoverNvidiaCapabilities, discoverNvidiaCapabilities,
discoverQsvCapabilities, discoverQsvCapabilities,
discoverVaapiDevice, discoverVaapiDevice,
discoverVaapiOpenclSupport,
type VaapiDeviceInfo, type VaapiDeviceInfo,
} from './FfmpegIntegrationHelper.ts'; } from './FfmpegIntegrationHelper.ts';
@@ -26,6 +27,11 @@ export const nvidiaCaps = binaries
? discoverNvidiaCapabilities(binaries.ffmpeg) ? discoverNvidiaCapabilities(binaries.ffmpeg)
: null; : null;
export const vaapiOpenclSupported =
binaries && vaapiInfo
? discoverVaapiOpenclSupport(binaries.ffmpeg, vaapiInfo.device)
: false;
const noopLogger = pino({ level: 'silent' }) as Logger; const noopLogger = pino({ level: 'silent' }) as Logger;
function makeFfmpegInfo(): FfmpegInfo { function makeFfmpegInfo(): FfmpegInfo {

Binary file not shown.

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 307 B

View File

@@ -2,6 +2,11 @@ import { defineConfig } from 'vitest/config';
export default defineConfig({ export default defineConfig({
test: { test: {
projects: ['web', 'server', 'shared'], projects: [
'web/vitest.config.ts',
'server/vitest.config.ts',
'server/vitest.local.config.ts',
'shared',
],
}, },
}); });