fix(qsv): ensure setpts filter is only run on software

This commit is contained in:
Christian Benincasa
2026-04-03 08:03:08 -04:00
parent 4e46aa5c9e
commit 18e84445b1
13 changed files with 222 additions and 108 deletions

View File

@@ -1,8 +1,8 @@
import type { VideoFormat } from '../constants.ts';
import { VideoFormats, type VideoFormat } from '../constants.ts';
import { AudioEncoder, BaseEncoder, VideoEncoder } from './BaseEncoder.ts';
export class CopyVideoEncoder extends VideoEncoder {
protected videoFormat: VideoFormat;
protected videoFormat: VideoFormat = VideoFormats.Copy;
constructor() {
super('copy');

View File

@@ -1,7 +1,6 @@
import dayjs from 'dayjs';
import duration from 'dayjs/plugin/duration.js';
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import { FileStreamSource } from '../../../../stream/types.ts';
import {
createTempWorkdir,
@@ -10,6 +9,8 @@ import {
} from '../../../../testing/ffmpeg/FfmpegIntegrationHelper.ts';
import {
binaries,
deriveVideoStreamForFixture,
Fixtures,
qsvInfo,
qsvTest,
} from '../../../../testing/ffmpeg/FfmpegTestFixtures.ts';
@@ -18,7 +19,10 @@ import {
FileOutputLocation,
VideoFormats,
} from '../../constants.ts';
import { PixelFormatYuv420P } from '../../format/PixelFormat.ts';
import {
PixelFormatYuv420P,
PixelFormatYuv420P10Le,
} from '../../format/PixelFormat.ts';
import {
AudioInputFilterSource,
AudioInputSource,
@@ -42,24 +46,9 @@ import { QsvPipelineBuilder } from './QsvPipelineBuilder.ts';
dayjs.extend(duration);
const fixturesDir = path.join(
path.dirname(fileURLToPath(import.meta.url)),
'../../../../testing/ffmpeg/fixtures',
);
const Fixtures = {
video720p: path.join(fixturesDir, '720p_h264.ts'),
video1080p: path.join(fixturesDir, '1080p_h264.ts'),
video480p43: path.join(fixturesDir, '480p_h264.ts'),
watermark: path.join(fixturesDir, 'watermark.png'),
blackWatermark: path.join(fixturesDir, 'black_watermark.png'),
} as const;
// Limit output to 1 second in all integration tests to keep runs fast
const testDuration = dayjs.duration(1, 'second');
// ─── Shared helpers ───────────────────────────────────────────────────────────
function makeH264VideoInput(inputPath: string, frameSize: FrameSize) {
return VideoInputSource.withStream(
new FileStreamSource(inputPath),
@@ -75,6 +64,21 @@ function makeH264VideoInput(inputPath: string, frameSize: FrameSize) {
);
}
function makeHevc10BitVideoInput(inputPath: string, frameSize: FrameSize) {
return VideoInputSource.withStream(
new FileStreamSource(inputPath),
VideoStream.create({
codec: VideoFormats.Hevc,
displayAspectRatio: '16:9',
frameSize,
index: 0,
pixelFormat: new PixelFormatYuv420P10Le(),
providedSampleAspectRatio: null,
colorFormat: null,
}),
);
}
function make43VideoInput(inputPath: string) {
return VideoInputSource.withStream(
new FileStreamSource(inputPath),
@@ -125,8 +129,6 @@ function makeWatermark(color: 'white' | 'black' = 'white') {
);
}
// ─────────────────────────────────────────────────────────────────────────────
describe.skipIf(!binaries || !qsvInfo)('QsvPipelineBuilder integration', () => {
let workdir: string;
let cleanup: () => Promise<void>;
@@ -291,13 +293,8 @@ describe.skipIf(!binaries || !qsvInfo)('QsvPipelineBuilder integration', () => {
expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true);
},
);
});
// ─── Pixel format fix integration tests ──────────────────────────────────────
describe.skipIf(!binaries || !qsvInfo)(
'QsvPipelineBuilder pixel format fixes',
() => {
describe('pixel format fixes', () => {
let workdir: string;
let cleanup: () => Promise<void>;
@@ -590,5 +587,55 @@ describe.skipIf(!binaries || !qsvInfo)(
expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true);
},
);
},
);
});
qsvTest(
'hevc decoding with setpts',
async ({ binaryCapabilities, resolvedQsv, ffmpegVersion }) => {
const video = await deriveVideoStreamForFixture(Fixtures.videoHevc1080p);
const audio = makeAudioInput(Fixtures.videoHevc1080p);
const builder = new QsvPipelineBuilder(
resolvedQsv.capabilities,
binaryCapabilities,
video,
audio,
null,
null,
null,
);
const frameState = new FrameState({
isAnamorphic: false,
scaledSize: FrameSize.withDimensions(1280, 720),
paddedSize: FrameSize.withDimensions(1280, 720),
pixelFormat: new PixelFormatYuv420P(),
});
const outputPath = path.join(workdir, 'qsv_transcode.ts');
const pipeline = builder.build(
FfmpegState.create({
version: ffmpegVersion,
outputLocation: FileOutputLocation(outputPath, true),
vaapiDevice: resolvedQsv.device,
start: dayjs.duration({ seconds: 1 }),
}),
frameState,
DefaultPipelineOptions,
);
const { exitCode, stderr } = runFfmpegWithPipeline(
binaries!.ffmpeg,
pipeline.getCommandArgs(),
);
expect(
exitCode,
`Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`,
).toBe(0);
const probe = probeFile(binaries!.ffprobe, outputPath);
expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true);
},
);
});

View File

@@ -165,6 +165,18 @@ export class QsvPipelineBuilder extends SoftwarePipelineBuilder {
currentState = this.decoder.nextState(currentState);
}
currentState = this.setDeinterlace(currentState);
currentState = this.setScale(currentState);
currentState = this.setTonemap(currentState);
currentState = this.setPad(currentState);
this.setStillImageLoop();
if (currentState.frameDataLocation === FrameDataLocation.Hardware) {
const hwDownload = new HardwareDownloadFilter(currentState);
currentState = hwDownload.nextState(currentState);
this.videoInputSource.filterSteps.push(hwDownload);
}
if (this.desiredState.videoFormat !== VideoFormats.Copy) {
currentState = this.addFilterToVideoChain(
currentState,
@@ -193,21 +205,6 @@ export class QsvPipelineBuilder extends SoftwarePipelineBuilder {
}
}
currentState = this.setDeinterlace(currentState);
currentState = this.setScale(currentState);
currentState = this.setTonemap(currentState);
currentState = this.setPad(currentState);
this.setStillImageLoop();
if (
currentState.frameDataLocation === FrameDataLocation.Hardware &&
this.context.hasWatermark
) {
const hwDownload = new HardwareDownloadFilter(currentState);
currentState = hwDownload.nextState(currentState);
this.videoInputSource.filterSteps.push(hwDownload);
}
currentState = this.setWatermark(currentState);
const noEncoderSteps = every(
@@ -438,13 +435,22 @@ export class QsvPipelineBuilder extends SoftwarePipelineBuilder {
// Only emit -pix_fmt for software encoders; QSV encoders don't accept
// a -pix_fmt flag and it causes swscaler errors with hardware frames.
if (
currentState.pixelFormat?.name !== targetPixelFormat.name &&
this.ffmpegState.encoderHwAccelMode !== HardwareAccelerationMode.Qsv
) {
// TODO: Handle color params
this.pipelineSteps.push(new PixelFormatOutputOption(targetPixelFormat));
if (currentState.pixelFormat?.name !== targetPixelFormat.name) {
if (
this.ffmpegState.encoderHwAccelMode === HardwareAccelerationMode.Qsv
) {
steps.push(
new QsvFormatFilter(
targetPixelFormat.toHardwareFormat() ?? targetPixelFormat,
),
);
} else {
this.pipelineSteps.push(
new PixelFormatOutputOption(targetPixelFormat),
);
}
}
// TODO: Handle color params
this.context.filterChain.pixelFormatFilterSteps = steps;
} else if (

View File

@@ -1,10 +1,7 @@
import dayjs from 'dayjs';
import duration from 'dayjs/plugin/duration.js';
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import { FileStreamSource } from '../../../../stream/types.ts';
dayjs.extend(duration);
import {
createTempWorkdir,
probeFile,
@@ -12,10 +9,12 @@ import {
} from '../../../../testing/ffmpeg/FfmpegIntegrationHelper.ts';
import {
binaries,
Fixtures,
vaapiInfo,
vaapiOpenclSupported,
vaapiTest,
} from '../../../../testing/ffmpeg/FfmpegTestFixtures.ts';
import { TONEMAP_ENABLED, TUNARR_ENV_VARS } from '../../../../util/env.ts';
import {
EmptyFfmpegCapabilities,
FfmpegCapabilities,
@@ -60,21 +59,9 @@ import {
} from '../../state/FfmpegState.ts';
import { FrameState } from '../../state/FrameState.ts';
import { FrameSize } from '../../types.ts';
import { TONEMAP_ENABLED, TUNARR_ENV_VARS } from '../../../../util/env.ts';
import { VaapiPipelineBuilder } from './VaapiPipelineBuilder.ts';
const fixturesDir = path.join(
path.dirname(fileURLToPath(import.meta.url)),
'../../../../testing/ffmpeg/fixtures',
);
const Fixtures = {
video720p: path.join(fixturesDir, '720p_h264.ts'),
video1080p: path.join(fixturesDir, '1080p_h264.ts'),
video480p43: path.join(fixturesDir, '480p_h264.ts'),
videoHdr720p: path.join(fixturesDir, '720p_hevc_hdr10.ts'),
watermark: path.join(fixturesDir, 'watermark.png'),
} as const;
dayjs.extend(duration);
// Limit output to 1 second in all integration tests to keep runs fast
const testDuration = dayjs.duration(1, 'second');
@@ -905,10 +892,22 @@ describe.skipIf(!binaries || !vaapiInfo)(
// Capabilities that include both H264 and HEVC decode/encode
function makeVaapiCapsWithH264AndHevc() {
return new VaapiHardwareCapabilities([
new VaapiProfileEntrypoint(VaapiProfiles.H264Main, VaapiEntrypoint.Decode),
new VaapiProfileEntrypoint(VaapiProfiles.H264Main, VaapiEntrypoint.Encode),
new VaapiProfileEntrypoint(VaapiProfiles.HevcMain10, VaapiEntrypoint.Decode),
new VaapiProfileEntrypoint(VaapiProfiles.HevcMain, VaapiEntrypoint.Encode),
new VaapiProfileEntrypoint(
VaapiProfiles.H264Main,
VaapiEntrypoint.Decode,
),
new VaapiProfileEntrypoint(
VaapiProfiles.H264Main,
VaapiEntrypoint.Encode,
),
new VaapiProfileEntrypoint(
VaapiProfiles.HevcMain10,
VaapiEntrypoint.Decode,
),
new VaapiProfileEntrypoint(
VaapiProfiles.HevcMain,
VaapiEntrypoint.Encode,
),
]);
}
@@ -1210,9 +1209,18 @@ describe.skipIf(!binaries || !vaapiInfo || !canDecodeHdr)(
function makeHdrVaapiCapabilities() {
return new VaapiHardwareCapabilities([
new VaapiProfileEntrypoint(VaapiProfiles.HevcMain10, VaapiEntrypoint.Decode),
new VaapiProfileEntrypoint(VaapiProfiles.HevcMain, VaapiEntrypoint.Encode),
new VaapiProfileEntrypoint(VaapiProfiles.H264Main, VaapiEntrypoint.Encode),
new VaapiProfileEntrypoint(
VaapiProfiles.HevcMain10,
VaapiEntrypoint.Decode,
),
new VaapiProfileEntrypoint(
VaapiProfiles.HevcMain,
VaapiEntrypoint.Encode,
),
new VaapiProfileEntrypoint(
VaapiProfiles.H264Main,
VaapiEntrypoint.Encode,
),
]);
}
@@ -1231,14 +1239,17 @@ describe.skipIf(!binaries || !vaapiInfo || !canDecodeHdr)(
async ({ binaryCapabilities, ffmpegVersion, resolvedVaapi }) => {
process.env[TONEMAP_ENABLED] = 'true';
const video = makeHdrVideoInput(Fixtures.videoHdr720p);
const audio = makeAudioInput(Fixtures.videoHdr720p);
const video = makeHdrVideoInput(Fixtures.videoHevc720p);
const audio = makeAudioInput(Fixtures.videoHevc720p);
// Merge TonemapOpencl into real capabilities so this path is available
const capsWithOpencl = new FfmpegCapabilities(
binaryCapabilities.allOptions(),
binaryCapabilities.allVideoEncoders(),
new Set([...binaryCapabilities.allFilters(), KnownFfmpegFilters.TonemapOpencl]),
new Set([
...binaryCapabilities.allFilters(),
KnownFfmpegFilters.TonemapOpencl,
]),
new Set(),
);
@@ -1285,8 +1296,8 @@ describe.skipIf(!binaries || !vaapiInfo || !canDecodeHdr)(
'TONEMAP_ENABLED=false skips tonemapping',
async ({ binaryCapabilities, ffmpegVersion, resolvedVaapi }) => {
// No TONEMAP_ENABLED set (defaults to off)
const video = makeHdrVideoInput(Fixtures.videoHdr720p);
const audio = makeAudioInput(Fixtures.videoHdr720p);
const video = makeHdrVideoInput(Fixtures.videoHevc720p);
const audio = makeAudioInput(Fixtures.videoHevc720p);
const builder = new VaapiPipelineBuilder(
makeHdrVaapiCapabilities(),
@@ -1329,8 +1340,8 @@ describe.skipIf(!binaries || !vaapiInfo || !canDecodeHdr)(
async ({ ffmpegVersion, resolvedVaapi }) => {
process.env[TONEMAP_ENABLED] = 'true';
const video = makeHdrVideoInput(Fixtures.videoHdr720p);
const audio = makeAudioInput(Fixtures.videoHdr720p);
const video = makeHdrVideoInput(Fixtures.videoHevc720p);
const audio = makeAudioInput(Fixtures.videoHevc720p);
const builder = new VaapiPipelineBuilder(
makeHdrVaapiCapabilities(),
@@ -1373,14 +1384,17 @@ describe.skipIf(!binaries || !vaapiInfo || !canDecodeHdr)(
async ({ binaryCapabilities, ffmpegVersion, resolvedVaapi }) => {
process.env[TONEMAP_ENABLED] = 'true';
const video = makeHdrVideoInput(Fixtures.videoHdr720p);
const audio = makeAudioInput(Fixtures.videoHdr720p);
const video = makeHdrVideoInput(Fixtures.videoHevc720p);
const audio = makeAudioInput(Fixtures.videoHevc720p);
// Force only tonemap_vaapi to be available
const capsWithVaapiTonemap = new FfmpegCapabilities(
binaryCapabilities.allOptions(),
binaryCapabilities.allVideoEncoders(),
new Set([...binaryCapabilities.allFilters(), KnownFfmpegFilters.TonemapVaapi]),
new Set([
...binaryCapabilities.allFilters(),
KnownFfmpegFilters.TonemapVaapi,
]),
new Set(),
);

View File

@@ -1,5 +1,4 @@
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import { FileStreamSource } from '../../../../stream/types.ts';
import {
createTempWorkdir,
@@ -8,6 +7,7 @@ import {
} from '../../../../testing/ffmpeg/FfmpegIntegrationHelper.ts';
import {
binaries,
Fixtures,
nvidiaCaps,
nvidiaTest,
} from '../../../../testing/ffmpeg/FfmpegTestFixtures.ts';
@@ -25,15 +25,6 @@ import { FrameState } from '../../state/FrameState.ts';
import { FrameSize } from '../../types.ts';
import { NvidiaPipelineBuilder } from './NvidiaPipelineBuilder.ts';
const fixturesDir = path.join(
path.dirname(fileURLToPath(import.meta.url)),
'../../../../testing/ffmpeg/fixtures',
);
const Fixtures = {
video720p: path.join(fixturesDir, '720p_h264.ts'),
video1080p: path.join(fixturesDir, '1080p_h264.ts'),
} as const;
describe.skipIf(!binaries || !nvidiaCaps)(
'NvidiaPipelineBuilder integration',

View File

@@ -1,5 +1,4 @@
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import { FileStreamSource } from '../../../../stream/types.ts';
import {
createTempWorkdir,
@@ -9,6 +8,7 @@ import {
import {
binaries,
ffmpegTest,
Fixtures,
} from '../../../../testing/ffmpeg/FfmpegTestFixtures.ts';
import { AudioFormats, FileOutputLocation } from '../../constants.ts';
import { PixelFormatYuv420P } from '../../format/PixelFormat.ts';
@@ -24,15 +24,6 @@ import { FrameState } from '../../state/FrameState.ts';
import { FrameSize } from '../../types.ts';
import { SoftwarePipelineBuilder } from './SoftwarePipelineBuilder.ts';
const fixturesDir = path.join(
path.dirname(fileURLToPath(import.meta.url)),
'../../../../testing/ffmpeg/fixtures',
);
const Fixtures = {
video720p: path.join(fixturesDir, '720p_h264.ts'),
video1080p: path.join(fixturesDir, '1080p_h264.ts'),
} as const;
describe.skipIf(!binaries)('SoftwarePipelineBuilder integration', () => {
let workdir: string;

View File

@@ -31,24 +31,23 @@ export function discoverFfmpegBinaries(): {
} | null {
try {
const ffmpeg =
process.env['TUNARR_TEST_FFMPEG'] ??
whichFirst('ffmpeg7.1', 'ffmpeg');
process.env['TUNARR_TEST_FFMPEG'] ?? whichFirst('ffmpeg7.1', 'ffmpeg');
const ffprobe =
process.env['TUNARR_TEST_FFPROBE'] ??
whichFirst('ffprobe7.1', 'ffprobe');
process.env['TUNARR_TEST_FFPROBE'] ?? whichFirst('ffprobe7.1', 'ffprobe');
if (!ffmpeg || !ffprobe) {
return null;
}
console.debug('Resolved ffmpeg binaries: ', ffmpeg, ffprobe);
return { ffmpeg, ffprobe };
} catch {
return null;
}
}
export async function createTempWorkdir(): Promise<{
dir: string;
cleanup: () => Promise<void>;

View File

@@ -1,3 +1,16 @@
import { VideoStream } from '@/ffmpeg/builder/MediaStream.js';
import { VideoFormats } from '@/ffmpeg/builder/constants.js';
import { ColorFormat } from '@/ffmpeg/builder/format/ColorFormat.js';
import {
KnownPixelFormats,
PixelFormatUnknown,
} from '@/ffmpeg/builder/format/PixelFormat.js';
import { VideoInputSource } from '@/ffmpeg/builder/input/VideoInputSource.js';
import { FrameSize } from '@/ffmpeg/builder/types.js';
import { FfprobeStreamDetails } from '@/stream/FfprobeStreamDetails.js';
import { FileStreamSource } from '@/stream/types.js';
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import pino from 'pino';
import { test as base } from 'vitest';
import type { FfmpegCapabilities } from '../../ffmpeg/builder/capabilities/FfmpegCapabilities.ts';
@@ -15,6 +28,59 @@ import {
type VaapiDeviceInfo,
} from './FfmpegIntegrationHelper.ts';
const fixturesDir = path.join(
path.dirname(fileURLToPath(import.meta.url)),
'fixtures',
);
export const Fixtures = {
video720p: path.join(fixturesDir, '720p_h264.ts'),
video1080p: path.join(fixturesDir, '1080p_h264.ts'),
video480p43: path.join(fixturesDir, '480p_h264.ts'),
videoHevc720p: path.join(fixturesDir, '720p_hevc_hdr10.ts'),
videoHevc1080p: path.join(fixturesDir, '1080p_hevc_hdr10.mp4'),
watermark: path.join(fixturesDir, 'watermark.png'),
blackWatermark: path.join(fixturesDir, 'black_watermark.png'),
} as const;
export async function deriveVideoStreamForFixture(
fixturePath: string,
): Promise<VideoInputSource> {
const ffprobeStreamDetails = new FfprobeStreamDetails(makeFfmpegInfo());
const { streamDetails } = (
await ffprobeStreamDetails.getStream({ path: fixturePath })
).getOrThrow();
if (!streamDetails.videoDetails) {
throw new Error(`File at ${fixturePath} has no video streams`);
}
const videoDetails = streamDetails.videoDetails[0];
return VideoInputSource.withStream(
new FileStreamSource(fixturePath),
VideoStream.create({
codec: videoDetails.codec ?? VideoFormats.Undetermined,
profile: videoDetails.profile,
displayAspectRatio: videoDetails.displayAspectRatio,
frameSize: FrameSize.withDimensions(
videoDetails.width,
videoDetails.height,
),
pixelFormat: videoDetails.pixelFormat
? (KnownPixelFormats.forPixelFormat(videoDetails.pixelFormat) ??
PixelFormatUnknown(videoDetails.bitDepth ?? 8))
: PixelFormatUnknown(videoDetails.bitDepth ?? 8),
providedSampleAspectRatio: videoDetails.sampleAspectRatio ?? null,
frameRate: videoDetails.framerate?.toString(),
index: videoDetails.streamIndex ?? 0,
colorFormat: new ColorFormat({
colorRange: videoDetails.colorRange ?? null,
colorSpace: videoDetails.colorSpace ?? null,
colorTransfer: videoDetails.colorTransfer ?? null,
colorPrimaries: videoDetails.colorPrimaries ?? null,
}),
}),
);
}
export const binaries = discoverFfmpegBinaries();
export const vaapiInfo = discoverVaapiDevice();

Binary file not shown.

Binary file not shown.

Binary file not shown.