test: add "integration" style tests that check basic transcoding pipelines

This commit is contained in:
Christian Benincasa
2026-03-19 09:56:03 -04:00
parent ddd2613735
commit 87974b629c
20 changed files with 1419 additions and 27 deletions

View File

@@ -26,6 +26,7 @@
"run-fixer": "dotenv -e .env.development -- tsx src/index.ts fixer",
"test:watch": "vitest --typecheck.tsconfig tsconfig.test.json --watch",
"test": "vitest --typecheck.tsconfig tsconfig.test.json --run",
"test:local": "vitest --config vitest.local.config.ts --typecheck.tsconfig tsconfig.test.json --run",
"tunarr": "dotenv -e .env.development -- tsx src/index.ts",
"typecheck": "cross-env NODE_OPTIONS=--max-old-space-size=8192 tsc -p tsconfig.build.json --noEmit --diagnostics"
},

View File

@@ -83,6 +83,16 @@ const RootModule = new ContainerModule((bind) => {
bind<ISettingsDB>(KEYS.SettingsDB).toDynamicValue((ctx) => {
return ctx.container.get<() => ISettingsDB>('Factory<ISettingsDB>')();
});
bind<string>(KEYS.FFmpegPath).toDynamicValue(
(ctx) =>
ctx.container.get<ISettingsDB>(KEYS.SettingsDB).ffmpegSettings()
.ffmpegExecutablePath,
);
bind<string>(KEYS.FFprobePath).toDynamicValue(
(ctx) =>
ctx.container.get<ISettingsDB>(KEYS.SettingsDB).ffmpegSettings()
.ffprobeExecutablePath,
);
bind<typeof LoggerFactory>(KEYS.LoggerFactory).toConstantValue(LoggerFactory);

View File

@@ -70,9 +70,37 @@ export const AudioFormats = {
Flac: 'flac',
} as const;
export const OutputLocation = {
export type StdoutOutputLocation = {
type: 'stdout';
};
export const StdoutOutputLocation: StdoutOutputLocation = {
type: 'stdout',
};
export type FileOutputLocation = {
type: 'file';
path: string;
overwrite: boolean;
};
export function FileOutputLocation(
path: string,
overwrite: boolean = false,
): FileOutputLocation {
return {
type: 'file',
path,
overwrite,
};
}
export const OutputLocations = {
Stdout: 'stdout',
} as const;
File: 'file',
} satisfies Record<Capitalize<OutputLocation['type']>, OutputLocation['type']>;
export type OutputLocation = FileOutputLocation | StdoutOutputLocation;
export const OutputFormatTypes = {
None: 'none',
@@ -125,8 +153,6 @@ export const ColorTransferFormats = {
export type ColorTransferFormat =
(typeof ColorTransferFormats)[keyof typeof ColorTransferFormats];
export type OutputLocation = Lowercase<keyof typeof OutputLocation>;
export type HlsOutputFormat = {
type: typeof OutputFormatTypes.Hls;
hlsOptions: HlsOptions;

View File

@@ -55,3 +55,9 @@ export class StandardFormatFlags extends ConstantGlobalOption {
super(['-fflags', '+genpts+discardcorrupt+igndts']);
}
}
export function makeConstantGlobalOption(
...args: [string, ...string[]]
): ConstantGlobalOption {
return new (class extends ConstantGlobalOption {})(args);
}

View File

@@ -61,11 +61,11 @@ import { LoggerFactory } from '@/util/logging/LoggerFactory.js';
import { getTunarrVersion } from '@/util/version.js';
import { filter, first, isNil, isNull, isUndefined, merge } from 'lodash-es';
import type { DeepReadonly, MarkRequired } from 'ts-essentials';
import { P, match } from 'ts-pattern';
import { match, P } from 'ts-pattern';
import {
AudioFormats,
OutputFormatTypes,
OutputLocation,
OutputLocations,
VideoFormats,
} from '../constants.ts';
import {
@@ -93,6 +93,7 @@ import {
} from '../options/AudioOutputOptions.ts';
import {
HideBannerOption,
makeConstantGlobalOption,
NoStdInOption,
StandardFormatFlags,
ThreadCountOption,
@@ -101,6 +102,7 @@ import {
ClosedGopOutputOption,
DoNotMapMetadataOutputOption,
FastStartOutputOption,
makeConstantOutputOption,
MapAllStreamsOutputOption,
MatroskaOutputFormatOption,
MetadataServiceNameOutputOption,
@@ -775,13 +777,10 @@ export abstract class BasePipelineBuilder implements PipelineBuilder {
this.context.videoStream.codec === VideoFormats.Mpeg2Video ||
this.desiredState.videoFormat === VideoFormats.Mpeg2Video ||
this.ffmpegState.decoderHwAccelMode ===
HardwareAccelerationMode.Videotoolbox
) {
this.pipelineSteps.push(NoSceneDetectOutputOption(1_000_000_000));
} else if (
HardwareAccelerationMode.Videotoolbox ||
this.ffmpegState.encoderHwAccelMode === HardwareAccelerationMode.None
) {
this.pipelineSteps.push(NoSceneDetectOutputOption(0));
this.pipelineSteps.push(NoSceneDetectOutputOption(1_000_000_000));
}
}
@@ -874,10 +873,18 @@ export abstract class BasePipelineBuilder implements PipelineBuilder {
this.ffmpegState.outputFormat.type !== OutputFormatTypes.Hls &&
this.ffmpegState.outputFormat.type !== OutputFormatTypes.HlsDirectV2
) {
switch (this.ffmpegState.outputLocation) {
case OutputLocation.Stdout:
switch (this.ffmpegState.outputLocation.type) {
case OutputLocations.Stdout:
this.pipelineSteps.push(PipeProtocolOutputOption());
break;
case OutputLocations.File: {
this.pipelineSteps.push(
makeConstantOutputOption([this.ffmpegState.outputLocation.path]),
);
if (this.ffmpegState.outputLocation.overwrite) {
this.pipelineSteps.push(makeConstantGlobalOption('-y'));
}
}
}
}
}

View File

@@ -0,0 +1,237 @@
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import { FileStreamSource } from '../../../../stream/types.ts';
import {
createTempWorkdir,
probeFile,
runFfmpegWithPipeline,
} from '../../../../testing/ffmpeg/FfmpegIntegrationHelper.ts';
import {
binaries,
qsvInfo,
qsvTest,
} from '../../../../testing/ffmpeg/FfmpegTestFixtures.ts';
import { AudioFormats, FileOutputLocation } from '../../constants.ts';
import { PixelFormatYuv420P } from '../../format/PixelFormat.ts';
import { AudioInputSource } from '../../input/AudioInputSource.ts';
import { VideoInputSource } from '../../input/VideoInputSource.ts';
import { AudioStream, VideoStream } from '../../MediaStream.ts';
import { AudioState } from '../../state/AudioState.ts';
import {
DefaultPipelineOptions,
FfmpegState,
} from '../../state/FfmpegState.ts';
import { FrameState } from '../../state/FrameState.ts';
import { FrameSize } from '../../types.ts';
import { QsvPipelineBuilder } from './QsvPipelineBuilder.ts';
const fixturesDir = path.join(
path.dirname(fileURLToPath(import.meta.url)),
'../../../../testing/ffmpeg/fixtures',
);
const Fixtures = {
video720p: path.join(fixturesDir, '720p_h264.ts'),
video1080p: path.join(fixturesDir, '1080p_h264.ts'),
} as const;
describe.skipIf(!binaries || !qsvInfo)('QsvPipelineBuilder integration', () => {
let workdir: string;
let cleanup: () => Promise<void>;
beforeAll(async () => {
({ dir: workdir, cleanup } = await createTempWorkdir());
});
afterAll(() => cleanup());
function makeVideoInput(inputPath: string, frameSize: FrameSize) {
return VideoInputSource.withStream(
new FileStreamSource(inputPath),
VideoStream.create({
codec: 'h264',
displayAspectRatio: '16:9',
frameSize,
index: 0,
pixelFormat: new PixelFormatYuv420P(),
providedSampleAspectRatio: null,
colorFormat: null,
}),
);
}
function makeAudioInput(inputPath: string) {
return AudioInputSource.withStream(
new FileStreamSource(inputPath),
AudioStream.create({
channels: 2,
codec: 'aac',
index: 1,
}),
AudioState.create({
audioEncoder: AudioFormats.Aac,
audioChannels: 2,
audioBitrate: 192,
audioBufferSize: 384,
}),
);
}
// QsvPipelineBuilder arg order: hardwareCaps, binaryCaps, video, audio, concat, watermark, subtitle
qsvTest('basic h264 qsv transcode', async ({
binaryCapabilities,
ffmpegVersion,
resolvedQsv,
}) => {
const video = makeVideoInput(
Fixtures.video720p,
FrameSize.withDimensions(1280, 720),
);
const audio = makeAudioInput(Fixtures.video720p);
const builder = new QsvPipelineBuilder(
resolvedQsv.capabilities,
binaryCapabilities,
video,
audio,
null,
null,
null,
);
const frameState = new FrameState({
isAnamorphic: false,
scaledSize: FrameSize.withDimensions(1280, 720),
paddedSize: FrameSize.withDimensions(1280, 720),
});
const outputPath = path.join(workdir, 'qsv_transcode.ts');
const pipeline = builder.build(
FfmpegState.create({
version: ffmpegVersion,
outputLocation: FileOutputLocation(outputPath, true),
vaapiDevice: resolvedQsv.device,
}),
frameState,
DefaultPipelineOptions,
);
const { exitCode, stderr } = runFfmpegWithPipeline(
binaries!.ffmpeg,
pipeline.getCommandArgs(),
);
expect(
exitCode,
`Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`,
).toBe(0);
const probe = probeFile(binaries!.ffprobe, outputPath);
expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true);
});
qsvTest('scale from 1080p to 720p via qsv', async ({
binaryCapabilities,
ffmpegVersion,
resolvedQsv,
}) => {
const video = makeVideoInput(
Fixtures.video1080p,
FrameSize.withDimensions(1920, 1080),
);
const audio = makeAudioInput(Fixtures.video1080p);
const builder = new QsvPipelineBuilder(
resolvedQsv.capabilities,
binaryCapabilities,
video,
audio,
null,
null,
null,
);
const frameState = new FrameState({
isAnamorphic: false,
scaledSize: FrameSize.withDimensions(1280, 720),
paddedSize: FrameSize.withDimensions(1280, 720),
});
const outputPath = path.join(workdir, 'qsv_scale.ts');
const pipeline = builder.build(
FfmpegState.create({
version: ffmpegVersion,
outputLocation: FileOutputLocation(outputPath, true),
vaapiDevice: resolvedQsv.device,
}),
frameState,
DefaultPipelineOptions,
);
const { exitCode, stderr } = runFfmpegWithPipeline(
binaries!.ffmpeg,
pipeline.getCommandArgs(),
);
expect(
exitCode,
`Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`,
).toBe(0);
const probe = probeFile(binaries!.ffprobe, outputPath);
expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true);
});
qsvTest('copy mode (qsv pipeline, no hw transcode needed)', async ({
binaryCapabilities,
ffmpegVersion,
resolvedQsv,
}) => {
const video = makeVideoInput(
Fixtures.video720p,
FrameSize.withDimensions(1280, 720),
);
const audio = makeAudioInput(Fixtures.video720p);
const builder = new QsvPipelineBuilder(
resolvedQsv.capabilities,
binaryCapabilities,
video,
audio,
null,
null,
null,
);
const frameState = new FrameState({
isAnamorphic: false,
scaledSize: FrameSize.withDimensions(1280, 720),
paddedSize: FrameSize.withDimensions(1280, 720),
videoFormat: 'copy',
});
const outputPath = path.join(workdir, 'qsv_copy.ts');
const pipeline = builder.build(
FfmpegState.create({
version: ffmpegVersion,
outputLocation: FileOutputLocation(outputPath, true),
vaapiDevice: resolvedQsv.device,
}),
frameState,
DefaultPipelineOptions,
);
const { exitCode, stderr } = runFfmpegWithPipeline(
binaries!.ffmpeg,
pipeline.getCommandArgs(),
);
expect(
exitCode,
`Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`,
).toBe(0);
const probe = probeFile(binaries!.ffprobe, outputPath);
expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true);
});
});

View File

@@ -0,0 +1,237 @@
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import { FileStreamSource } from '../../../../stream/types.ts';
import {
createTempWorkdir,
probeFile,
runFfmpegWithPipeline,
} from '../../../../testing/ffmpeg/FfmpegIntegrationHelper.ts';
import {
binaries,
vaapiInfo,
vaapiTest,
} from '../../../../testing/ffmpeg/FfmpegTestFixtures.ts';
import { AudioFormats, FileOutputLocation } from '../../constants.ts';
import { PixelFormatYuv420P } from '../../format/PixelFormat.ts';
import { AudioInputSource } from '../../input/AudioInputSource.ts';
import { VideoInputSource } from '../../input/VideoInputSource.ts';
import { AudioStream, VideoStream } from '../../MediaStream.ts';
import { AudioState } from '../../state/AudioState.ts';
import {
DefaultPipelineOptions,
FfmpegState,
} from '../../state/FfmpegState.ts';
import { FrameState } from '../../state/FrameState.ts';
import { FrameSize } from '../../types.ts';
import { VaapiPipelineBuilder } from './VaapiPipelineBuilder.ts';
const fixturesDir = path.join(
path.dirname(fileURLToPath(import.meta.url)),
'../../../../testing/ffmpeg/fixtures',
);
const Fixtures = {
video720p: path.join(fixturesDir, '720p_h264.ts'),
video1080p: path.join(fixturesDir, '1080p_h264.ts'),
} as const;
describe.skipIf(!binaries || !vaapiInfo)(
'VaapiPipelineBuilder integration',
() => {
let workdir: string;
let cleanup: () => Promise<void>;
beforeAll(async () => {
({ dir: workdir, cleanup } = await createTempWorkdir());
});
afterAll(() => cleanup());
function makeVideoInput(inputPath: string, frameSize: FrameSize) {
return VideoInputSource.withStream(
new FileStreamSource(inputPath),
VideoStream.create({
codec: 'h264',
profile: 'high',
displayAspectRatio: '16:9',
frameSize,
index: 0,
pixelFormat: new PixelFormatYuv420P(),
providedSampleAspectRatio: null,
colorFormat: null,
}),
);
}
function makeAudioInput(inputPath: string) {
return AudioInputSource.withStream(
new FileStreamSource(inputPath),
AudioStream.create({
channels: 2,
codec: 'aac',
index: 1,
}),
AudioState.create({
audioEncoder: AudioFormats.Aac,
audioChannels: 2,
audioBitrate: 192,
audioBufferSize: 384,
}),
);
}
vaapiTest(
'basic h264 vaapi transcode',
async ({ binaryCapabilities, ffmpegVersion, resolvedVaapi }) => {
const video = makeVideoInput(
Fixtures.video720p,
FrameSize.withDimensions(1280, 720),
);
const audio = makeAudioInput(Fixtures.video720p);
const builder = new VaapiPipelineBuilder(
resolvedVaapi.capabilities,
binaryCapabilities,
video,
audio,
null,
null,
null,
);
const frameState = new FrameState({
isAnamorphic: false,
scaledSize: FrameSize.withDimensions(1280, 720),
paddedSize: FrameSize.withDimensions(1280, 720),
});
const outputPath = path.join(workdir, 'vaapi_transcode.ts');
const pipeline = builder.build(
FfmpegState.create({
version: ffmpegVersion,
outputLocation: FileOutputLocation(outputPath, true),
vaapiDevice: resolvedVaapi.device,
}),
frameState,
DefaultPipelineOptions,
);
const { exitCode, stderr } = runFfmpegWithPipeline(
binaries!.ffmpeg,
pipeline.getCommandArgs(),
);
expect(
exitCode,
`Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`,
).toBe(0);
const probe = probeFile(binaries!.ffprobe, outputPath);
expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true);
},
);
vaapiTest(
'scale from 1080p to 720p via vaapi',
async ({ binaryCapabilities, ffmpegVersion, resolvedVaapi }) => {
const video = makeVideoInput(
Fixtures.video1080p,
FrameSize.withDimensions(1920, 1080),
);
const audio = makeAudioInput(Fixtures.video1080p);
const builder = new VaapiPipelineBuilder(
resolvedVaapi.capabilities,
binaryCapabilities,
video,
audio,
null,
null,
null,
);
const frameState = new FrameState({
isAnamorphic: false,
scaledSize: FrameSize.withDimensions(1280, 720),
paddedSize: FrameSize.withDimensions(1280, 720),
});
const outputPath = path.join(workdir, 'vaapi_scale.ts');
const pipeline = builder.build(
FfmpegState.create({
version: ffmpegVersion,
outputLocation: FileOutputLocation(outputPath, true),
vaapiDevice: resolvedVaapi.device,
}),
frameState,
DefaultPipelineOptions,
);
const { exitCode, stderr } = runFfmpegWithPipeline(
binaries!.ffmpeg,
pipeline.getCommandArgs(),
);
expect(
exitCode,
`Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`,
).toBe(0);
const probe = probeFile(binaries!.ffprobe, outputPath);
expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true);
},
);
vaapiTest(
'copy mode (vaapi pipeline, no hw transcode needed)',
async ({ binaryCapabilities, ffmpegVersion, resolvedVaapi }) => {
const video = makeVideoInput(
Fixtures.video720p,
FrameSize.withDimensions(1280, 720),
);
const audio = makeAudioInput(Fixtures.video720p);
const builder = new VaapiPipelineBuilder(
resolvedVaapi.capabilities,
binaryCapabilities,
video,
audio,
null,
null,
null,
);
const frameState = new FrameState({
isAnamorphic: false,
scaledSize: FrameSize.withDimensions(1280, 720),
paddedSize: FrameSize.withDimensions(1280, 720),
videoFormat: 'copy',
});
const outputPath = path.join(workdir, 'vaapi_copy.ts');
const pipeline = builder.build(
FfmpegState.create({
version: ffmpegVersion,
outputLocation: FileOutputLocation(outputPath, true),
vaapiDevice: resolvedVaapi.device,
}),
frameState,
DefaultPipelineOptions,
);
const { exitCode, stderr } = runFfmpegWithPipeline(
binaries!.ffmpeg,
pipeline.getCommandArgs(),
);
expect(
exitCode,
`Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`,
).toBe(0);
const probe = probeFile(binaries!.ffprobe, outputPath);
expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true);
},
);
},
);

View File

@@ -0,0 +1,237 @@
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import { FileStreamSource } from '../../../../stream/types.ts';
import {
createTempWorkdir,
probeFile,
runFfmpegWithPipeline,
} from '../../../../testing/ffmpeg/FfmpegIntegrationHelper.ts';
import {
binaries,
nvidiaCaps,
nvidiaTest,
} from '../../../../testing/ffmpeg/FfmpegTestFixtures.ts';
import { AudioFormats, FileOutputLocation } from '../../constants.ts';
import { PixelFormatYuv420P } from '../../format/PixelFormat.ts';
import { AudioInputSource } from '../../input/AudioInputSource.ts';
import { VideoInputSource } from '../../input/VideoInputSource.ts';
import { AudioStream, VideoStream } from '../../MediaStream.ts';
import { AudioState } from '../../state/AudioState.ts';
import {
DefaultPipelineOptions,
FfmpegState,
} from '../../state/FfmpegState.ts';
import { FrameState } from '../../state/FrameState.ts';
import { FrameSize } from '../../types.ts';
import { NvidiaPipelineBuilder } from './NvidiaPipelineBuilder.ts';
const fixturesDir = path.join(
path.dirname(fileURLToPath(import.meta.url)),
'../../../../testing/ffmpeg/fixtures',
);
const Fixtures = {
video720p: path.join(fixturesDir, '720p_h264.ts'),
video1080p: path.join(fixturesDir, '1080p_h264.ts'),
} as const;
describe.skipIf(!binaries || !nvidiaCaps)(
'NvidiaPipelineBuilder integration',
() => {
let workdir: string;
let cleanup: () => Promise<void>;
beforeAll(async () => {
({ dir: workdir, cleanup } = await createTempWorkdir());
});
afterAll(() => cleanup());
function makeVideoInput(inputPath: string, frameSize: FrameSize) {
return VideoInputSource.withStream(
new FileStreamSource(inputPath),
VideoStream.create({
codec: 'h264',
displayAspectRatio: '16:9',
frameSize,
index: 0,
pixelFormat: new PixelFormatYuv420P(),
providedSampleAspectRatio: null,
colorFormat: null,
}),
);
}
function makeAudioInput(inputPath: string) {
return AudioInputSource.withStream(
new FileStreamSource(inputPath),
AudioStream.create({
channels: 2,
codec: 'aac',
index: 1,
}),
AudioState.create({
audioEncoder: AudioFormats.Aac,
audioChannels: 2,
audioBitrate: 192,
audioBufferSize: 384,
}),
);
}
// NvidiaPipelineBuilder arg order: hardwareCaps, binaryCaps, video, audio, concat, watermark, subtitle
nvidiaTest('basic h264 nvidia transcode', async ({
binaryCapabilities,
ffmpegVersion,
resolvedNvidia,
}) => {
const video = makeVideoInput(
Fixtures.video720p,
FrameSize.withDimensions(1280, 720),
);
const audio = makeAudioInput(Fixtures.video720p);
const builder = new NvidiaPipelineBuilder(
resolvedNvidia,
binaryCapabilities,
video,
audio,
null,
null,
null,
);
const frameState = new FrameState({
isAnamorphic: false,
scaledSize: FrameSize.withDimensions(1280, 720),
paddedSize: FrameSize.withDimensions(1280, 720),
});
const outputPath = path.join(workdir, 'nvidia_transcode.ts');
const pipeline = builder.build(
FfmpegState.create({
version: ffmpegVersion,
outputLocation: FileOutputLocation(outputPath, true),
}),
frameState,
DefaultPipelineOptions,
);
const { exitCode, stderr } = runFfmpegWithPipeline(
binaries!.ffmpeg,
pipeline.getCommandArgs(),
);
expect(
exitCode,
`Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`,
).toBe(0);
const probe = probeFile(binaries!.ffprobe, outputPath);
expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true);
});
nvidiaTest('scale from 1080p to 720p via nvidia', async ({
binaryCapabilities,
ffmpegVersion,
resolvedNvidia,
}) => {
const video = makeVideoInput(
Fixtures.video1080p,
FrameSize.withDimensions(1920, 1080),
);
const audio = makeAudioInput(Fixtures.video1080p);
const builder = new NvidiaPipelineBuilder(
resolvedNvidia,
binaryCapabilities,
video,
audio,
null,
null,
null,
);
const frameState = new FrameState({
isAnamorphic: false,
scaledSize: FrameSize.withDimensions(1280, 720),
paddedSize: FrameSize.withDimensions(1280, 720),
});
const outputPath = path.join(workdir, 'nvidia_scale.ts');
const pipeline = builder.build(
FfmpegState.create({
version: ffmpegVersion,
outputLocation: FileOutputLocation(outputPath, true),
}),
frameState,
DefaultPipelineOptions,
);
const { exitCode, stderr } = runFfmpegWithPipeline(
binaries!.ffmpeg,
pipeline.getCommandArgs(),
);
expect(
exitCode,
`Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`,
).toBe(0);
const probe = probeFile(binaries!.ffprobe, outputPath);
expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true);
});
nvidiaTest('copy mode (nvidia pipeline, no hw transcode needed)', async ({
binaryCapabilities,
ffmpegVersion,
resolvedNvidia,
}) => {
const video = makeVideoInput(
Fixtures.video720p,
FrameSize.withDimensions(1280, 720),
);
const audio = makeAudioInput(Fixtures.video720p);
const builder = new NvidiaPipelineBuilder(
resolvedNvidia,
binaryCapabilities,
video,
audio,
null,
null,
null,
);
const frameState = new FrameState({
isAnamorphic: false,
scaledSize: FrameSize.withDimensions(1280, 720),
paddedSize: FrameSize.withDimensions(1280, 720),
videoFormat: 'copy',
});
const outputPath = path.join(workdir, 'nvidia_copy.ts');
const pipeline = builder.build(
FfmpegState.create({
version: ffmpegVersion,
outputLocation: FileOutputLocation(outputPath, true),
}),
frameState,
DefaultPipelineOptions,
);
const { exitCode, stderr } = runFfmpegWithPipeline(
binaries!.ffmpeg,
pipeline.getCommandArgs(),
);
expect(
exitCode,
`Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`,
).toBe(0);
const probe = probeFile(binaries!.ffprobe, outputPath);
expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true);
});
},
);

View File

@@ -0,0 +1,226 @@
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import { FileStreamSource } from '../../../../stream/types.ts';
import {
createTempWorkdir,
probeFile,
runFfmpegWithPipeline,
} from '../../../../testing/ffmpeg/FfmpegIntegrationHelper.ts';
import {
binaries,
ffmpegTest,
} from '../../../../testing/ffmpeg/FfmpegTestFixtures.ts';
import { AudioFormats, FileOutputLocation } from '../../constants.ts';
import { PixelFormatYuv420P } from '../../format/PixelFormat.ts';
import { AudioInputSource } from '../../input/AudioInputSource.ts';
import { VideoInputSource } from '../../input/VideoInputSource.ts';
import { AudioStream, VideoStream } from '../../MediaStream.ts';
import { AudioState } from '../../state/AudioState.ts';
import {
DefaultPipelineOptions,
FfmpegState,
} from '../../state/FfmpegState.ts';
import { FrameState } from '../../state/FrameState.ts';
import { FrameSize } from '../../types.ts';
import { SoftwarePipelineBuilder } from './SoftwarePipelineBuilder.ts';
const fixturesDir = path.join(
path.dirname(fileURLToPath(import.meta.url)),
'../../../../testing/ffmpeg/fixtures',
);
const Fixtures = {
video720p: path.join(fixturesDir, '720p_h264.ts'),
video1080p: path.join(fixturesDir, '1080p_h264.ts'),
} as const;
describe.skipIf(!binaries)('SoftwarePipelineBuilder integration', () => {
let workdir: string;
let cleanup: () => Promise<void>;
beforeAll(async () => {
({ dir: workdir, cleanup } = await createTempWorkdir());
});
afterAll(() => cleanup());
function makeVideoInput(inputPath: string, frameSize: FrameSize) {
return VideoInputSource.withStream(
new FileStreamSource(inputPath),
VideoStream.create({
codec: 'h264',
displayAspectRatio: '16:9',
frameSize,
index: 0,
pixelFormat: new PixelFormatYuv420P(),
providedSampleAspectRatio: null,
colorFormat: null,
}),
);
}
function makeAudioInput(inputPath: string) {
return AudioInputSource.withStream(
new FileStreamSource(inputPath),
AudioStream.create({
channels: 2,
codec: 'aac',
index: 1,
}),
AudioState.create({
audioEncoder: AudioFormats.Aac,
audioChannels: 2,
audioBitrate: 192,
audioBufferSize: 384,
}),
);
}
ffmpegTest('basic h264 software transcode', async ({
binaryCapabilities,
ffmpegVersion,
}) => {
const video = makeVideoInput(
Fixtures.video720p,
FrameSize.withDimensions(1280, 720),
);
const audio = makeAudioInput(Fixtures.video720p);
const builder = new SoftwarePipelineBuilder(
video,
audio,
null,
null,
null,
binaryCapabilities,
);
const frameState = new FrameState({
isAnamorphic: false,
scaledSize: FrameSize.withDimensions(1280, 720),
paddedSize: FrameSize.withDimensions(1280, 720),
});
const outputPath = path.join(workdir, 'output_transcode.ts');
const pipeline = builder.build(
FfmpegState.create({
version: ffmpegVersion,
outputLocation: FileOutputLocation(outputPath, true),
}),
frameState,
DefaultPipelineOptions,
);
const { exitCode, stderr } = runFfmpegWithPipeline(
binaries!.ffmpeg,
pipeline.getCommandArgs(),
);
expect(
exitCode,
`Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`,
).toBe(0);
const probe = probeFile(binaries!.ffprobe, outputPath);
expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true);
});
ffmpegTest('scale from 1080p to 720p', async ({
binaryCapabilities,
ffmpegVersion,
}) => {
const video = makeVideoInput(
Fixtures.video1080p,
FrameSize.withDimensions(1920, 1080),
);
const audio = makeAudioInput(Fixtures.video1080p);
const builder = new SoftwarePipelineBuilder(
video,
audio,
null,
null,
null,
binaryCapabilities,
);
const frameState = new FrameState({
isAnamorphic: false,
scaledSize: FrameSize.withDimensions(1280, 720),
paddedSize: FrameSize.withDimensions(1280, 720),
});
const outputPath = path.join(workdir, 'output_scale.ts');
const pipeline = builder.build(
FfmpegState.create({
version: ffmpegVersion,
outputLocation: FileOutputLocation(outputPath, true),
}),
frameState,
DefaultPipelineOptions,
);
const { exitCode, stderr } = runFfmpegWithPipeline(
binaries!.ffmpeg,
pipeline.getCommandArgs(),
);
expect(
exitCode,
`Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`,
).toBe(0);
const probe = probeFile(binaries!.ffprobe, outputPath);
expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true);
});
ffmpegTest('copy mode (no transcode)', async ({
binaryCapabilities,
ffmpegVersion,
}) => {
const video = makeVideoInput(
Fixtures.video720p,
FrameSize.withDimensions(1280, 720),
);
const audio = makeAudioInput(Fixtures.video720p);
const builder = new SoftwarePipelineBuilder(
video,
audio,
null,
null,
null,
binaryCapabilities,
);
const frameState = new FrameState({
isAnamorphic: false,
scaledSize: FrameSize.withDimensions(1280, 720),
paddedSize: FrameSize.withDimensions(1280, 720),
videoFormat: 'copy',
});
const outputPath = path.join(workdir, 'output_copy.ts');
const pipeline = builder.build(
FfmpegState.create({
version: ffmpegVersion,
outputLocation: FileOutputLocation(outputPath, true),
}),
frameState,
DefaultPipelineOptions,
);
const { exitCode, stderr } = runFfmpegWithPipeline(
binaries!.ffmpeg,
pipeline.getCommandArgs(),
);
expect(
exitCode,
`Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`,
).toBe(0);
const probe = probeFile(binaries!.ffprobe, outputPath);
expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true);
});
});

View File

@@ -7,11 +7,11 @@ import type { Duration } from 'dayjs/plugin/duration.js';
import { merge } from 'lodash-es';
import path from 'node:path';
import type { MarkRequired } from 'ts-essentials';
import type { OutputFormat } from '../constants.ts';
import type { OutputFormat, OutputLocation } from '../constants.ts';
import {
MpegTsOutputFormat,
OutputFormatTypes,
OutputLocation,
StdoutOutputLocation,
} from '../constants.ts';
export const VaapiTonemapType = ['vaapi', 'opencl'] as const;
@@ -88,7 +88,7 @@ export class FfmpegState {
vaapiDevice: Nullable<string> = null;
vaapiDriver: Nullable<string> = null;
outputFormat: OutputFormat = MpegTsOutputFormat; // TODO: No
outputLocation: OutputLocation = OutputLocation.Stdout;
outputLocation: OutputLocation = StdoutOutputLocation;
ptsOffset?: number;
tonemapHdr: boolean = false;

View File

@@ -25,7 +25,6 @@ import {
} from 'lodash-es';
import NodeCache from 'node-cache';
import { format } from 'node:util';
import type { ISettingsDB } from '../db/interfaces/ISettingsDB.ts';
import { attempt, isNonEmptyString, parseIntOrNull } from '../util/index.ts';
import { FfmpegCapabilities } from './builder/capabilities/FfmpegCapabilities.ts';
@@ -73,18 +72,11 @@ export class FfmpegInfo {
}
constructor(
@inject(KEYS.SettingsDB) private settingsDB: ISettingsDB,
@inject(KEYS.FFmpegPath) private ffmpegPath: string,
@inject(KEYS.FFprobePath) private ffprobePath: string,
@inject(KEYS.Logger) private logger: Logger,
) {}
private get ffmpegPath() {
return this.settingsDB.ffmpegSettings().ffmpegExecutablePath;
}
private get ffprobePath() {
return this.settingsDB.ffmpegSettings().ffprobeExecutablePath;
}
async seed() {
this.logger.debug('Seeding ffmpeg info');
try {

View File

@@ -0,0 +1,262 @@
import { execFileSync, spawnSync } from 'node:child_process';
import { existsSync } from 'node:fs';
import fs from 'node:fs/promises';
import os from 'node:os';
import path from 'node:path';
import z from 'zod';
import { NvidiaHardwareCapabilities } from '../../ffmpeg/builder/capabilities/NvidiaHardwareCapabilities.ts';
import { parseNvidiaModelAndArchitecture } from '../../ffmpeg/builder/capabilities/NvidiaHardwareCapabilitiesFactory.ts';
import { QsvHardwareCapabilities } from '../../ffmpeg/builder/capabilities/QsvHardwareCapabilities.ts';
import type { VaapiHardwareCapabilities } from '../../ffmpeg/builder/capabilities/VaapiHardwareCapabilities.ts';
import { VaapiHardwareCapabilitiesParser } from '../../ffmpeg/builder/capabilities/VaapiHardwareCapabilitiesParser.ts';
import { FfprobeMediaInfoSchema } from '../../types/ffmpeg.ts';
export function discoverFfmpegBinaries(): {
ffmpeg: string;
ffprobe: string;
} | null {
try {
const ffmpeg =
process.env['TUNARR_TEST_FFMPEG'] ??
execFileSync('which', ['ffmpeg'], { encoding: 'utf-8' as const }).trim();
const ffprobe =
process.env['TUNARR_TEST_FFPROBE'] ??
execFileSync('which', ['ffprobe'], { encoding: 'utf-8' as const }).trim();
if (!ffmpeg || !ffprobe) {
return null;
}
return { ffmpeg, ffprobe };
} catch {
return null;
}
}
export async function createTempWorkdir(): Promise<{
dir: string;
cleanup: () => Promise<void>;
}> {
const dir = await fs.mkdtemp(path.join(os.tmpdir(), 'tunarr-ffmpeg-test-'));
return {
dir,
cleanup: () => fs.rm(dir, { recursive: true, force: true }),
};
}
export function generateTestMediaFile(
ffmpegPath: string,
outputPath: string,
): void {
const result = spawnSync(
ffmpegPath,
[
'-f',
'lavfi',
'-i',
'testsrc=size=1280x720:rate=30,format=yuv420p',
'-f',
'lavfi',
'-i',
'sine=frequency=1000:duration=3',
'-t',
'3',
'-c:v',
'libx264',
'-c:a',
'aac',
'-y',
outputPath,
],
{ stdio: 'ignore' },
);
if (result.status !== 0) {
throw new Error(
`Failed to generate test media file, exit code: ${result.status}`,
);
}
}
export function runFfmpegWithPipeline(
ffmpegPath: string,
pipelineArgs: string[],
) {
const result = spawnSync(ffmpegPath, pipelineArgs, {
stdio: ['ignore', 'ignore', 'pipe'],
});
if (result.status !== 0) {
console.error('ffmpeg failed with stderr: ', result.stderr);
}
return { exitCode: result.status ?? -1, stderr: result.stderr };
}
export function probeFile(
ffprobePath: string,
filePath: string,
): z.infer<typeof FfprobeMediaInfoSchema> {
const result = spawnSync(
ffprobePath,
[
'-hide_banner',
'-v',
'quiet',
'-print_format',
'json',
'-show_format',
'-show_chapters',
'-show_streams',
filePath,
],
{ encoding: 'utf-8' as const },
);
const parsed = FfprobeMediaInfoSchema.parse(JSON.parse(result.stdout), {
reportInput: true,
});
return parsed;
}
// ---------------------------------------------------------------------------
// Hardware discovery helpers
// ---------------------------------------------------------------------------
export type VaapiDeviceInfo = {
device: string;
capabilities: VaapiHardwareCapabilities;
};
/**
* Returns VAAPI device info if a compatible device exists and vainfo can parse
* its capabilities, otherwise null. Reads TUNARR_TEST_VAAPI_DEVICE to override
* the default /dev/dri/renderD128 path.
*/
export function discoverVaapiDevice(): VaapiDeviceInfo | null {
const device =
process.env['TUNARR_TEST_VAAPI_DEVICE'] ?? '/dev/dri/renderD128';
try {
if (!existsSync(device)) {
return null;
}
const result = spawnSync(
'vainfo',
['--display', 'drm', '--device', device, '-a'],
{ encoding: 'utf-8' as const },
);
// vainfo may write to stdout or stderr depending on version; combine both
const output = (result.stdout ?? '') + (result.stderr ?? '');
if (!output.includes('VAProfile')) {
return null;
}
const capabilities =
VaapiHardwareCapabilitiesParser.extractAllFromVaInfo(output);
if (!capabilities) {
return null;
}
return { device, capabilities };
} catch {
return null;
}
}
/**
* Returns QSV capabilities if VAAPI is available and the ffmpeg binary
* supports h264_qsv decoding, otherwise null.
*/
export function discoverQsvCapabilities(
ffmpegPath: string,
): { device: string; capabilities: QsvHardwareCapabilities } | null {
const vaapi = discoverVaapiDevice();
if (!vaapi) {
return null;
}
try {
const result = spawnSync(
ffmpegPath,
['-hide_banner', '-help', 'decoder=h264_qsv'],
{ encoding: 'utf-8' as const },
);
const output = result.stdout ?? '';
// If the decoder is unknown, ffmpeg exits non-zero and prints an error
if (result.status !== 0 || output.includes('Codec h264_qsv is not')) {
return null;
}
// Replicate QsvHardwareCapabilitiesFactory.getDecoderOptions() parsing
const decoderOptions: string[] = [];
const optionPattern = /^-([a-z_]+)\s+.*/m;
for (const line of output.split('\n').slice(1)) {
const trimmed = line.trim();
if (!trimmed) continue;
const match = trimmed.match(optionPattern);
if (match?.[1]) {
decoderOptions.push(match[1]);
}
}
return {
device: vaapi.device,
capabilities: new QsvHardwareCapabilities(
vaapi.capabilities,
decoderOptions,
),
};
} catch {
return null;
}
}
/**
* Returns NvidiaHardwareCapabilities if an NVIDIA GPU is detectable via
* h264_nvenc, otherwise null.
*/
export function discoverNvidiaCapabilities(
ffmpegPath: string,
): NvidiaHardwareCapabilities | null {
try {
const result = spawnSync(
ffmpegPath,
[
'-hide_banner',
'-f',
'lavfi',
'-i',
'nullsrc',
'-c:v',
'h264_nvenc',
'-gpu',
'list',
'-f',
'null',
'-',
],
{ encoding: 'utf-8' as const },
);
// GPU list is written to stderr by ffmpeg
const allOutput = (result.stdout ?? '') + (result.stderr ?? '');
for (const line of allOutput.split('\n')) {
const parsed = parseNvidiaModelAndArchitecture(line);
if (parsed) {
return new NvidiaHardwareCapabilities(
parsed.model,
parsed.architecture,
);
}
}
return null;
} catch {
return null;
}
}

View File

@@ -0,0 +1,96 @@
import pino from 'pino';
import { test as base } from 'vitest';
import type { FfmpegCapabilities } from '../../ffmpeg/builder/capabilities/FfmpegCapabilities.ts';
import type { NvidiaHardwareCapabilities } from '../../ffmpeg/builder/capabilities/NvidiaHardwareCapabilities.ts';
import type { QsvHardwareCapabilities } from '../../ffmpeg/builder/capabilities/QsvHardwareCapabilities.ts';
import type { FfmpegVersionResult } from '../../ffmpeg/ffmpegInfo.ts';
import { FfmpegInfo } from '../../ffmpeg/ffmpegInfo.ts';
import type { Logger } from '../../util/logging/LoggerFactory.ts';
import {
discoverFfmpegBinaries,
discoverNvidiaCapabilities,
discoverQsvCapabilities,
discoverVaapiDevice,
type VaapiDeviceInfo,
} from './FfmpegIntegrationHelper.ts';
export const binaries = discoverFfmpegBinaries();
export const vaapiInfo = discoverVaapiDevice();
export const qsvInfo = binaries
? discoverQsvCapabilities(binaries.ffmpeg)
: null;
export const nvidiaCaps = binaries
? discoverNvidiaCapabilities(binaries.ffmpeg)
: null;
const noopLogger = pino({ level: 'silent' }) as Logger;
function makeFfmpegInfo(): FfmpegInfo {
// Instantiate directly, ignoring Inversify DI bindings
return new FfmpegInfo(binaries!.ffmpeg, binaries!.ffprobe, noopLogger);
}
export type FfmpegTestFixtures = {
ffmpegInfo: FfmpegInfo;
binaryCapabilities: FfmpegCapabilities;
ffmpegVersion: FfmpegVersionResult;
};
export const ffmpegTest = base.extend<FfmpegTestFixtures>({
ffmpegInfo: [
async ({}, use) => {
const info = makeFfmpegInfo();
await info.seed();
await use(info);
},
{ scope: 'file' },
],
binaryCapabilities: [
async ({ ffmpegInfo }, use) => {
await use(await ffmpegInfo.getCapabilities());
},
{ scope: 'file' },
],
ffmpegVersion: [
async ({ ffmpegInfo }, use) => {
await use(await ffmpegInfo.getVersion());
},
{ scope: 'file' },
],
});
export const vaapiTest = ffmpegTest.extend<{ resolvedVaapi: VaapiDeviceInfo }>({
resolvedVaapi: [
async ({}, use) => {
await use(vaapiInfo!);
},
{ scope: 'file' },
],
});
export const qsvTest = ffmpegTest.extend<{
resolvedQsv: { device: string; capabilities: QsvHardwareCapabilities };
}>({
resolvedQsv: [
async ({}, use) => {
await use(qsvInfo!);
},
{ scope: 'file' },
],
});
export const nvidiaTest = ffmpegTest.extend<{
resolvedNvidia: NvidiaHardwareCapabilities;
}>({
resolvedNvidia: [
async ({}, use) => {
await use(nvidiaCaps!);
},
{ scope: 'file' },
],
});

Binary file not shown.

Binary file not shown.

View File

@@ -28,6 +28,8 @@ const KEYS = {
),
MediaSourceLibraryRefresher: Symbol.for('MediaSourceLibraryRefresher'),
ProgramDaoMinterFactory: Symbol.for('ProgramDaoMinterFactory'),
FFmpegPath: Symbol.for('FFmpegPath'),
FFprobePath: Symbol.for('FFprobePath'),
// Streaming
HlsSession: Symbol.for('HlsSession'),

View File

@@ -47,6 +47,7 @@
],
"exclude": [
"./src/**/*.ignore.ts",
"./streams/**/*.ts"
"./streams/**/*.ts",
"./src/testing/ffmpeg/**"
]
}

View File

@@ -43,6 +43,9 @@
"generate-env": {},
"test": {
"dependsOn": ["^build"]
},
"test:local": {
"dependsOn": ["^build"]
}
}
}

View File

@@ -12,6 +12,14 @@ export default defineConfig({
globals: true,
watch: false,
includeSource: ['src/**/*.test.ts'],
exclude: [
'**/node_modules/**',
'**/dist/**',
'**/.{idea,git,cache,output,temp}/**',
// Comment this out if developing locally and you want
// to easily debug tests right in an editor like VS Code
'**/*.local.test.ts',
],
setupFiles: [
'src/testing/matchers/PixelFormatMatcher.ts',
'src/testing/matchers/FrameSizeMatcher.ts',

View File

@@ -0,0 +1,41 @@
import swc from '@rollup/plugin-swc';
import path from 'node:path';
import { defineConfig } from 'vitest/config';
export default defineConfig({
resolve: {
alias: {
'@': path.resolve(__dirname, 'src'),
},
},
test: {
globals: true,
watch: false,
include: ['src/**/*.local.test.ts'],
testTimeout: 60_000,
silent: false,
reporters: ['verbose'],
typecheck: {
tsconfig: 'tsconfig.test.json',
},
},
define: {
'import.meta.vitest': false,
},
plugins: [
swc({
swc: {
jsc: {
parser: {
syntax: 'typescript',
decorators: true,
},
target: 'esnext',
transform: {
decoratorMetadata: true,
},
},
},
}),
],
});