feat!: expose advanced transcode parameters (#1347)

This feature is mainly to provide a migration path from legacy transcode
pipeline users to the new pipeline in preparation of the former's
deprecation and removal. In general, users should not need to use these
first set of parameters (disabling various hardware accelerated
features) as doing so implies a bug that should be fixed. However, this
should provide a more smooth path for legacy holdouts to get onto the
new pipeline and allow them to experiment with the full, new pipeline,
without potentially wrecking their existing setup (via multiple
transcode configs).

Closes #1090
This commit is contained in:
Christian Benincasa
2025-08-27 16:48:46 -04:00
committed by GitHub
parent 29591a2473
commit f13e3bbefb
49 changed files with 3334 additions and 778 deletions

File diff suppressed because one or more lines are too long

6
pnpm-lock.yaml generated
View File

@@ -398,12 +398,18 @@ importers:
specifier: ^4.0.17
version: 4.0.17
devDependencies:
'@rollup/plugin-swc':
specifier: ^0.4.0
version: 0.4.0(@swc/core@1.10.9)(rollup@4.20.0)
'@types/lodash-es':
specifier: 4.17.9
version: 4.17.9
'@types/node':
specifier: 22.10.7
version: 22.10.7
'@vitest/coverage-v8':
specifier: ^3.2.4
version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@22.10.7))
rimraf:
specifier: ^5.0.5
version: 5.0.5

View File

@@ -0,0 +1,3 @@
ALTER TABLE `transcode_config` ADD `disable_hardware_decoder` integer DEFAULT false;--> statement-breakpoint
ALTER TABLE `transcode_config` ADD `disable_hardware_encoding` integer DEFAULT false;--> statement-breakpoint
ALTER TABLE `transcode_config` ADD `disable_hardware_filters` integer DEFAULT false;

File diff suppressed because it is too large Load Diff

View File

@@ -64,6 +64,13 @@
"when": 1748345244715,
"tag": "0008_gray_jean_grey",
"breakpoints": true
},
{
"idx": 9,
"version": "6",
"when": 1756312519937,
"tag": "0009_orange_night_nurse",
"breakpoints": true
}
]
}

View File

@@ -212,7 +212,7 @@ export const ffmpegSettingsRouter: RouterPluginCallback = (
{
schema: {
params: z.object({
id: z.string().uuid(),
id: z.uuid(),
}),
response: {
200: TranscodeConfigSchema,

View File

@@ -74,6 +74,9 @@ export class TranscodeConfigDB {
deinterlaceVideo: booleanToNumber(config.deinterlaceVideo),
disableChannelOverlay: booleanToNumber(config.disableChannelOverlay),
isDefault: booleanToNumber(config.disableChannelOverlay),
disableHardwareDecoder: booleanToNumber(config.disableHardwareDecoder),
disableHardwareEncoding: booleanToNumber(config.disableHardwareEncoding),
disableHardwareFilters: booleanToNumber(config.disableHardwareFilters),
};
return this.db
@@ -118,6 +121,15 @@ export class TranscodeConfigDB {
updatedConfig.disableChannelOverlay,
),
isDefault: booleanToNumber(updatedConfig.isDefault),
disableHardwareDecoder: booleanToNumber(
updatedConfig.disableHardwareDecoder,
),
disableHardwareEncoding: booleanToNumber(
updatedConfig.disableHardwareEncoding,
),
disableHardwareFilters: booleanToNumber(
updatedConfig.disableHardwareFilters,
),
};
return this.db
@@ -173,8 +185,7 @@ export class TranscodeConfigDB {
await tx
.deleteFrom('transcodeConfig')
.where('uuid', '=', id)
// TODO: Blocked in https://github.com/oven-sh/bun/issues/16909
// .limit(1)
.limit(1)
.execute();
return;
}
@@ -192,8 +203,7 @@ export class TranscodeConfigDB {
.updateTable('transcodeConfig')
.set('isDefault', 1)
.where('uuid', '=', newDefaultConfig.uuid)
// TODO: Blocked on https://github.com/oven-sh/bun/issues/16909
// .limit(1)
.limit(1)
.execute();
await tx
.updateTable('channel')
@@ -204,8 +214,7 @@ export class TranscodeConfigDB {
await tx
.deleteFrom('transcodeConfig')
.where('uuid', '=', id)
// TODO: Blocked on https://github.com/oven-sh/bun/issues/16909
// .limit(1)
.limit(1)
.execute();
});
}

View File

@@ -12,5 +12,8 @@ export function dbTranscodeConfigToApiSchema(
normalizeFrameRate: numberToBoolean(config.normalizeFrameRate),
deinterlaceVideo: numberToBoolean(config.deinterlaceVideo),
isDefault: numberToBoolean(config.isDefault),
disableHardwareDecoder: numberToBoolean(config.disableHardwareDecoder),
disableHardwareEncoding: numberToBoolean(config.disableHardwareEncoding),
disableHardwareFilters: numberToBoolean(config.disableHardwareFilters),
} satisfies TranscodeConfig;
}

View File

@@ -86,7 +86,7 @@ export type ErrorScreenType = TupleToUnion<typeof ErrorScreenTypes>;
export const ErrorScreenAudioTypes = ['silent', 'sine', 'whitenoise'] as const;
export type ErrorScreenAudioType = TupleToUnion<typeof ErrorScreenAudioTypes>;
export const TranscodeConfigColumns: (keyof TrannscodeConfigTable)[] = [
export const TranscodeConfigColumns: (keyof TranscodeConfigTable)[] = [
'audioBitRate',
'audioBufferSize',
'audioChannels',
@@ -115,7 +115,7 @@ export const TranscodeConfigColumns: (keyof TrannscodeConfigTable)[] = [
] as const;
type TranscodeConfigFields<Alias extends string = 'transcodeConfig'> =
readonly `${Alias}.${keyof TrannscodeConfigTable}`[];
readonly `${Alias}.${keyof TranscodeConfigTable}`[];
export const AllTranscodeConfigColumns: TranscodeConfigFields =
TranscodeConfigColumns.map((key) => `transcodeConfig.${key}` as const);
@@ -156,6 +156,10 @@ export const TranscodeConfig = sqliteTable(
.notNull(),
isDefault: integer({ mode: 'boolean' }).default(false).notNull(),
disableHardwareDecoder: integer({ mode: 'boolean' }).default(false),
disableHardwareEncoding: integer({ mode: 'boolean' }).default(false),
disableHardwareFilters: integer({ mode: 'boolean' }).default(false),
},
(table) => [
check(
@@ -191,10 +195,10 @@ export const TranscodeConfig = sqliteTable(
],
);
export type TrannscodeConfigTable = KyselifyBetter<typeof TranscodeConfig>;
export type TranscodeConfig = Selectable<TrannscodeConfigTable>;
export type NewTranscodeConfig = Insertable<TrannscodeConfigTable>;
export type TranscodeConfigUpdate = Updateable<TrannscodeConfigTable>;
export type TranscodeConfigTable = KyselifyBetter<typeof TranscodeConfig>;
export type TranscodeConfig = Selectable<TranscodeConfigTable>;
export type NewTranscodeConfig = Insertable<TranscodeConfigTable>;
export type TranscodeConfigUpdate = Updateable<TranscodeConfigTable>;
export const defaultTranscodeConfig = (
isDefault?: boolean,

View File

@@ -18,7 +18,7 @@ import type {
ChannelSubtitlePreferencesTable,
CustomShowSubtitlePreferencesTable,
} from './SubtitlePreferences.ts';
import type { TrannscodeConfigTable } from './TranscodeConfig.ts';
import type { TranscodeConfigTable } from './TranscodeConfig.ts';
export interface DB {
cachedImage: CachedImageTable;
@@ -38,7 +38,7 @@ export interface DB {
programExternalId: ProgramExternalIdTable;
programGrouping: ProgramGroupingTable;
programGroupingExternalId: ProgramGroupingExternalIdTable;
transcodeConfig: TrannscodeConfigTable;
transcodeConfig: TranscodeConfigTable;
// Legacy migration table
mikroOrmMigrations: MikroOrmMigrationsTable;

View File

@@ -51,7 +51,11 @@ import { VideoInputSource } from './builder/input/VideoInputSource.ts';
import { WatermarkInputSource } from './builder/input/WatermarkInputSource.ts';
import type { PipelineBuilderFactory } from './builder/pipeline/PipelineBuilderFactory.ts';
import { AudioState } from './builder/state/AudioState.ts';
import { FfmpegState } from './builder/state/FfmpegState.ts';
import type { PipelineOptions } from './builder/state/FfmpegState.ts';
import {
DefaultPipelineOptions,
FfmpegState,
} from './builder/state/FfmpegState.ts';
import { FrameState } from './builder/state/FrameState.ts';
import { FrameSize } from './builder/types.ts';
import type { ConcatOptions, StreamSessionCreateArgs } from './ffmpeg.ts';
@@ -249,6 +253,7 @@ export class FfmpegStreamFactory extends IFFMPEG {
videoFormat: playbackParams.videoFormat,
// videoPreset: playbackParams.video
}),
DefaultPipelineOptions,
);
pipeline.inputs.concatInput?.addOptions(
@@ -491,6 +496,14 @@ export class FfmpegStreamFactory extends IFFMPEG {
this.transcodeConfig.resolution,
);
const pipelineOptions: PipelineOptions = {
...DefaultPipelineOptions,
decoderThreadCount: this.transcodeConfig.threadCount,
encoderThreadCount: this.transcodeConfig.threadCount,
vaapiDevice: this.getVaapiDevice(),
vaapiDriver: this.getVaapiDriver(),
};
const pipeline = builder.build(
FfmpegState.create({
version: await this.ffmpegInfo.getVersion(),
@@ -520,6 +533,7 @@ export class FfmpegStreamFactory extends IFFMPEG {
videoProfile: null, // 'main', // TODO:
deinterlace: playbackParams.deinterlace,
}),
pipelineOptions,
);
return new FfmpegTranscodeSession(
@@ -651,6 +665,7 @@ export class FfmpegStreamFactory extends IFFMPEG {
videoProfile: null, // TODO:
deinterlace: false,
}),
DefaultPipelineOptions,
);
return new FfmpegTranscodeSession(
@@ -757,6 +772,7 @@ export class FfmpegStreamFactory extends IFFMPEG {
videoProfile: null, // TODO:
deinterlace: false,
}),
DefaultPipelineOptions,
);
return new FfmpegTranscodeSession(
@@ -777,7 +793,7 @@ export class FfmpegStreamFactory extends IFFMPEG {
? this.transcodeConfig.vaapiDevice
: isLinux()
? '/dev/dri/renderD128'
: undefined;
: null;
}
private getVaapiDriver() {

View File

@@ -3,7 +3,7 @@ import type { FrameState } from '@/ffmpeg/builder/state/FrameState.js';
import type { FrameSize } from '@/ffmpeg/builder/types.js';
import { FrameDataLocation } from '@/ffmpeg/builder/types.js';
import { isNonEmptyString } from '@/util/index.js';
import type { HardwareAccelerationMode } from '../../../db/schema/TranscodeConfig.ts';
import { HardwareAccelerationMode } from '../../../db/schema/TranscodeConfig.js';
import type { Nullable } from '../../../types/util.ts';
import { FilterOption } from './FilterOption.ts';
import { HardwareDownloadCudaFilter } from './nvidia/HardwareDownloadCudaFilter.ts';
@@ -23,7 +23,7 @@ export class PadFilter extends FilterOption {
super();
this.desiredPaddedSize = desiredState.paddedSize;
this.hwDownloadFilter =
decoderHwAccelMode === 'cuda'
decoderHwAccelMode === HardwareAccelerationMode.Cuda
? new HardwareDownloadCudaFilter(this.currentState.pixelFormat, null)
: new HardwareDownloadFilter(this.currentState);
this.filter = this.generateFilter();
@@ -34,7 +34,11 @@ export class PadFilter extends FilterOption {
}
static forCuda(currentState: FrameState, desiredState: FrameState) {
return new PadFilter('cuda', currentState, desiredState);
return new PadFilter(
HardwareAccelerationMode.Cuda,
currentState,
desiredState,
);
}
nextState(currentState: FrameState): FrameState {

View File

@@ -40,7 +40,10 @@ import { ReadrateInputOption } from '@/ffmpeg/builder/options/input/ReadrateInpu
import { StreamSeekInputOption } from '@/ffmpeg/builder/options/input/StreamSeekInputOption.js';
import { UserAgentInputOption } from '@/ffmpeg/builder/options/input/UserAgentInputOption.js';
import type { AudioState } from '@/ffmpeg/builder/state/AudioState.js';
import type { FfmpegState } from '@/ffmpeg/builder/state/FfmpegState.js';
import type {
FfmpegState,
PipelineOptions,
} from '@/ffmpeg/builder/state/FfmpegState.js';
import type { FrameState } from '@/ffmpeg/builder/state/FrameState.js';
import type { DataProps } from '@/ffmpeg/builder/types.js';
import { FrameDataLocation } from '@/ffmpeg/builder/types.js';
@@ -54,7 +57,7 @@ import type { Logger } from '@/util/logging/LoggerFactory.js';
import { LoggerFactory } from '@/util/logging/LoggerFactory.js';
import { getTunarrVersion } from '@/util/version.js';
import { filter, first, isNil, isNull, isUndefined, merge } from 'lodash-es';
import type { MarkRequired } from 'ts-essentials';
import type { DeepReadonly, MarkRequired } from 'ts-essentials';
import { P, match } from 'ts-pattern';
import {
OutputFormatTypes,
@@ -133,23 +136,8 @@ export type PipelineAudioFunctionArgs = {
pipelineSteps: IPipelineStep[];
};
// export type PipelineBuilderContext = {
// videoStream?: VideoStream;
// audioStream?: AudioStream;
// ffmpegState: FfmpegState;
// desiredState: FrameState;
// desiredAudioState?: AudioState;
// pipelineSteps: PipelineStep[];
// filterChain: FilterChain;
// hasWatermark: boolean;
// hasSubtitleOverlay: boolean;
// shouldDeinterlace: boolean;
// is10BitOutput: boolean;
// isIntelVaapiOrQsv: boolean;
// };
type PipelineBuilderContextProps = DataProps<PipelineBuilderContext>;
export class PipelineBuilderContext {
videoStream?: VideoStream;
audioStream?: AudioStream;
@@ -157,6 +145,8 @@ export class PipelineBuilderContext {
ffmpegState: FfmpegState;
desiredState: FrameState;
desiredAudioState?: AudioState;
pipelineOptions: DeepReadonly<PipelineOptions>;
pipelineSteps: PipelineStep[];
filterChain: FilterChain;
hasWatermark: boolean;
@@ -329,7 +319,11 @@ export abstract class BasePipelineBuilder implements PipelineBuilder {
});
}
build(ffmpegState: FfmpegState, desiredState: FrameState): Pipeline {
build(
ffmpegState: FfmpegState,
desiredState: FrameState,
pipelineOptions: PipelineOptions,
): Pipeline {
this.context = new PipelineBuilderContext({
videoStream: first(this.videoInputSource.streams),
audioStream: first(this.audioInputSource?.streams),
@@ -343,6 +337,7 @@ export abstract class BasePipelineBuilder implements PipelineBuilder {
is10BitOutput: (desiredState.pixelFormat?.bitDepth ?? 8) === 10,
shouldDeinterlace: desiredState.deinterlace,
isIntelVaapiOrQsv: false,
pipelineOptions,
});
this.logger.debug(
@@ -475,10 +470,6 @@ export abstract class BasePipelineBuilder implements PipelineBuilder {
),
);
if (isNull(this.audioInputSource)) {
this.pipelineSteps.push(new CopyAudioEncoder());
}
return new Pipeline(this.pipelineSteps, {
videoInput: this.videoInputSource,
audioInput: this.audioInputSource,

View File

@@ -1,5 +1,8 @@
import type { ConcatInputSource } from '@/ffmpeg/builder/input/ConcatInputSource.js';
import type { FfmpegState } from '@/ffmpeg/builder/state/FfmpegState.js';
import type {
FfmpegState,
PipelineOptions,
} from '@/ffmpeg/builder/state/FfmpegState.js';
import type { FrameState } from '@/ffmpeg/builder/state/FrameState.js';
import type { Nullable } from '@/types/util.js';
import type { Pipeline } from './Pipeline.ts';
@@ -29,5 +32,9 @@ export interface PipelineBuilder {
* @param currentState
* @param desiredState
*/
build(currentState: FfmpegState, desiredState: FrameState): Pipeline;
build(
currentState: FfmpegState,
desiredState: FrameState,
pipelineOptions: PipelineOptions,
): Pipeline;
}

View File

@@ -76,14 +76,22 @@ export class QsvPipelineBuilder extends SoftwarePipelineBuilder {
const { videoStream, desiredState } = this.context;
let canDecode = this.hardwareCapabilities.canDecodeVideoStream(videoStream);
let canEncode = this.hardwareCapabilities.canEncodeState(desiredState);
let canDecode = this.context.pipelineOptions?.disableHardwareDecoding
? false
: this.hardwareCapabilities.canDecodeVideoStream(videoStream);
let canEncode = this.context.pipelineOptions?.disableHardwareEncoding
? false
: this.hardwareCapabilities.canEncodeState(desiredState);
if (this.ffmpegState.outputFormat.type === OutputFormatTypes.Nut) {
if (
canEncode &&
this.ffmpegState.outputFormat.type === OutputFormatTypes.Nut
) {
canEncode = false;
}
if (
canDecode &&
(this.context.videoStream.codec === VideoFormats.H264 ||
this.context.videoStream.codec === VideoFormats.Hevc) &&
this.context.videoStream.pixelFormat?.bitDepth === 10
@@ -243,7 +251,10 @@ export class QsvPipelineBuilder extends SoftwarePipelineBuilder {
!desiredState.scaledSize.equals(desiredState.paddedSize);
let scaleFilter: FilterOption;
if (needsScale && (noHardware || onlySoftwareFilters)) {
if (
this.context.pipelineOptions.disableHardwareFilters ||
(needsScale && (noHardware || onlySoftwareFilters))
) {
scaleFilter = ScaleFilter.create(
currentState,
ffmpegState,

View File

@@ -1,6 +1,11 @@
import { FileStreamSource } from '../../../../stream/types.ts';
import { FfmpegCapabilities } from '../../capabilities/FfmpegCapabilities.ts';
import { VaapiHardwareCapabilities } from '../../capabilities/VaapiHardwareCapabilities.ts';
import {
VaapiEntrypoint,
VaapiHardwareCapabilities,
VaapiProfileEntrypoint,
VaapiProfiles,
} from '../../capabilities/VaapiHardwareCapabilities.ts';
import { PixelFormatYuv420P } from '../../format/PixelFormat.ts';
import { SubtitlesInputSource } from '../../input/SubtitlesInputSource.ts';
import { VideoInputSource } from '../../input/VideoInputSource.ts';
@@ -11,7 +16,10 @@ import {
SubtitleMethods,
VideoStream,
} from '../../MediaStream.ts';
import { FfmpegState } from '../../state/FfmpegState.ts';
import {
DefaultPipelineOptions,
FfmpegState,
} from '../../state/FfmpegState.ts';
import { FrameState } from '../../state/FrameState.ts';
import { FrameSize } from '../../types.ts';
import { VaapiPipelineBuilder } from './VaapiPipelineBuilder.ts';
@@ -86,6 +94,265 @@ describe('VaapiPipelineBuilder', () => {
paddedSize: FrameSize.FHD,
pixelFormat: new PixelFormatYuv420P(),
}),
DefaultPipelineOptions,
);
console.log(out.getCommandArgs().join(' '));
});
test('should work, decoding disabled', () => {
const capabilities = new VaapiHardwareCapabilities([
new VaapiProfileEntrypoint(
VaapiProfiles.H264Main,
VaapiEntrypoint.Decode,
),
new VaapiProfileEntrypoint(
VaapiProfiles.H264Main,
VaapiEntrypoint.Encode,
),
]);
const binaryCapabilities = new FfmpegCapabilities(
new Set(),
new Map(),
new Set(),
);
const video = VideoInputSource.withStream(
new FileStreamSource('/path/to/video.mkv'),
VideoStream.create({
codec: 'h264',
displayAspectRatio: '16:9',
frameSize: FrameSize.withDimensions(1920, 900),
index: 0,
pixelFormat: new PixelFormatYuv420P(),
sampleAspectRatio: null,
}),
);
const watermark = new WatermarkInputSource(
new FileStreamSource('/path/to/watermark.jpg'),
StillImageStream.create({
frameSize: FrameSize.withDimensions(800, 600),
index: 0,
}),
{
duration: 5,
enabled: true,
horizontalMargin: 5,
opacity: 100,
position: 'bottom-right',
verticalMargin: 5,
width: 10,
},
);
const builder = new VaapiPipelineBuilder(
capabilities,
binaryCapabilities,
video,
null,
watermark,
new SubtitlesInputSource(
new FileStreamSource('/path/to/video.mkv'),
[new EmbeddedSubtitleStream('pgs', 5, SubtitleMethods.Burn)],
SubtitleMethods.Burn,
),
null,
);
const state = FfmpegState.create({
version: {
versionString: 'n7.0.2-15-g0458a86656-20240904',
majorVersion: 7,
minorVersion: 0,
patchVersion: 2,
isUnknown: false,
},
// start: +dayjs.duration(0),
});
const out = builder.build(
state,
new FrameState({
isAnamorphic: false,
scaledSize: video.streams[0].squarePixelFrameSize(FrameSize.FHD),
paddedSize: FrameSize.FHD,
pixelFormat: new PixelFormatYuv420P(),
videoFormat: 'h264',
}),
{ ...DefaultPipelineOptions, disableHardwareDecoding: true },
);
console.log(out.getCommandArgs().join(' '));
});
test('should work, encoding disabled', () => {
const capabilities = new VaapiHardwareCapabilities([
new VaapiProfileEntrypoint(
VaapiProfiles.H264Main,
VaapiEntrypoint.Decode,
),
new VaapiProfileEntrypoint(
VaapiProfiles.H264Main,
VaapiEntrypoint.Encode,
),
]);
const binaryCapabilities = new FfmpegCapabilities(
new Set(),
new Map(),
new Set(),
);
const video = VideoInputSource.withStream(
new FileStreamSource('/path/to/video.mkv'),
VideoStream.create({
codec: 'h264',
profile: 'main',
displayAspectRatio: '16:9',
frameSize: FrameSize.withDimensions(1920, 900),
index: 0,
pixelFormat: new PixelFormatYuv420P(),
sampleAspectRatio: null,
}),
);
const watermark = new WatermarkInputSource(
new FileStreamSource('/path/to/watermark.jpg'),
StillImageStream.create({
frameSize: FrameSize.withDimensions(800, 600),
index: 0,
}),
{
duration: 0,
enabled: true,
horizontalMargin: 5,
opacity: 100,
position: 'bottom-right',
verticalMargin: 5,
width: 10,
},
);
const builder = new VaapiPipelineBuilder(
capabilities,
binaryCapabilities,
video,
null,
watermark,
// new SubtitlesInputSource(
// new FileStreamSource('/path/to/video.mkv'),
// [new EmbeddedSubtitleStream('pgs', 5, SubtitleMethods.Burn)],
// SubtitleMethods.Burn,
// ),
null,
null,
);
const state = FfmpegState.create({
version: {
versionString: 'n7.0.2-15-g0458a86656-20240904',
majorVersion: 7,
minorVersion: 0,
patchVersion: 2,
isUnknown: false,
},
// start: +dayjs.duration(0),
});
const out = builder.build(
state,
new FrameState({
isAnamorphic: false,
scaledSize: video.streams[0].squarePixelFrameSize(FrameSize.FHD),
paddedSize: FrameSize.FHD,
pixelFormat: new PixelFormatYuv420P(),
videoFormat: 'h264',
}),
{ ...DefaultPipelineOptions, disableHardwareEncoding: true },
);
console.log(out.getCommandArgs().join(' '));
});
test('should work, filters disabled', () => {
const capabilities = new VaapiHardwareCapabilities([
new VaapiProfileEntrypoint(
VaapiProfiles.H264Main,
VaapiEntrypoint.Decode,
),
new VaapiProfileEntrypoint(
VaapiProfiles.H264Main,
VaapiEntrypoint.Encode,
),
]);
const binaryCapabilities = new FfmpegCapabilities(
new Set(),
new Map(),
new Set(),
);
const video = VideoInputSource.withStream(
new FileStreamSource('/path/to/video.mkv'),
VideoStream.create({
codec: 'h264',
profile: 'main',
displayAspectRatio: '16:9',
frameSize: FrameSize.withDimensions(1920, 900),
index: 0,
pixelFormat: new PixelFormatYuv420P(),
sampleAspectRatio: null,
}),
);
const watermark = new WatermarkInputSource(
new FileStreamSource('/path/to/watermark.jpg'),
StillImageStream.create({
frameSize: FrameSize.withDimensions(800, 600),
index: 0,
}),
{
duration: 5,
enabled: true,
horizontalMargin: 5,
opacity: 100,
position: 'bottom-right',
verticalMargin: 5,
width: 10,
},
);
const builder = new VaapiPipelineBuilder(
capabilities,
binaryCapabilities,
video,
null,
watermark,
new SubtitlesInputSource(
new FileStreamSource('/path/to/video.mkv'),
[new EmbeddedSubtitleStream('pgs', 5, SubtitleMethods.Burn)],
SubtitleMethods.Burn,
),
null,
);
const state = FfmpegState.create({
version: {
versionString: 'n7.0.2-15-g0458a86656-20240904',
majorVersion: 7,
minorVersion: 0,
patchVersion: 2,
isUnknown: false,
},
// start: +dayjs.duration(0),
});
const out = builder.build(
state,
new FrameState({
isAnamorphic: false,
scaledSize: video.streams[0].squarePixelFrameSize(FrameSize.FHD),
paddedSize: FrameSize.FHD,
pixelFormat: new PixelFormatYuv420P(),
videoFormat: 'h264',
}),
{ ...DefaultPipelineOptions, disableHardwareFilters: true },
);
console.log(out.getCommandArgs().join(' '));

View File

@@ -85,11 +85,14 @@ export class VaapiPipelineBuilder extends SoftwarePipelineBuilder {
const { videoStream, desiredState, ffmpegState } = this.context;
const canDecode =
this.hardwareCapabilities.canDecodeVideoStream(videoStream);
let canEncode = this.hardwareCapabilities.canEncodeState(desiredState);
const canDecode = this.context.pipelineOptions?.disableHardwareDecoding
? false
: this.hardwareCapabilities.canDecodeVideoStream(videoStream);
let canEncode = this.context.pipelineOptions?.disableHardwareEncoding
? false
: this.hardwareCapabilities.canEncodeState(desiredState);
if (ffmpegState.outputFormat.type === OutputFormatTypes.Nut) {
if (canEncode && ffmpegState.outputFormat.type === OutputFormatTypes.Nut) {
canEncode = false;
}
@@ -164,10 +167,11 @@ export class VaapiPipelineBuilder extends SoftwarePipelineBuilder {
currentState = this.setScale(currentState);
currentState = this.setPad(currentState);
this.setStillImageLoop();
// Set crop
// TODO: Set crop
// TODO: Make vaapi driver a union
const forceSoftwareOverlay =
this.context.pipelineOptions?.disableHardwareFilters ||
(this.context.hasWatermark && this.context.isSubtitleOverlay()) ||
ffmpegState.vaapiDriver === 'radeonsi';
@@ -329,7 +333,8 @@ export class VaapiPipelineBuilder extends SoftwarePipelineBuilder {
let nextState = currentState;
if (this.context.shouldDeinterlace) {
const filter =
this.context.ffmpegState.decoderHwAccelMode === 'vaapi'
this.context.ffmpegState.decoderHwAccelMode ===
HardwareAccelerationMode.Vaapi
? new DeinterlaceVaapiFilter(currentState)
: new DeinterlaceFilter(this.context.ffmpegState, currentState);
nextState = filter.nextState(currentState);
@@ -341,7 +346,6 @@ export class VaapiPipelineBuilder extends SoftwarePipelineBuilder {
protected setScale(currentState: FrameState): FrameState {
let nextState = currentState;
const { desiredState, ffmpegState, shouldDeinterlace } = this.context;
// TODO: Watermark, subtitles, interface
let scaleOption: FilterOption;
if (
!currentState.scaledSize.equals(desiredState.scaledSize) &&

View File

@@ -47,12 +47,14 @@ export class VideoToolboxPipelineBuilder extends SoftwarePipelineBuilder {
return;
}
const canDecode = this.hardwareCapabilities.canDecodeVideoStream(
this.context.videoStream,
);
const canEncode = this.hardwareCapabilities.canEncodeState(
this.desiredState,
);
const canDecode = this.context.pipelineOptions?.disableHardwareDecoding
? false
: this.hardwareCapabilities.canDecodeVideoStream(
this.context.videoStream,
);
const canEncode = this.context.pipelineOptions?.disableHardwareEncoding
? false
: this.hardwareCapabilities.canEncodeState(this.desiredState);
this.pipelineSteps.push(new VideoToolboxHardwareAccelerationOption());

View File

@@ -1,4 +1,5 @@
import { FileStreamSource } from '../../../../stream/types.ts';
import { LoggerFactory } from '../../../../util/logging/LoggerFactory.ts';
import { FfmpegCapabilities } from '../../capabilities/FfmpegCapabilities.ts';
import { NvidiaHardwareCapabilities } from '../../capabilities/NvidiaHardwareCapabilities.ts';
import { PixelFormatYuv420P } from '../../format/PixelFormat.ts';
@@ -11,7 +12,10 @@ import {
SubtitleMethods,
VideoStream,
} from '../../MediaStream.ts';
import { FfmpegState } from '../../state/FfmpegState.ts';
import {
DefaultPipelineOptions,
FfmpegState,
} from '../../state/FfmpegState.ts';
import { FrameState } from '../../state/FrameState.ts';
import { FrameSize } from '../../types.ts';
import { NvidiaPipelineBuilder } from './NvidiaPipelineBuilder.ts';
@@ -86,6 +90,84 @@ describe('NvidiaPipelineBuilder', () => {
paddedSize: FrameSize.FHD,
pixelFormat: new PixelFormatYuv420P(),
}),
DefaultPipelineOptions,
);
console.log(out.getCommandArgs().join(' '));
});
test('should work with hardware filters disabled', () => {
const capabilities = new NvidiaHardwareCapabilities('RTX 2080 Ti', 75);
const binaryCapabilities = new FfmpegCapabilities(
new Set(),
new Map(),
new Set(),
);
const video = VideoInputSource.withStream(
new FileStreamSource('/path/to/video.mkv'),
VideoStream.create({
codec: 'h264',
displayAspectRatio: '16:9',
frameSize: FrameSize.withDimensions(1920, 900),
index: 0,
pixelFormat: new PixelFormatYuv420P(),
sampleAspectRatio: null,
}),
);
const watermark = new WatermarkInputSource(
new FileStreamSource('/path/to/watermark.jpg'),
StillImageStream.create({
frameSize: FrameSize.withDimensions(800, 600),
index: 0,
}),
{
duration: 5,
enabled: true,
horizontalMargin: 5,
opacity: 100,
position: 'bottom-right',
verticalMargin: 5,
width: 10,
},
);
const builder = new NvidiaPipelineBuilder(
capabilities,
binaryCapabilities,
video,
null,
null,
watermark,
new SubtitlesInputSource(
new FileStreamSource('/path/to/video.mkv'),
[new EmbeddedSubtitleStream('pgs', 5, SubtitleMethods.Burn)],
SubtitleMethods.Burn,
),
);
const state = FfmpegState.create({
version: {
versionString: 'n7.0.2-15-g0458a86656-20240904',
majorVersion: 7,
minorVersion: 0,
patchVersion: 2,
isUnknown: false,
},
});
const out = builder.build(
state,
new FrameState({
isAnamorphic: false,
scaledSize: video.streams[0].squarePixelFrameSize(FrameSize.FHD),
paddedSize: FrameSize.FHD,
pixelFormat: new PixelFormatYuv420P(),
}),
{
...DefaultPipelineOptions,
disableHardwareFilters: true,
},
);
console.log(out.getCommandArgs().join(' '));
@@ -164,12 +246,13 @@ describe('NvidiaPipelineBuilder', () => {
pixelFormat: new PixelFormatYuv420P(),
deinterlace: true,
}),
DefaultPipelineOptions,
);
console.log(out.getCommandArgs().join(' '));
});
test('intermittent watermark, set format on hardware scale, do not set format on hwdownload', () => {
test('intermittent watermark, set format on hardware scale, do not set format on hwdownload', async () => {
const capabilities = new NvidiaHardwareCapabilities('RTX 2080 Ti', 75);
const binaryCapabilities = new FfmpegCapabilities(
new Set(),
@@ -238,8 +321,16 @@ describe('NvidiaPipelineBuilder', () => {
pixelFormat: new PixelFormatYuv420P(),
deinterlace: false,
}),
DefaultPipelineOptions,
);
await new Promise((resolve, reject) => {
LoggerFactory.root.flush((err) => {
if (err) reject(err);
resolve(void 0);
});
});
console.log(out.getCommandArgs().join(' '));
});
});

View File

@@ -90,15 +90,20 @@ export class NvidiaPipelineBuilder extends SoftwarePipelineBuilder {
const { videoStream, ffmpegState, desiredState, pipelineSteps } =
this.context;
let canDecode = this.hardwareCapabilities.canDecodeVideoStream(videoStream);
let canEncode = this.hardwareCapabilities.canEncodeState(desiredState);
let canDecode = this.context.pipelineOptions?.disableHardwareDecoding
? false
: this.hardwareCapabilities.canDecodeVideoStream(videoStream);
let canEncode = this.context.pipelineOptions?.disableHardwareEncoding
? false
: this.hardwareCapabilities.canEncodeState(desiredState);
// Hardcode this assumption for now
if (desiredState.videoFormat === VideoFormats.Raw) {
if (canEncode && desiredState.videoFormat === VideoFormats.Raw) {
canEncode = false;
}
if (
canDecode &&
this.context.shouldDeinterlace &&
videoStream.codec === VideoFormats.Mpeg2Video
) {
@@ -215,10 +220,11 @@ export class NvidiaPipelineBuilder extends SoftwarePipelineBuilder {
// TODO: is this necessary?
// See: https://trac.ffmpeg.org/ticket/9442
const needsSoftwareWatermarkOverlay =
(this.context.hasWatermark &&
!isEmpty(this.watermarkInputSource?.watermark.fadeConfig)) ||
(isDefined(this.watermarkInputSource?.watermark.duration) &&
this.watermarkInputSource.watermark.duration > 0);
this.context.hasWatermark &&
(!isEmpty(this.watermarkInputSource?.watermark.fadeConfig) ||
(isDefined(this.watermarkInputSource?.watermark.duration) &&
this.watermarkInputSource.watermark.duration > 0) ||
this.context.pipelineOptions?.disableHardwareFilters);
// If we're certain that we're about to use a hardware overlay of some sort
// then ensure the video stream is uploaded to hardware.
@@ -226,6 +232,7 @@ export class NvidiaPipelineBuilder extends SoftwarePipelineBuilder {
currentState.frameDataLocation === FrameDataLocation.Software &&
currentState.bitDepth === 8 &&
!this.context.isSubtitleTextContext() &&
!this.context.pipelineOptions.disableHardwareFilters &&
(this.context.isSubtitleOverlay() ||
(this.context.hasWatermark && !needsSoftwareWatermarkOverlay))
) {
@@ -402,7 +409,9 @@ export class NvidiaPipelineBuilder extends SoftwarePipelineBuilder {
// ? new PixelFormatNv12(this.context.videoStream.pixelFormat.name)
// : this.context.videoStream.pixelFormat;
const padStep = PadFilter.forCuda(currentState, this.desiredState);
const padStep = this.context.pipelineOptions?.disableHardwareFilters
? PadFilter.create(currentState, this.desiredState)
: PadFilter.forCuda(currentState, this.desiredState);
nextState = padStep.nextState(nextState);
this.videoInputSource.filterSteps.push(padStep);
@@ -551,19 +560,20 @@ export class NvidiaPipelineBuilder extends SoftwarePipelineBuilder {
new PixelFormatFilter(new PixelFormatYuva420P()),
);
const needsSubtitleScale = this.videoInputSource.hasAnyFilterStep([
ScaleCudaFilter,
ScaleNppFilter,
ScaleFilter,
PadFilter,
]);
if (currentState.bitDepth === 8) {
const needsSubtitleScale = this.videoInputSource.hasAnyFilterStep([
ScaleCudaFilter,
ScaleNppFilter,
ScaleFilter,
PadFilter,
]);
const hasNpp = this.ffmpegCapabilities.hasFilter(
KnownFfmpegFilters.ScaleNpp,
);
if (needsSubtitleScale) {
if (hasNpp) {
if (hasNpp && !this.context.pipelineOptions?.disableHardwareFilters) {
// Use a hardware scale. Only scale_npp supports yuva
const hwUpload = new HardwareUploadCudaFilter(
currentState.updateFrameLocation(FrameDataLocation.Software),
@@ -572,39 +582,55 @@ export class NvidiaPipelineBuilder extends SoftwarePipelineBuilder {
FrameDataLocation.Hardware;
this.subtitleInputSource.filterSteps.push(hwUpload);
const filter = new SubtitleScaleNppFilter(
this.desiredState.paddedSize,
this.subtitleInputSource.filterSteps.push(
new SubtitleScaleNppFilter(this.desiredState.paddedSize),
);
this.subtitleInputSource.filterSteps.push(filter);
} else {
// Otherwise perform the scale on software and the upload to the GPU
// Otherwise perform the scale on software and the upload to the GPU, unless
// explicitly stated otherwise
this.subtitleInputSource.addFilter(
new ImageScaleFilter(this.desiredState.paddedSize),
);
this.subtitleInputSource.addFilter(
new HardwareUploadCudaFilter(
currentState.updateFrameLocation(FrameDataLocation.Software),
),
);
if (!this.context.pipelineOptions?.disableHardwareFilters) {
this.subtitleInputSource.addFilter(
new HardwareUploadCudaFilter(
currentState.updateFrameLocation(FrameDataLocation.Software),
),
);
this.subtitleInputSource.frameDataLocation =
FrameDataLocation.Hardware;
}
}
} else {
if (needsSubtitleScale) {
const filter = new ImageScaleFilter(this.desiredState.paddedSize);
this.subtitleInputSource.filterSteps.push(filter);
}
// if (needsSubtitleScale) {
// const filter = new ImageScaleFilter(this.desiredState.paddedSize);
// this.subtitleInputSource.filterSteps.push(filter);
// }
const hwUpload = new HardwareUploadCudaFilter(
currentState.updateFrameLocation(FrameDataLocation.Software),
);
this.subtitleInputSource.frameDataLocation =
FrameDataLocation.Hardware;
this.subtitleInputSource.filterSteps.push(hwUpload);
if (!this.context.pipelineOptions?.disableHardwareFilters) {
const hwUpload = new HardwareUploadCudaFilter(
currentState.updateFrameLocation(FrameDataLocation.Software),
);
this.subtitleInputSource.frameDataLocation =
FrameDataLocation.Hardware;
this.subtitleInputSource.filterSteps.push(hwUpload);
}
}
this.context.filterChain.subtitleOverlayFilterSteps.push(
new OverlaySubtitleCudaFilter(),
);
if (this.context.pipelineOptions?.disableHardwareFilters) {
this.context.filterChain.subtitleOverlayFilterSteps.push(
new SubtitleOverlayFilter(
this.desiredState.pixelFormat ?? new PixelFormatYuv420P(),
),
);
} else {
this.context.filterChain.subtitleOverlayFilterSteps.push(
new OverlaySubtitleCudaFilter(),
);
}
} else {
// 10-bit case
if (currentState.frameDataLocation === FrameDataLocation.Hardware) {
currentState = this.addFilterToVideoChain(
currentState,
@@ -613,14 +639,7 @@ export class NvidiaPipelineBuilder extends SoftwarePipelineBuilder {
this.videoInputSource.frameDataLocation = FrameDataLocation.Software;
}
const needsScale = this.videoInputSource.hasAnyFilterStep([
ScaleCudaFilter,
ScaleNppFilter,
ScaleFilter,
PadFilter,
]);
if (needsScale) {
if (needsSubtitleScale) {
this.subtitleInputSource.addFilter(
new ImageScaleFilter(this.desiredState.paddedSize),
);

View File

@@ -32,11 +32,13 @@ export class NvidiaScaler {
const noHardwareFilters = !desiredState.deinterlace;
const needsToPad = !currentState.paddedSize.equals(desiredState.paddedSize);
if (
decodeToSoftware &&
(needsToPad || noHardwareFilters) &&
softwareEncoder
) {
const useSoftwareScale =
(decodeToSoftware &&
(needsToPad || noHardwareFilters) &&
softwareEncoder) ||
context.pipelineOptions?.disableHardwareFilters;
if (useSoftwareScale) {
scaleStep = ScaleFilter.create(
currentState,
ffmpegState,

View File

@@ -1,10 +1,10 @@
import type { HardwareAccelerationMode } from '@/db/schema/TranscodeConfig.js';
import { HardwareAccelerationMode } from '@/db/schema/TranscodeConfig.js';
import type { DataProps } from '@/ffmpeg/builder/types.js';
import type { FfmpegVersionResult } from '@/ffmpeg/ffmpegInfo.js';
import type { Maybe, Nullable } from '@/types/util.js';
import type { FfmpegLogLevel } from '@tunarr/types/schemas';
import type { Duration } from 'dayjs/plugin/duration.js';
import { isNil, merge } from 'lodash-es';
import { merge } from 'lodash-es';
import path from 'node:path';
import type { MarkRequired } from 'ts-essentials';
import type { OutputFormat } from '../constants.ts';
@@ -14,6 +14,28 @@ import {
OutputLocation,
} from '../constants.ts';
export type PipelineOptions = {
decoderThreadCount: Nullable<number>;
encoderThreadCount: Nullable<number>;
filterThreadCount: Nullable<number>;
disableHardwareDecoding?: boolean;
disableHardwareEncoding?: boolean;
disableHardwareFilters?: boolean;
vaapiDevice: Nullable<string>;
vaapiDriver: Nullable<string>;
};
export const DefaultPipelineOptions: PipelineOptions = {
decoderThreadCount: null,
encoderThreadCount: null,
filterThreadCount: null,
disableHardwareDecoding: false,
disableHardwareEncoding: false,
disableHardwareFilters: false,
vaapiDevice: null,
vaapiDriver: null,
};
export const DefaultFfmpegState: Partial<DataProps<FfmpegState>> = {
threadCount: null,
start: null,
@@ -32,7 +54,8 @@ type FfmpegStateFields = MarkRequired<
>;
export class FfmpegState {
version: FfmpegVersionResult;
readonly version: FfmpegVersionResult;
threadCount: Nullable<number> = null;
start: Nullable<Duration> = null;
duration: Nullable<Duration> = null;
@@ -42,9 +65,9 @@ export class FfmpegState {
doNotMapMetadata: boolean;
metadataServiceName: Nullable<string> = null;
metadataServiceProvider: Nullable<string> = null;
decoderHwAccelMode: HardwareAccelerationMode = HardwareAccelerationMode.None;
encoderHwAccelMode: HardwareAccelerationMode = HardwareAccelerationMode.None;
decoderHwAccelMode: HardwareAccelerationMode = 'none';
encoderHwAccelMode: HardwareAccelerationMode = 'none';
softwareScalingAlgorithm: string = 'fast_bilinear';
softwareDeinterlaceFilter: string = 'yadif=1';
vaapiDevice: Nullable<string> = null;
@@ -109,35 +132,4 @@ export class FfmpegState {
static defaultWithVersion(version: FfmpegVersionResult) {
return this.create({ version });
}
// HACK: kinda hacky here!
isAtLeastVersion(
version: { major: number; minor?: number },
permissive: boolean = true,
) {
if (this.version.isUnknown || isNil(this.version.majorVersion)) {
return permissive;
}
const { major, minor } = version;
if (this.version.majorVersion > major) {
return true;
}
if (this.version.majorVersion === major) {
if (isNil(this.version.minorVersion)) {
return permissive;
}
// We're not looking for a minor version
if (isNil(minor)) {
return true;
}
return this.version.minorVersion >= minor;
}
return false;
}
}

View File

@@ -32,6 +32,7 @@ import Migration1746042667_AddSubtitles from './db/Migration1746042667_AddSubtit
import Migration1746123876_ReworkSubtitleFilter from './db/Migration1746123876_ReworkSubtitleFilter.ts';
import Migration1746128022_FixSubtitlePriorityType from './db/Migration1746128022_FixSubtitlePriorityType.ts';
import Migration1748345299_AddMoreProgramTypes from './db/Migration1748345299_AddMoreProgramTypes.ts';
import Migration1756312561_InitialAdvancedTranscodeConfig from './db/Migration1756312561_InitialAdvancedTranscodeConfig.ts';
export const LegacyMigrationNameToNewMigrationName = [
['Migration20240124115044', '_Legacy_Migration00'],
@@ -110,6 +111,8 @@ export class DirectMigrationProvider implements MigrationProvider {
migration1746123876: Migration1746123876_ReworkSubtitleFilter,
migration1746128022: Migration1746128022_FixSubtitlePriorityType,
migration1748345299: Migration1748345299_AddMoreProgramTypes,
migration1756312561:
Migration1756312561_InitialAdvancedTranscodeConfig,
},
wrapWithTransaction,
),

View File

@@ -0,0 +1,23 @@
import { isNonEmptyString } from '@tunarr/shared/util';
import { CompiledQuery } from 'kysely';
import type { TunarrDatabaseMigration } from '../DirectMigrationProvider.ts';
const expr = String.raw`
ALTER TABLE "transcode_config" ADD "disable_hardware_decoder" integer DEFAULT false;--> statement-breakpoint
ALTER TABLE "transcode_config" ADD "disable_hardware_encoding" integer DEFAULT false;--> statement-breakpoint
ALTER TABLE "transcode_config" ADD "disable_hardware_filters" integer DEFAULT false;
`;
export default {
async up(db) {
const queries = expr
.split('--> statement-breakpoint')
.map((s) => s.trim())
.filter(isNonEmptyString)
.map((s) => CompiledQuery.raw(s));
for (const query of queries) {
await db.executeQuery(query);
}
},
} satisfies TunarrDatabaseMigration;

View File

@@ -40,6 +40,7 @@ import {
map,
mapValues,
nth,
sumBy,
uniq,
values,
} from 'lodash-es';
@@ -432,9 +433,13 @@ export class TVGuideService {
throw new Error(channel.number + " wasn't preprocesed correctly???!?");
}
const channelDuration =
channel.duration <= 0
? sumBy(lineup.items, ({ durationMs }) => durationMs)
: channel.duration;
// How many ms we are "into" the current channel cycle
let channelProgress =
(currentUpdateTimeMs - channelStartTime) % channel.duration;
(currentUpdateTimeMs - channelStartTime) % channelDuration;
// The timestamp of the start of this cycle
const startOfCycle = currentUpdateTimeMs - channelProgress;

View File

@@ -145,10 +145,10 @@ class LoggerFactoryImpl {
file: isProduction
? undefined
: caller
? isString(caller)
? caller
: getCaller(caller)
: undefined,
? isString(caller)
? caller
: getCaller(caller)
: undefined,
caller: isProduction ? undefined : className, // Don't include this twice in production
};
const newChild = this.rootLogger.child(childOpts);

View File

@@ -1,7 +1,9 @@
import { isNil } from 'lodash-es';
export function booleanToNumber(b: boolean): number {
return b ? 1 : 0;
}
export function numberToBoolean(n: number): boolean {
return n === 0 ? false : true;
return isNil(n) || n === 0 ? false : true;
}

View File

@@ -6,7 +6,7 @@
"mikro-orm.base.config.ts"
],
"exclude": [
"./build/**/*",
"./dist/**/*",
"./tests/**/*",
"./src/**/*.test.ts",
"./src/**/*.ignore.ts",

View File

@@ -9,7 +9,7 @@
"moduleResolution": "nodenext",
"resolveJsonModule": true,
"rootDir": ".",
"outDir": "build",
"outDir": "dist",
"allowSyntheticDefaultImports": true,
"importHelpers": true,
"alwaysStrict": true,

View File

@@ -11,25 +11,25 @@
"bundle": "tsup",
"build": "tsup --dts",
"build-dev": "tsup --dts --watch",
"clean": "rimraf ./build/",
"clean": "rimraf ./dist/",
"dev": "tsup --dts --watch",
"test": "vitest --run"
},
"exports": {
".": {
"types": "./build/index.d.ts",
"default": "./build/index.js"
"types": "./dist/index.d.ts",
"default": "./dist/index.js"
},
"./constants": {
"types": "./build/constants/index.d.ts",
"default": "./build/constants/index.js"
"types": "./dist/constants/index.d.ts",
"default": "./dist/constants/index.js"
},
"./util": {
"types": "./build/util/index.d.ts",
"default": "./build/util/index.js"
"types": "./dist/util/index.d.ts",
"default": "./dist/util/index.js"
},
"./types": {
"types": "./build/types/index.d.ts"
"types": "./dist/types/index.d.ts"
}
},
"main": "index.ts",
@@ -42,8 +42,10 @@
"zod": "^4.0.17"
},
"devDependencies": {
"@rollup/plugin-swc": "^0.4.0",
"@types/lodash-es": "4.17.9",
"@types/node": "22.10.7",
"@vitest/coverage-v8": "^3.2.4",
"rimraf": "^5.0.5",
"ts-essentials": "^9.4.1",
"tsup": "^8.0.2",

View File

@@ -8,7 +8,7 @@
],
"moduleResolution": "nodenext",
"rootDir": ".",
"outDir": "build",
"outDir": "dist",
"allowSyntheticDefaultImports": true,
"importHelpers": true,
"alwaysStrict": true,
@@ -33,7 +33,7 @@
"./src/**/*.ts",
],
"exclude": [
"./build/**/*",
"./dist/**/*",
"./**/*.ignore.ts"
],
}

View File

@@ -5,7 +5,7 @@
],
"exclude": [
"vitest.config.ts",
"./build/**/*",
"./dist/**/*",
"./**/*.test.ts",
"./**/*.ignore.ts"
],

View File

@@ -10,7 +10,7 @@ export default defineConfig((opts) => ({
dts: !!opts.dts,
splitting: false,
format: 'esm',
outDir: 'build',
outDir: 'dist',
sourcemap: true,
tsconfig: 'tsconfig.prod.json',
}));

View File

@@ -4,11 +4,11 @@
"tasks": {
"bundle": {
"dependsOn": ["@tunarr/types#build"],
"outputs": ["build/**"]
"outputs": ["dist/**"]
},
"build": {
"dependsOn": ["@tunarr/types#build"],
"outputs": ["build/**"]
"outputs": ["dist/**"]
},
"build-dev": {
"dependsOn": ["@tunarr/types#build"],

View File

@@ -1,21 +1,44 @@
import swc from '@rollup/plugin-swc';
import path from 'node:path';
import { defineConfig } from 'vitest/config';
export default defineConfig({
test: {
globals: true,
includeSource: ['src/**/*.test.ts'],
typecheck: {
tsconfig: 'tsconfig.json',
resolve: {
alias: {
'@': path.resolve(__dirname, 'src'),
},
},
// define: {
// 'import.meta.vitest': false,
// },
// build: {
// lib: {
// formats: ['es', 'cjs'],
// entry: './index.ts',
// fileName: 'index',
// },
// },
test: {
globals: true,
watch: false,
coverage: {
provider: 'v8',
},
},
define: {
'import.meta.vitest': false,
},
build: {
lib: {
formats: ['es', 'cjs'],
entry: './index.ts',
fileName: 'index',
},
},
plugins: [
swc({
swc: {
jsc: {
parser: {
syntax: 'typescript',
decorators: true,
},
target: 'esnext',
transform: {
decoratorMetadata: true,
},
},
},
}),
],
});

File diff suppressed because one or more lines are too long

View File

@@ -11,9 +11,9 @@
"build-dev": "tsc --declaration --watch",
"gen-emby": "typed-openapi https://swagger.emby.media/openapi.json -r zod -o ./build/emby-generated.ts"
},
"main": "./build/src/index.js",
"module": "./build/src/index.js",
"types": "./build/src/index.d.ts",
"main": "./dist/src/index.js",
"module": "./dist/src/index.js",
"types": "./dist/src/index.d.ts",
"keywords": [],
"author": "chrisbenincasa",
"type": "module",
@@ -33,29 +33,29 @@
},
"exports": {
".": {
"types": "./build/src/index.d.ts",
"default": "./build/src/index.js"
"types": "./dist/src/index.d.ts",
"default": "./dist/src/index.js"
},
"./package.json": "./package.json",
"./schemas": {
"types": "./build/src/schemas/index.d.ts",
"default": "./build/src/schemas/index.js"
"types": "./dist/src/schemas/index.d.ts",
"default": "./dist/src/schemas/index.js"
},
"./plex": {
"types": "./build/src/plex/index.d.ts",
"default": "./build/src/plex/index.js"
"types": "./dist/src/plex/index.d.ts",
"default": "./dist/src/plex/index.js"
},
"./jellyfin": {
"types": "./build/src/jellyfin/index.d.ts",
"default": "./build/src/jellyfin/index.js"
"types": "./dist/src/jellyfin/index.d.ts",
"default": "./dist/src/jellyfin/index.js"
},
"./emby": {
"types": "./build/src/emby/index.d.ts",
"default": "./build/src/emby/index.js"
"types": "./dist/src/emby/index.d.ts",
"default": "./dist/src/emby/index.js"
},
"./api": {
"types": "./build/src/api/index.d.ts",
"default": "./build/src/api/index.js"
"types": "./dist/src/api/index.d.ts",
"default": "./dist/src/api/index.js"
}
}
}

View File

@@ -63,4 +63,7 @@ export const TranscodeConfigSchema = z.object({
errorScreen: z.enum(SupportedErrorScreens),
errorScreenAudio: z.enum(SupportedErrorAudioTypes),
isDefault: z.boolean(),
disableHardwareDecoder: z.boolean().default(false),
disableHardwareEncoding: z.boolean().default(false),
disableHardwareFilters: z.boolean().default(false),
});

View File

@@ -12,7 +12,7 @@
"declarationMap": true,
"moduleResolution": "nodenext",
"rootDir": ".",
"outDir": "build",
"outDir": "dist",
"allowSyntheticDefaultImports": true,
"importHelpers": true,
"alwaysStrict": true,
@@ -31,6 +31,6 @@
],
"exclude": [
"./**/*.test.ts",
"./build/**"
"./dist/**"
]
}

View File

@@ -11,7 +11,7 @@ export default defineConfig((opts) => ({
},
format: 'esm',
dts: !!opts.dts,
outDir: 'build',
outDir: 'dist',
splitting: false,
sourcemap: false,
target: 'esnext',

View File

@@ -14,6 +14,7 @@ import { isEmpty, trimEnd } from 'lodash-es';
import type { SyntheticEvent } from 'react';
import { useCallback, useState } from 'react';
import { deleteApiChannelsByIdSessionsMutation } from '../../generated/@tanstack/react-query.gen.ts';
import { invalidateTaggedQueries } from '../../helpers/queryUtil.ts';
import { isNonEmptyString } from '../../helpers/util.ts';
import { useCopyToClipboard } from '../../hooks/useCopyToClipboard.ts';
import { useCreateChannel } from '../../hooks/useCreateChannel.ts';
@@ -47,8 +48,7 @@ export const ChannelsTableOptionsMenu = ({
...deleteApiChannelsByIdSessionsMutation(),
onSuccess: () => {
return queryClient.invalidateQueries({
queryKey: ['Channels'],
exact: false,
predicate: invalidateTaggedQueries(['Channels']),
});
},
});

View File

@@ -0,0 +1,62 @@
import {
FormControl,
FormControlLabel,
FormHelperText,
Stack,
} from '@mui/material';
import type { TranscodeConfig } from '@tunarr/types';
import { useFormContext } from 'react-hook-form';
import { CheckboxFormController } from '../../util/TypedController.tsx';
export const TranscodeConfigAdvancedOptions = () => {
const { control } = useFormContext<TranscodeConfig>();
return (
<Stack gap={2}>
<FormControl fullWidth>
<FormControlLabel
control={
<CheckboxFormController
control={control}
name="disableHardwareDecoder"
/>
}
label={'Disable Hardware Decoding'}
/>
<FormHelperText>
Will force use of a software decoder despite hardware acceleration
settings.
</FormHelperText>
</FormControl>
<FormControl fullWidth>
<FormControlLabel
control={
<CheckboxFormController
control={control}
name="disableHardwareEncoding"
/>
}
label={'Disable Hardware Encoding'}
/>
<FormHelperText>
Will force use of a software encoder despite hardware acceleration
settings.
</FormHelperText>
</FormControl>
<FormControl fullWidth>
<FormControlLabel
control={
<CheckboxFormController
control={control}
name="disableHardwareFilters"
/>
}
label={'Disable Hardware Filters'}
/>
<FormHelperText>
Will force use of a software filters (e.g. scale, pad, etc.) despite
hardware acceleration settings.
</FormHelperText>
</FormControl>
</Stack>
);
};

View File

@@ -0,0 +1,134 @@
import {
FormControl,
InputAdornment,
InputLabel,
MenuItem,
Select,
Stack,
} from '@mui/material';
import type { TranscodeConfig } from '@tunarr/types';
import type { SupportedTranscodeAudioOutputFormats } from '@tunarr/types/schemas';
import { Controller, useFormContext } from 'react-hook-form';
import type { DropdownOption } from '../../../helpers/DropdownOption';
import { NumericFormControllerText } from '../../util/TypedController.tsx';
const AudioFormats: DropdownOption<SupportedTranscodeAudioOutputFormats>[] = [
{
description: 'AAC',
value: 'aac',
},
{
description: 'AC3',
value: 'ac3',
},
{
description: 'MP3',
value: 'mp3',
},
{
description: 'Copy',
value: 'copy',
},
] as const;
export const TranscodeConfigAudioSettingsForm = () => {
const { control } = useFormContext<TranscodeConfig>();
return (
<Stack gap={2}>
<FormControl fullWidth>
<InputLabel>Audio Format</InputLabel>
<Controller
control={control}
name="audioFormat"
render={({ field }) => (
<Select<SupportedTranscodeAudioOutputFormats>
label="Audio Format"
{...field}
>
{AudioFormats.map((opt) => (
<MenuItem key={opt.value} value={opt.value}>
{opt.description}
</MenuItem>
))}
</Select>
)}
/>
</FormControl>
<Stack direction={{ sm: 'column', md: 'row' }} gap={2} useFlexGap>
<NumericFormControllerText
control={control}
name="audioBitRate"
prettyFieldName="Audio Bitrate"
TextFieldProps={{
id: 'audio-bitrate',
label: 'Audio Bitrate',
fullWidth: true,
InputProps: {
endAdornment: (
<InputAdornment position="end">kbps</InputAdornment>
),
},
}}
/>
<NumericFormControllerText
control={control}
name="audioBufferSize"
prettyFieldName="Audio Buffer Size"
TextFieldProps={{
id: 'audio-buffer-size',
label: 'Audio Buffer Size',
fullWidth: true,
InputProps: {
endAdornment: <InputAdornment position="end">kb</InputAdornment>,
},
}}
/>
</Stack>
<Stack direction={{ sm: 'column', md: 'row' }} gap={2} useFlexGap>
<NumericFormControllerText
control={control}
name="audioVolumePercent"
prettyFieldName="Audio Volume Percent"
TextFieldProps={{
id: 'audio-volume',
label: 'Audio Volume',
fullWidth: true,
sx: { my: 1 },
helperText: 'Values higher than 100 will boost the audio.',
InputProps: {
endAdornment: <InputAdornment position="end">%</InputAdornment>,
},
}}
/>
<NumericFormControllerText
control={control}
name="audioChannels"
prettyFieldName="Audio Channels"
TextFieldProps={{
id: 'audio-bitrate',
label: 'Audio Channels',
fullWidth: true,
sx: { my: 1 },
}}
/>
</Stack>
<NumericFormControllerText
control={control}
name="audioSampleRate"
prettyFieldName="Audio Sample Rate"
TextFieldProps={{
id: 'audio-sample-rate',
label: 'Audio Sample Rate',
fullWidth: true,
sx: { my: 1 },
InputProps: {
endAdornment: <InputAdornment position="end">kHz</InputAdornment>,
},
}}
/>
</Stack>
);
};

View File

@@ -0,0 +1,99 @@
import {
FormControl,
FormHelperText,
Grid,
InputLabel,
MenuItem,
Select,
} from '@mui/material';
import type { TranscodeConfig } from '@tunarr/types';
import { Controller, useFormContext } from 'react-hook-form';
const supportedErrorScreens = [
{
value: 'pic',
string: 'Default Generic Error Image',
},
{ value: 'blank', string: 'Blank Screen' },
{ value: 'static', string: 'Static' },
{
value: 'testsrc',
string: 'Test Pattern (color bars + timer)',
},
{
value: 'text',
string: 'Detailed error (requires ffmpeg with drawtext)',
},
{
value: 'kill',
string: 'Stop stream, show errors in logs',
},
];
const supportedErrorAudio = [
{ value: 'whitenoise', string: 'White Noise' },
{ value: 'sine', string: 'Beep' },
{ value: 'silent', string: 'No Audio' },
];
export const TranscodeConfigErrorOptions = () => {
const { control } = useFormContext<TranscodeConfig>();
return (
<Grid container spacing={2}>
<Grid size={{ sm: 12, md: 6 }}>
<FormControl sx={{ mt: 2 }}>
<InputLabel id="error-screen-label">Error Screen</InputLabel>
<Controller
control={control}
name="errorScreen"
render={({ field }) => (
<Select
labelId="error-screen-label"
id="error-screen"
label="Error Screen"
{...field}
>
{supportedErrorScreens.map((error) => (
<MenuItem key={error.value} value={error.value}>
{error.string}
</MenuItem>
))}
</Select>
)}
/>
<FormHelperText>
If there are issues playing a video, Tunarr will try to use an error
screen as a placeholder while retrying loading the video every 60
seconds.
</FormHelperText>
</FormControl>
</Grid>
<Grid size={{ sm: 12, md: 6 }}>
<FormControl sx={{ mt: 2 }} fullWidth>
<InputLabel id="error-audio-label">Error Audio</InputLabel>
<Controller
control={control}
name="errorScreenAudio"
render={({ field }) => (
<Select
labelId="error-audio-label"
id="error-screen"
label="Error Audio"
fullWidth
{...field}
>
{supportedErrorAudio.map((error) => (
<MenuItem key={error.value} value={error.value}>
{error.string}
</MenuItem>
))}
</Select>
)}
/>
</FormControl>
</Grid>
</Grid>
);
};

View File

@@ -1,17 +1,8 @@
import {
CheckboxFormController,
NumericFormControllerText,
TypedController,
} from '@/components/util/TypedController';
import { TranscodeResolutionOptions } from '@/helpers/constants';
import type { DropdownOption } from '@/helpers/DropdownOption';
import {
isNonEmptyString,
resolutionFromAnyString,
resolutionToString,
} from '@/helpers/util';
import { useFfmpegSettings } from '@/hooks/settingsHooks';
import type { SelectChangeEvent } from '@mui/material';
import { isNonEmptyString } from '@/helpers/util';
import {
Box,
Button,
@@ -20,114 +11,20 @@ import {
FormControlLabel,
FormHelperText,
Grid,
InputAdornment,
InputLabel,
MenuItem,
Link as MuiLink,
Select,
Stack,
TextField,
Typography,
} from '@mui/material';
import { useSuspenseQuery } from '@tanstack/react-query';
import type {
SupportedTranscodeVideoOutputFormat,
TranscodeConfig,
} from '@tunarr/types';
import type {
SupportedHardwareAccels,
SupportedTranscodeAudioOutputFormats,
} from '@tunarr/types/schemas';
import type { TranscodeConfig } from '@tunarr/types';
import { useSnackbar } from 'notistack';
import type { FieldErrors } from 'react-hook-form';
import { Controller, useForm } from 'react-hook-form';
import { getApiFfmpegInfoOptions } from '../../../generated/@tanstack/react-query.gen.ts';
import { Controller, FormProvider, useForm } from 'react-hook-form';
import Breadcrumbs from '../../Breadcrumbs.tsx';
const VideoFormats: DropdownOption<SupportedTranscodeVideoOutputFormat>[] = [
{
description: 'H.264',
value: 'h264',
},
{
description: 'HEVC (H.265)',
value: 'hevc',
},
{
description: 'MPEG-2',
value: 'mpeg2video',
},
] as const;
const AudioFormats: DropdownOption<SupportedTranscodeAudioOutputFormats>[] = [
{
description: 'AAC',
value: 'aac',
},
{
description: 'AC3',
value: 'ac3',
},
{
description: 'MP3',
value: 'mp3',
},
{
description: 'Copy',
value: 'copy',
},
] as const;
const VideoHardwareAccelerationOptions: DropdownOption<SupportedHardwareAccels>[] =
[
{
description: 'Software (no GPU)',
value: 'none',
},
{
description: 'Nvidia (CUDA)',
value: 'cuda',
},
{
description: 'Video Acceleration API (VA-API)',
value: 'vaapi',
},
{
description: 'Intel QuickSync',
value: 'qsv',
},
{
description: 'VideoToolbox',
value: 'videotoolbox',
},
] as const;
const supportedErrorScreens = [
{
value: 'pic',
string: 'Default Generic Error Image',
},
{ value: 'blank', string: 'Blank Screen' },
{ value: 'static', string: 'Static' },
{
value: 'testsrc',
string: 'Test Pattern (color bars + timer)',
},
{
value: 'text',
string: 'Detailed error (requires ffmpeg with drawtext)',
},
{
value: 'kill',
string: 'Stop stream, show errors in logs',
},
];
const supportedErrorAudio = [
{ value: 'whitenoise', string: 'White Noise' },
{ value: 'sine', string: 'Beep' },
{ value: 'silent', string: 'No Audio' },
];
import { TranscodeConfigAdvancedOptions } from './TranscodeConfigAdvancedOptions.tsx';
import { TranscodeConfigAudioSettingsForm } from './TranscodeConfigAudioSettingsForm.tsx';
import { TranscodeConfigErrorOptions } from './TranscodeConfigErrorOptions.tsx';
import { TranscodeConfigVideoSettingsForm } from './TranscodeConfigVideoSettingsForm.tsx';
type Props = {
onSave: (config: TranscodeConfig) => Promise<TranscodeConfig>;
@@ -140,26 +37,19 @@ export const TranscodeConfigSettingsForm = ({
initialConfig,
isNew,
}: Props) => {
const { data: ffmpegSettings } = useFfmpegSettings();
const ffmpegInfo = useSuspenseQuery({
// queryKey: ['ffmpeg-info'],
// queryFn: (apiClient) => apiClient.getFfmpegInfo(),
...getApiFfmpegInfoOptions(),
});
const snackbar = useSnackbar();
const {
control,
watch,
reset,
formState: { isSubmitting, isValid, isDirty },
handleSubmit,
} = useForm<TranscodeConfig>({
const transcodeConfigForm = useForm<TranscodeConfig>({
defaultValues: initialConfig,
mode: 'onChange',
});
const {
control,
reset,
formState: { isSubmitting, isValid, isDirty },
handleSubmit,
watch,
} = transcodeConfigForm;
const hardwareAccelerationMode = watch('hardwareAccelerationMode');
@@ -185,466 +75,156 @@ export const TranscodeConfigSettingsForm = ({
console.error(errors);
};
const videoFfmpegSettings = () => {
return (
<Stack gap={2}>
<FormControl fullWidth>
<InputLabel>Video Format</InputLabel>
<Controller
control={control}
name="videoFormat"
render={({ field }) => (
<Select label="Video Format" {...field}>
{VideoFormats.map((opt) => (
<MenuItem key={opt.value} value={opt.value}>
{opt.description}
</MenuItem>
))}
</Select>
)}
/>
<FormHelperText></FormHelperText>
</FormControl>
<FormControl fullWidth>
<InputLabel>Hardware Acceleration</InputLabel>
<Controller
control={control}
name="hardwareAccelerationMode"
render={({ field }) => (
<Select label="Hardware Acceleration" {...field}>
{VideoHardwareAccelerationOptions.filter(
({ value }) =>
value === 'none' ||
ffmpegInfo.data.hardwareAccelerationTypes.includes(value),
).map((opt) => (
<MenuItem key={opt.value} value={opt.value}>
{opt.description}
</MenuItem>
))}
</Select>
)}
/>
<FormHelperText></FormHelperText>
</FormControl>
{(hardwareAccelerationMode === 'vaapi' ||
hardwareAccelerationMode === 'qsv') && (
<Controller
control={control}
name="vaapiDevice"
render={({ field }) => (
<TextField
fullWidth
label={
hardwareAccelerationMode === 'qsv'
? 'QSV Device'
: 'VA-API Device'
}
helperText={
<span>
Override the default{' '}
{hardwareAccelerationMode === 'qsv' ? 'QSV' : 'VA-API'}{' '}
device path (defaults to <code>/dev/dri/renderD128</code> on
Linux and blank otherwise)
</span>
}
{...field}
/>
)}
/>
)}
<FormControl fullWidth>
<InputLabel id="target-resolution-label">Resolution</InputLabel>
<TypedController
control={control}
name="resolution"
toFormType={resolutionFromAnyString}
valueExtractor={(e) => (e as SelectChangeEvent).target.value}
render={({ field }) => (
<Select
labelId="target-resolution-label"
id="target-resolution"
label="Resolution"
{...field}
value={resolutionToString(field.value)}
>
{TranscodeResolutionOptions.map((resolution) => (
<MenuItem key={resolution.value} value={resolution.value}>
{resolution.label}
</MenuItem>
))}
</Select>
)}
/>
</FormControl>
<Stack direction={{ sm: 'column', md: 'row' }} gap={2} useFlexGap>
<NumericFormControllerText
control={control}
name="videoBitRate"
prettyFieldName="Video Bitrate"
TextFieldProps={{
id: 'video-bitrate',
label: 'Video Bitrate',
fullWidth: true,
sx: { my: 1 },
InputProps: {
endAdornment: (
<InputAdornment position="end">kbps</InputAdornment>
),
},
}}
/>
<NumericFormControllerText
control={control}
name="videoBufferSize"
prettyFieldName="Video Buffer Size"
TextFieldProps={{
id: 'video-buffer-size',
label: 'Video Buffer Size',
fullWidth: true,
sx: { my: 1 },
InputProps: {
endAdornment: (
<InputAdornment position="end">kb</InputAdornment>
),
},
helperText: (
<>
Buffer size effects how frequently ffmpeg reconsiders the
output bitrate.{' '}
<MuiLink
target="_blank"
href="https://trac.ffmpeg.org/wiki/Limiting%20the%20output%20bitrate#Whatdoes-bufsizedo"
>
Read more
</MuiLink>
</>
),
}}
/>
</Stack>
<Stack gap={1}>
{ffmpegSettings.useNewFfmpegPipeline && (
<FormControl fullWidth>
<FormControlLabel
control={
<CheckboxFormController
control={control}
name="deinterlaceVideo"
/>
}
label={'Auto Deinterlace Video'}
/>
<FormHelperText></FormHelperText>
</FormControl>
)}
{ffmpegSettings.useNewFfmpegPipeline && (
<FormControl fullWidth>
<FormControlLabel
control={
<CheckboxFormController
control={control}
name="normalizeFrameRate"
/>
}
label={'Normalize Frame Rate'}
/>
<FormHelperText>
Output video at a constant frame rate.
</FormHelperText>
</FormControl>
)}
</Stack>
</Stack>
);
};
const audioFfmpegSettings = () => {
return (
<Stack gap={2}>
<FormControl fullWidth>
<InputLabel>Audio Format</InputLabel>
<Controller
control={control}
name="audioFormat"
render={({ field }) => (
<Select<SupportedTranscodeAudioOutputFormats>
label="Audio Format"
{...field}
>
{AudioFormats.map((opt) => (
<MenuItem key={opt.value} value={opt.value}>
{opt.description}
</MenuItem>
))}
</Select>
)}
/>
</FormControl>
<Stack direction={{ sm: 'column', md: 'row' }} gap={2} useFlexGap>
<NumericFormControllerText
control={control}
name="audioBitRate"
prettyFieldName="Audio Bitrate"
TextFieldProps={{
id: 'audio-bitrate',
label: 'Audio Bitrate',
fullWidth: true,
InputProps: {
endAdornment: (
<InputAdornment position="end">kbps</InputAdornment>
),
},
}}
/>
<NumericFormControllerText
control={control}
name="audioBufferSize"
prettyFieldName="Audio Buffer Size"
TextFieldProps={{
id: 'audio-buffer-size',
label: 'Audio Buffer Size',
fullWidth: true,
InputProps: {
endAdornment: (
<InputAdornment position="end">kb</InputAdornment>
),
},
}}
/>
</Stack>
<Stack direction={{ sm: 'column', md: 'row' }} gap={2} useFlexGap>
<NumericFormControllerText
control={control}
name="audioVolumePercent"
prettyFieldName="Audio Volume Percent"
TextFieldProps={{
id: 'audio-volume',
label: 'Audio Volume',
fullWidth: true,
sx: { my: 1 },
helperText: 'Values higher than 100 will boost the audio.',
InputProps: {
endAdornment: <InputAdornment position="end">%</InputAdornment>,
},
}}
/>
<NumericFormControllerText
control={control}
name="audioChannels"
prettyFieldName="Audio Channels"
TextFieldProps={{
id: 'audio-bitrate',
label: 'Audio Channels',
fullWidth: true,
sx: { my: 1 },
}}
/>
</Stack>
<NumericFormControllerText
control={control}
name="audioSampleRate"
prettyFieldName="Audio Sample Rate"
TextFieldProps={{
id: 'audio-sample-rate',
label: 'Audio Sample Rate',
fullWidth: true,
sx: { my: 1 },
InputProps: {
endAdornment: <InputAdornment position="end">kHz</InputAdornment>,
},
}}
/>
</Stack>
);
};
return (
<Box component="form" onSubmit={handleSubmit(saveForm, handleSubmitError)}>
<Breadcrumbs />
<Stack spacing={2}>
<Typography variant="h5">
Edit Config: "{initialConfig.name}"
</Typography>
<Divider />
<Box>
<Typography variant="h6" sx={{ mb: 2 }}>
General
<FormProvider {...transcodeConfigForm}>
<Stack spacing={2}>
<Typography variant="h5">
Edit Config: "{initialConfig.name}"
</Typography>
<Grid container columnSpacing={2}>
<Grid size={{ sm: 12, md: 6 }}>
<Controller
control={control}
name="name"
rules={{
required: true,
minLength: 1,
}}
render={({ field, fieldState: { error } }) => (
<TextField
fullWidth
label="Name"
error={!!error}
helperText={
isNonEmptyString(error?.message)
? error.message
: error?.type === 'required'
? 'Name is required'
: error?.type === 'minLength'
? 'Name is required'
: null
}
{...field}
/>
)}
/>
</Grid>
<Grid size={{ sm: 12, md: 6 }}>
<NumericFormControllerText
control={control}
name="threadCount"
prettyFieldName="Threads"
TextFieldProps={{
label: 'Threads',
fullWidth: true,
helperText: (
<>
Sets the number of threads used to decode the input
stream. Set to 0 to let ffmpeg automatically decide how
many threads to use. Read more about this option{' '}
<MuiLink
target="_blank"
href="https://ffmpeg.org/ffmpeg-codecs.html#:~:text=threads%20integer%20(decoding/encoding%2Cvideo)"
>
here
</MuiLink>
</>
),
}}
/>
</Grid>
<Grid size={12}>
<FormControl fullWidth>
<FormControlLabel
control={
<CheckboxFormController
control={control}
name="disableChannelOverlay"
/>
}
label={'Disable Watermarks'}
/>
<FormHelperText>
If set, all watermark overlays will be disabled for channels
assigned this transcode config.
</FormHelperText>
</FormControl>
</Grid>
</Grid>
</Box>
<Divider />
<Grid container columnSpacing={2}>
<Grid size={{ sm: 12, md: 6 }}>
<Typography component="h6" variant="h6" sx={{ mb: 2 }}>
Video Options
<Divider />
<Box>
<Typography variant="h6" sx={{ mb: 2 }}>
General
</Typography>
{videoFfmpegSettings()}
</Grid>
<Grid size={{ sm: 12, md: 6 }}>
<Typography component="h6" variant="h6" sx={{ mb: 2 }}>
Audio Options
</Typography>
{audioFfmpegSettings()}
</Grid>
<Grid size={12} sx={{ mt: 2 }}>
<Divider />
</Grid>
<Grid size={12}>
<Typography component="h6" variant="h6" sx={{ pt: 2, pb: 1 }}>
Error Options
</Typography>
<Grid container spacing={2}>
<Grid container columnSpacing={2}>
<Grid size={{ sm: 12, md: 6 }}>
<FormControl sx={{ mt: 2 }}>
<InputLabel id="error-screen-label">Error Screen</InputLabel>
<Controller
control={control}
name="errorScreen"
render={({ field }) => (
<Select
labelId="error-screen-label"
id="error-screen"
label="Error Screen"
{...field}
>
{supportedErrorScreens.map((error) => (
<MenuItem key={error.value} value={error.value}>
{error.string}
</MenuItem>
))}
</Select>
)}
<Controller
control={control}
name="name"
rules={{
required: true,
minLength: 1,
}}
render={({ field, fieldState: { error } }) => (
<TextField
fullWidth
label="Name"
error={!!error}
helperText={
isNonEmptyString(error?.message)
? error.message
: error?.type === 'required'
? 'Name is required'
: error?.type === 'minLength'
? 'Name is required'
: null
}
{...field}
/>
)}
/>
</Grid>
<Grid size={{ sm: 12, md: 6 }}>
<NumericFormControllerText
control={control}
name="threadCount"
prettyFieldName="Threads"
TextFieldProps={{
label: 'Threads',
fullWidth: true,
helperText: (
<>
Sets the number of threads used to decode the input
stream. Set to 0 to let ffmpeg automatically decide how
many threads to use. Read more about this option{' '}
<MuiLink
target="_blank"
href="https://ffmpeg.org/ffmpeg-codecs.html#:~:text=threads%20integer%20(decoding/encoding%2Cvideo)"
>
here
</MuiLink>
</>
),
}}
/>
</Grid>
<Grid size={12}>
<FormControl fullWidth>
<FormControlLabel
control={
<CheckboxFormController
control={control}
name="disableChannelOverlay"
/>
}
label={'Disable Watermarks'}
/>
<FormHelperText>
If there are issues playing a video, Tunarr will try to use
an error screen as a placeholder while retrying loading the
video every 60 seconds.
If set, all watermark overlays will be disabled for channels
assigned this transcode config.
</FormHelperText>
</FormControl>
</Grid>
<Grid size={{ sm: 12, md: 6 }}>
<FormControl sx={{ mt: 2 }} fullWidth>
<InputLabel id="error-audio-label">Error Audio</InputLabel>
<Controller
control={control}
name="errorScreenAudio"
render={({ field }) => (
<Select
labelId="error-audio-label"
id="error-screen"
label="Error Audio"
fullWidth
{...field}
>
{supportedErrorAudio.map((error) => (
<MenuItem key={error.value} value={error.value}>
{error.string}
</MenuItem>
))}
</Select>
)}
/>
</FormControl>
</Grid>
</Grid>
</Box>
<Divider />
<Grid container spacing={2}>
<Grid size={{ sm: 12, md: 6 }}>
<Typography component="h6" variant="h6" sx={{ mb: 2 }}>
Video Options
</Typography>
<TranscodeConfigVideoSettingsForm />
</Grid>
<Grid size={{ sm: 12, md: 6 }}>
<Typography component="h6" variant="h6" sx={{ mb: 2 }}>
Audio Options
</Typography>
<TranscodeConfigAudioSettingsForm />
</Grid>
<Grid size={12} sx={{ mt: 2 }}>
<Divider />
</Grid>
{hardwareAccelerationMode !== 'none' && (
<>
<Grid size={{ sm: 12 }}>
<Typography component="h6" variant="h6" mb={1}>
Advanced Options
</Typography>
<Typography variant="body2" sx={{ mb: 2 }}>
Advanced options relating to transcoding. In general, do not
change these unless you know what you are doing! These
settings exist in order to leave some parity with the old
dizqueTV transcode pipeline as well as to provide mechanisms
to aid in debugging streaming issues.
</Typography>
<TranscodeConfigAdvancedOptions />
</Grid>
<Grid size={12} sx={{ mt: 2 }}>
<Divider />
</Grid>
</>
)}
<Grid size={12}>
<Typography component="h6" variant="h6" sx={{ pt: 2, pb: 1 }}>
Error Options
</Typography>
<TranscodeConfigErrorOptions />
</Grid>
</Grid>
</Grid>
<Stack spacing={2} direction="row" justifyContent="right">
{(isDirty || (isDirty && !isSubmitting)) && (
<Stack spacing={2} direction="row" justifyContent="right">
{(isDirty || (isDirty && !isSubmitting)) && (
<Button
variant="outlined"
onClick={() => {
reset();
}}
>
Reset Changes
</Button>
)}
<Button
variant="outlined"
onClick={() => {
reset();
}}
variant="contained"
disabled={!isValid || isSubmitting || (!isDirty && !isNew)}
type="submit"
>
Reset Changes
Save
</Button>
)}
<Button
variant="contained"
disabled={!isValid || isSubmitting || (!isDirty && !isNew)}
type="submit"
>
Save
</Button>
</Stack>
</Stack>
</Stack>
</FormProvider>
</Box>
);
};

View File

@@ -0,0 +1,256 @@
import type {
SelectChangeEvent} from '@mui/material';
import {
FormControl,
FormControlLabel,
FormHelperText,
InputAdornment,
InputLabel,
MenuItem,
Link as MuiLink,
Select,
Stack,
TextField,
} from '@mui/material';
import { useSuspenseQuery } from '@tanstack/react-query';
import type {
SupportedTranscodeVideoOutputFormat,
TranscodeConfig,
} from '@tunarr/types';
import type { SupportedHardwareAccels } from '@tunarr/types/schemas';
import { Controller, useFormContext } from 'react-hook-form';
import { getApiFfmpegInfoOptions } from '../../../generated/@tanstack/react-query.gen.ts';
import { TranscodeResolutionOptions } from '../../../helpers/constants.ts';
import type { DropdownOption } from '../../../helpers/DropdownOption';
import {
resolutionFromAnyString,
resolutionToString,
} from '../../../helpers/util.ts';
import { useFfmpegSettings } from '../../../hooks/settingsHooks.ts';
import {
CheckboxFormController,
NumericFormControllerText,
TypedController,
} from '../../util/TypedController.tsx';
const VideoFormats: DropdownOption<SupportedTranscodeVideoOutputFormat>[] = [
{
description: 'H.264',
value: 'h264',
},
{
description: 'HEVC (H.265)',
value: 'hevc',
},
{
description: 'MPEG-2',
value: 'mpeg2video',
},
] as const;
const VideoHardwareAccelerationOptions: DropdownOption<SupportedHardwareAccels>[] =
[
{
description: 'Software (no GPU)',
value: 'none',
},
{
description: 'Nvidia (CUDA)',
value: 'cuda',
},
{
description: 'Video Acceleration API (VA-API)',
value: 'vaapi',
},
{
description: 'Intel QuickSync',
value: 'qsv',
},
{
description: 'VideoToolbox',
value: 'videotoolbox',
},
] as const;
export const TranscodeConfigVideoSettingsForm = () => {
const { data: ffmpegSettings } = useFfmpegSettings();
const ffmpegInfo = useSuspenseQuery({
...getApiFfmpegInfoOptions(),
});
const { control, watch } = useFormContext<TranscodeConfig>();
const hardwareAccelerationMode = watch('hardwareAccelerationMode');
return (
<Stack gap={2}>
<FormControl fullWidth>
<InputLabel>Video Format</InputLabel>
<Controller
control={control}
name="videoFormat"
render={({ field }) => (
<Select label="Video Format" {...field}>
{VideoFormats.map((opt) => (
<MenuItem key={opt.value} value={opt.value}>
{opt.description}
</MenuItem>
))}
</Select>
)}
/>
<FormHelperText></FormHelperText>
</FormControl>
<FormControl fullWidth>
<InputLabel>Hardware Acceleration</InputLabel>
<Controller
control={control}
name="hardwareAccelerationMode"
render={({ field }) => (
<Select label="Hardware Acceleration" {...field}>
{VideoHardwareAccelerationOptions.filter(
({ value }) =>
value === 'none' ||
ffmpegInfo.data.hardwareAccelerationTypes.includes(value),
).map((opt) => (
<MenuItem key={opt.value} value={opt.value}>
{opt.description}
</MenuItem>
))}
</Select>
)}
/>
<FormHelperText></FormHelperText>
</FormControl>
{(hardwareAccelerationMode === 'vaapi' ||
hardwareAccelerationMode === 'qsv') && (
<Controller
control={control}
name="vaapiDevice"
render={({ field }) => (
<TextField
fullWidth
label={
hardwareAccelerationMode === 'qsv'
? 'QSV Device'
: 'VA-API Device'
}
helperText={
<span>
Override the default{' '}
{hardwareAccelerationMode === 'qsv' ? 'QSV' : 'VA-API'} device
path (defaults to <code>/dev/dri/renderD128</code> on Linux
and blank otherwise)
</span>
}
{...field}
/>
)}
/>
)}
<FormControl fullWidth>
<InputLabel id="target-resolution-label">Resolution</InputLabel>
<TypedController
control={control}
name="resolution"
toFormType={resolutionFromAnyString}
valueExtractor={(e) => (e as SelectChangeEvent).target.value}
render={({ field }) => (
<Select
labelId="target-resolution-label"
id="target-resolution"
label="Resolution"
{...field}
value={resolutionToString(field.value)}
>
{TranscodeResolutionOptions.map((resolution) => (
<MenuItem key={resolution.value} value={resolution.value}>
{resolution.label}
</MenuItem>
))}
</Select>
)}
/>
</FormControl>
<Stack direction={{ sm: 'column', md: 'row' }} gap={2} useFlexGap>
<NumericFormControllerText
control={control}
name="videoBitRate"
prettyFieldName="Video Bitrate"
TextFieldProps={{
id: 'video-bitrate',
label: 'Video Bitrate',
fullWidth: true,
sx: { my: 1 },
InputProps: {
endAdornment: (
<InputAdornment position="end">kbps</InputAdornment>
),
},
}}
/>
<NumericFormControllerText
control={control}
name="videoBufferSize"
prettyFieldName="Video Buffer Size"
TextFieldProps={{
id: 'video-buffer-size',
label: 'Video Buffer Size',
fullWidth: true,
sx: { my: 1 },
InputProps: {
endAdornment: <InputAdornment position="end">kb</InputAdornment>,
},
helperText: (
<>
Buffer size effects how frequently ffmpeg reconsiders the output
bitrate.{' '}
<MuiLink
target="_blank"
href="https://trac.ffmpeg.org/wiki/Limiting%20the%20output%20bitrate#Whatdoes-bufsizedo"
>
Read more
</MuiLink>
</>
),
}}
/>
</Stack>
<Stack gap={1}>
{ffmpegSettings.useNewFfmpegPipeline && (
<FormControl fullWidth>
<FormControlLabel
control={
<CheckboxFormController
control={control}
name="deinterlaceVideo"
/>
}
label={'Auto Deinterlace Video'}
/>
<FormHelperText></FormHelperText>
</FormControl>
)}
{ffmpegSettings.useNewFfmpegPipeline && (
<FormControl fullWidth>
<FormControlLabel
control={
<CheckboxFormController
control={control}
name="normalizeFrameRate"
/>
}
label={'Normalize Frame Rate'}
/>
<FormHelperText>
Output video at a constant frame rate.
</FormHelperText>
</FormControl>
)}
</Stack>
</Stack>
);
};

View File

@@ -6075,6 +6075,9 @@ export type GetApiChannelsByIdTranscodeConfigResponses = {
errorScreen: 'static' | 'pic' | 'blank' | 'testsrc' | 'text' | 'kill';
errorScreenAudio: 'silent' | 'sine' | 'whitenoise';
isDefault: boolean;
disableHardwareDecoder: boolean;
disableHardwareEncoding: boolean;
disableHardwareFilters: boolean;
};
};
@@ -13106,6 +13109,9 @@ export type GetApiTranscodeConfigsResponses = {
errorScreen: 'static' | 'pic' | 'blank' | 'testsrc' | 'text' | 'kill';
errorScreenAudio: 'silent' | 'sine' | 'whitenoise';
isDefault: boolean;
disableHardwareDecoder: boolean;
disableHardwareEncoding: boolean;
disableHardwareFilters: boolean;
}>;
};
@@ -13146,6 +13152,9 @@ export type PostApiTranscodeConfigsData = {
errorScreen: 'static' | 'pic' | 'blank' | 'testsrc' | 'text' | 'kill';
errorScreenAudio: 'silent' | 'sine' | 'whitenoise';
isDefault: boolean;
disableHardwareDecoder?: boolean;
disableHardwareEncoding?: boolean;
disableHardwareFilters?: boolean;
};
path?: never;
query?: never;
@@ -13190,6 +13199,9 @@ export type PostApiTranscodeConfigsResponses = {
errorScreen: 'static' | 'pic' | 'blank' | 'testsrc' | 'text' | 'kill';
errorScreenAudio: 'silent' | 'sine' | 'whitenoise';
isDefault: boolean;
disableHardwareDecoder: boolean;
disableHardwareEncoding: boolean;
disableHardwareFilters: boolean;
};
};
@@ -13266,6 +13278,9 @@ export type GetApiTranscodeConfigsByIdResponses = {
errorScreen: 'static' | 'pic' | 'blank' | 'testsrc' | 'text' | 'kill';
errorScreenAudio: 'silent' | 'sine' | 'whitenoise';
isDefault: boolean;
disableHardwareDecoder: boolean;
disableHardwareEncoding: boolean;
disableHardwareFilters: boolean;
};
};
@@ -13307,6 +13322,9 @@ export type PutApiTranscodeConfigsByIdData = {
errorScreen: 'static' | 'pic' | 'blank' | 'testsrc' | 'text' | 'kill';
errorScreenAudio: 'silent' | 'sine' | 'whitenoise';
isDefault: boolean;
disableHardwareDecoder?: boolean;
disableHardwareEncoding?: boolean;
disableHardwareFilters?: boolean;
};
path: {
id: string;
@@ -13353,6 +13371,9 @@ export type PutApiTranscodeConfigsByIdResponses = {
errorScreen: 'static' | 'pic' | 'blank' | 'testsrc' | 'text' | 'kill';
errorScreenAudio: 'silent' | 'sine' | 'whitenoise';
isDefault: boolean;
disableHardwareDecoder: boolean;
disableHardwareEncoding: boolean;
disableHardwareFilters: boolean;
};
};
@@ -13417,6 +13438,9 @@ export type PostApiTranscodeConfigsByIdCopyResponses = {
errorScreen: 'static' | 'pic' | 'blank' | 'testsrc' | 'text' | 'kill';
errorScreenAudio: 'silent' | 'sine' | 'whitenoise';
isDefault: boolean;
disableHardwareDecoder: boolean;
disableHardwareEncoding: boolean;
disableHardwareFilters: boolean;
};
};

View File

@@ -0,0 +1,28 @@
import type { Query } from '@tanstack/react-query';
import { first, intersection } from 'lodash-es';
import { z } from 'zod/v4';
const queryKeySchema = z.object({
tags: z.string().array(),
});
export function invalidateTaggedQueries(tagsToMatch: string[]) {
return (query: Query): boolean => {
if (tagsToMatch.length === 0) {
return false;
}
const key = first(query.queryKey);
if (!key) {
return false;
}
try {
const { tags } = queryKeySchema.parse(key);
return intersection(tags, tagsToMatch).length > 0;
} catch (e) {
console.warn(e);
return false;
}
};
}

View File

@@ -35,6 +35,9 @@ const defaultNewTranscodeConfig: TranscodeConfig = {
videoBitDepth: 8,
videoPreset: '',
videoProfile: '',
disableHardwareDecoder: false,
disableHardwareEncoding: false,
disableHardwareFilters: false,
};
export const NewTranscodeConfigSettingsPage = () => {