Merge remote-tracking branch 'origin/main' into dev

This commit is contained in:
Christian Benincasa
2026-04-07 14:55:50 -04:00
46 changed files with 2862 additions and 1006 deletions

View File

@@ -7,7 +7,6 @@ This page describes how to get Tunarr running with various methods and installat
```
docker run \
-v "$(pwd)"/tunarr:/config/tunarr \
-v "$(pwd)"/.dizquetv:/.dizquetv \
-e "TZ=America/New_York" \
-p 8000:8000 \
chrisbenincasa/tunarr
@@ -23,25 +22,15 @@ services:
container_name: tunarr
ports:
- ${TUNARR_SERVER_PORT:-8000}:8000
# Uncomment if using the Nvidia container
# runtime: nvidia
environment:
- LOG_LEVEL=${TUNARR_LOG_LEVEL:-INFO}
# Replace this with your timezone to ensure accurate guide
# data and scheduling.
- TZ=America/New_York
# Uncomment if you'd like to adjust default config path
# - TUNARR_DATABASE_PATH=/your/path/tunarr
volumes:
# Choose a path on your host to map to /config/tunarr. This ensures
# that restarting the container will not delete your settings or DB.
- /path/to/tunarr/data:/config/tunarr
# The host path is relative to the location of the compose file
# This can also use an absolute path.
#
# Uncomment if migrating from dizquetv. Chnage the host path
# to the location of your dizquetv "database"
# - ./.dizquetv:/.dizquetv
```
### Docker Desktop
@@ -140,17 +129,21 @@ services:
container_name: tunarr
ports:
- ${TUNARR_SERVER_PORT:-8000}:8000
runtime: nvidia
environment:
- LOG_LEVEL=${TUNARR_LOG_LEVEL:-INFO}
- TUNARR_LOG_LEVEL=${TUNARR_LOG_LEVEL:-INFO}
- NVIDIA_VISIBLE_DEVICES=all
- TZ=America/New_York
# Uncomment if you'd like to adjust default config path
# - TUNARR_DATABASE_PATH=/your/path/tunarr
volumes:
# Choose a path on your host to map to /config/tunarr. This ensures
# that restarting the container will not delete your settings or DB.
- /path/to/tunarr/data:/config/tunarr
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: all
capabilities: [gpu, video, utility]
```
### QSV (Intel) / VA-API (Video Acceleration API)
@@ -179,7 +172,7 @@ services:
ports:
- ${TUNARR_SERVER_PORT:-8000}:8000
environment:
- LOG_LEVEL=${TUNARR_LOG_LEVEL:-INFO}
- TUNARR_LOG_LEVEL=${TUNARR_LOG_LEVEL:-INFO}
- TZ=America/New_York
# Pass all render devices to container
devices:

View File

@@ -30,34 +30,32 @@ import { OnDemandChannelService } from './services/OnDemandChannelService.js';
import { TVGuideService } from './services/TvGuideService.ts';
import { CacheImageService } from './services/cacheImageService.js';
import { MediaSourceScanCoordinator } from './services/scanner/MediaSourceScanCoordinator.ts';
import { ChannelCache } from './stream/ChannelCache.js';
import { SessionManager } from './stream/SessionManager.js';
import { StreamProgramCalculator } from './stream/StreamProgramCalculator.js';
@injectable()
export class ServerContext {
@inject(ProgramConverter) public readonly programConverter: ProgramConverter;
@inject(ProgramConverter) public readonly programConverter!: ProgramConverter;
@inject(OnDemandChannelService)
public readonly onDemandChannelService: OnDemandChannelService;
@inject(KEYS.ChannelDB) public channelDB: IChannelDB;
@inject(M3uService) public m3uService: M3uService;
@inject(KEYS.SettingsDB) public settings: ISettingsDB;
public readonly onDemandChannelService!: OnDemandChannelService;
@inject(KEYS.ChannelDB) public channelDB!: IChannelDB;
@inject(M3uService) public m3uService!: M3uService;
@inject(KEYS.SettingsDB) public settings!: ISettingsDB;
@inject(FillerDB) public fillerDB!: FillerDB;
public fileCache: FileCacheService = new FileCacheService();
public cacheImageService: CacheImageService;
@inject(EventService) public eventService: EventService;
@inject(TVGuideService) public guideService: TVGuideService;
@inject(HdhrService) public hdhrService: HdhrService;
@inject(CustomShowDB) public customShowDB: CustomShowDB;
@inject(ChannelCache) public channelCache: ChannelCache;
@inject(MediaSourceDB) public mediaSourceDB: MediaSourceDB;
@inject(KEYS.ProgramDB) public programDB: IProgramDB;
@inject(TranscodeConfigDB) public transcodeConfigDB: TranscodeConfigDB;
public cacheImageService!: CacheImageService;
@inject(EventService) public eventService!: EventService;
@inject(TVGuideService) public guideService!: TVGuideService;
@inject(HdhrService) public hdhrService!: HdhrService;
@inject(CustomShowDB) public customShowDB!: CustomShowDB;
@inject(MediaSourceDB) public mediaSourceDB!: MediaSourceDB;
@inject(KEYS.ProgramDB) public programDB!: IProgramDB;
@inject(TranscodeConfigDB) public transcodeConfigDB!: TranscodeConfigDB;
@inject(SessionManager) public readonly sessionManager: SessionManager;
@inject(SessionManager) public readonly sessionManager!: SessionManager;
@inject(HealthCheckService)
public readonly healthCheckService: HealthCheckService;
public readonly healthCheckService!: HealthCheckService;
@inject(ChannelLineupMigrator)
public readonly channelLineupMigrator!: ChannelLineupMigrator;
@@ -77,7 +75,7 @@ export class ServerContext {
public readonly drizzleFactory!: interfaces.AutoFactory<DrizzleDBAccess>;
@inject(KEYS.WorkerPool)
public readonly workerPool: IWorkerPool;
public readonly workerPool!: IWorkerPool;
@inject(MeilisearchService)
public readonly searchService!: MeilisearchService;

View File

@@ -6,7 +6,8 @@ import { SavePlexProgramExternalIdsTask } from '@/tasks/plex/SavePlexProgramExte
import { DateTimeRange } from '@/types/DateTimeRange.js';
import { OpenDateTimeRange } from '@/types/OpenDateTimeRange.js';
import type { RouterPluginAsyncCallback } from '@/types/serverType.js';
import { tag } from '@tunarr/types';
import { LoggerFactory } from '@/util/logging/LoggerFactory.js';
import { LogLevels, tag } from '@tunarr/types';
import { ChannelLineupQuery } from '@tunarr/types/api';
import { ChannelLineupSchema } from '@tunarr/types/schemas';
import dayjs from 'dayjs';
@@ -44,6 +45,23 @@ export const debugApi: RouterPluginAsyncCallback = async (fastify) => {
return res.send(getHeapStatistics());
});
fastify.get(
'/debug/log',
{
schema: {
querystring: z.object({
level: z.enum(LogLevels).default('debug'),
log: z.string().optional(),
}),
},
},
async (req, res) => {
const logger = LoggerFactory.root;
logger[req.query.level](req.query.log ?? 'Test log');
return res.send('ok');
},
);
fastify.get(
'/debug/helpers/playing_at',
{

View File

@@ -49,13 +49,10 @@ import { SearchParser } from './services/search/SearchParser.ts';
import { ChannelLineupMigratorStartupTask } from './services/startup/ChannelLineupMigratorStartupTask.ts';
import { ClearM3uCacheStartupTask } from './services/startup/ClearM3uCacheStartupTask.ts';
import { GenerateGuideStartupTask } from './services/startup/GenerateGuideStartupTask.ts';
import { LoadChannelCacheStartupTask } from './services/startup/LoadChannelCacheStartupTask.ts';
import { RefreshLibrariesStartupTask } from './services/startup/RefreshLibrariesStartupTask.ts';
import { ScheduleJobsStartupTask } from './services/startup/ScheduleJobsStartupTask.ts';
import { SeedFfmpegInfoCache } from './services/startup/SeedFfmpegInfoCache.ts';
import { SeedSystemDevicesStartupTask } from './services/startup/SeedSystemDevicesStartupTask.ts';
import { StreamCacheMigratorStartupTask } from './services/startup/StreamCacheMigratorStartupTask.ts';
import { ChannelCache } from './stream/ChannelCache.ts';
import { FixerRunner } from './tasks/fixers/FixerRunner.ts';
import { ChildProcessHelper } from './util/ChildProcessHelper.ts';
import { Timer } from './util/Timer.ts';
@@ -152,7 +149,6 @@ const RootModule = new ContainerModule((bind) => {
bind<interfaces.AutoFactory<TimeSlotSchedulerService>>(
KEYS.TimeSlotSchedulerServiceFactory,
).toAutoFactory(TimeSlotSchedulerService);
bind(KEYS.ChannelCache).to(ChannelCache).inSingletonScope();
bind(KEYS.StartupTask).to(SeedSystemDevicesStartupTask).inSingletonScope();
bind(KEYS.StartupTask).to(ClearM3uCacheStartupTask).inSingletonScope();
@@ -163,8 +159,6 @@ const RootModule = new ContainerModule((bind) => {
bind(KEYS.StartupTask).to(ScheduleJobsStartupTask).inSingletonScope();
bind(KEYS.StartupTask).to(FixerRunner).inSingletonScope();
bind(KEYS.StartupTask).to(GenerateGuideStartupTask).inSingletonScope();
bind(KEYS.StartupTask).to(LoadChannelCacheStartupTask).inSingletonScope();
bind(KEYS.StartupTask).to(StreamCacheMigratorStartupTask).inSingletonScope();
bind(KEYS.StartupTask).to(RefreshLibrariesStartupTask).inSingletonScope();
if (getBooleanEnvVar(USE_WORKER_POOL_ENV_VAR, false)) {

View File

@@ -1,5 +1,4 @@
import type { IProgramDB } from '@/db/interfaces/IProgramDB.js';
import { ChannelCache } from '@/stream/ChannelCache.js';
import { KEYS } from '@/types/inject.js';
import { isNonEmptyString, programExternalIdString } from '@/util/index.js';
import {
@@ -49,7 +48,6 @@ import { DrizzleDBAccess } from './schema/index.ts';
@injectable()
export class FillerDB implements IFillerListDB {
constructor(
@inject(ChannelCache) private channelCache: ChannelCache,
@inject(KEYS.ProgramDB) private programDB: IProgramDB,
@inject(KEYS.Database) private db: Kysely<DB>,
@inject(KEYS.DrizzleDB) private drizzle: DrizzleDBAccess,
@@ -324,7 +322,6 @@ export class FillerDB implements IFillerListDB {
.execute();
});
this.channelCache.clear();
return;
}

View File

@@ -2408,6 +2408,7 @@ export class ProgramDB implements IProgramDB {
releaseDate: incoming.releaseDate,
tagline: incoming.tagline,
updatedAt: incoming.updatedAt,
state: incoming.state,
};
return head(

View File

@@ -101,6 +101,7 @@ export class ProgramGroupingMinter {
show.year ??
(show.releaseDate ? dayjs(show.releaseDate).year() : null),
rating: show.rating,
state: 'ok',
},
externalIds: this.mintExternalIdsFromIdentifiers(
mediaSource,
@@ -195,6 +196,7 @@ export class ProgramGroupingMinter {
externalKey: artist.externalId,
plot: artist.plot,
tagline: artist.tagline,
state: 'ok',
},
externalIds: this.mintExternalIdsFromIdentifiers(
mediaSource,
@@ -255,6 +257,7 @@ export class ProgramGroupingMinter {
year:
season.year ??
(season.releaseDate ? dayjs(season.releaseDate).year() : null),
state: 'ok',
},
externalIds: this.mintExternalIdsFromIdentifiers(
mediaSource,
@@ -316,6 +319,7 @@ export class ProgramGroupingMinter {
album.year ??
(album.releaseDate ? dayjs(album.releaseDate).year() : null),
artistUuid: album.artist?.uuid,
state: 'ok',
},
externalIds: this.mintExternalIdsFromIdentifiers(
mediaSource,

View File

@@ -26,7 +26,7 @@ type MediaStreamFields<T extends MediaStream> = Omit<
// semantics with class construction, but still enabling us
// to have hierarchies, methods, etc.
type AudioStreamFields = MediaStreamFields<AudioStream>;
type VideoStreamFields = StrictOmit<
export type VideoStreamFields = StrictOmit<
MediaStreamFields<VideoStream>,
'isAnamorphic' | 'sampleAspectRatio'
>;

View File

@@ -1,8 +1,8 @@
import type { VideoFormat } from '../constants.ts';
import { VideoFormats, type VideoFormat } from '../constants.ts';
import { AudioEncoder, BaseEncoder, VideoEncoder } from './BaseEncoder.ts';
export class CopyVideoEncoder extends VideoEncoder {
protected videoFormat: VideoFormat;
protected videoFormat: VideoFormat = VideoFormats.Copy;
constructor() {
super('copy');

View File

@@ -1,5 +1,6 @@
import dayjs from 'dayjs';
import duration from 'dayjs/plugin/duration.js';
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import { FileStreamSource } from '../../../../stream/types.ts';
import {
createTempWorkdir,
@@ -8,14 +9,32 @@ import {
} from '../../../../testing/ffmpeg/FfmpegIntegrationHelper.ts';
import {
binaries,
deriveVideoStreamForFixture,
Fixtures,
qsvInfo,
qsvTest,
} from '../../../../testing/ffmpeg/FfmpegTestFixtures.ts';
import { AudioFormats, FileOutputLocation } from '../../constants.ts';
import { PixelFormatYuv420P } from '../../format/PixelFormat.ts';
import { AudioInputSource } from '../../input/AudioInputSource.ts';
import {
AudioFormats,
FileOutputLocation,
VideoFormats,
} from '../../constants.ts';
import {
PixelFormatYuv420P,
PixelFormatYuv420P10Le,
} from '../../format/PixelFormat.ts';
import {
AudioInputFilterSource,
AudioInputSource,
} from '../../input/AudioInputSource.ts';
import { LavfiVideoInputSource } from '../../input/LavfiVideoInputSource.ts';
import { VideoInputSource } from '../../input/VideoInputSource.ts';
import { AudioStream, VideoStream } from '../../MediaStream.ts';
import { WatermarkInputSource } from '../../input/WatermarkInputSource.ts';
import {
AudioStream,
StillImageStream,
VideoStream,
} from '../../MediaStream.ts';
import { AudioState } from '../../state/AudioState.ts';
import {
DefaultPipelineOptions,
@@ -25,15 +44,90 @@ import { FrameState } from '../../state/FrameState.ts';
import { FrameSize } from '../../types.ts';
import { QsvPipelineBuilder } from './QsvPipelineBuilder.ts';
const fixturesDir = path.join(
path.dirname(fileURLToPath(import.meta.url)),
'../../../../testing/ffmpeg/fixtures',
);
dayjs.extend(duration);
const Fixtures = {
video720p: path.join(fixturesDir, '720p_h264.ts'),
video1080p: path.join(fixturesDir, '1080p_h264.ts'),
} as const;
// Limit output to 1 second in all integration tests to keep runs fast
const testDuration = dayjs.duration(1, 'second');
function makeH264VideoInput(inputPath: string, frameSize: FrameSize) {
return VideoInputSource.withStream(
new FileStreamSource(inputPath),
VideoStream.create({
codec: 'h264',
displayAspectRatio: '16:9',
frameSize,
index: 0,
pixelFormat: new PixelFormatYuv420P(),
providedSampleAspectRatio: null,
colorFormat: null,
}),
);
}
function makeHevc10BitVideoInput(inputPath: string, frameSize: FrameSize) {
return VideoInputSource.withStream(
new FileStreamSource(inputPath),
VideoStream.create({
codec: VideoFormats.Hevc,
displayAspectRatio: '16:9',
frameSize,
index: 0,
pixelFormat: new PixelFormatYuv420P10Le(),
providedSampleAspectRatio: null,
colorFormat: null,
}),
);
}
function make43VideoInput(inputPath: string) {
return VideoInputSource.withStream(
new FileStreamSource(inputPath),
VideoStream.create({
codec: 'h264',
profile: 'main',
displayAspectRatio: '4:3',
frameSize: FrameSize.withDimensions(640, 480),
index: 0,
pixelFormat: new PixelFormatYuv420P(),
providedSampleAspectRatio: null,
colorFormat: null,
}),
);
}
function makeAudioInput(inputPath: string) {
return AudioInputSource.withStream(
new FileStreamSource(inputPath),
AudioStream.create({ channels: 2, codec: 'aac', index: 1 }),
AudioState.create({
audioEncoder: AudioFormats.Aac,
audioChannels: 2,
audioBitrate: 192,
audioBufferSize: 384,
}),
);
}
function makeWatermark(color: 'white' | 'black' = 'white') {
return new WatermarkInputSource(
new FileStreamSource(
color === 'white' ? Fixtures.watermark : Fixtures.blackWatermark,
),
StillImageStream.create({
frameSize: FrameSize.withDimensions(100, 100),
index: 0,
}),
{
enabled: true,
position: 'bottom-right',
width: 10,
verticalMargin: 5,
horizontalMargin: 5,
duration: 0,
opacity: 100,
},
);
}
describe.skipIf(!binaries || !qsvInfo)('QsvPipelineBuilder integration', () => {
let workdir: string;
@@ -45,193 +139,503 @@ describe.skipIf(!binaries || !qsvInfo)('QsvPipelineBuilder integration', () => {
afterAll(() => cleanup());
function makeVideoInput(inputPath: string, frameSize: FrameSize) {
return VideoInputSource.withStream(
new FileStreamSource(inputPath),
VideoStream.create({
codec: 'h264',
displayAspectRatio: '16:9',
frameSize,
index: 0,
pixelFormat: new PixelFormatYuv420P(),
providedSampleAspectRatio: null,
colorFormat: null,
}),
);
}
function makeAudioInput(inputPath: string) {
return AudioInputSource.withStream(
new FileStreamSource(inputPath),
AudioStream.create({
channels: 2,
codec: 'aac',
index: 1,
}),
AudioState.create({
audioEncoder: AudioFormats.Aac,
audioChannels: 2,
audioBitrate: 192,
audioBufferSize: 384,
}),
);
}
// QsvPipelineBuilder arg order: hardwareCaps, binaryCaps, video, audio, concat, watermark, subtitle
qsvTest('basic h264 qsv transcode', async ({
binaryCapabilities,
ffmpegVersion,
resolvedQsv,
}) => {
const video = makeVideoInput(
Fixtures.video720p,
FrameSize.withDimensions(1280, 720),
);
const audio = makeAudioInput(Fixtures.video720p);
qsvTest(
'basic h264 qsv transcode',
async ({ binaryCapabilities, ffmpegVersion, resolvedQsv }) => {
const video = makeH264VideoInput(
Fixtures.video720p,
FrameSize.withDimensions(1280, 720),
);
const audio = makeAudioInput(Fixtures.video720p);
const builder = new QsvPipelineBuilder(
resolvedQsv.capabilities,
binaryCapabilities,
video,
audio,
null,
null,
null,
);
const builder = new QsvPipelineBuilder(
resolvedQsv.capabilities,
binaryCapabilities,
video,
audio,
null,
null,
null,
);
const frameState = new FrameState({
isAnamorphic: false,
scaledSize: FrameSize.withDimensions(1280, 720),
paddedSize: FrameSize.withDimensions(1280, 720),
const frameState = new FrameState({
isAnamorphic: false,
scaledSize: FrameSize.withDimensions(1280, 720),
paddedSize: FrameSize.withDimensions(1280, 720),
});
const outputPath = path.join(workdir, 'qsv_transcode.ts');
const pipeline = builder.build(
FfmpegState.create({
version: ffmpegVersion,
outputLocation: FileOutputLocation(outputPath, true),
vaapiDevice: resolvedQsv.device,
}),
frameState,
DefaultPipelineOptions,
);
const { exitCode, stderr } = runFfmpegWithPipeline(
binaries!.ffmpeg,
pipeline.getCommandArgs(),
);
expect(
exitCode,
`Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`,
).toBe(0);
const probe = probeFile(binaries!.ffprobe, outputPath);
expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true);
},
);
qsvTest(
'scale from 1080p to 720p via qsv',
async ({ binaryCapabilities, ffmpegVersion, resolvedQsv }) => {
const video = makeH264VideoInput(
Fixtures.video1080p,
FrameSize.withDimensions(1920, 1080),
);
const audio = makeAudioInput(Fixtures.video1080p);
const builder = new QsvPipelineBuilder(
resolvedQsv.capabilities,
binaryCapabilities,
video,
audio,
null,
null,
null,
);
const frameState = new FrameState({
isAnamorphic: false,
scaledSize: FrameSize.withDimensions(1280, 720),
paddedSize: FrameSize.withDimensions(1280, 720),
});
const outputPath = path.join(workdir, 'qsv_scale.ts');
const pipeline = builder.build(
FfmpegState.create({
version: ffmpegVersion,
outputLocation: FileOutputLocation(outputPath, true),
vaapiDevice: resolvedQsv.device,
}),
frameState,
DefaultPipelineOptions,
);
const { exitCode, stderr } = runFfmpegWithPipeline(
binaries!.ffmpeg,
pipeline.getCommandArgs(),
);
expect(
exitCode,
`Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`,
).toBe(0);
const probe = probeFile(binaries!.ffprobe, outputPath);
expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true);
},
);
qsvTest(
'copy mode (qsv pipeline, no hw transcode needed)',
async ({ binaryCapabilities, ffmpegVersion, resolvedQsv }) => {
const video = makeH264VideoInput(
Fixtures.video720p,
FrameSize.withDimensions(1280, 720),
);
const audio = makeAudioInput(Fixtures.video720p);
const builder = new QsvPipelineBuilder(
resolvedQsv.capabilities,
binaryCapabilities,
video,
audio,
null,
null,
null,
);
const frameState = new FrameState({
isAnamorphic: false,
scaledSize: FrameSize.withDimensions(1280, 720),
paddedSize: FrameSize.withDimensions(1280, 720),
videoFormat: 'copy',
});
const outputPath = path.join(workdir, 'qsv_copy.ts');
const pipeline = builder.build(
FfmpegState.create({
version: ffmpegVersion,
outputLocation: FileOutputLocation(outputPath, true),
vaapiDevice: resolvedQsv.device,
}),
frameState,
DefaultPipelineOptions,
);
const { exitCode, stderr } = runFfmpegWithPipeline(
binaries!.ffmpeg,
pipeline.getCommandArgs(),
);
expect(
exitCode,
`Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`,
).toBe(0);
const probe = probeFile(binaries!.ffprobe, outputPath);
expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true);
},
);
describe('pixel format fixes', () => {
let workdir: string;
let cleanup: () => Promise<void>;
beforeAll(async () => {
({ dir: workdir, cleanup } = await createTempWorkdir());
});
const outputPath = path.join(workdir, 'qsv_transcode.ts');
const pipeline = builder.build(
FfmpegState.create({
version: ffmpegVersion,
outputLocation: FileOutputLocation(outputPath, true),
vaapiDevice: resolvedQsv.device,
}),
frameState,
DefaultPipelineOptions,
afterAll(() => cleanup());
// Bug 3: after watermark overlay frames are in software (yuv420p); without
// the fix, bare hwupload would fail format negotiation.
qsvTest(
'QSV transcode with watermark (Bug 3: format=nv12 before hwupload)',
async ({ binaryCapabilities, ffmpegVersion, resolvedQsv }) => {
const video = makeH264VideoInput(
Fixtures.video720p,
FrameSize.withDimensions(1280, 720),
);
const audio = makeAudioInput(Fixtures.video720p);
const watermark = makeWatermark('black');
const builder = new QsvPipelineBuilder(
resolvedQsv.capabilities,
binaryCapabilities,
video,
audio,
null,
watermark,
null,
);
const frameState = new FrameState({
isAnamorphic: false,
scaledSize: FrameSize.withDimensions(1280, 720),
paddedSize: FrameSize.withDimensions(1280, 720),
});
const outputPath = path.join(workdir, 'qsv_watermark.ts');
const pipeline = builder.build(
FfmpegState.create({
duration: testDuration,
version: ffmpegVersion,
outputLocation: FileOutputLocation(outputPath, true),
vaapiDevice: resolvedQsv.device,
}),
frameState,
DefaultPipelineOptions,
);
const { exitCode, stderr } = runFfmpegWithPipeline(
binaries!.ffmpeg,
pipeline.getCommandArgs(),
);
expect(
exitCode,
`Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`,
).toBe(0);
const probe = probeFile(binaries!.ffprobe, outputPath);
expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true);
},
);
const { exitCode, stderr } = runFfmpegWithPipeline(
binaries!.ffmpeg,
pipeline.getCommandArgs(),
// Bug 3: scaling + padding + watermark path — ensures the format=nv12,hwupload
// sequence is correct even when scale_qsv and pad filters also run.
qsvTest(
'QSV transcode with scaling + padding + watermark (Bug 3)',
async ({ binaryCapabilities, ffmpegVersion, resolvedQsv }) => {
const video = makeH264VideoInput(
Fixtures.video1080p,
FrameSize.withDimensions(1920, 1080),
);
const audio = makeAudioInput(Fixtures.video1080p);
const watermark = makeWatermark();
const builder = new QsvPipelineBuilder(
resolvedQsv.capabilities,
binaryCapabilities,
video,
audio,
null,
watermark,
null,
);
const frameState = new FrameState({
isAnamorphic: false,
scaledSize: FrameSize.withDimensions(1280, 720),
paddedSize: FrameSize.withDimensions(1280, 720),
});
const outputPath = path.join(workdir, 'qsv_scale_watermark.ts');
const pipeline = builder.build(
FfmpegState.create({
duration: testDuration,
version: ffmpegVersion,
outputLocation: FileOutputLocation(outputPath, true),
vaapiDevice: resolvedQsv.device,
}),
frameState,
DefaultPipelineOptions,
);
const { exitCode, stderr } = runFfmpegWithPipeline(
binaries!.ffmpeg,
pipeline.getCommandArgs(),
);
expect(
exitCode,
`Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`,
).toBe(0);
const probe = probeFile(binaries!.ffprobe, outputPath);
expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true);
},
);
expect(
exitCode,
`Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`,
).toBe(0);
// Bug 3: anamorphic (4:3) source forces scale + pillarbox pad + overlay +
// hwupload chain — the deepest exercise of the fix.
qsvTest(
'QSV transcode of anamorphic content with watermark (Bug 3)',
async ({ binaryCapabilities, ffmpegVersion, resolvedQsv }) => {
const video = make43VideoInput(Fixtures.video480p43);
const audio = makeAudioInput(Fixtures.video480p43);
const watermark = makeWatermark();
const probe = probeFile(binaries!.ffprobe, outputPath);
expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true);
const builder = new QsvPipelineBuilder(
resolvedQsv.capabilities,
binaryCapabilities,
video,
audio,
null,
watermark,
null,
);
// 4:3 → squarePixelFrameSize gives 1440x1080, padded to 1920x1080
const frameState = new FrameState({
isAnamorphic: false,
scaledSize: video.streams[0]!.squarePixelFrameSize(FrameSize.FHD),
paddedSize: FrameSize.FHD,
});
const outputPath = path.join(workdir, 'qsv_anamorphic_watermark.ts');
const pipeline = builder.build(
FfmpegState.create({
duration: testDuration,
version: ffmpegVersion,
outputLocation: FileOutputLocation(outputPath, true),
vaapiDevice: resolvedQsv.device,
}),
frameState,
DefaultPipelineOptions,
);
const { exitCode, stderr } = runFfmpegWithPipeline(
binaries!.ffmpeg,
pipeline.getCommandArgs(),
);
expect(
exitCode,
`Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`,
).toBe(0);
const probe = probeFile(binaries!.ffprobe, outputPath);
const videoOut = probe.streams.find((s) => s.codec_type === 'video');
expect(videoOut).toBeDefined();
expect(videoOut!.width).toBe(1920);
expect(videoOut!.height).toBe(1080);
},
);
// Bug 1: LavfiVideoInputSource sets PixelFormatUnknown(); without the fix
// this produces vpp_qsv=format=unknown and ffmpeg fails.
qsvTest(
'error screen (LavfiVideoInputSource) does not produce format=unknown (Bug 1)',
async ({ binaryCapabilities, ffmpegVersion, resolvedQsv }) => {
const audioState = AudioState.create({
audioEncoder: AudioFormats.Aac,
audioChannels: 2,
audioBitrate: 192,
audioBufferSize: 384,
});
const builder = new QsvPipelineBuilder(
resolvedQsv.capabilities,
binaryCapabilities,
LavfiVideoInputSource.errorText(FrameSize.FHD, 'Error', 'Test'),
AudioInputFilterSource.noise(audioState),
null,
null,
null,
);
const outputPath = path.join(workdir, 'qsv_error_screen.ts');
const pipeline = builder.build(
FfmpegState.create({
duration: testDuration,
version: ffmpegVersion,
outputLocation: FileOutputLocation(outputPath, true),
vaapiDevice: resolvedQsv.device,
}),
new FrameState({
isAnamorphic: false,
scaledSize: FrameSize.FHD,
paddedSize: FrameSize.FHD,
videoFormat: VideoFormats.H264,
pixelFormat: new PixelFormatYuv420P(),
}),
DefaultPipelineOptions,
);
const args = pipeline.getCommandArgs();
expect(args.join(' ')).not.toContain('format=unknown');
const { exitCode, stderr } = runFfmpegWithPipeline(
binaries!.ffmpeg,
args,
);
expect(
exitCode,
`Pipeline command failed: ${args.join(' ')}\n${stderr}`,
).toBe(0);
const probe = probeFile(binaries!.ffprobe, outputPath);
expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true);
},
);
// Bug 2: -pix_fmt yuv420p is incompatible with h264_qsv operating on hardware
// frames; without the fix ffmpeg crashes with a swscaler error.
qsvTest(
'no scaling path does not emit -pix_fmt yuv420p for QSV encode (Bug 2)',
async ({ binaryCapabilities, ffmpegVersion, resolvedQsv }) => {
const video = makeH264VideoInput(
Fixtures.video720p,
FrameSize.withDimensions(1280, 720),
);
const audio = makeAudioInput(Fixtures.video720p);
const builder = new QsvPipelineBuilder(
resolvedQsv.capabilities,
binaryCapabilities,
video,
audio,
null,
null,
null,
);
const frameState = new FrameState({
isAnamorphic: false,
scaledSize: FrameSize.withDimensions(1280, 720),
paddedSize: FrameSize.withDimensions(1280, 720),
videoFormat: VideoFormats.H264,
pixelFormat: new PixelFormatYuv420P(),
});
const outputPath = path.join(workdir, 'qsv_no_scale_pix_fmt.ts');
const pipeline = builder.build(
FfmpegState.create({
duration: testDuration,
version: ffmpegVersion,
outputLocation: FileOutputLocation(outputPath, true),
vaapiDevice: resolvedQsv.device,
}),
frameState,
DefaultPipelineOptions,
);
const args = pipeline.getCommandArgs();
expect(args).not.toContain('-pix_fmt');
const { exitCode, stderr } = runFfmpegWithPipeline(
binaries!.ffmpeg,
args,
);
expect(
exitCode,
`Pipeline command failed: ${args.join(' ')}\n${stderr}`,
).toBe(0);
expect(stderr).not.toContain('swscaler');
const probe = probeFile(binaries!.ffprobe, outputPath);
expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true);
},
);
});
qsvTest('scale from 1080p to 720p via qsv', async ({
binaryCapabilities,
ffmpegVersion,
resolvedQsv,
}) => {
const video = makeVideoInput(
Fixtures.video1080p,
FrameSize.withDimensions(1920, 1080),
);
const audio = makeAudioInput(Fixtures.video1080p);
qsvTest(
'hevc decoding with setpts',
async ({ binaryCapabilities, resolvedQsv, ffmpegVersion }) => {
const video = await deriveVideoStreamForFixture(Fixtures.videoHevc1080p);
const audio = makeAudioInput(Fixtures.videoHevc1080p);
const builder = new QsvPipelineBuilder(
resolvedQsv.capabilities,
binaryCapabilities,
video,
audio,
null,
null,
null,
);
const builder = new QsvPipelineBuilder(
resolvedQsv.capabilities,
binaryCapabilities,
video,
audio,
null,
null,
null,
);
const frameState = new FrameState({
isAnamorphic: false,
scaledSize: FrameSize.withDimensions(1280, 720),
paddedSize: FrameSize.withDimensions(1280, 720),
});
const frameState = new FrameState({
isAnamorphic: false,
scaledSize: FrameSize.withDimensions(1280, 720),
paddedSize: FrameSize.withDimensions(1280, 720),
pixelFormat: new PixelFormatYuv420P(),
});
const outputPath = path.join(workdir, 'qsv_scale.ts');
const pipeline = builder.build(
FfmpegState.create({
version: ffmpegVersion,
outputLocation: FileOutputLocation(outputPath, true),
vaapiDevice: resolvedQsv.device,
}),
frameState,
DefaultPipelineOptions,
);
const outputPath = path.join(workdir, 'qsv_transcode.ts');
const pipeline = builder.build(
FfmpegState.create({
version: ffmpegVersion,
outputLocation: FileOutputLocation(outputPath, true),
vaapiDevice: resolvedQsv.device,
start: dayjs.duration({ seconds: 1 }),
}),
frameState,
DefaultPipelineOptions,
);
const { exitCode, stderr } = runFfmpegWithPipeline(
binaries!.ffmpeg,
pipeline.getCommandArgs(),
);
const { exitCode, stderr } = runFfmpegWithPipeline(
binaries!.ffmpeg,
pipeline.getCommandArgs(),
);
expect(
exitCode,
`Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`,
).toBe(0);
expect(
exitCode,
`Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`,
).toBe(0);
const probe = probeFile(binaries!.ffprobe, outputPath);
expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true);
});
qsvTest('copy mode (qsv pipeline, no hw transcode needed)', async ({
binaryCapabilities,
ffmpegVersion,
resolvedQsv,
}) => {
const video = makeVideoInput(
Fixtures.video720p,
FrameSize.withDimensions(1280, 720),
);
const audio = makeAudioInput(Fixtures.video720p);
const builder = new QsvPipelineBuilder(
resolvedQsv.capabilities,
binaryCapabilities,
video,
audio,
null,
null,
null,
);
const frameState = new FrameState({
isAnamorphic: false,
scaledSize: FrameSize.withDimensions(1280, 720),
paddedSize: FrameSize.withDimensions(1280, 720),
videoFormat: 'copy',
});
const outputPath = path.join(workdir, 'qsv_copy.ts');
const pipeline = builder.build(
FfmpegState.create({
version: ffmpegVersion,
outputLocation: FileOutputLocation(outputPath, true),
vaapiDevice: resolvedQsv.device,
}),
frameState,
DefaultPipelineOptions,
);
const { exitCode, stderr } = runFfmpegWithPipeline(
binaries!.ffmpeg,
pipeline.getCommandArgs(),
);
expect(
exitCode,
`Pipeline command failed: ${pipeline.getCommandArgs().join(' ')}\n${stderr}`,
).toBe(0);
const probe = probeFile(binaries!.ffprobe, outputPath);
expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true);
});
const probe = probeFile(binaries!.ffprobe, outputPath);
expect(probe.streams.some((s) => s.codec_type === 'video')).toBe(true);
},
);
});

View File

@@ -1,5 +1,6 @@
import { Watermark } from '@tunarr/types';
import dayjs from 'dayjs';
import { StrictOmit } from 'ts-essentials';
import { FileStreamSource } from '../../../../stream/types.ts';
import { TUNARR_ENV_VARS } from '../../../../util/env.ts';
import { EmptyFfmpegCapabilities } from '../../capabilities/FfmpegCapabilities.ts';
@@ -14,20 +15,22 @@ import {
ColorRanges,
ColorSpaces,
ColorTransferFormats,
VideoFormats,
} from '../../constants.ts';
import { HardwareDownloadFilter } from '../../filter/HardwareDownloadFilter.ts';
import { PixelFormatFilter } from '../../filter/PixelFormatFilter.ts';
import { HardwareUploadQsvFilter } from '../../filter/qsv/HardwareUploadQsvFilter.ts';
import { QsvFormatFilter } from '../../filter/qsv/QsvFormatFilter.ts';
import { TonemapQsvFilter } from '../../filter/qsv/TonemapQsvFilter.ts';
import { TonemapFilter } from '../../filter/TonemapFilter.ts';
import { OverlayWatermarkFilter } from '../../filter/watermark/OverlayWatermarkFilter.ts';
import { WatermarkOpacityFilter } from '../../filter/watermark/WatermarkOpacityFilter.ts';
import { WatermarkScaleFilter } from '../../filter/watermark/WatermarkScaleFilter.ts';
import { ColorFormat } from '../../format/ColorFormat.ts';
import {
PixelFormats,
PixelFormatYuv420P,
PixelFormatYuv420P10Le,
} from '../../format/PixelFormat.ts';
import { LavfiVideoInputSource } from '../../input/LavfiVideoInputSource.ts';
import { SubtitlesInputSource } from '../../input/SubtitlesInputSource.ts';
import { VideoInputSource } from '../../input/VideoInputSource.ts';
import { WatermarkInputSource } from '../../input/WatermarkInputSource.ts';
@@ -36,6 +39,7 @@ import {
StillImageStream,
SubtitleMethods,
VideoStream,
VideoStreamFields,
} from '../../MediaStream.ts';
import {
DefaultPipelineOptions,
@@ -46,6 +50,151 @@ import { FrameState } from '../../state/FrameState.ts';
import { FrameSize } from '../../types.ts';
import { QsvPipelineBuilder } from './QsvPipelineBuilder.ts';
// ─── Module-level constants ───────────────────────────────────────────────────
const ffmpegVersion = {
versionString: 'n7.0.2-15-g0458a86656-20240904',
majorVersion: 7,
minorVersion: 0,
patchVersion: 2,
isUnknown: false,
} as const;
const hdrColorFormat = new ColorFormat({
colorRange: ColorRanges.Tv,
colorSpace: ColorSpaces.Bt2020nc,
colorTransfer: ColorTransferFormats.Smpte2084,
colorPrimaries: ColorPrimaries.Bt2020,
});
// ─── Shared input factories ───────────────────────────────────────────────────
/**
* H264 1080p video input. Pass `sar: '1:1'` to force square pixels (no
* scaling), or leave it null to let the pipeline decide.
*/
function makeH264VideoInput(sar: string | null = null) {
return VideoInputSource.withStream(
new FileStreamSource('/path/to/video.mkv'),
VideoStream.create({
codec: 'h264',
profile: 'main',
displayAspectRatio: '16:9',
frameSize: FrameSize.FHD,
index: 0,
pixelFormat: new PixelFormatYuv420P(),
providedSampleAspectRatio: sar,
colorFormat: ColorFormat.unknown,
}),
);
}
/** HEVC 10-bit 1080p video input with HDR color metadata. */
function makeHevc10BitVideoInput() {
return VideoInputSource.withStream(
new FileStreamSource('/path/to/hdr-video.mkv'),
VideoStream.create({
codec: 'hevc',
displayAspectRatio: '16:9',
frameSize: FrameSize.FHD,
index: 0,
pixelFormat: new PixelFormatYuv420P10Le(),
providedSampleAspectRatio: null,
colorFormat: hdrColorFormat,
profile: 'main 10',
}),
);
}
function makeWatermarkSource(overrides: Partial<Watermark> = {}) {
return new WatermarkInputSource(
new FileStreamSource('/path/to/watermark.png'),
StillImageStream.create({
frameSize: FrameSize.withDimensions(100, 100),
index: 1,
}),
{
duration: 0,
enabled: true,
horizontalMargin: 5,
opacity: 100,
position: 'bottom-right',
verticalMargin: 5,
width: 10,
...overrides,
} satisfies Watermark,
);
}
/** FrameState targeting the input video at FHD with yuv420p output. */
function makeDesiredFrameState(video: VideoInputSource) {
return new FrameState({
isAnamorphic: false,
scaledSize: video.streams[0]!.squarePixelFrameSize(FrameSize.FHD),
paddedSize: FrameSize.FHD,
pixelFormat: new PixelFormatYuv420P(),
});
}
function makeHevcVideoInput(
fields?: Partial<StrictOmit<VideoStreamFields, 'codec'>>,
) {
return VideoInputSource.withStream(
new FileStreamSource('/path/to/video.mkv'),
VideoStream.create({
codec: VideoFormats.Hevc,
profile: 'main',
displayAspectRatio: '16:9',
frameSize: FrameSize.FHD,
index: 0,
pixelFormat: new PixelFormatYuv420P(),
// SAR 1:1 means non-anamorphic: squarePixelFrameSize(FHD) == FHD,
// so no scaling or padding is needed. The frame stays on hardware
// from the QSV decoder until the watermark path.
providedSampleAspectRatio: '1:1',
colorFormat: ColorFormat.unknown,
...fields,
}),
);
}
// H264 with both decode and encode capabilities — frame goes to hardware
const fullCapabilities = new VaapiHardwareCapabilities([
new VaapiProfileEntrypoint(VaapiProfiles.H264Main, VaapiEntrypoint.Decode),
new VaapiProfileEntrypoint(VaapiProfiles.H264Main, VaapiEntrypoint.Encode),
new VaapiProfileEntrypoint(VaapiProfiles.HevcMain, VaapiEntrypoint.Decode),
]);
function buildPipeline(opts: {
videoInput?: VideoInputSource;
watermark?: WatermarkInputSource | null;
capabilities?: VaapiHardwareCapabilities;
pipelineOptions?: Partial<PipelineOptions>;
}) {
const video = opts.videoInput ?? makeH264VideoInput();
const builder = new QsvPipelineBuilder(
opts.capabilities ?? fullCapabilities,
EmptyFfmpegCapabilities,
video,
null,
null,
opts.watermark ?? null,
null,
);
return builder.build(
FfmpegState.create({
version: { versionString: '7.1.1', isUnknown: false },
}),
new FrameState({
isAnamorphic: false,
scaledSize: video.streams[0]!.squarePixelFrameSize(FrameSize.FHD),
paddedSize: FrameSize.FHD,
pixelFormat: new PixelFormatYuv420P(),
}),
{ ...DefaultPipelineOptions, ...(opts.pipelineOptions ?? {}) },
);
}
describe('QsvPipelineBuilder', () => {
test('should work', () => {
const capabilities = new VaapiHardwareCapabilities([]);
@@ -361,21 +510,8 @@ describe('QsvPipelineBuilder', () => {
});
describe('tonemapping', () => {
const ffmpegVersion = {
versionString: 'n7.0.2-15-g0458a86656-20240904',
majorVersion: 7,
minorVersion: 0,
patchVersion: 2,
isUnknown: false,
} as const;
const hdrColorFormat = new ColorFormat({
colorRange: ColorRanges.Tv,
colorSpace: ColorSpaces.Bt2020nc,
colorTransfer: ColorTransferFormats.Smpte2084,
colorPrimaries: ColorPrimaries.Bt2020,
});
// Capabilities covering both H264 and HEVC 10-bit decode+encode —
// needed to test hardware tonemap paths.
const fullCapabilities = new VaapiHardwareCapabilities([
new VaapiProfileEntrypoint(
VaapiProfiles.H264Main,
@@ -399,47 +535,6 @@ describe('QsvPipelineBuilder', () => {
vi.unstubAllEnvs();
});
function makeH264VideoInput() {
return VideoInputSource.withStream(
new FileStreamSource('/path/to/video.mkv'),
VideoStream.create({
codec: 'h264',
profile: 'main',
displayAspectRatio: '16:9',
frameSize: FrameSize.FHD,
index: 0,
pixelFormat: new PixelFormatYuv420P(),
providedSampleAspectRatio: null,
colorFormat: ColorFormat.unknown,
}),
);
}
function makeHevc10BitVideoInput() {
return VideoInputSource.withStream(
new FileStreamSource('/path/to/hdr-video.mkv'),
VideoStream.create({
codec: 'hevc',
displayAspectRatio: '16:9',
frameSize: FrameSize.FHD,
index: 0,
pixelFormat: new PixelFormatYuv420P10Le(),
providedSampleAspectRatio: null,
colorFormat: hdrColorFormat,
profile: 'main 10',
}),
);
}
function makeDesiredFrameState(video: VideoInputSource) {
return new FrameState({
isAnamorphic: false,
scaledSize: video.streams[0]!.squarePixelFrameSize(FrameSize.FHD),
paddedSize: FrameSize.FHD,
pixelFormat: new PixelFormatYuv420P(),
});
}
test('does not apply tonemap when TUNARR_TONEMAP_ENABLED is not set', () => {
const video = makeH264VideoInput();
@@ -588,21 +683,6 @@ describe('QsvPipelineBuilder', () => {
});
describe('initial current state', () => {
const ffmpegVersion = {
versionString: 'n7.0.2-15-g0458a86656-20240904',
majorVersion: 7,
minorVersion: 0,
patchVersion: 2,
isUnknown: false,
} as const;
const hdrColorFormat = new ColorFormat({
colorRange: ColorRanges.Tv,
colorSpace: ColorSpaces.Bt2020nc,
colorTransfer: ColorTransferFormats.Smpte2084,
colorPrimaries: ColorPrimaries.Bt2020,
});
const emptyCapabilities = new VaapiHardwareCapabilities([]);
afterEach(() => {
@@ -610,18 +690,7 @@ describe('QsvPipelineBuilder', () => {
});
test('initializes with the input pixel format when it matches the desired format', () => {
const video = VideoInputSource.withStream(
new FileStreamSource('/path/to/video.mkv'),
VideoStream.create({
codec: 'h264',
displayAspectRatio: '16:9',
frameSize: FrameSize.FHD,
index: 0,
pixelFormat: new PixelFormatYuv420P(),
providedSampleAspectRatio: null,
colorFormat: ColorFormat.unknown,
}),
);
const video = makeH264VideoInput();
const builder = new QsvPipelineBuilder(
emptyCapabilities,
@@ -635,12 +704,7 @@ describe('QsvPipelineBuilder', () => {
const out = builder.build(
FfmpegState.create({ version: ffmpegVersion }),
new FrameState({
isAnamorphic: false,
scaledSize: FrameSize.FHD,
paddedSize: FrameSize.FHD,
pixelFormat: new PixelFormatYuv420P(),
}),
makeDesiredFrameState(video),
DefaultPipelineOptions,
);
@@ -693,32 +757,22 @@ describe('QsvPipelineBuilder', () => {
},
);
// A QsvFormatFilter should be present because the initial currentState
// A PixelFormatFilter should be present because the initial currentState
// correctly reflects the 10-bit input pixel format (yuv420p10le), which
// differs from the desired 8-bit output (yuv420p).
// differs from the desired 8-bit output (yuv420p). Both HW decode and
// encode are disabled, so the frame stays on software and PixelFormatFilter
// (not QsvFormatFilter) is used for the conversion.
const pixelFormatFilterSteps =
out.getComplexFilter()?.filterChain.pixelFormatFilterSteps ?? [];
expect(
pixelFormatFilterSteps.some((s) => s instanceof QsvFormatFilter),
pixelFormatFilterSteps.some((s) => s instanceof PixelFormatFilter),
).toBe(true);
});
test('initializes with the input color format, used by software tonemap', () => {
vi.stubEnv(TUNARR_ENV_VARS.TONEMAP_ENABLED, 'true');
const video = VideoInputSource.withStream(
new FileStreamSource('/path/to/video.mkv'),
VideoStream.create({
codec: 'hevc',
profile: 'main 10',
displayAspectRatio: '16:9',
frameSize: FrameSize.FHD,
index: 0,
pixelFormat: new PixelFormatYuv420P10Le(),
providedSampleAspectRatio: null,
colorFormat: hdrColorFormat,
}),
);
const video = makeHevc10BitVideoInput();
const builder = new QsvPipelineBuilder(
emptyCapabilities,
@@ -732,12 +786,7 @@ describe('QsvPipelineBuilder', () => {
const out = builder.build(
FfmpegState.create({ version: ffmpegVersion }),
new FrameState({
isAnamorphic: false,
scaledSize: FrameSize.FHD,
paddedSize: FrameSize.FHD,
pixelFormat: new PixelFormatYuv420P(),
}),
makeDesiredFrameState(video),
{
...DefaultPipelineOptions,
disableHardwareDecoding: true,
@@ -762,15 +811,8 @@ describe('QsvPipelineBuilder', () => {
});
describe('watermark', () => {
const ffmpegVersion = {
versionString: 'n7.0.2-15-g0458a86656-20240904',
majorVersion: 7,
minorVersion: 0,
patchVersion: 2,
isUnknown: false,
} as const;
// H264 with both decode and encode capabilities — frame goes to hardware
// H264-only capabilities: frame goes to hardware for decode and encode,
// which exercises the hwdownload→overlay→hwupload watermark path.
const fullCapabilities = new VaapiHardwareCapabilities([
new VaapiProfileEntrypoint(
VaapiProfiles.H264Main,
@@ -782,52 +824,15 @@ describe('QsvPipelineBuilder', () => {
),
]);
function makeH264VideoInput() {
return VideoInputSource.withStream(
new FileStreamSource('/path/to/video.mkv'),
VideoStream.create({
codec: 'h264',
profile: 'main',
displayAspectRatio: '16:9',
frameSize: FrameSize.FHD,
index: 0,
pixelFormat: new PixelFormatYuv420P(),
// SAR 1:1 means non-anamorphic: squarePixelFrameSize(FHD) == FHD,
// so no scaling or padding is needed. The frame stays on hardware
// from the QSV decoder until the watermark path.
providedSampleAspectRatio: '1:1',
colorFormat: ColorFormat.unknown,
}),
);
}
function makeWatermarkSource(overrides: Partial<Watermark> = {}) {
return new WatermarkInputSource(
new FileStreamSource('/path/to/watermark.png'),
StillImageStream.create({
frameSize: FrameSize.withDimensions(100, 100),
index: 1,
}),
{
duration: 0,
enabled: true,
horizontalMargin: 5,
opacity: 100,
position: 'bottom-right',
verticalMargin: 5,
width: 10,
...overrides,
} satisfies Watermark,
);
}
function buildPipeline(opts: {
videoInput?: VideoInputSource;
watermark?: WatermarkInputSource | null;
capabilities?: VaapiHardwareCapabilities;
pipelineOptions?: Partial<PipelineOptions>;
}) {
const video = opts.videoInput ?? makeH264VideoInput();
// SAR 1:1 → squarePixelFrameSize(FHD) == FHD, so no scaling/padding is
// needed and the frame stays on hardware from QSV decode until watermark.
const video = opts.videoInput ?? makeH264VideoInput('1:1');
const builder = new QsvPipelineBuilder(
opts.capabilities ?? fullCapabilities,
EmptyFfmpegCapabilities,
@@ -839,12 +844,7 @@ describe('QsvPipelineBuilder', () => {
);
return builder.build(
FfmpegState.create({ version: ffmpegVersion }),
new FrameState({
isAnamorphic: false,
scaledSize: video.streams[0]!.squarePixelFrameSize(FrameSize.FHD),
paddedSize: FrameSize.FHD,
pixelFormat: new PixelFormatYuv420P(),
}),
makeDesiredFrameState(video),
{ ...DefaultPipelineOptions, ...(opts.pipelineOptions ?? {}) },
);
}
@@ -1023,6 +1023,177 @@ describe('QsvPipelineBuilder', () => {
});
});
describe('pixel format fixes', () => {
test('no format=unknown for error screens (Fix 1)', () => {
const errorScreen = LavfiVideoInputSource.errorText(
FrameSize.FHD,
'Error',
'Subtitle',
);
const builder = new QsvPipelineBuilder(
fullCapabilities,
EmptyFfmpegCapabilities,
errorScreen,
null,
null,
null,
null,
);
const pipeline = builder.build(
FfmpegState.create({
version: { versionString: '7.1.1', isUnknown: false },
}),
new FrameState({
isAnamorphic: false,
scaledSize: FrameSize.FHD,
paddedSize: FrameSize.FHD,
pixelFormat: new PixelFormatYuv420P(),
}),
DefaultPipelineOptions,
);
const args = pipeline.getCommandArgs().join(' ');
expect(args, args).not.toContain('format=unknown');
});
test('no -pix_fmt with QSV encode (Fix 2)', () => {
const pipeline = buildPipeline({});
const args = pipeline.getCommandArgs().join(' ');
expect(args, args).not.toContain('-pix_fmt');
});
test('no -pix_fmt for QSV encode without scaling (Fix 2)', () => {
// H264 FHD input → FHD output: no scaling or padding, so no QSV scale filter.
// Still must not emit -pix_fmt because the encoder is QSV.
const pipeline = buildPipeline({
pipelineOptions: { disableHardwareDecoding: true },
});
const args = pipeline.getCommandArgs().join(' ');
expect(args, args).not.toContain('-pix_fmt');
});
test('format=nv12 inserted before hwupload after watermark overlay (Fix 3)', () => {
// Software decode leaves frame in yuv420p. After the watermark overlay,
// hwupload needs the frame in nv12 format; without Fix 3 the conversion
// filter was absent, causing format negotiation failures.
const pipeline = buildPipeline({
watermark: makeWatermarkSource(),
pipelineOptions: { disableHardwareDecoding: true },
});
const pixelFormatFilterSteps =
pipeline.getComplexFilter()!.filterChain.pixelFormatFilterSteps;
const hwUploadIdx = pixelFormatFilterSteps.findIndex(
(s) => s instanceof HardwareUploadQsvFilter,
);
expect(
hwUploadIdx,
'HardwareUploadQsvFilter should be present',
).toBeGreaterThan(-1);
const fmtFilterIdx = pixelFormatFilterSteps.findIndex(
(s) => s instanceof PixelFormatFilter,
);
expect(
fmtFilterIdx,
'PixelFormatFilter should be present before hwupload',
).toBeGreaterThan(-1);
expect(fmtFilterIdx).toBeLessThan(hwUploadIdx);
const fmtFilter = pixelFormatFilterSteps[
fmtFilterIdx
] as PixelFormatFilter;
expect(fmtFilter.filter).toBe(`format=${PixelFormats.NV12}`);
const args = pipeline.getCommandArgs().join(' ');
const nv12Idx = args.indexOf(`format=${PixelFormats.NV12}`);
const hwuploadIdx = args.indexOf('hwupload');
expect(nv12Idx).toBeGreaterThan(-1);
expect(nv12Idx).toBeLessThan(hwuploadIdx);
});
test('format=p010le inserted before hwupload for 10-bit input + watermark (Fix 3)', () => {
// 10-bit HEVC: after watermark overlay the frame is in yuv420p10le on software.
// hwupload for QSV requires p010le; without Fix 3 the format conversion was missing.
const pipeline = buildPipeline({
videoInput: makeHevcVideoInput({
pixelFormat: new PixelFormatYuv420P10Le(),
}),
watermark: makeWatermarkSource(),
pipelineOptions: { disableHardwareDecoding: true },
});
const pixelFormatFilterSteps =
pipeline.getComplexFilter()!.filterChain.pixelFormatFilterSteps;
const hwUploadIdx = pixelFormatFilterSteps.findIndex(
(s) => s instanceof HardwareUploadQsvFilter,
);
expect(
hwUploadIdx,
'HardwareUploadQsvFilter should be present',
).toBeGreaterThan(-1);
// The last PixelFormatFilter before hwupload should be format=p010le
const fmtFiltersBeforeUpload = pixelFormatFilterSteps
.slice(0, hwUploadIdx)
.filter((s) => s instanceof PixelFormatFilter);
const lastFmtFilter = fmtFiltersBeforeUpload.at(-1) as
| PixelFormatFilter
| undefined;
expect(lastFmtFilter).toBeDefined();
expect(lastFmtFilter!.filter).toBe(`format=${PixelFormats.P010}`);
const args = pipeline.getCommandArgs().join(' ');
const p010Idx = args.indexOf(`format=${PixelFormats.P010}`);
const hwuploadIdx = args.indexOf('hwupload');
expect(p010Idx).toBeGreaterThan(-1);
expect(p010Idx).toBeLessThan(hwuploadIdx);
});
test('software-only pipeline still emits -pix_fmt when formats differ (regression guard for Fix 2)', () => {
// With no hardware capabilities (software decode + encode) and a 10-bit HEVC
// input transcoded to 8-bit yuv420p, the code must still emit -pix_fmt yuv420p
// via the unconditional path (encoder is not QSV). Fix 2 only suppresses
// -pix_fmt for QSV encoders.
const noCapabilities = new VaapiHardwareCapabilities([]);
const hevc10bitInput = makeHevcVideoInput({
pixelFormat: new PixelFormatYuv420P10Le(),
});
const builder = new QsvPipelineBuilder(
noCapabilities,
EmptyFfmpegCapabilities,
hevc10bitInput,
null,
null,
null,
null,
);
const pipeline = builder.build(
FfmpegState.create({
version: { versionString: '7.1.1', isUnknown: false },
}),
new FrameState({
isAnamorphic: false,
scaledSize: hevc10bitInput.streams[0]!.squarePixelFrameSize(
FrameSize.FHD,
),
paddedSize: FrameSize.FHD,
pixelFormat: new PixelFormatYuv420P(),
}),
DefaultPipelineOptions,
);
const args = pipeline.getCommandArgs().join(' ');
expect(args, args).toContain('-pix_fmt yuv420p');
});
});
test('hwdownload bug', async () => {
const wm = new WatermarkInputSource(
new FileStreamSource('/path/to/img'),
@@ -1150,6 +1321,95 @@ describe('QsvPipelineBuilder', () => {
disableHardwareFilters: false,
vaapiDevice: null,
vaapiDriver: null,
vaapiPipelineOptions: null,
},
);
console.log(x.getCommandArgs().join(' '));
});
test('10-bit input, 8-bit output', async () => {
const builder = new QsvPipelineBuilder(
new VaapiHardwareCapabilities([
new VaapiProfileEntrypoint(
VaapiProfiles.H264Main,
VaapiEntrypoint.Decode,
),
new VaapiProfileEntrypoint(
VaapiProfiles.H264Main,
VaapiEntrypoint.Encode,
),
new VaapiProfileEntrypoint(
VaapiProfiles.HevcMain,
VaapiEntrypoint.Decode,
),
]),
EmptyFfmpegCapabilities,
makeHevcVideoInput({
frameSize: FrameSize.FHD,
pixelFormat: new PixelFormatYuv420P10Le(),
}),
null,
null,
null,
null,
);
const x = builder.build(
FfmpegState.create({
version: {
versionString: 'n7.1.1-56-gc2184b65d2-20250716',
majorVersion: 7,
minorVersion: 1,
patchVersion: 1,
versionDetails: '56-gc2184b65d2-20250716',
isUnknown: false,
},
threadCount: 0,
start: dayjs.duration({ minutes: 5, seconds: 19.253 }),
duration: dayjs.duration({ minutes: 18, seconds: 2.348 }),
logLevel: 'debug',
mapMetadata: false,
metadataServiceName: null,
metadataServiceProvider: null,
decoderHwAccelMode: 'none',
encoderHwAccelMode: 'none',
softwareScalingAlgorithm: 'bicubic',
softwareDeinterlaceFilter: 'none',
vaapiDevice: null,
vaapiDriver: null,
outputLocation: 'stdout',
ptsOffset: 0,
tonemapHdr: false,
}),
new FrameState({
scaledSize: FrameSize.FHD,
paddedSize: FrameSize.FHD,
isAnamorphic: false,
realtime: false,
videoFormat: 'h264',
videoPreset: null,
videoProfile: null,
frameRate: null,
videoTrackTimescale: 90000,
videoBitrate: 10000,
videoBufferSize: 20000,
frameDataLocation: 'unknown',
deinterlace: false,
pixelFormat: new PixelFormatYuv420P(),
colorFormat: ColorFormat.bt709,
infiniteLoop: false,
forceSoftwareOverlay: false,
}),
{
decoderThreadCount: 0,
encoderThreadCount: 0,
filterThreadCount: null,
disableHardwareDecoding: false,
disableHardwareEncoding: false,
disableHardwareFilters: false,
vaapiDevice: null,
vaapiDriver: null,
vaapiPipelineOptions: null,
},
);
console.log(x.getCommandArgs().join(' '));

View File

@@ -4,7 +4,7 @@ import type { FfmpegCapabilities } from '@/ffmpeg/builder/capabilities/FfmpegCap
import { OutputFormatTypes, VideoFormats } from '@/ffmpeg/builder/constants.js';
import type { Decoder } from '@/ffmpeg/builder/decoder/Decoder.js';
import { DecoderFactory } from '@/ffmpeg/builder/decoder/DecoderFactory.js';
import { Encoder } from '@/ffmpeg/builder/encoder/Encoder.js';
import type { Encoder } from '@/ffmpeg/builder/encoder/Encoder.js';
import { DeinterlaceFilter } from '@/ffmpeg/builder/filter/DeinterlaceFilter.js';
import type { FilterOption } from '@/ffmpeg/builder/filter/FilterOption.js';
import { HardwareDownloadFilter } from '@/ffmpeg/builder/filter/HardwareDownloadFilter.js';
@@ -21,6 +21,7 @@ import {
PixelFormatNv12,
PixelFormatP010,
PixelFormats,
PixelFormatYuv420P,
PixelFormatYuv420P10Le,
PixelFormatYuva420P,
} from '@/ffmpeg/builder/format/PixelFormat.js';
@@ -164,47 +165,46 @@ export class QsvPipelineBuilder extends SoftwarePipelineBuilder {
currentState = this.decoder.nextState(currentState);
}
currentState = this.addFilterToVideoChain(
currentState,
new ResetPtsFilter(),
);
const setFrameRate =
this.context?.videoStream.getNumericFrameRateOrDefault() ?? 24;
currentState = this.addFilterToVideoChain(
currentState,
new SetFpsFilter(setFrameRate),
);
// Remove existing frame rate output option if the framerate we just
// set differs from the
if (
this.desiredState.frameRate &&
this.desiredState.frameRate !== setFrameRate
) {
const idx = this.pipelineSteps.findIndex(
(step) => step instanceof FrameRateOutputOption,
);
if (idx !== -1) {
this.pipelineSteps.splice(idx, 1);
}
}
currentState = this.setDeinterlace(currentState);
currentState = this.setScale(currentState);
currentState = this.setTonemap(currentState);
currentState = this.setPad(currentState);
this.setStillImageLoop();
if (
currentState.frameDataLocation === FrameDataLocation.Hardware &&
this.context.hasWatermark
) {
if (currentState.frameDataLocation === FrameDataLocation.Hardware) {
const hwDownload = new HardwareDownloadFilter(currentState);
currentState = hwDownload.nextState(currentState);
this.videoInputSource.filterSteps.push(hwDownload);
}
if (this.desiredState.videoFormat !== VideoFormats.Copy) {
currentState = this.addFilterToVideoChain(
currentState,
new ResetPtsFilter(),
);
const setFrameRate =
this.context?.videoStream.getNumericFrameRateOrDefault() ?? 24;
currentState = this.addFilterToVideoChain(
currentState,
new SetFpsFilter(setFrameRate),
);
// Remove existing frame rate output option if the framerate we just
// set differs from the
if (
this.desiredState.frameRate &&
this.desiredState.frameRate !== setFrameRate
) {
const idx = this.pipelineSteps.findIndex(
(step) => step instanceof FrameRateOutputOption,
);
if (idx !== -1) {
this.pipelineSteps.splice(idx, 1);
}
}
}
currentState = this.setWatermark(currentState);
const noEncoderSteps = every(
@@ -334,70 +334,80 @@ export class QsvPipelineBuilder extends SoftwarePipelineBuilder {
step instanceof DeinterlaceQsvFilter,
);
const currentPixelFormat = currentState.pixelFormat;
let currentPixelFormat = currentState.pixelFormat;
if (
some(
this.videoInputSource.filterSteps,
(step) => !(step instanceof Encoder),
) &&
currentPixelFormat
) {
if (currentPixelFormat && currentPixelFormat.isUnknown()) {
const resolved =
currentPixelFormat.bitDepth === 10
? new PixelFormatP010()
: new PixelFormatNv12(new PixelFormatYuv420P());
currentState = currentState.update({ pixelFormat: resolved });
currentPixelFormat = resolved;
}
if (currentPixelFormat) {
let needsConversion = false;
if (currentPixelFormat.name === PixelFormats.NV12) {
needsConversion =
currentPixelFormat.unwrap().name !== targetPixelFormat.name;
if (!needsConversion) {
currentState = currentState.update({
pixelFormat: targetPixelFormat,
});
}
} else {
needsConversion = currentPixelFormat.name !== targetPixelFormat.name;
const unwrappedCurrent =
currentPixelFormat.toSoftwareFormat() ?? currentPixelFormat;
needsConversion = unwrappedCurrent.name !== targetPixelFormat.name;
if (!needsConversion) {
currentState = currentState.update({
pixelFormat: targetPixelFormat,
});
}
if (needsConversion) {
const filter = new QsvFormatFilter(currentPixelFormat);
const filterCtor =
currentState.frameDataLocation === FrameDataLocation.Hardware
? QsvFormatFilter
: PixelFormatFilter;
hasQsvFilter =
currentState.frameDataLocation === FrameDataLocation.Hardware;
const filter = new filterCtor(currentPixelFormat);
steps.push(filter);
currentState = filter.nextState(currentState);
if (currentPixelFormat.bitDepth === 8 && this.context.is10BitOutput) {
const tenbitFilter = new QsvFormatFilter(new PixelFormatP010());
const tenbitFilter = new filterCtor(new PixelFormatP010());
steps.push(tenbitFilter);
currentState = tenbitFilter.nextState(currentState);
}
hasQsvFilter = true;
}
}
if (hasQsvFilter) {
if (currentState.frameDataLocation === FrameDataLocation.Hardware) {
if (
currentState.pixelFormat?.bitDepth === 10 &&
pixelFormatToDownload?.name !== PixelFormats.YUV420P10LE
) {
pixelFormatToDownload = new PixelFormatYuv420P10Le();
currentState = currentState.update({
pixelFormat: pixelFormatToDownload,
});
} else if (
currentState.pixelFormat?.bitDepth === 8 &&
pixelFormatToDownload?.name !== PixelFormats.NV12
) {
pixelFormatToDownload = new PixelFormatNv12(pixelFormatToDownload);
currentState = currentState.update({
pixelFormat: pixelFormatToDownload,
});
}
// hasQsvFilter implies we're on hardware, but check anyway.
if (
hasQsvFilter &&
currentState.frameDataLocation === FrameDataLocation.Hardware
) {
if (
currentState.pixelFormat?.bitDepth === 10 &&
pixelFormatToDownload?.name !== PixelFormats.P010
) {
pixelFormatToDownload = new PixelFormatP010();
currentState = currentState.update({
pixelFormat: pixelFormatToDownload,
});
} else if (
currentState.pixelFormat?.bitDepth === 8 &&
pixelFormatToDownload?.name !== PixelFormats.NV12
) {
pixelFormatToDownload = new PixelFormatNv12(pixelFormatToDownload);
currentState = currentState.update({
pixelFormat: pixelFormatToDownload,
});
}
}
// If we're about to encode with software and we're in hardware,
// we'll need to download. We shouldn't have to do any more conversions
// at this point
if (
this.ffmpegState.encoderHwAccelMode === HardwareAccelerationMode.None &&
currentState.frameDataLocation === FrameDataLocation.Hardware
) {
pixelFormatToDownload = new PixelFormatNv12(pixelFormatToDownload);
// pixelFormatToDownload = new PixelFormatNv12(pixelFormatToDownload);
const hwDownloadFilter = new HardwareDownloadFilter(
currentState.update({ pixelFormat: pixelFormatToDownload }),
);
@@ -405,18 +415,56 @@ export class QsvPipelineBuilder extends SoftwarePipelineBuilder {
steps.push(hwDownloadFilter);
}
// If we're going to encode on hardware, but we're still in software,
// perform the final upload.
if (
this.ffmpegState.encoderHwAccelMode === HardwareAccelerationMode.Qsv &&
currentState.frameDataLocation === FrameDataLocation.Software
) {
const hwCompatFormat =
currentState.pixelFormat?.bitDepth === 10
? new PixelFormatP010()
: new PixelFormatNv12(new PixelFormatYuv420P());
if (currentState.pixelFormat?.name !== hwCompatFormat.name) {
const fmtFilter = new PixelFormatFilter(hwCompatFormat);
steps.push(fmtFilter);
currentState = fmtFilter.nextState(currentState);
}
steps.push(new HardwareUploadQsvFilter(64));
}
// Only emit -pix_fmt for software encoders; QSV encoders don't accept
// a -pix_fmt flag and it causes swscaler errors with hardware frames.
if (currentState.pixelFormat?.name !== targetPixelFormat.name) {
// TODO: Handle color params
this.pipelineSteps.push(new PixelFormatOutputOption(targetPixelFormat));
if (
this.ffmpegState.encoderHwAccelMode === HardwareAccelerationMode.Qsv
) {
steps.push(
new QsvFormatFilter(
targetPixelFormat.toHardwareFormat() ?? targetPixelFormat,
),
);
} else {
this.pipelineSteps.push(
new PixelFormatOutputOption(targetPixelFormat),
);
}
}
// TODO: Handle color params
this.context.filterChain.pixelFormatFilterSteps = steps;
} else if (
this.ffmpegState.encoderHwAccelMode === HardwareAccelerationMode.Qsv &&
currentState.frameDataLocation === FrameDataLocation.Software
) {
// No explicit pixel format was requested but QSV needs hardware frames.
// This happens after a watermark overlay (which outputs software yuv420p).
const hwCompatFormat =
currentState.pixelFormat?.bitDepth === 10
? new PixelFormatP010()
: new PixelFormatNv12(new PixelFormatYuv420P());
steps.push(new PixelFormatFilter(hwCompatFormat));
steps.push(new HardwareUploadQsvFilter(64));
this.context.filterChain.pixelFormatFilterSteps = steps;
}
return currentState;
@@ -472,9 +520,10 @@ export class QsvPipelineBuilder extends SoftwarePipelineBuilder {
// Fades
}
if (this.desiredState.pixelFormat) {
const pf = this.desiredState.pixelFormat.unwrap();
const pf = (
this.desiredState.pixelFormat ?? currentState.pixelFormat
)?.unwrap();
if (pf && !pf.isUnknown()) {
// Overlay
this.context.filterChain.watermarkOverlayFilterSteps.push(
new OverlayWatermarkFilter(

View File

@@ -1,5 +1,4 @@
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import { FileStreamSource } from '../../../../stream/types.ts';
import {
createTempWorkdir,
@@ -8,6 +7,7 @@ import {
} from '../../../../testing/ffmpeg/FfmpegIntegrationHelper.ts';
import {
binaries,
Fixtures,
nvidiaCaps,
nvidiaTest,
} from '../../../../testing/ffmpeg/FfmpegTestFixtures.ts';
@@ -25,15 +25,6 @@ import { FrameState } from '../../state/FrameState.ts';
import { FrameSize } from '../../types.ts';
import { NvidiaPipelineBuilder } from './NvidiaPipelineBuilder.ts';
const fixturesDir = path.join(
path.dirname(fileURLToPath(import.meta.url)),
'../../../../testing/ffmpeg/fixtures',
);
const Fixtures = {
video720p: path.join(fixturesDir, '720p_h264.ts'),
video1080p: path.join(fixturesDir, '1080p_h264.ts'),
} as const;
describe.skipIf(!binaries || !nvidiaCaps)(
'NvidiaPipelineBuilder integration',

View File

@@ -1,5 +1,4 @@
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import { FileStreamSource } from '../../../../stream/types.ts';
import {
createTempWorkdir,
@@ -9,6 +8,7 @@ import {
import {
binaries,
ffmpegTest,
Fixtures,
} from '../../../../testing/ffmpeg/FfmpegTestFixtures.ts';
import { AudioFormats, FileOutputLocation } from '../../constants.ts';
import { PixelFormatYuv420P } from '../../format/PixelFormat.ts';
@@ -24,15 +24,6 @@ import { FrameState } from '../../state/FrameState.ts';
import { FrameSize } from '../../types.ts';
import { SoftwarePipelineBuilder } from './SoftwarePipelineBuilder.ts';
const fixturesDir = path.join(
path.dirname(fileURLToPath(import.meta.url)),
'../../../../testing/ffmpeg/fixtures',
);
const Fixtures = {
video720p: path.join(fixturesDir, '720p_h264.ts'),
video1080p: path.join(fixturesDir, '1080p_h264.ts'),
} as const;
describe.skipIf(!binaries)('SoftwarePipelineBuilder integration', () => {
let workdir: string;

View File

@@ -1,15 +0,0 @@
import type { StreamLineupItem } from '../db/derived_types/StreamLineup.ts';
export interface IStreamLineupCache {
getProgramLastPlayTime(channelId: string, programId: string): number;
getFillerLastPlayTime(channelId: string, fillerId: string): number;
recordPlayback(
channelId: string,
t0: number,
lineupItem: StreamLineupItem,
): Promise<void>;
clear(): void;
}

View File

@@ -1,109 +0,0 @@
import { jsonSchema } from '@/types/schemas.js';
import { inject, interfaces } from 'inversify';
import { findIndex, isArray } from 'lodash-es';
import fs from 'node:fs/promises';
import path from 'node:path';
import { CurrentLineupSchemaVersion } from '../../db/derived_types/Lineup.ts';
import { GlobalOptions } from '../../globals.ts';
import { PersistentChannelCache } from '../../stream/ChannelCache.ts';
import { KEYS } from '../../types/inject.ts';
import { fileExists } from '../../util/fsUtil.ts';
import { parseIntOrNull } from '../../util/index.ts';
import { getFirstValue } from '../../util/json.ts';
import { Logger } from '../../util/logging/LoggerFactory.ts';
import { JsonFileMigrator, MigrationStep } from '../JsonFileMigrator.ts';
import { ClearStreamPlayCacheMigration } from './ClearStreamPlayCacheMigration.ts';
const MigrationSteps: interfaces.ServiceIdentifier<MigrationStep>[] = [
ClearStreamPlayCacheMigration,
];
const CurrentVersion = 1;
export class StreamCacheMigrator extends JsonFileMigrator<MigrationStep> {
constructor(
@inject(KEYS.Logger) private logger: Logger,
@inject(KEYS.GlobalOptions) private opts: GlobalOptions,
@inject(PersistentChannelCache)
private channelCache: PersistentChannelCache,
) {
super(MigrationSteps);
}
async run(): Promise<void> {
const cachePath = path.join(
this.opts.databaseDirectory,
'stream-cache.json',
);
if (!(await fileExists(cachePath))) {
return;
}
const rawCacheContents = await fs.readFile(cachePath);
const parsed = jsonSchema.parse(
JSON.parse(rawCacheContents.toString('utf-8')),
);
if (
(typeof parsed !== 'object' && typeof parsed !== 'function') ||
parsed === null ||
isArray(parsed)
) {
this.logger.warn(
'Got invalid cache JSON: %s. Expected object.',
JSON.stringify(parsed),
);
return;
}
const version = getFirstValue('$.version@number()', parsed, parseIntOrNull);
let currVersion = version ?? 0;
if (currVersion === CurrentVersion) {
this.logger.debug(
'Cache schema already at latest version: %d',
CurrentLineupSchemaVersion,
);
return;
}
let migrationIndex = findIndex(
this.pipeline,
({ from }) => from === currVersion,
);
if (migrationIndex === -1) {
this.logger.error(
'Error determining which migration to start from for stream cache',
);
return;
}
try {
do {
const migration = this.pipeline?.[migrationIndex];
if (!migration) {
break;
}
await migration.migrate(parsed);
currVersion = migration.to;
parsed['version'] = currVersion;
migrationIndex++;
} while (currVersion <= CurrentLineupSchemaVersion);
await fs.writeFile(
path.join(this.opts.databaseDirectory, 'stream-cache.json'),
JSON.stringify(parsed),
);
await this.channelCache.init();
this.logger.info(
'Successfully migrated stream cache from version %d to %d',
version ?? 0,
currVersion,
);
} catch (e) {
this.logger.error(e, 'Error while migrating stream cache schema');
}
}
}

View File

@@ -276,6 +276,7 @@ type BaseProgramSearchDocument = {
director: Director[];
studio?: Studio[];
tags: string[];
state: ProgramState;
};
export type TerminalProgramSearchDocument<
@@ -302,7 +303,6 @@ export type TerminalProgramSearchDocument<
audioChannels?: number;
audioLanguages?: string[];
subtitleLanguages?: string[];
state: ProgramState;
};
export type ProgramSearchDocument =
@@ -829,6 +829,7 @@ export class MeilisearchService implements ISearchService {
),
tags: show.tags,
studio: show.studios,
state: 'ok',
};
await this.client()
@@ -879,6 +880,7 @@ export class MeilisearchService implements ISearchService {
`${eid.type}|${eid.sourceId ?? ''}|${eid.id}` satisfies MergedExternalId,
),
tags: season.tags,
state: 'ok',
parent: {
id: encodeCaseSensitiveId(season.show.uuid),
externalIds: showEids ?? [],
@@ -1002,6 +1004,7 @@ export class MeilisearchService implements ISearchService {
`${eid.type}|${eid.sourceId ?? ''}|${eid.id}` satisfies MergedExternalId,
),
tags: artist.tags,
state: 'ok',
};
await this.client()
@@ -1050,6 +1053,7 @@ export class MeilisearchService implements ISearchService {
`${eid.type}|${eid.sourceId ?? ''}|${eid.id}` satisfies MergedExternalId,
),
tags: album.tags,
state: 'ok',
parent: {
id: encodeCaseSensitiveId(album.artist.uuid),
externalIds: artistEids ?? [],

View File

@@ -1,20 +0,0 @@
import { inject, injectable } from 'inversify';
import { PersistentChannelCache } from '../../stream/ChannelCache.ts';
import { SimpleStartupTask } from './IStartupTask.ts';
@injectable()
export class LoadChannelCacheStartupTask extends SimpleStartupTask {
id = LoadChannelCacheStartupTask.name;
dependencies: string[] = [];
constructor(
@inject(PersistentChannelCache)
private persistentChannelCache: PersistentChannelCache,
) {
super();
}
getPromise(): Promise<void> {
return this.persistentChannelCache.init();
}
}

View File

@@ -1,22 +0,0 @@
import { inject, injectable } from 'inversify';
import { StreamCacheMigrator } from '../../migration/streamCache/StreamCacheMigrator.ts';
import { ClearM3uCacheStartupTask } from './ClearM3uCacheStartupTask.ts';
import { SimpleStartupTask } from './IStartupTask.ts';
@injectable()
export class StreamCacheMigratorStartupTask extends SimpleStartupTask {
id: string = StreamCacheMigratorStartupTask.name;
dependencies: string[] = [ClearM3uCacheStartupTask.name];
constructor(
@inject(StreamCacheMigrator)
private streamCacheMigrator: StreamCacheMigrator,
) {
super();
}
getPromise(): Promise<void> {
return this.streamCacheMigrator.run();
}
}

View File

@@ -1,164 +0,0 @@
import { InMemoryCachedDbAdapter } from '@/db/json/InMemoryCachedDbAdapter.js';
import { SchemaBackedDbAdapter } from '@/db/json/SchemaBackedJsonDBAdapter.js';
import { GlobalOptions } from '@/globals.js';
import { inject, injectable } from 'inversify';
import { isUndefined } from 'lodash-es';
import { Low } from 'lowdb';
import { join } from 'node:path';
import { z } from 'zod/v4';
import {
StreamLineupItem,
isCommercialLineupItem,
} from '../db/derived_types/StreamLineup.ts';
import { IStreamLineupCache } from '../interfaces/IStreamLineupCache.ts';
import { KEYS } from '../types/inject.ts';
import { Logger } from '../util/logging/LoggerFactory.ts';
const channelCacheSchema = z.object({
fillerPlayTimeCache: z.record(z.string(), z.number()).default({}),
programPlayTimeCache: z.record(z.string(), z.number()).default({}),
version: z.number().optional(),
});
type ChannelCacheSchema = z.infer<typeof channelCacheSchema>;
export type PersistentChannelCacheProvider =
() => Promise<PersistentChannelCache>;
@injectable()
export class PersistentChannelCache {
#db: Low<ChannelCacheSchema>;
constructor(
@inject(KEYS.GlobalOptions) private globalOptions: GlobalOptions,
) {
this.#db = new Low<ChannelCacheSchema>(
new InMemoryCachedDbAdapter(
new SchemaBackedDbAdapter(
channelCacheSchema,
join(this.globalOptions.databaseDirectory, 'stream-cache.json'),
),
),
{
fillerPlayTimeCache: {},
programPlayTimeCache: {},
},
);
}
async init() {
return this.#db.read();
}
getProgramPlayTime(id: string): number | undefined {
return this.#db.data.programPlayTimeCache[id];
}
setProgramPlayTime(id: string, time: number) {
return this.#db.update(({ programPlayTimeCache }) => {
programPlayTimeCache[id] = time;
});
}
getFillerPlayTime(id: string): number | undefined {
return this.#db.data.fillerPlayTimeCache[id];
}
setFillerPlayTime(id: string, time: number) {
return this.#db.update(({ fillerPlayTimeCache }) => {
fillerPlayTimeCache[id] = time;
});
}
}
@injectable()
export class ChannelCache implements IStreamLineupCache {
constructor(
@inject(PersistentChannelCache)
private persistentChannelCache: PersistentChannelCache,
@inject(KEYS.Logger) private logger: Logger,
) {}
getCurrentLineupItem(): StreamLineupItem | undefined {
// TODO: Remove this entirely. Just return undefined for now since this is essentially
// useless.
return;
}
private getKey(channelId: string, programId: string) {
return `${channelId}|${programId}`;
}
private async recordProgramPlayTime(
channelId: string,
lineupItem: StreamLineupItem,
t0: number,
) {
let remaining: number;
if (!isUndefined(lineupItem.streamDuration)) {
remaining = lineupItem.streamDuration;
} else {
remaining = lineupItem.duration - (lineupItem.startOffset ?? 0);
}
if (lineupItem.type === 'program') {
const key = this.getKey(channelId, lineupItem.program.uuid);
await this.persistentChannelCache.setProgramPlayTime(key, t0 + remaining);
}
if (isCommercialLineupItem(lineupItem)) {
await this.persistentChannelCache.setFillerPlayTime(
this.getKey(channelId, lineupItem.fillerListId),
t0 + remaining,
);
}
}
getProgramLastPlayTime(channelId: string, programId: string) {
return (
this.persistentChannelCache.getProgramPlayTime(
this.getKey(channelId, programId),
) ?? 0
);
}
getFillerLastPlayTime(channelId: string, fillerId: string) {
return (
this.persistentChannelCache.getFillerPlayTime(
this.getKey(channelId, fillerId),
) ?? 0
);
}
async recordPlayback(
channelId: string,
t0: number,
lineupItem: StreamLineupItem,
) {
try {
await this.recordProgramPlayTime(channelId, lineupItem, t0);
// await this.persistentChannelCache.setStreamPlayItem(channelId, {
// timestamp: t0,
// lineupItem: lineupItem,
// });
} catch (e) {
this.logger.warn(
e,
'Error while setting stream cache for lineup item: %O at %d',
lineupItem,
t0,
);
}
}
async clearPlayback() {
// return await this.persistentChannelCache.clearStreamPlayItem(channelId);
}
// Is this necessary??
clear() {
// this.configCache = {};
// this.cache = {};
// this.channelNumbers = undefined;
}
}

View File

@@ -31,7 +31,6 @@ import { FillerPickerV2 } from '../services/scheduling/FillerPickerV2.ts';
import type { UpdatePlexPlayStatusScheduledTaskFactory } from '../tasks/plex/UpdatePlexPlayStatusTask.ts';
import { UpdatePlexPlayStatusScheduledTask } from '../tasks/plex/UpdatePlexPlayStatusTask.ts';
import { bindFactoryFunc } from '../util/inject.ts';
import { PersistentChannelCache } from './ChannelCache.ts';
import type { ProgramStreamFactory } from './ProgramStreamFactory.ts';
import { ExternalStreamDetailsFetcherFactory } from './StreamDetailsFetcher.ts';
import { EmbyProgramStream } from './emby/EmbyProgramStream.ts';
@@ -252,8 +251,6 @@ const configure: interfaces.ContainerModuleCallBack = (bind) => {
bind(ExternalStreamDetailsFetcherFactory).toSelf().inSingletonScope();
bind(PersistentChannelCache).toSelf().inSingletonScope();
bind(KEYS.FillerPicker).to(FillerPickerV2).inSingletonScope();
};

View File

@@ -13,7 +13,6 @@ import { IProgramDB } from '../db/interfaces/IProgramDB.ts';
import { calculateStartTimeOffsets } from '../db/lineupUtil.ts';
import { ProgramPlayHistoryDB } from '../db/ProgramPlayHistoryDB.ts';
import { MediaSourceId } from '../db/schema/base.ts';
import { IStreamLineupCache } from '../interfaces/IStreamLineupCache.ts';
import { IFillerPicker } from '../services/interfaces/IFillerPicker.ts';
import {
createChannelOrm,
@@ -31,7 +30,6 @@ describe('StreamProgramCalculator', () => {
const channelDB = mock<IChannelDB>();
const programDB = mock<IProgramDB>();
const fillerPicker = mock<IFillerPicker>();
const channelCache = mock<IStreamLineupCache>();
const playHistoryDB = mock<ProgramPlayHistoryDB>();
const startTime = dayjs(new Date(2025, 8, 17, 8));
@@ -106,7 +104,6 @@ describe('StreamProgramCalculator', () => {
LoggerFactory.root,
instance(fillerDB),
instance(channelDB),
instance(channelCache),
instance(programDB),
instance(fillerPicker),
instance(playHistoryDB),
@@ -130,9 +127,13 @@ describe('StreamProgramCalculator', () => {
startOffset: +dayjs.duration(16, 'minutes'),
});
// Wait for async play history recording
await new Promise((resolve) => setTimeout(resolve, 10));
verify(
channelCache.recordPlayback(channel.uuid, +startTime, out.lineupItem),
playHistoryDB.isProgramCurrentlyPlaying(channelId, programId1, +startTime),
).once();
verify(playHistoryDB.create(anything())).once();
});
baseTest('getCurrentLineupItem filler lineup item', async () => {
@@ -140,7 +141,6 @@ describe('StreamProgramCalculator', () => {
const channelDB = mock<IChannelDB>();
const programDB = mock<IProgramDB>();
const fillerPicker = mock<IFillerPicker>();
const channelCache = mock<IStreamLineupCache>();
const playHistoryDB = mock<ProgramPlayHistoryDB>();
const startTime = dayjs(new Date(2025, 8, 17, 8));
@@ -217,7 +217,6 @@ describe('StreamProgramCalculator', () => {
LoggerFactory.root,
instance(fillerDB),
instance(channelDB),
instance(channelCache),
instance(programDB),
instance(fillerPicker),
instance(playHistoryDB),
@@ -243,9 +242,13 @@ describe('StreamProgramCalculator', () => {
type: 'commercial',
});
// Wait for async play history recording
await new Promise((resolve) => setTimeout(resolve, 10));
verify(
channelCache.recordPlayback(channel.uuid, +startTime, out.lineupItem),
playHistoryDB.isProgramCurrentlyPlaying(channelId, programId1, +startTime),
).once();
verify(playHistoryDB.create(anything())).once();
});
baseTest('getCurrentLineupItem loop filler lineup item', async () => {
@@ -253,7 +256,6 @@ describe('StreamProgramCalculator', () => {
const channelDB = mock<IChannelDB>();
const programDB = mock<IProgramDB>();
const fillerPicker = mock<IFillerPicker>();
const channelCache = mock<IStreamLineupCache>();
const playHistoryDB = mock<ProgramPlayHistoryDB>();
const startTime = dayjs(new Date(2025, 8, 17, 8));
@@ -330,7 +332,6 @@ describe('StreamProgramCalculator', () => {
LoggerFactory.root,
instance(fillerDB),
instance(channelDB),
instance(channelCache),
instance(programDB),
instance(fillerPicker),
instance(playHistoryDB),
@@ -355,9 +356,13 @@ describe('StreamProgramCalculator', () => {
duration: +dayjs.duration(22, 'minutes'),
});
// Wait for async play history recording
await new Promise((resolve) => setTimeout(resolve, 10));
verify(
channelCache.recordPlayback(channel.uuid, +startTime, out.lineupItem),
playHistoryDB.isProgramCurrentlyPlaying(channelId, programId1, +startTime),
).once();
verify(playHistoryDB.create(anything())).once();
});
baseTest('records play history for new playback', async () => {
@@ -365,7 +370,6 @@ describe('StreamProgramCalculator', () => {
const channelDB = mock<IChannelDB>();
const programDB = mock<IProgramDB>();
const fillerPicker = mock<IFillerPicker>();
const channelCache = mock<IStreamLineupCache>();
const playHistoryDB = mock<ProgramPlayHistoryDB>();
const startTime = dayjs(new Date(2025, 8, 17, 8));
@@ -440,7 +444,6 @@ describe('StreamProgramCalculator', () => {
LoggerFactory.root,
instance(fillerDB),
instance(channelDB),
instance(channelCache),
instance(programDB),
instance(fillerPicker),
instance(playHistoryDB),
@@ -475,7 +478,6 @@ describe('StreamProgramCalculator', () => {
const channelDB = mock<IChannelDB>();
const programDB = mock<IProgramDB>();
const fillerPicker = mock<IFillerPicker>();
const channelCache = mock<IStreamLineupCache>();
const playHistoryDB = mock<ProgramPlayHistoryDB>();
const startTime = dayjs(new Date(2025, 8, 17, 8));
@@ -547,7 +549,6 @@ describe('StreamProgramCalculator', () => {
LoggerFactory.root,
instance(fillerDB),
instance(channelDB),
instance(channelCache),
instance(programDB),
instance(fillerPicker),
instance(playHistoryDB),

View File

@@ -23,7 +23,6 @@ import { IFillerListDB } from '../db/interfaces/IFillerListDB.ts';
import { IProgramDB } from '../db/interfaces/IProgramDB.ts';
import { ProgramPlayHistoryDB } from '../db/ProgramPlayHistoryDB.ts';
import { OneDayMillis } from '../ffmpeg/builder/constants.ts';
import { IStreamLineupCache } from '../interfaces/IStreamLineupCache.ts';
import { IFillerPicker } from '../services/interfaces/IFillerPicker.ts';
import { WrappedError } from '../types/errors.ts';
import { devAssert } from '../util/debug.ts';
@@ -76,7 +75,6 @@ export class StreamProgramCalculator {
@inject(KEYS.Logger) private logger: Logger,
@inject(KEYS.FillerListDB) private fillerDB: IFillerListDB,
@inject(KEYS.ChannelDB) private channelDB: IChannelDB,
@inject(KEYS.ChannelCache) private channelCache: IStreamLineupCache,
@inject(KEYS.ProgramDB) private programDB: IProgramDB,
@inject(KEYS.FillerPicker)
private fillerPicker: IFillerPicker,
@@ -132,15 +130,6 @@ export class StreamProgramCalculator {
);
if (redirectChannels.includes(currentProgram.program.channel)) {
await this.channelCache.recordPlayback(channelContext.uuid, startTime, {
type: 'error',
error:
'Recursive channel redirect found: ' + redirectChannels.join(', '),
duration: 60_000,
streamDuration: 60_000,
startOffset: 0,
programBeginMs: req.startTime,
});
}
const nextChannelId = currentProgram.program.channel;
@@ -237,12 +226,6 @@ export class StreamProgramCalculator {
this.logger.trace('Got lineup item: %O', lineupItem);
}
await this.channelCache.recordPlayback(
channel.uuid,
req.startTime,
lineupItem,
);
// Record play history for content-backed items (programs and commercials/fillers)
// Only record if this is a new playback (not a duplicate request for an already-playing program)
if (

View File

@@ -15,7 +15,7 @@ import { defaultHlsOptions } from '../../ffmpeg/builder/constants.ts';
import { serverOptions } from '../../globals.ts';
import { fileExists } from '../../util/fsUtil.ts';
export const SegmentNameRegex = /data(\d{6})\..*/;
export const SegmentNameRegex = /\D+(\d+)\.(ts|mp4|vtt)/;
export abstract class BaseHlsSession<
HlsSessionOptsT extends BaseHlsSessionOptions = BaseHlsSessionOptions,

View File

@@ -920,6 +920,112 @@ describe('HlsPlaylistMutator', () => {
});
});
describe('high-water mark floor protection', () => {
// These tests verify the invariant that HlsSession's #highestDeletedBelow
// relies on: the before_segment_number filter acts as a hard floor, so the
// playlist never references segments below (segmentNumber - segmentsToKeepBefore).
const start = dayjs('2024-10-18T14:00:00.000-0400');
const largeOpts = { ...defaultOpts, maxSegmentsToKeep: 20 };
it('Scenario B: segmentNumber=0 (empty _minByIp) selects segments from the start', () => {
// Without the high-water mark fix, stale cleanup empties _minByIp and
// minSegmentRequested returns 0. This causes trimPlaylist to serve segments
// from the very beginning of the playlist — all of which have been deleted.
// This test documents the behavior that #highestDeletedBelow prevents.
const lines = createPlaylist(100);
const result = mutator.trimPlaylist(
start,
{
type: 'before_segment_number',
segmentNumber: 0,
segmentsToKeepBefore: 10,
},
lines,
largeOpts,
);
// minSeg = max(0-10, 0) = 0 → all 100 segments pass the filter (≥20) → take first 20 = segs 0..19
expect(result.playlist).toContain('data000000.ts');
expect(result.playlist).toContain('data000019.ts');
expect(result.playlist).not.toContain('data000020.ts');
});
it('Scenario B fix: high-water mark as segmentNumber floors the playlist above deleted range', () => {
// After deleteOldSegments(190), #highestDeletedBelow = 190.
// Math.max(minSegmentRequested=0, highestDeletedBelow=190) = 190 is used
// as segmentNumber, so the playlist starts at 180 (190 - keepBefore:10)
// rather than 0, avoiding any reference to deleted segments.
const lines = createPlaylist(300);
const result = mutator.trimPlaylist(
start,
{
type: 'before_segment_number',
segmentNumber: 190,
segmentsToKeepBefore: 10,
},
lines,
largeOpts,
);
// minSeg = max(190-10, 0) = 180; segs 180..299 (120 ≥ 20) → take first 20 = segs 180..199
expect(result.playlist).toContain('data000180.ts');
expect(result.playlist).toContain('data000199.ts');
expect(result.playlist).not.toContain('data000179.ts');
expect(result.playlist).not.toContain('data000000.ts');
});
it('Scenario A: stale client removal jump still floors above deleted range', () => {
// Client A was at seg 100, stale cleanup removes it, leaving Client B at seg 200.
// deleteOldSegments(190) ran, so #highestDeletedBelow = 190.
// Math.max(minSegmentRequested=200, highestDeletedBelow=190) = 200.
// Playlist must not include segments below 190 (200 - keepBefore:10).
const lines = createPlaylist(300);
const result = mutator.trimPlaylist(
start,
{
type: 'before_segment_number',
segmentNumber: 200,
segmentsToKeepBefore: 10,
},
lines,
largeOpts,
);
// minSeg = max(200-10, 0) = 190; segs 190..299 (110 ≥ 20) → take first 20 = segs 190..209
expect(result.playlist).toContain('data000190.ts');
expect(result.playlist).toContain('data000209.ts');
expect(result.playlist).not.toContain('data000189.ts');
expect(result.playlist).not.toContain('data000100.ts');
});
it('floor does not over-trim when segmentNumber equals the live edge', () => {
// When #highestDeletedBelow and minSegmentRequested agree (normal single-client case),
// the playlist should include the last keepBefore segments before the current position.
const lines = createPlaylist(50);
const result = mutator.trimPlaylist(
start,
{
type: 'before_segment_number',
segmentNumber: 30,
segmentsToKeepBefore: 10,
},
lines,
largeOpts,
);
// minSeg = max(30-10, 0) = 20; segs 20..49 (30 ≥ 20) → take first 20 = segs 20..39
expect(result.playlist).toContain('data000020.ts');
expect(result.playlist).toContain('data000039.ts');
expect(result.playlist).not.toContain('data000019.ts');
expect(result.segmentCount).toBe(20);
});
});
describe('integration with real test file', () => {
it('should parse and trim the test.m3u8 file', async () => {
const lines = (await readTestFile('test.m3u8'))

View File

@@ -25,11 +25,11 @@ import fs from 'node:fs/promises';
import path, { basename, dirname, extname } from 'node:path';
import type { DeepRequired } from 'ts-essentials';
import type { BaseHlsSessionOptions } from './BaseHlsSession.js';
import { BaseHlsSession } from './BaseHlsSession.js';
import { BaseHlsSession, SegmentNameRegex } from './BaseHlsSession.js';
import type { SubtitleRenditionInfo } from './HlsMasterPlaylistMutator.js';
import { HlsMasterPlaylistMutator } from './HlsMasterPlaylistMutator.js';
import type { HlsPlaylistFilterOptions } from './HlsPlaylistMutator.js';
import { HlsPlaylistMutator } from './HlsPlaylistMutator.js';
import { HlsMasterPlaylistMutator } from './HlsMasterPlaylistMutator.js';
import type { SubtitleRenditionInfo } from './HlsMasterPlaylistMutator.js';
export type HlsSessionProvider = (
channel: ChannelOrmWithTranscodeConfig,
@@ -56,6 +56,7 @@ export class HlsSession extends BaseHlsSession<HlsSessionOptions> {
#lastDelete: Dayjs = dayjs().subtract(1, 'year');
#isFirstTranscode = true;
#lastDiscontinuitySequence: number | undefined;
#highestDeletedBelow: number = 0;
#currentSubtitleRendition: SubtitleRenditionInfo | undefined;
constructor(
@@ -107,7 +108,10 @@ export class HlsSession extends BaseHlsSession<HlsSessionOptions> {
async trimPlaylist(filterOpts?: HlsPlaylistFilterOptions) {
filterOpts ??= {
type: 'before_segment_number',
segmentNumber: this.minSegmentRequested,
segmentNumber: Math.max(
this.minSegmentRequested,
this.#highestDeletedBelow,
),
segmentsToKeepBefore: 10,
};
return Result.attemptAsync(async () => {
@@ -133,9 +137,7 @@ export class HlsSession extends BaseHlsSession<HlsSessionOptions> {
this.channel.uuid,
this.channel.number,
);
this.deleteOldSegments(trimResult.sequence).catch((e) =>
this.logger.error(e),
);
await this.deleteOldSegments(trimResult.sequence);
this.#lastDelete = now;
}
@@ -406,6 +408,10 @@ export class HlsSession extends BaseHlsSession<HlsSessionOptions> {
}
private async deleteOldSegments(sequenceNum: number) {
this.#highestDeletedBelow = Math.max(
this.#highestDeletedBelow,
sequenceNum,
);
const workingDirectoryFiles = await fs.readdir(this._workingDirectory);
const segments = filter(
seq.collect(
@@ -414,8 +420,8 @@ export class HlsSession extends BaseHlsSession<HlsSessionOptions> {
return ext === '.ts' || ext === '.mp4' || ext === '.vtt';
}),
(file) => {
const matches = file.match(/\D+(\d+)\.(ts|mp4|vtt)/);
if (matches && matches.length > 0) {
const matches = file.match(SegmentNameRegex);
if (matches && matches.length > 1) {
return {
file,
seq: parseInt(matches[1]!),

View File

@@ -31,24 +31,23 @@ export function discoverFfmpegBinaries(): {
} | null {
try {
const ffmpeg =
process.env['TUNARR_TEST_FFMPEG'] ??
whichFirst('ffmpeg7.1', 'ffmpeg');
process.env['TUNARR_TEST_FFMPEG'] ?? whichFirst('ffmpeg7.1', 'ffmpeg');
const ffprobe =
process.env['TUNARR_TEST_FFPROBE'] ??
whichFirst('ffprobe7.1', 'ffprobe');
process.env['TUNARR_TEST_FFPROBE'] ?? whichFirst('ffprobe7.1', 'ffprobe');
if (!ffmpeg || !ffprobe) {
return null;
}
console.debug('Resolved ffmpeg binaries: ', ffmpeg, ffprobe);
return { ffmpeg, ffprobe };
} catch {
return null;
}
}
export async function createTempWorkdir(): Promise<{
dir: string;
cleanup: () => Promise<void>;

View File

@@ -1,3 +1,16 @@
import { VideoStream } from '@/ffmpeg/builder/MediaStream.js';
import { VideoFormats } from '@/ffmpeg/builder/constants.js';
import { ColorFormat } from '@/ffmpeg/builder/format/ColorFormat.js';
import {
KnownPixelFormats,
PixelFormatUnknown,
} from '@/ffmpeg/builder/format/PixelFormat.js';
import { VideoInputSource } from '@/ffmpeg/builder/input/VideoInputSource.js';
import { FrameSize } from '@/ffmpeg/builder/types.js';
import { FfprobeStreamDetails } from '@/stream/FfprobeStreamDetails.js';
import { FileStreamSource } from '@/stream/types.js';
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import pino from 'pino';
import { test as base } from 'vitest';
import type { FfmpegCapabilities } from '../../ffmpeg/builder/capabilities/FfmpegCapabilities.ts';
@@ -15,6 +28,59 @@ import {
type VaapiDeviceInfo,
} from './FfmpegIntegrationHelper.ts';
const fixturesDir = path.join(
path.dirname(fileURLToPath(import.meta.url)),
'fixtures',
);
export const Fixtures = {
video720p: path.join(fixturesDir, '720p_h264.ts'),
video1080p: path.join(fixturesDir, '1080p_h264.ts'),
video480p43: path.join(fixturesDir, '480p_h264.ts'),
videoHevc720p: path.join(fixturesDir, '720p_hevc_hdr10.ts'),
videoHevc1080p: path.join(fixturesDir, '1080p_hevc_hdr10.mp4'),
watermark: path.join(fixturesDir, 'watermark.png'),
blackWatermark: path.join(fixturesDir, 'black_watermark.png'),
} as const;
export async function deriveVideoStreamForFixture(
fixturePath: string,
): Promise<VideoInputSource> {
const ffprobeStreamDetails = new FfprobeStreamDetails(makeFfmpegInfo());
const { streamDetails } = (
await ffprobeStreamDetails.getStream({ path: fixturePath })
).getOrThrow();
if (!streamDetails.videoDetails) {
throw new Error(`File at ${fixturePath} has no video streams`);
}
const videoDetails = streamDetails.videoDetails[0];
return VideoInputSource.withStream(
new FileStreamSource(fixturePath),
VideoStream.create({
codec: videoDetails.codec ?? VideoFormats.Undetermined,
profile: videoDetails.profile,
displayAspectRatio: videoDetails.displayAspectRatio,
frameSize: FrameSize.withDimensions(
videoDetails.width,
videoDetails.height,
),
pixelFormat: videoDetails.pixelFormat
? (KnownPixelFormats.forPixelFormat(videoDetails.pixelFormat) ??
PixelFormatUnknown(videoDetails.bitDepth ?? 8))
: PixelFormatUnknown(videoDetails.bitDepth ?? 8),
providedSampleAspectRatio: videoDetails.sampleAspectRatio ?? null,
frameRate: videoDetails.framerate?.toString(),
index: videoDetails.streamIndex ?? 0,
colorFormat: new ColorFormat({
colorRange: videoDetails.colorRange ?? null,
colorSpace: videoDetails.colorSpace ?? null,
colorTransfer: videoDetails.colorTransfer ?? null,
colorPrimaries: videoDetails.colorPrimaries ?? null,
}),
}),
);
}
export const binaries = discoverFfmpegBinaries();
export const vaapiInfo = discoverVaapiDevice();

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -31,11 +31,6 @@ import type { SerializedLogger } from './LoggerWrapper.ts';
import { RootLoggerWrapper } from './LoggerWrapper.ts';
import { RollingLogDestination } from './RollingDestination.ts';
export const LogConfigEnvVars = {
level: 'LOG_LEVEL',
directory: 'LOG_DIRECTORY',
} as const;
export function getEnvironmentLogLevel(envVar?: string): Maybe<LogLevels> {
const envLevel = trim(
toLower(process.env[envVar ?? TUNARR_ENV_VARS.LOG_LEVEL_ENV_VAR]),
@@ -108,12 +103,12 @@ export const LogCategories = ['streaming', 'scheduling'] as const;
export type LogCategory = TupleToUnion<typeof LogCategories>;
class LoggerFactoryImpl {
private settingsDB: SettingsDB;
private settingsDB?: SettingsDB;
// private rootLogger: PinoLogger<ExtraLogLevels>;
private rootLogger!: RootLoggerWrapper;
private initialized = false;
private children: Record<string, WeakRef<Logger>> = {};
private currentStreams: MultiStreamRes<LogLevels>;
private currentStreams?: MultiStreamRes<LogLevels>;
private roller?: RollingLogDestination;
constructor() {
@@ -136,7 +131,7 @@ class LoggerFactoryImpl {
}
const currentSettings =
this.settingsDB.systemSettings().logging.logRollConfig;
this.settingsDB?.systemSettings().logging.logRollConfig;
const { level: newLevel } = this.logLevel;
const perCategoryLogLevel = this.perCategoryLogLevel;
@@ -272,12 +267,11 @@ class LoggerFactoryImpl {
return;
}
// Reset the level of the root logger and all children
// We do this by setting the level on the instance directly
// but then for multistream to work, we have to manually reset the streams
// by cloning them with new levels.
this.rootLogger.level = newLevel;
this.rootLogger.updateStreams(this.createLogStreams(newLevel));
// Reset the level of the root logger and all children.
// We set the level on every logger instance directly because pino children
// snapshot the parent's level at creation time and don't follow changes.
// For multistream to work, we also have to manually reset the streams.
this.rootLogger.updateLevel(newLevel, this.createLogStreams(newLevel));
}
private createStreams(logLevel: LogLevels): StreamEntry<LogLevels>[] {

View File

@@ -25,6 +25,7 @@ interface ILoggerWrapper {
args: GetChildLoggerArgs,
opts?: ChildLoggerOptions<LogLevels>,
): ILoggerWrapper;
updateLevel(level: LogLevels, streams: MultiStreamRes<LogLevels>): void;
updateStreams(streams: MultiStreamRes<LogLevels>): void;
logger: Logger;
traverseHierarchy(): Generator<readonly [string, SerializedLogger]>;
@@ -32,7 +33,7 @@ interface ILoggerWrapper {
}
abstract class BaseLoggerWrapper implements ILoggerWrapper {
protected children: Record<string, WeakRef<ILoggerWrapper>> = {};
protected children: Record<string, ILoggerWrapper> = {};
constructor(protected wrappedLogger: Logger) {}
@@ -41,14 +42,20 @@ abstract class BaseLoggerWrapper implements ILoggerWrapper {
opts?: ChildLoggerOptions<LogLevels>,
): ILoggerWrapper;
updateLevel(level: LogLevels, streams: MultiStreamRes<LogLevels>) {
this.wrappedLogger.level = level;
Object.assign(this.wrappedLogger[symbols.streamSym], streams);
for (const child of Object.values(this.children)) {
child.updateLevel(level, streams);
}
}
updateStreams(streams: MultiStreamRes<LogLevels>) {
Object.assign(this.wrappedLogger[symbols.streamSym], streams);
for (const childRef of Object.values(this.children)) {
const child = childRef.deref();
if (child) {
child.updateStreams(streams);
}
for (const child of Object.values(this.children)) {
child.updateStreams(streams);
}
}
@@ -67,8 +74,8 @@ abstract class BaseLoggerWrapper implements ILoggerWrapper {
}
*traverseHierarchy() {
for (const [loggerName, ref] of Object.entries(this.children)) {
const child = ref.deref();
for (const [loggerName, child] of Object.entries(this.children)) {
// const child = ref.deref();
if (!child) {
continue;
}
@@ -110,7 +117,7 @@ export class RootLoggerWrapper extends BaseLoggerWrapper {
{ level: initialLogSettings?.categoryLogLevel?.[category] },
);
const wrapped = new LoggerWrapper(categoryLogger);
this.children[`category:${category}`] = new WeakRef(wrapped);
this.children[`category:${category}`] = wrapped;
this.loggerByCategory.set(category, wrapped);
}
}
@@ -121,7 +128,7 @@ export class RootLoggerWrapper extends BaseLoggerWrapper {
): ILoggerWrapper {
const { caller, className, category, ...rest } = args;
const ref = this.children[className]?.deref();
const ref = this.children[className]; //?.deref();
if (ref) {
return ref;
}
@@ -145,15 +152,11 @@ export class RootLoggerWrapper extends BaseLoggerWrapper {
} else {
const newLogger = this.wrappedLogger.child(childOpts, opts);
const wrapped = new LoggerWrapper(newLogger);
this.children[className] = new WeakRef(wrapped);
this.children[className] = wrapped;
return wrapped;
}
}
set level(newLevel: LogLevels) {
this.wrappedLogger.level = newLevel;
}
updateCategoryLevel(
newLevel: LogLevels,
category: LogCategory,
@@ -164,8 +167,7 @@ export class RootLoggerWrapper extends BaseLoggerWrapper {
return;
}
rootCategoryLogger.logger.level = newLevel;
rootCategoryLogger.updateStreams(newStreamFn());
rootCategoryLogger.updateLevel(newLevel, newStreamFn());
}
}
@@ -189,7 +191,7 @@ export class LoggerWrapper extends BaseLoggerWrapper {
): ILoggerWrapper {
const { caller, className, ...rest } = args;
const ref = this.children[className]?.deref();
const ref = this.children[className]; //?.deref();
if (ref) {
return ref;
}
@@ -208,7 +210,7 @@ export class LoggerWrapper extends BaseLoggerWrapper {
const newChild = new LoggerWrapper(
this.wrappedLogger.child(childOpts, opts),
);
this.children[className] = new WeakRef(newChild);
this.children[className] = newChild;
return newChild;
}
}

View File

@@ -627,4 +627,26 @@ describe('searchFilterToString', () => {
const request = parsedSearchToRequest(query);
expect(searchFilterToString(request)).toEqual('genre in ["comedy"]');
});
test('starts with renders as < not literal "starts with"', () => {
const filter = {
type: 'value',
fieldSpec: {
key: 'title',
name: 'title',
op: 'starts with',
type: 'string',
value: ['The'],
},
} satisfies SearchFilter;
expect(searchFilterToString(filter)).toEqual('title < "The"');
});
test('round-trips starts with through parse and stringify', () => {
const input = 'title < "The"';
const query = parseAndCheckExpression(input);
const request = parsedSearchToRequest(query);
expect(searchFilterToString(request)).toEqual(input);
});
});

View File

@@ -234,7 +234,16 @@ const SearchExpressionLexer = new Lexer({
defaultMode: 'normalMode',
});
const StringOps = ['=', '!=', '<', '<=', 'in', 'not in', 'contains', 'not contains'] as const;
const StringOps = [
'=',
'!=',
'<',
'<=',
'in',
'not in',
'contains',
'not contains',
] as const;
type StringOps = TupleToUnion<typeof StringOps>;
const NumericOps = ['=', '!=', '<', '<=', '>', '>=', 'between'] as const;
type NumericOps = TupleToUnion<typeof NumericOps>;
@@ -364,6 +373,7 @@ const indexOperatorToSyntax: Dictionary<string> = {
contains: '~',
'not contains': '!~',
to: 'between',
'starts with': '<',
};
function normalizeReleaseDate(value: string) {

View File

@@ -14,7 +14,7 @@ import type { Dayjs } from 'dayjs';
import dayjs from 'dayjs';
import { capitalize } from 'lodash-es';
import type { ReactEventHandler } from 'react';
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { useCallback, useMemo, useRef, useState } from 'react';
import { useSettings } from '../store/settings/selectors.ts';
import ProgramInfoBar from './programs/ProgramInfoBar.tsx';
@@ -32,8 +32,20 @@ export const ProgramMetadataDialogContent = ({
stop,
}: Props) => {
const settings = useSettings();
const [thumbLoadState, setThumbLoadState] =
useState<ThumbLoadState>('loading');
const thumbnailImage = useMemo(() => {
return `${settings.backendUri}/api/programs/${program.uuid}/artwork/poster`;
}, [settings.backendUri, program]);
const [imageState, setImageState] = useState<{
url: string | undefined;
state: ThumbLoadState;
}>({ url: undefined, state: 'loading' });
// Derive load state: if the stored URL doesn't match the current thumbnail,
// the image hasn't loaded yet — treat it as loading without a useEffect reset.
const thumbLoadState: ThumbLoadState =
imageState.url === thumbnailImage ? imageState.state : 'loading';
const imageRef = useRef<HTMLImageElement>(null);
const theme = useTheme();
@@ -41,10 +53,6 @@ export const ProgramMetadataDialogContent = ({
const isEpisode = program && program.type === 'episode';
const imageWidth = smallViewport ? (isEpisode ? '100%' : '55%') : 240;
const thumbnailImage = useMemo(() => {
return `${settings.backendUri}/api/programs/${program.uuid}/artwork/poster`;
}, [settings.backendUri, program]);
const externalLink = useMemo(() => {
return `${settings.backendUri}/api/programs/${program.uuid}/external-link`;
}, [settings.backendUri, program]);
@@ -53,18 +61,17 @@ export const ProgramMetadataDialogContent = ({
return `${window.location.origin}/web/media/${program.type}/${program.uuid}`;
}, [program]);
useEffect(() => {
setThumbLoadState('loading');
const onLoad = useCallback(() => {
setImageState({ url: thumbnailImage, state: 'success' });
}, [thumbnailImage]);
const onLoad = useCallback(() => {
setThumbLoadState('success');
}, [setThumbLoadState]);
const onError: ReactEventHandler<HTMLImageElement> = useCallback((e) => {
console.error(e);
setThumbLoadState('error');
}, []);
const onError: ReactEventHandler<HTMLImageElement> = useCallback(
(e) => {
console.error(e);
setImageState({ url: thumbnailImage, state: 'error' });
},
[thumbnailImage],
);
const summary = useMemo(() => {
return getProgramSummary(program);
@@ -159,6 +166,7 @@ export const ProgramMetadataDialogContent = ({
borderTop: `1px solid`,
borderBottom: `1px solid`,
my: 1,
py: 1,
textAlign: ['center', 'left'],
}}
>

View File

@@ -1,7 +1,7 @@
import type { channelListOptions } from '@/types/index.ts';
import { Settings } from '@mui/icons-material';
import KeyboardArrowDownIcon from '@mui/icons-material/KeyboardArrowDown';
import { Button } from '@mui/material';
import { Button, IconButton, useMediaQuery, useTheme } from '@mui/material';
import type { Channel } from '@tunarr/types';
import { isNull } from 'lodash-es';
import { useState } from 'react';
@@ -16,6 +16,8 @@ export const ChannelOptionsButton = ({ channel, hideItems }: Props) => {
const [anchorEl, setAnchorEl] = useState<null | HTMLElement>(null);
const [channelMenu, setChannelMenu] = useState<Channel>();
const open = !isNull(anchorEl);
const theme = useTheme();
const smallViewport = useMediaQuery(theme.breakpoints.down('sm'));
const handleClick = (
event: React.MouseEvent<HTMLElement>,
@@ -31,19 +33,30 @@ export const ChannelOptionsButton = ({ channel, hideItems }: Props) => {
return (
<>
<Button
variant="outlined"
startIcon={<Settings />}
aria-controls={open ? 'channel-nav-menu' : undefined}
aria-haspopup="true"
aria-expanded={open ? 'true' : undefined}
disableRipple
disableElevation
onClick={(event) => handleClick(event, channel)}
endIcon={<KeyboardArrowDownIcon />}
>
Options
</Button>
{smallViewport ? (
<IconButton
aria-controls={open ? 'channel-nav-menu' : undefined}
aria-haspopup="true"
aria-expanded={open ? 'true' : undefined}
onClick={(event) => handleClick(event, channel)}
>
<Settings />
</IconButton>
) : (
<Button
variant="outlined"
startIcon={<Settings />}
aria-controls={open ? 'channel-nav-menu' : undefined}
aria-haspopup="true"
aria-expanded={open ? 'true' : undefined}
disableRipple
disableElevation
onClick={(event) => handleClick(event, channel)}
endIcon={<KeyboardArrowDownIcon />}
>
Options
</Button>
)}
{channelMenu && (
<ChannelOptionsMenu
anchorEl={anchorEl}

View File

@@ -6,7 +6,7 @@ import {
ContentProgramTypeSchema,
type ContentProgramType,
} from '@tunarr/types/schemas';
import { groupBy, isNil, keys, mapValues, omitBy } from 'lodash-es';
import { groupBy, isNil, keys, mapValues, omitBy, uniqBy } from 'lodash-es';
import { useMemo, useState } from 'react';
import {
extractProgramGrandparent,
@@ -103,14 +103,17 @@ export const ChannelPrograms = ({ channelId }: Props) => {
const programsByType = useMemo(
() =>
groupBy(
seq.collect(lineup, (p) => {
if (p.type === 'content' && p.id) {
return programs[p.id];
} else if (p.type === 'custom') {
return programs[p.id];
}
return;
}),
uniqBy(
seq.collect(lineup, (p) => {
if (p.type === 'content' && p.id) {
return programs[p.id];
} else if (p.type === 'custom') {
return programs[p.id];
}
return;
}),
(p) => p.id,
),
({ program }) => program.type,
),
[lineup, programs],
@@ -188,7 +191,12 @@ export const ChannelPrograms = ({ channelId }: Props) => {
return (
<>
<Tabs value={tab} onChange={(_, v) => setTab(v as number)}>
<Tabs
value={tab}
onChange={(_, v) => setTab(v as number)}
variant="scrollable"
allowScrollButtonsMobile
>
{Object.values(ContentProgramTypeSchema.enum).map((v, idx) => (
<ProgramTypeTab
key={v}

View File

@@ -79,7 +79,7 @@ export const ChannelSummaryQuickStats = ({ channelId }: Props) => {
},
}}
>
<Grid size={{ xs: 12, md: 4 }} sx={{ p: 1 }}>
<Grid size={{ xs: 12, md: 4 }} sx={{ p: [0.5, 1] }}>
<Stack direction="row">
<div>
<Typography variant="overline">Total Runtime</Typography>
@@ -88,7 +88,7 @@ export const ChannelSummaryQuickStats = ({ channelId }: Props) => {
<Box></Box>
</Stack>
</Grid>
<Grid size={{ xs: 12, md: 4 }} sx={{ p: 1 }}>
<Grid size={{ xs: 12, md: 4 }} sx={{ p: [0.5, 1] }}>
<Stack direction="row">
<div>
<Typography variant="overline">Programs</Typography>
@@ -96,7 +96,7 @@ export const ChannelSummaryQuickStats = ({ channelId }: Props) => {
</div>
</Stack>
</Grid>
<Grid size={{ xs: 12, md: 2 }} sx={{ p: 1 }}>
<Grid size={{ xs: 12, md: 2 }} sx={{ p: [0.5, 1] }}>
<Box sx={{ flex: 1 }}>
<Typography variant="overline">Stream Mode</Typography>
<Typography variant="h5">
@@ -104,7 +104,7 @@ export const ChannelSummaryQuickStats = ({ channelId }: Props) => {
</Typography>
</Box>
</Grid>
<Grid size={{ xs: 12, md: 2 }} sx={{ p: 1 }}>
<Grid size={{ xs: 12, md: 2 }} sx={{ p: [0.5, 1] }}>
<Box sx={{ flex: 1 }}>
<Typography variant="overline">
Transcode Config{' '}

View File

@@ -110,10 +110,20 @@ function ProgramDetailsDialogContent({
</Skeleton>
) : (
<DialogTitle
variant="h4"
variant={smallViewport ? 'h6' : 'h4'}
sx={{ display: 'flex', alignItems: 'center' }}
>
<Box sx={{ flex: 1 }}>{title}</Box>
<Box
sx={{
flex: 1,
minWidth: 0,
overflow: 'hidden',
textOverflow: 'ellipsis',
whiteSpace: 'nowrap',
}}
>
{title}
</Box>
<IconButton
onClick={(e) => setMoreMenuAnchorEl(e.currentTarget)}
@@ -149,6 +159,8 @@ function ProgramDetailsDialogContent({
<Tabs
value={tab}
onChange={(_, v) => setTab(v as Panels)}
variant="scrollable"
allowScrollButtonsMobile
sx={{ mb: 2 }}
>
{visibility.metadata && <Tab value={'metadata'} label="Overview" />}
@@ -332,7 +344,11 @@ export default function ProgramDetailsDialog(props: Props) {
backgroundSize: 'cover',
backgroundRepeat: 'no-repeat',
backgroundPosition: 'center',
minHeight: programType === 'episode' ? 450 : 575,
minHeight: smallViewport
? undefined
: programType === 'episode'
? 450
: 575,
},
},
}}

View File

@@ -193,12 +193,33 @@ export default function ProgramInfoBar({ program, time }: Props) {
seasonTitle,
]);
return compact(itemInfoBar).map((chip, index) => (
<React.Fragment key={index}>
<Box display="inline-block">{chip}</Box>
{index < itemInfoBar.length - 1 && (
<Box display="inline-block">&nbsp;&nbsp;&bull;&nbsp;&nbsp;</Box>
)}
</React.Fragment>
));
const compacted = compact(itemInfoBar);
return (
<Box
sx={{
display: 'flex',
flexWrap: 'wrap',
alignItems: 'center',
rowGap: 0.5,
}}
>
{compacted.map((chip, index) => (
<React.Fragment key={index}>
<Box sx={{ display: 'flex', alignItems: 'center' }}>{chip}</Box>
{index < compacted.length - 1 && (
<Box
sx={{
display: 'flex',
alignItems: 'center',
mx: 0.75,
userSelect: 'none',
}}
>
&bull;
</Box>
)}
</React.Fragment>
))}
</Box>
);
}

View File

@@ -29,21 +29,21 @@ export const ChannelSummaryPage = () => {
return (
<Stack spacing={2}>
<Breadcrumbs />
<Stack direction="row" alignItems="center" spacing={1}>
<Box>
{isNonEmptyString(channel.icon.path) ? (
<Box component="img" width={[32, 132]} src={channel.icon.path} />
) : (
<TunarrLogo style={{ width: smallViewport ? '32px' : '132px' }} />
)}
</Box>
<Box sx={{ flex: 1 }}>
<Typography variant="h4">{channel.name}</Typography>
<Stack direction="row" alignItems="flex-start" spacing={1}>
<Stack spacing={0.5} sx={{ flex: 1, minWidth: 0 }}>
<Box>
{isNonEmptyString(channel.icon.path) ? (
<Box component="img" width={[48, 132]} src={channel.icon.path} />
) : (
<TunarrLogo style={{ width: smallViewport ? '48px' : '132px' }} />
)}
</Box>
<Typography variant={smallViewport ? 'h5' : 'h4'} noWrap>
{channel.name}
</Typography>
<Typography variant="subtitle1">Channel #{channel.number}</Typography>
</Box>
</Stack>
<Stack direction="row" spacing={1} justifyContent="right">
</Stack>
<ChannelOptionsButton
channel={channel}
hideItems={['duplicate', 'delete']}

View File

@@ -1,13 +1,21 @@
import { MoreVert, PlayArrowOutlined } from '@mui/icons-material';
import {
Box,
Card,
CardContent,
Divider,
IconButton,
List,
ListItem,
ListItemIcon,
ListItemText,
Menu,
MenuItem,
Stack,
Tooltip,
Typography,
useMediaQuery,
useTheme,
} from '@mui/material';
import { useMutation, useSuspenseQuery } from '@tanstack/react-query';
import type { Task } from '@tunarr/types';
@@ -25,6 +33,8 @@ import type { Nullable } from '../../types/util.ts';
export default function TaskSettingsPage() {
const snackbar = useSnackbar();
const theme = useTheme();
const isMobile = useMediaQuery(theme.breakpoints.down('sm'));
const { data: tasks } = useSuspenseQuery({
...getApiTasksOptions(),
refetchInterval: 60 * 1000, // Check tasks every minute
@@ -172,6 +182,83 @@ export default function TaskSettingsPage() {
},
});
const renderMobileList = () => (
<List disablePadding>
{(tasks ?? []).map((task, idx) => {
const lastEpoch = maxBy(
task.scheduledTasks,
(t) => t.lastExecutionEpoch,
)?.lastExecutionEpoch;
const nextEpoch = minBy(
task.scheduledTasks,
(t) => t.nextExecutionEpoch,
)?.nextExecutionEpoch;
return (
<Box key={task.id}>
{idx > 0 && <Divider />}
<ListItem
disableGutters
secondaryAction={
<Tooltip title="Run now">
<IconButton
edge="end"
onClick={() => runTaskNow(task.id)}
sx={{ mr: 1 }}
>
<PlayArrowOutlined />
</IconButton>
</Tooltip>
}
>
<Card
elevation={0}
sx={{ flex: 1, mr: 6, bgcolor: 'transparent' }}
>
<CardContent sx={{ p: 1, '&:last-child': { pb: 1 } }}>
<Typography variant="subtitle1" fontWeight="bold">
{task.name}
</Typography>
{task.description && (
<Typography
variant="body2"
color="text.secondary"
gutterBottom
>
{task.description}
</Typography>
)}
<Stack direction="row" gap={2} mt={0.5} flexWrap="wrap">
<Box>
<Typography variant="caption" color="text.secondary">
Last run
</Typography>
<Typography variant="body2">
{lastEpoch
? dayjs(lastEpoch * 1000).format('lll')
: '-'}
</Typography>
</Box>
<Box>
<Typography variant="caption" color="text.secondary">
Next run
</Typography>
<Typography variant="body2">
{nextEpoch
? dayjs(nextEpoch * 1000).format('lll')
: '-'}
</Typography>
</Box>
</Stack>
</CardContent>
</Card>
</ListItem>
</Box>
);
})}
</List>
);
return (
<Stack gap={2}>
<Box>
@@ -181,7 +268,7 @@ export default function TaskSettingsPage() {
operations.
</Typography>
</Box>
<MRT_Table table={table} />
{isMobile ? renderMobileList() : <MRT_Table table={table} />}
{/* <TableContainer>
<Table>
<TableHead>