chore: release v0.3.1-rc.3

This commit is contained in:
2026-04-12 10:25:12 -04:00
parent 3e9b0f0290
commit 5ca33835f1
60 changed files with 2904 additions and 1598 deletions

View File

@@ -0,0 +1,67 @@
---
name: caveman
description: >
Ultra-compressed communication mode. Cuts token usage ~75% by speaking like caveman
while keeping full technical accuracy. Supports intensity levels: lite, full (default), ultra,
wenyan-lite, wenyan-full, wenyan-ultra.
Use when user says "caveman mode", "talk like caveman", "use caveman", "less tokens",
"be brief", or invokes /caveman. Also auto-triggers when token efficiency is requested.
---
Respond terse like smart caveman. All technical substance stay. Only fluff die.
## Persistence
ACTIVE EVERY RESPONSE. No revert after many turns. No filler drift. Still active if unsure. Off only: "stop caveman" / "normal mode".
Default: **full**. Switch: `/caveman lite|full|ultra`.
## Rules
Drop: articles (a/an/the), filler (just/really/basically/actually/simply), pleasantries (sure/certainly/of course/happy to), hedging. Fragments OK. Short synonyms (big not extensive, fix not "implement a solution for"). Technical terms exact. Code blocks unchanged. Errors quoted exact.
Pattern: `[thing] [action] [reason]. [next step].`
Not: "Sure! I'd be happy to help you with that. The issue you're experiencing is likely caused by..."
Yes: "Bug in auth middleware. Token expiry check use `<` not `<=`. Fix:"
## Intensity
| Level | What change |
|-------|------------|
| **lite** | No filler/hedging. Keep articles + full sentences. Professional but tight |
| **full** | Drop articles, fragments OK, short synonyms. Classic caveman |
| **ultra** | Abbreviate (DB/auth/config/req/res/fn/impl), strip conjunctions, arrows for causality (X → Y), one word when one word enough |
| **wenyan-lite** | Semi-classical. Drop filler/hedging but keep grammar structure, classical register |
| **wenyan-full** | Maximum classical terseness. Fully 文言文. 80-90% character reduction. Classical sentence patterns, verbs precede objects, subjects often omitted, classical particles (之/乃/為/其) |
| **wenyan-ultra** | Extreme abbreviation while keeping classical Chinese feel. Maximum compression, ultra terse |
Example — "Why React component re-render?"
- lite: "Your component re-renders because you create a new object reference each render. Wrap it in `useMemo`."
- full: "New object ref each render. Inline object prop = new ref = re-render. Wrap in `useMemo`."
- ultra: "Inline obj prop → new ref → re-render. `useMemo`."
- wenyan-lite: "組件頻重繪,以每繪新生對象參照故。以 useMemo 包之。"
- wenyan-full: "物出新參照致重繪。useMemo .Wrap之。"
- wenyan-ultra: "新參照→重繪。useMemo Wrap。"
Example — "Explain database connection pooling."
- lite: "Connection pooling reuses open connections instead of creating new ones per request. Avoids repeated handshake overhead."
- full: "Pool reuse open DB connections. No new connection per request. Skip handshake overhead."
- ultra: "Pool = reuse DB conn. Skip handshake → fast under load."
- wenyan-full: "池reuse open connection。不每req新開。skip handshake overhead。"
- wenyan-ultra: "池reuse conn。skip handshake → fast。"
## Auto-Clarity
Drop caveman for: security warnings, irreversible action confirmations, multi-step sequences where fragment order risks misread, user asks to clarify or repeats question. Resume caveman after clear part done.
Example — destructive op:
> **Warning:** This will permanently delete all rows in the `users` table and cannot be undone.
> ```sql
> DROP TABLE users;
> ```
> Caveman resume. Verify backup exist first.
## Boundaries
Code/commits/PRs: write normal. "stop caveman" or "normal mode": revert. Level persist until changed or session end.

View File

@@ -12,7 +12,22 @@
"Bash(bash --version)", "Bash(bash --version)",
"Bash(git tag:*)", "Bash(git tag:*)",
"Bash(cargo clippy:*)", "Bash(cargo clippy:*)",
"Bash(bun run:*)" "Bash(bun run:*)",
"Bash(ls /Users/brooklyn/data/alchemist/*.md)",
"Bash(ls /Users/brooklyn/data/alchemist/docs/*.md)",
"Bash(npx skills:*)",
"Bash(find /Users/brooklyn/data/alchemist/web -name tailwind.config.* -o -name *.config.ts -o -name *.config.js)",
"Bash(just check-web:*)",
"Bash(git stash:*)",
"Bash(just test-e2e:*)",
"Bash(bunx tsc:*)",
"Bash(wait)",
"Bash(npx playwright:*)",
"Bash(just check-rust:*)",
"Bash(cargo fmt:*)",
"Bash(cargo test:*)",
"Bash(just check:*)",
"Bash(just test:*)"
] ]
} }
} }

1
.claude/skills/caveman Symbolic link
View File

@@ -0,0 +1 @@
../../.agents/skills/caveman

3
.idea/alchemist.iml generated
View File

@@ -2,7 +2,7 @@
<module type="EMPTY_MODULE" version="4"> <module type="EMPTY_MODULE" version="4">
<component name="FacetManager"> <component name="FacetManager">
<facet type="Python" name="Python facet"> <facet type="Python" name="Python facet">
<configuration sdkName="Python 3.14 (alchemist)" /> <configuration sdkName="" />
</facet> </facet>
</component> </component>
<component name="NewModuleRootManager"> <component name="NewModuleRootManager">
@@ -13,6 +13,5 @@
</content> </content>
<orderEntry type="inheritedJdk" /> <orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" /> <orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="library" name="Python 3.14 (alchemist) interpreter library" level="application" />
</component> </component>
</module> </module>

View File

@@ -1,6 +1,8 @@
<component name="InspectionProjectProfileManager"> <component name="InspectionProjectProfileManager">
<profile version="1.0"> <profile version="1.0">
<option name="myName" value="Project Default" /> <option name="myName" value="Project Default" />
<inspection_tool class="CyclomaticComplexityInspection" enabled="true" level="WARNING" enabled_by_default="true" />
<inspection_tool class="SqlNoDataSourceInspection" enabled="false" level="WARNING" enabled_by_default="false" /> <inspection_tool class="SqlNoDataSourceInspection" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="TsLint" enabled="true" level="WARNING" enabled_by_default="true" />
</profile> </profile>
</component> </component>

View File

@@ -2,6 +2,28 @@
All notable changes to this project will be documented in this file. All notable changes to this project will be documented in this file.
## [0.3.1-rc.3] - 2026-04-12
### New Features
#### Job Management Refactor
- **Componentized Job Manager** — extracted monolithic `JobManager.tsx` into a modular suite under `web/src/components/jobs/`, including dedicated components for the toolbar, table, and detail modal.
- **Enhanced Job Detail Modal** — rebuilt the job detail view with better loading states, smoother transitions, and improved information hierarchy for analysis, decisions, and failure reasons.
- **Job SSE Hook** — unified job-related Server-Sent Events logic into a custom `useJobSSE` hook for better state management and reduced re-renders.
#### Themes & UX
- **Midnight OLED+** — enhanced the `midnight` theme with true-black surfaces and suppressed decorative gradients to maximize OLED power savings.
- **Improved Toasts** — toast notifications now feature a high-quality backdrop blur and refined border styling for better visibility against busy backgrounds.
#### Reliability & Observability
- **Engine Lifecycle Specs** — added a comprehensive Playwright suite for validating engine transitions (Running -> Draining -> Paused -> Stopped).
- **Planner & Lifecycle Docs** — added detailed technical documentation for the transcoding planner logic and engine state machine.
- **Encode Attempt Tracking** — added a database migration to track individual encode attempts, laying the groundwork for more granular retry statistics.
#### Hardware & Performance
- **Concurrency & Speed Optimizations** — internal refinements to the executor and processor to improve hardware utilization and address reported speed issues on certain platforms.
- **Backlog Grooming** — updated `TODO.md` with a focus on validating AMF and VAAPI AV1 hardware encoders.
## [0.3.1-rc.1] - 2026-04-08 ## [0.3.1-rc.1] - 2026-04-08
### New Features ### New Features

2
Cargo.lock generated
View File

@@ -13,7 +13,7 @@ dependencies = [
[[package]] [[package]]
name = "alchemist" name = "alchemist"
version = "0.3.1-rc.1" version = "0.3.1-rc.3"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"argon2", "argon2",

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "alchemist" name = "alchemist"
version = "0.3.1-rc.1" version = "0.3.1-rc.3"
edition = "2024" edition = "2024"
rust-version = "1.85" rust-version = "1.85"
license = "GPL-3.0" license = "GPL-3.0"

23
TODO.md
View File

@@ -1,21 +1,8 @@
# Todo List # Todo List
Remove `src/wizard.rs` from the project, the web setup handles it.. maybe keep for CLI users? ## AMD / VAAPI / AMF
## Frontend - Validate `av1_vaapi` on real Linux VAAPI hardware — confirm encode succeeds with current args.
- Validate `av1_amf` on real Windows AMF hardware — confirm encode succeeds with current args.
- Rework the Jobs screen sorting/filter island so it uses space more intelligently on narrow screens and overflows in a controlled, intentional-looking way instead of overflowing awkwardly. - If either encoder needs quality/rate-control params, apply the same pattern as the VideoToolbox fix (add `rate_control: Option<&RateControl>` to `vaapi::append_args` and `amf::append_args`).
- Make the toast across all pages blur the background instead of reading as transparent. - Update support claims in README and UI only after validation passes.
- Fix the Jobs modal so active jobs do not show `Waiting for analysis` while encoding/remuxing is already in progress.
- Reduce the stop/drain redundancy in the header so pressing Stop does not leave both the button and the status pill saying `Stopping`.
- Make the `midnight` OLED theme truly black, without gray treatment or shared gradients.
## Backend
- Investigate why encoding is very slow on macOS even when hardware acceleration is selected.
- Investigate why so many jobs are skipped and why only one job appears to run at a time even when concurrent jobs are enabled.
- Fix the clippy error that is currently blocking CI/CD.
## Jobs / UX
- Improve failed-job explanations on the Jobs screen when the current failure summary is weak or missing.

View File

@@ -1 +1 @@
0.3.1-rc.1 0.3.1-rc.3

136
audit.md Normal file
View File

@@ -0,0 +1,136 @@
# Audit Findings
Date: 2026-04-11
## Summary
This audit focused on the highest-risk paths in Alchemist:
- queue claiming and cancellation
- media planning and execution
- conversion validation
- setup/auth exposure
- job detail and failure UX
The current automated checks were green at audit time, but several real
correctness and behavior issues remain.
## Findings
### [P1] Canceling a job during analysis can be overwritten
Relevant code:
- `src/server/jobs.rs:41`
- `src/media/pipeline.rs:927`
- `src/media/pipeline.rs:970`
- `src/orchestrator.rs:239`
`request_job_cancel()` marks `analyzing` and `resuming` jobs as
`cancelled` immediately. But the analysis/planning path can still run to
completion and later overwrite that state to `skipped`,
`encoding`/`remuxing`, or another follow-on state.
The transcoder-side `pending_cancels` check only applies around FFmpeg
spawn, so a cancel issued during analysis is not guaranteed to stop the
pipeline before state transitions are persisted.
Impact:
- a user-visible cancel can be lost
- the UI can report a cancelled job that later resumes or becomes skipped
- queue state becomes harder to trust
### [P1] VideoToolbox quality controls are effectively a no-op
Relevant code:
- `src/config.rs:85`
- `src/media/planner.rs:633`
- `src/media/ffmpeg/videotoolbox.rs:3`
- `src/conversion.rs:424`
The config still defines a VideoToolbox quality ladder, and the planner
still emits `RateControl::Cq` for VideoToolbox encoders. But the actual
VideoToolbox FFmpeg builder ignores rate-control input entirely.
The Convert workflow does the same thing by still generating `Cq` for
non-CPU/QSV encoders even though the VideoToolbox path does not consume
it.
Impact:
- quality profile does not meaningfully affect VideoToolbox jobs
- Convert quality values for VideoToolbox are misleading
- macOS throughput/quality tradeoffs are harder to reason about
### [P2] Convert does not reuse subtitle/container compatibility checks
Relevant code:
- `src/media/planner.rs:863`
- `src/media/planner.rs:904`
- `src/conversion.rs:272`
- `src/conversion.rs:366`
The main library planner explicitly rejects unsafe subtitle-copy
combinations, especially for MP4/MOV targets. The Convert flow has its
own normalization/build path and does not reuse that validation.
Impact:
- the Convert UI can accept settings that are known to fail later in FFmpeg
- conversion behavior diverges from library-job behavior
- users can hit avoidable execution-time errors instead of fast validation
### [P2] Completed job details omit metadata at the API layer
Relevant code:
- `src/server/jobs.rs:344`
- `web/src/components/JobManager.tsx:1774`
The job detail endpoint explicitly returns `metadata = None` for
`completed` jobs, even though the Jobs modal is structured to display
input metadata when available.
Impact:
- completed-job details are structurally incomplete
- the frontend needs special-case empty-state behavior
- operator confidence is lower when comparing completed jobs after the fact
### [P2] LAN-only setup is easy to misconfigure behind a local reverse proxy
Relevant code:
- `src/server/middleware.rs:269`
- `src/server/middleware.rs:300`
The setup gate uses `request_ip()` and trusts forwarded headers only when
the direct peer is local/private. If Alchemist sits behind a loopback or
LAN reverse proxy that fails to forward the real client IP, the request
falls back to the proxy peer IP and is treated as LAN-local.
Impact:
- public reverse-proxy deployments can accidentally expose setup
- behavior depends on correct proxy header forwarding
- the security model is sound in principle but fragile in deployment
## What To Fix First
1. Fix the cancel-during-analysis race.
2. Fix or redesign VideoToolbox quality handling so the UI and planner do
not promise controls that the backend ignores.
3. Reuse planner validation in Convert for subtitle/container safety.
4. Decide whether completed jobs should persist and return metadata in the
detail API.
## What To Investigate Next
1. Use runtime diagnostics to confirm whether macOS slowness is true
hardware underperformance, silent fallback, or filter overhead.
2. Verify whether “only one job at a time” is caused by actual worker
serialization or by planner eligibility/skips.
3. Review dominant skip reasons before relaxing planner heuristics.

View File

@@ -3,6 +3,28 @@ title: Changelog
description: Release history for Alchemist. description: Release history for Alchemist.
--- ---
## [0.3.1-rc.3] - 2026-04-12
### New Features
#### Job Management Refactor
- **Componentized Job Manager** — extracted monolithic `JobManager.tsx` into a modular suite under `web/src/components/jobs/`, including dedicated components for the toolbar, table, and detail modal.
- **Enhanced Job Detail Modal** — rebuilt the job detail view with better loading states, smoother transitions, and improved information hierarchy for analysis, decisions, and failure reasons.
- **Job SSE Hook** — unified job-related Server-Sent Events logic into a custom `useJobSSE` hook for better state management and reduced re-renders.
#### Themes & UX
- **Midnight OLED+** — enhanced the `midnight` theme with true-black surfaces and suppressed decorative gradients to maximize OLED power savings.
- **Improved Toasts** — toast notifications now feature a high-quality backdrop blur and refined border styling for better visibility against busy backgrounds.
#### Reliability & Observability
- **Engine Lifecycle Specs** — added a comprehensive Playwright suite for validating engine transitions (Running -> Draining -> Paused -> Stopped).
- **Planner & Lifecycle Docs** — added detailed technical documentation for the transcoding planner logic and engine state machine.
- **Encode Attempt Tracking** — added a database migration to track individual encode attempts, laying the groundwork for more granular retry statistics.
#### Hardware & Performance
- **Concurrency & Speed Optimizations** — internal refinements to the executor and processor to improve hardware utilization and address reported speed issues on certain platforms.
- **Backlog Grooming** — updated `TODO.md` with a focus on validating AMF and VAAPI AV1 hardware encoders.
## [0.3.1-rc.1] - 2026-04-08 ## [0.3.1-rc.1] - 2026-04-08
### New Features ### New Features

View File

@@ -0,0 +1,152 @@
---
title: Engine Lifecycle
description: Engine states, transitions, and job cancellation semantics.
---
The Alchemist engine is a background loop that claims queued jobs, processes them, and manages concurrent execution. This page documents all states, what triggers each transition, and the exact behavior during cancel, pause, drain, and restart.
---
## Engine states
| State | Jobs start? | Active jobs affected? | How to enter |
|-------|------------|----------------------|-------------|
| **Running** | Yes | Not affected | Resume, restart |
| **Paused** (manual) | No | Not cancelled | Header → Stop, `POST /api/engine/pause` |
| **Paused** (scheduler) | No | Not cancelled | Schedule window activates |
| **Draining** | No | Run to completion | Header → Stop (while running), `POST /api/engine/drain` |
| **Restarting** | No (briefly) | Cancelled | `POST /api/engine/restart` |
| **Shutdown** | No | Force-cancelled | Process exit / SIGTERM |
Paused-manual and paused-scheduler are independent. Both must be cleared for jobs to start again.
---
## State transitions
```
Resume
┌──────────────────────────────┐
│ ▼
Paused ◄─── Pause ─────── Running ──── Drain ───► Draining
│ ▲ │ │
│ Restart │ └─── Shutdown ──► Shutdown
│ ┌──────────┐ │
└─────►│ Restart │────────┘
└──────────┘
(brief pause,
cancel in-flight,
then resume)
```
### Pause
- Sets `manual_paused = true`.
- The claim loop polls every 2 seconds and blocks while paused.
- Active jobs continue until they finish naturally.
- Does **not** affect draining state.
### Resume
- Clears `manual_paused`.
- Does **not** clear `scheduler_paused` (scheduler manages its own flag).
- The claim loop immediately resumes on the next iteration.
- Does **not** cancel the drain if draining.
### Drain
- Sets `draining = true` without setting `paused`.
- No new jobs are claimed.
- Active jobs run to completion.
- When `in_flight_jobs` reaches zero: drain completes, `draining` is cleared, engine transitions to **Paused** (manual).
### Restart
1. Pause (set `manual_paused = true`).
2. Cancel all in-flight jobs (Encoding, Remuxing, Analyzing, Resuming) via FFmpeg kill signal.
3. Clear `draining` flag.
4. Clear `idle_notified` flag.
5. Resume (clear `manual_paused`).
Cancelled in-flight jobs are marked `failed` with `failure_summary = "cancelled"`. They are eligible for automatic retry per the retry backoff schedule.
### Shutdown
Called when the process exits (SIGTERM / graceful shutdown):
1. Cancel all active jobs via FFmpeg kill.
2. Wait up to a short timeout for kills to complete.
3. No retry is scheduled — the jobs return to `queued` on next startup.
---
## Job states
| Job state | Meaning | Terminal? |
|-----------|---------|-----------|
| `queued` | Waiting to be claimed | No |
| `analyzing` | FFprobe running on the file | No |
| `encoding` | FFmpeg encoding in progress | No |
| `remuxing` | FFmpeg stream-copy in progress | No |
| `resuming` | Job being re-queued after retry | No |
| `completed` | Encode finished successfully | Yes |
| `skipped` | Planner decided not to transcode | Yes |
| `failed` | Encode or analysis failed | Yes (with retry) |
| `cancelled` | Cancelled by operator | Yes (with retry) |
---
## Retry backoff
Failed and cancelled jobs are automatically retried. The engine checks elapsed time before claiming.
| Attempt # | Backoff before retry |
|-----------|---------------------|
| 1 | 5 minutes |
| 2 | 15 minutes |
| 3 | 60 minutes |
| 4+ | 6 hours |
After 3 consecutive failures with no success, the job still retries on the 6-hour schedule. There is no permanent failure state from retries alone — operator must manually delete or cancel the job to stop retries.
---
## Cancel semantics
### Cancel mid-analysis
FFprobe process is not currently cancellable via signal. The cancel flag is checked before FFprobe starts. If analysis is in progress when cancel arrives, the job will be cancelled after analysis completes (before encoding starts).
### Cancel mid-encode
The FFmpeg process receives a kill signal immediately. The partial output file is cleaned up. The job is marked `failed` with `failure_summary = "cancelled"`.
### Cancel while queued
The job status is set to `cancelled` directly without any process kill.
---
## Pause vs. drain vs. restart
| Operation | In-flight jobs | Partial output | New jobs |
|-----------|---------------|---------------|----------|
| Pause | Finish normally | Not affected | Blocked |
| Drain | Finish normally | Not affected | Blocked until drain completes |
| Restart | Killed | Cleaned up | Blocked briefly, then resume |
| Shutdown | Killed | Cleaned up | N/A |
Use **Pause** when you need to inspect the queue or change settings without losing progress.
Use **Drain** when you want to stop gracefully after the current batch finishes (e.g. before maintenance).
Use **Restart** to force a clean slate — e.g. after changing hardware settings that affect in-flight jobs.
---
## Boot sequence
1. Migrations run.
2. Any jobs left in `encoding`, `remuxing`, `analyzing`, or `resuming` are reset to `queued` (crash recovery).
3. Boot analysis runs — all `queued` jobs that have no metadata have FFprobe run on them. This uses a single-slot semaphore and blocks the claim loop.
4. Engine claim loop starts — jobs are claimed and processed up to the concurrent limit.

176
docs/docs/planner.md Normal file
View File

@@ -0,0 +1,176 @@
---
title: Planner Heuristics
description: How Alchemist decides whether to transcode, skip, or remux a file.
---
The planner runs once per job during the analysis phase and produces one of three decisions:
- **Transcode** — re-encode the video stream.
- **Remux** — copy streams into a different container (lossless, fast).
- **Skip** — mark the file as not worth processing.
Decisions are deterministic and based solely on file metadata and settings.
---
## Decision flow
Each condition is evaluated in order. The first match wins.
```
1. already_target_codec → Skip (or Remux if container mismatch)
2. no_available_encoders → Skip
3. preferred_codec_unavailable → Skip (if fallback disabled)
4. no_suitable_encoder → Skip (no encoder selected)
5. incomplete_metadata → Skip (missing resolution)
6. bpp_below_threshold → Skip (already efficient)
7. below_min_file_size → Skip (too small)
8. h264 source → Transcode (priority path)
9. everything else → Transcode (transcode_recommended)
```
---
## Skip conditions
### already_target_codec
The video stream is already in the target codec at the required bit depth.
- **AV1 / HEVC target:** skip if codec matches AND bit depth is 10-bit.
- **H.264 target:** skip if codec is h264 AND bit depth is 8-bit or lower.
If the codec matches but the container does not (e.g. AV1 in an MP4, target MKV), the decision is **Remux** instead.
```
skip if: codec == target AND bit_depth == required_depth
remux if: above AND container != target_container
```
---
### bpp_below_threshold
**Bits-per-pixel** measures how efficiently a file is already compressed relative to its resolution and frame rate.
#### Formula
```
raw_bpp = video_bitrate_bps / (width × height × fps)
normalized_bpp = raw_bpp × resolution_multiplier
effective_threshold = min_bpp_threshold × confidence_multiplier × codec_multiplier × target_multiplier
skip if: normalized_bpp < effective_threshold
```
#### Resolution multipliers
| Resolution | Multiplier | Reason |
|------------|-----------|--------|
| ≥ 3840px wide (4K) | 0.60× | 4K compression is naturally denser |
| ≥ 1920px wide (1080p) | 0.80× | HD has moderate density premium |
| < 1920px (SD) | 1.00× | No adjustment |
#### Confidence multipliers
Applied to the threshold when Alchemist is uncertain about bitrate accuracy:
| Confidence | Multiplier | When |
|-----------|-----------|------|
| High | 1.00× | Video bitrate directly reported by FFprobe |
| Medium | 0.70× | Bitrate estimated from container/file size |
| Low | 0.50× | Bitrate estimated with low reliability |
Lower confidence lower threshold harder to skip safer.
#### Codec multipliers
| Source codec | Multiplier | Reason |
|-------------|-----------|--------|
| h264 (AVC) | 0.60× | H.264 needs more bits to match HEVC/AV1 quality |
#### Target multipliers
| Target codec | Multiplier | Reason |
|-------------|-----------|--------|
| AV1 | 0.70× | AV1 is more efficient; skip more aggressively |
| HEVC/H.264 | 1.00× | No adjustment |
#### Worked example
Settings: `min_bpp_threshold = 0.10`, target AV1, source HEVC 10-bit 4K.
```
raw_bpp = 15_000_000 / (3840 × 2160 × 24) = 0.0756
normalized_bpp = 0.0756 × 0.60 = 0.0454 (4K multiplier)
threshold = 0.10 × 1.00 × 1.00 × 0.70 = 0.070 (AV1 multiplier, HEVC source)
0.0454 < 0.070 → SKIP (bpp_below_threshold)
```
---
### below_min_file_size
Files smaller than `min_file_size_mb` (default: 50 MB) are skipped. Small files have minimal savings potential relative to overhead.
**Adjust:** Settings Transcoding Minimum file size.
---
### incomplete_metadata
FFprobe could not determine resolution (width or height is zero). Without resolution, BPP cannot be computed and no valid decision can be made.
**Diagnose:** run Library Doctor on the file.
---
### no_available_encoders
No encoder is available for the target codec at all. Either:
- CPU encoding is disabled (`allow_cpu_encoding = false`)
- Hardware detection failed and CPU fallback is off
**Fix:** Settings Hardware Enable CPU fallback.
---
### preferred_codec_unavailable_fallback_disabled
The requested codec encoder is not available, and `allow_fallback = false` prevents using any substitute.
**Fix:** Enable CPU fallback in Settings Hardware, or check GPU detection.
---
## Transcode paths
### transcode_h264_source
H.264 files are unconditionally transcoded (if not skipped by BPP or size filters above). H.264 is the largest space-saving opportunity in most libraries.
### transcode_recommended
Everything else that passes the skip filters. Alchemist transcodes it because it is a plausible candidate based on the current codec and measured efficiency.
---
## Remux path
### already_target_codec_wrong_container
The video is already in the correct codec but wrapped in the wrong container (e.g. AV1 in `.mp4`, target is `.mkv`). Alchemist remuxes using stream copy fast and lossless.
---
## Tuning
| Setting | Effect |
|---------|--------|
| `min_bpp_threshold` | Higher = skip more files. Default: 0.10. |
| `min_file_size_mb` | Higher = skip more small files. Default: 50. |
| `size_reduction_threshold` | Minimum predicted savings. Default: 30%. |
| `allow_fallback` | Allow CPU encoding when hardware is unavailable. |
| `allow_cpu_encoding` | Allow CPU to encode (not just fall back). |

View File

@@ -1,6 +1,6 @@
{ {
"name": "alchemist-docs", "name": "alchemist-docs",
"version": "0.3.1-rc.1", "version": "0.3.1-rc.3",
"private": true, "private": true,
"packageManager": "bun@1.3.5", "packageManager": "bun@1.3.5",
"scripts": { "scripts": {

View File

@@ -0,0 +1,21 @@
CREATE TABLE IF NOT EXISTS encode_attempts (
id INTEGER PRIMARY KEY AUTOINCREMENT,
job_id INTEGER NOT NULL REFERENCES jobs(id) ON DELETE CASCADE,
attempt_number INTEGER NOT NULL,
started_at TEXT,
finished_at TEXT NOT NULL DEFAULT (datetime('now')),
outcome TEXT NOT NULL CHECK(outcome IN ('completed', 'failed', 'cancelled')),
failure_code TEXT,
failure_summary TEXT,
input_size_bytes INTEGER,
output_size_bytes INTEGER,
encode_time_seconds REAL,
created_at TEXT NOT NULL DEFAULT (datetime('now'))
);
CREATE INDEX IF NOT EXISTS idx_encode_attempts_job_id ON encode_attempts(job_id);
INSERT OR REPLACE INTO schema_info (key, value) VALUES
('schema_version', '8'),
('min_compatible_version', '0.2.5'),
('last_updated', datetime('now'));

10
skills-lock.json Normal file
View File

@@ -0,0 +1,10 @@
{
"version": 1,
"skills": {
"caveman": {
"source": "JuliusBrussee/caveman",
"sourceType": "github",
"computedHash": "a818cdc41dcfaa50dd891c5cb5e5705968338de02e7e37949ca56e8c30ad4176"
}
}
}

View File

@@ -357,7 +357,9 @@ pub(crate) fn default_allow_fallback() -> bool {
} }
pub(crate) fn default_tonemap_peak() -> f32 { pub(crate) fn default_tonemap_peak() -> f32 {
100.0 // HDR10 content is typically mastered at 1000 nits. Using 100 (SDR level)
// causes severe over-compression of highlights during tone-mapping.
1000.0
} }
pub(crate) fn default_tonemap_desat() -> f32 { pub(crate) fn default_tonemap_desat() -> f32 {

View File

@@ -195,8 +195,8 @@ pub fn build_plan(
match normalized.video.hdr_mode.as_str() { match normalized.video.hdr_mode.as_str() {
"tonemap" => filters.push(FilterStep::Tonemap { "tonemap" => filters.push(FilterStep::Tonemap {
algorithm: TonemapAlgorithm::Hable, algorithm: TonemapAlgorithm::Hable,
peak: 100.0, peak: crate::config::default_tonemap_peak(),
desat: 0.2, desat: crate::config::default_tonemap_desat(),
}), }),
"strip_metadata" => filters.push(FilterStep::StripHdrMetadata), "strip_metadata" => filters.push(FilterStep::StripHdrMetadata),
_ => {} _ => {}
@@ -369,7 +369,18 @@ fn build_subtitle_plan(
copy_video: bool, copy_video: bool,
) -> Result<SubtitleStreamPlan> { ) -> Result<SubtitleStreamPlan> {
match settings.subtitles.mode.as_str() { match settings.subtitles.mode.as_str() {
"copy" => Ok(SubtitleStreamPlan::CopyAllCompatible), "copy" => {
if !crate::media::planner::subtitle_copy_supported(
&settings.output_container,
&analysis.metadata.subtitle_streams,
) {
return Err(AlchemistError::Config(
"Subtitle copy is not supported for the selected output container with these subtitle codecs. \
Use 'remove' or 'burn' instead.".to_string(),
));
}
Ok(SubtitleStreamPlan::CopyAllCompatible)
}
"remove" | "drop" | "none" => Ok(SubtitleStreamPlan::Drop), "remove" | "drop" | "none" => Ok(SubtitleStreamPlan::Drop),
"burn" => { "burn" => {
if copy_video { if copy_video {

View File

@@ -576,6 +576,35 @@ pub struct DetailedEncodeStats {
pub created_at: DateTime<Utc>, pub created_at: DateTime<Utc>,
} }
#[derive(Debug, Serialize, Deserialize, Clone, sqlx::FromRow)]
pub struct EncodeAttempt {
pub id: i64,
pub job_id: i64,
pub attempt_number: i32,
pub started_at: Option<String>,
pub finished_at: String,
pub outcome: String,
pub failure_code: Option<String>,
pub failure_summary: Option<String>,
pub input_size_bytes: Option<i64>,
pub output_size_bytes: Option<i64>,
pub encode_time_seconds: Option<f64>,
pub created_at: String,
}
#[derive(Debug, Clone)]
pub struct EncodeAttemptInput {
pub job_id: i64,
pub attempt_number: i32,
pub started_at: Option<String>,
pub outcome: String,
pub failure_code: Option<String>,
pub failure_summary: Option<String>,
pub input_size_bytes: Option<i64>,
pub output_size_bytes: Option<i64>,
pub encode_time_seconds: Option<f64>,
}
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct EncodeStatsInput { pub struct EncodeStatsInput {
pub job_id: i64, pub job_id: i64,
@@ -1161,6 +1190,45 @@ impl Db {
Ok(()) Ok(())
} }
/// Record a single encode attempt outcome
pub async fn insert_encode_attempt(&self, input: EncodeAttemptInput) -> Result<()> {
sqlx::query(
"INSERT INTO encode_attempts
(job_id, attempt_number, started_at, finished_at, outcome,
failure_code, failure_summary, input_size_bytes, output_size_bytes,
encode_time_seconds)
VALUES (?, ?, ?, datetime('now'), ?, ?, ?, ?, ?, ?)",
)
.bind(input.job_id)
.bind(input.attempt_number)
.bind(input.started_at)
.bind(input.outcome)
.bind(input.failure_code)
.bind(input.failure_summary)
.bind(input.input_size_bytes)
.bind(input.output_size_bytes)
.bind(input.encode_time_seconds)
.execute(&self.pool)
.await?;
Ok(())
}
/// Get all encode attempts for a job, ordered by attempt_number
pub async fn get_encode_attempts_by_job(&self, job_id: i64) -> Result<Vec<EncodeAttempt>> {
let attempts = sqlx::query_as::<_, EncodeAttempt>(
"SELECT id, job_id, attempt_number, started_at, finished_at, outcome,
failure_code, failure_summary, input_size_bytes, output_size_bytes,
encode_time_seconds, created_at
FROM encode_attempts
WHERE job_id = ?
ORDER BY attempt_number ASC",
)
.bind(job_id)
.fetch_all(&self.pool)
.await?;
Ok(attempts)
}
/// Get job by ID /// Get job by ID
pub async fn get_job(&self, id: i64) -> Result<Option<Job>> { pub async fn get_job(&self, id: i64) -> Result<Option<Job>> {
let job = sqlx::query_as::<_, Job>( let job = sqlx::query_as::<_, Job>(
@@ -2531,6 +2599,32 @@ impl Db {
.await .await
} }
pub async fn get_skip_reason_counts(&self) -> Result<Vec<(String, i64)>> {
let pool = &self.pool;
timed_query("get_skip_reason_counts", || async {
let rows = sqlx::query(
"SELECT COALESCE(reason_code, action) AS code, COUNT(*) AS count
FROM decisions
WHERE action = 'skip'
AND DATE(created_at, 'localtime') = DATE('now', 'localtime')
GROUP BY COALESCE(reason_code, action)
ORDER BY count DESC, code ASC
LIMIT 20",
)
.fetch_all(pool)
.await?;
Ok(rows
.into_iter()
.map(|row| {
let code: String = row.get("code");
let count: i64 = row.get("count");
(code, count)
})
.collect())
})
.await
}
pub async fn add_log(&self, level: &str, job_id: Option<i64>, message: &str) -> Result<()> { pub async fn add_log(&self, level: &str, job_id: Option<i64>, message: &str) -> Result<()> {
sqlx::query("INSERT INTO logs (level, job_id, message) VALUES (?, ?, ?)") sqlx::query("INSERT INTO logs (level, job_id, message) VALUES (?, ?, ?)")
.bind(level) .bind(level)

View File

@@ -606,6 +606,21 @@ pub fn failure_from_summary(summary: &str) -> Explanation {
); );
} }
if normalized.contains("vtcompressionsession")
|| normalized.contains("kvtvideoencoder")
|| normalized.contains("kvtvideoencodenotavailablenowerr")
|| normalized.contains("videotoolbox session")
{
return Explanation::new(
ExplanationCategory::Failure,
"videotoolbox_session_failure",
"VideoToolbox session failed",
"The macOS VideoToolbox hardware encoder could not initialize or lost its session mid-encode. This can happen when the GPU is under load or when another process holds the hardware encoder.",
Some("Retry the job. If this repeats, reduce concurrent jobs, restart Alchemist, or enable CPU fallback.".to_string()),
summary,
);
}
if normalized.contains("videotoolbox") if normalized.contains("videotoolbox")
|| normalized.contains("vt_compression") || normalized.contains("vt_compression")
|| normalized.contains("mediaserverd") || normalized.contains("mediaserverd")

View File

@@ -795,11 +795,16 @@ async fn run() -> Result<()> {
} }
} }
} else { } else {
match args let command = match args.command.clone() {
.command Some(command) => command,
.clone() None => {
.expect("CLI branch requires a subcommand") return Err(alchemist::error::AlchemistError::Config(
{ "Missing CLI command".into(),
));
}
};
match command {
Commands::Scan { directories } => { Commands::Scan { directories } => {
agent.scan_and_enqueue(directories).await?; agent.scan_and_enqueue(directories).await?;
info!("Scan complete. Matching files were enqueued."); info!("Scan complete. Matching files were enqueued.");

View File

@@ -7,7 +7,7 @@ use serde::{Deserialize, Serialize};
use std::path::Path; use std::path::Path;
use tokio::process::Command; use tokio::process::Command;
const FFPROBE_TIMEOUT_SECS: u64 = 120; const FFPROBE_TIMEOUT_SECS: u64 = 30;
async fn run_ffprobe(args: &[&str], path: &Path) -> Result<std::process::Output> { async fn run_ffprobe(args: &[&str], path: &Path) -> Result<std::process::Output> {
match tokio::time::timeout( match tokio::time::timeout(

View File

@@ -159,6 +159,18 @@ impl Executor for FfmpegExecutor {
self.event_channels.clone(), self.event_channels.clone(),
)); ));
tracing::info!(
"Job {} execution path: requested_codec={}, planned_codec={}, encoder={:?}, backend={:?}, fallback={:?}",
job.id,
plan.requested_codec.as_str(),
planned_output_codec.as_str(),
encoder.map(|value| value.ffmpeg_encoder_name()),
used_backend.map(|value| value.as_str()),
plan.fallback
.as_ref()
.map(|fallback| fallback.reason.as_str())
);
self.transcoder self.transcoder
.transcode_media(TranscodeRequest { .transcode_media(TranscodeRequest {
job_id: Some(job.id), job_id: Some(job.id),
@@ -244,6 +256,14 @@ impl Executor for FfmpegExecutor {
); );
} }
tracing::info!(
"Job {} output probe: actual_codec={:?}, actual_encoder={:?}, fallback_occurred={}",
job.id,
actual_output_codec.map(|value| value.as_str()),
actual_encoder_name.as_deref(),
plan.fallback.is_some() || codec_mismatch || encoder_mismatch
);
Ok(ExecutionResult { Ok(ExecutionResult {
requested_codec: plan.requested_codec, requested_codec: plan.requested_codec,
planned_output_codec, planned_output_codec,

View File

@@ -1,6 +1,13 @@
use crate::media::pipeline::Encoder; use crate::media::pipeline::{Encoder, RateControl};
pub fn append_args(args: &mut Vec<String>, encoder: Encoder, rate_control: Option<&RateControl>) {
// AMF quality: CQP mode uses -rc cqp with -qp_i and -qp_p.
// The config uses CQ-style semantics (lower value = better quality).
let (use_cqp, qp_value) = match rate_control {
Some(RateControl::Cq { value }) => (true, *value),
_ => (false, 25),
};
pub fn append_args(args: &mut Vec<String>, encoder: Encoder) {
match encoder { match encoder {
Encoder::Av1Amf => { Encoder::Av1Amf => {
args.extend(["-c:v".to_string(), "av1_amf".to_string()]); args.extend(["-c:v".to_string(), "av1_amf".to_string()]);
@@ -13,4 +20,15 @@ pub fn append_args(args: &mut Vec<String>, encoder: Encoder) {
} }
_ => {} _ => {}
} }
if use_cqp {
args.extend([
"-rc".to_string(),
"cqp".to_string(),
"-qp_i".to_string(),
qp_value.to_string(),
"-qp_p".to_string(),
qp_value.to_string(),
]);
}
} }

View File

@@ -242,15 +242,20 @@ impl<'a> FFmpegCommandBuilder<'a> {
); );
} }
Encoder::Av1Vaapi | Encoder::HevcVaapi | Encoder::H264Vaapi => { Encoder::Av1Vaapi | Encoder::HevcVaapi | Encoder::H264Vaapi => {
vaapi::append_args(&mut args, encoder, self.hw_info); vaapi::append_args(&mut args, encoder, self.hw_info, rate_control.as_ref());
} }
Encoder::Av1Amf | Encoder::HevcAmf | Encoder::H264Amf => { Encoder::Av1Amf | Encoder::HevcAmf | Encoder::H264Amf => {
amf::append_args(&mut args, encoder); amf::append_args(&mut args, encoder, rate_control.as_ref());
} }
Encoder::Av1Videotoolbox Encoder::Av1Videotoolbox
| Encoder::HevcVideotoolbox | Encoder::HevcVideotoolbox
| Encoder::H264Videotoolbox => { | Encoder::H264Videotoolbox => {
videotoolbox::append_args(&mut args, encoder, tag_hevc_as_hvc1); videotoolbox::append_args(
&mut args,
encoder,
tag_hevc_as_hvc1,
rate_control.as_ref(),
);
} }
Encoder::Av1Svt | Encoder::Av1Aom | Encoder::HevcX265 | Encoder::H264X264 => { Encoder::Av1Svt | Encoder::Av1Aom | Encoder::HevcX265 | Encoder::H264X264 => {
cpu::append_args( cpu::append_args(
@@ -264,6 +269,12 @@ impl<'a> FFmpegCommandBuilder<'a> {
} }
} }
// Set maximum keyframe interval (~10s GOP) for all non-copy encodes.
// Improves seeking reliability; hardware encoders respect this upper bound.
if !self.plan.copy_video {
args.extend(["-g".to_string(), "250".to_string()]);
}
if let Some(RateControl::Bitrate { kbps }) = rate_control { if let Some(RateControl::Bitrate { kbps }) = rate_control {
args.extend(["-b:v".to_string(), format!("{kbps}k")]); args.extend(["-b:v".to_string(), format!("{kbps}k")]);
} }

View File

@@ -19,6 +19,8 @@ pub fn append_args(
"av1_nvenc".to_string(), "av1_nvenc".to_string(),
"-preset".to_string(), "-preset".to_string(),
preset.clone(), preset.clone(),
"-rc".to_string(),
"vbr".to_string(),
"-cq".to_string(), "-cq".to_string(),
cq.to_string(), cq.to_string(),
]); ]);
@@ -29,6 +31,8 @@ pub fn append_args(
"hevc_nvenc".to_string(), "hevc_nvenc".to_string(),
"-preset".to_string(), "-preset".to_string(),
preset.clone(), preset.clone(),
"-rc".to_string(),
"vbr".to_string(),
"-cq".to_string(), "-cq".to_string(),
cq.to_string(), cq.to_string(),
]); ]);
@@ -39,6 +43,8 @@ pub fn append_args(
"h264_nvenc".to_string(), "h264_nvenc".to_string(),
"-preset".to_string(), "-preset".to_string(),
preset, preset,
"-rc".to_string(),
"vbr".to_string(),
"-cq".to_string(), "-cq".to_string(),
cq.to_string(), cq.to_string(),
]); ]);

View File

@@ -32,7 +32,7 @@ pub fn append_args(
"-global_quality".to_string(), "-global_quality".to_string(),
quality.to_string(), quality.to_string(),
"-look_ahead".to_string(), "-look_ahead".to_string(),
"1".to_string(), "20".to_string(),
]); ]);
} }
Encoder::HevcQsv => { Encoder::HevcQsv => {
@@ -42,7 +42,7 @@ pub fn append_args(
"-global_quality".to_string(), "-global_quality".to_string(),
quality.to_string(), quality.to_string(),
"-look_ahead".to_string(), "-look_ahead".to_string(),
"1".to_string(), "20".to_string(),
]); ]);
} }
Encoder::H264Qsv => { Encoder::H264Qsv => {
@@ -52,7 +52,7 @@ pub fn append_args(
"-global_quality".to_string(), "-global_quality".to_string(),
quality.to_string(), quality.to_string(),
"-look_ahead".to_string(), "-look_ahead".to_string(),
"1".to_string(), "20".to_string(),
]); ]);
} }
_ => {} _ => {}

View File

@@ -1,7 +1,12 @@
use crate::media::pipeline::Encoder; use crate::media::pipeline::{Encoder, RateControl};
use crate::system::hardware::HardwareInfo; use crate::system::hardware::HardwareInfo;
pub fn append_args(args: &mut Vec<String>, encoder: Encoder, hw_info: Option<&HardwareInfo>) { pub fn append_args(
args: &mut Vec<String>,
encoder: Encoder,
hw_info: Option<&HardwareInfo>,
rate_control: Option<&RateControl>,
) {
if let Some(hw) = hw_info { if let Some(hw) = hw_info {
if let Some(ref device_path) = hw.device_path { if let Some(ref device_path) = hw.device_path {
args.extend(["-vaapi_device".to_string(), device_path.to_string()]); args.extend(["-vaapi_device".to_string(), device_path.to_string()]);
@@ -20,4 +25,12 @@ pub fn append_args(args: &mut Vec<String>, encoder: Encoder, hw_info: Option<&Ha
} }
_ => {} _ => {}
} }
// VAAPI quality is set via -global_quality (0100, higher = better).
// The config uses CQ-style semantics where lower value = better quality,
// so we invert: global_quality = 100 - cq_value.
if let Some(RateControl::Cq { value }) = rate_control {
let global_quality = 100u8.saturating_sub(*value);
args.extend(["-global_quality".to_string(), global_quality.to_string()]);
}
} }

View File

@@ -1,33 +1,32 @@
use crate::media::pipeline::Encoder; use crate::media::pipeline::{Encoder, RateControl};
pub fn append_args(args: &mut Vec<String>, encoder: Encoder, tag_hevc_as_hvc1: bool) { pub fn append_args(
// Current FFmpeg VideoToolbox encoders on macOS do not expose qscale-style args: &mut Vec<String>,
// quality controls, so bitrate mode is handled by the shared builder and encoder: Encoder,
// CQ-style requests intentionally fall back to the encoder defaults. tag_hevc_as_hvc1: bool,
rate_control: Option<&RateControl>,
) {
// VideoToolbox quality is controlled via -global_quality (0100, 100=best).
// The config uses CQ-style semantics where lower value = better quality,
// so we invert: global_quality = 100 - cq_value.
// Bitrate mode is handled by the shared builder in mod.rs.
match encoder { match encoder {
Encoder::Av1Videotoolbox => { Encoder::Av1Videotoolbox => {
args.extend([ args.extend(["-c:v".to_string(), "av1_videotoolbox".to_string()]);
"-c:v".to_string(),
"av1_videotoolbox".to_string(),
"-allow_sw".to_string(),
"1".to_string(),
]);
} }
Encoder::HevcVideotoolbox => { Encoder::HevcVideotoolbox => {
args.extend(["-c:v".to_string(), "hevc_videotoolbox".to_string()]); args.extend(["-c:v".to_string(), "hevc_videotoolbox".to_string()]);
if tag_hevc_as_hvc1 { if tag_hevc_as_hvc1 {
args.extend(["-tag:v".to_string(), "hvc1".to_string()]); args.extend(["-tag:v".to_string(), "hvc1".to_string()]);
} }
args.extend(["-allow_sw".to_string(), "1".to_string()]);
} }
Encoder::H264Videotoolbox => { Encoder::H264Videotoolbox => {
args.extend([ args.extend(["-c:v".to_string(), "h264_videotoolbox".to_string()]);
"-c:v".to_string(),
"h264_videotoolbox".to_string(),
"-allow_sw".to_string(),
"1".to_string(),
]);
} }
_ => {} _ => {}
} }
if let Some(RateControl::Cq { value }) = rate_control {
let global_quality = 100u8.saturating_sub(*value);
args.extend(["-global_quality".to_string(), global_quality.to_string()]);
}
} }

View File

@@ -443,6 +443,8 @@ struct FinalizeJobContext<'a> {
plan: &'a TranscodePlan, plan: &'a TranscodePlan,
bypass_quality_gates: bool, bypass_quality_gates: bool,
start_time: std::time::Instant, start_time: std::time::Instant,
encode_started_at: chrono::DateTime<chrono::Utc>,
attempt_number: i32,
metadata: &'a MediaMetadata, metadata: &'a MediaMetadata,
execution_result: &'a ExecutionResult, execution_result: &'a ExecutionResult,
} }
@@ -453,6 +455,8 @@ struct FinalizeFailureContext<'a> {
execution_result: &'a ExecutionResult, execution_result: &'a ExecutionResult,
config_snapshot: &'a crate::config::Config, config_snapshot: &'a crate::config::Config,
start_time: std::time::Instant, start_time: std::time::Instant,
encode_started_at: chrono::DateTime<chrono::Utc>,
attempt_number: i32,
temp_output_path: &'a Path, temp_output_path: &'a Path,
} }
@@ -657,6 +661,13 @@ impl Pipeline {
// Store the decision and return to queued — do NOT encode // Store the decision and return to queued — do NOT encode
match &plan.decision { match &plan.decision {
crate::media::pipeline::TranscodeDecision::Skip { reason } => { crate::media::pipeline::TranscodeDecision::Skip { reason } => {
let skip_code = reason.split('|').next().unwrap_or(reason).trim();
tracing::info!(
job_id = job_id,
skip_code = skip_code,
"Job skipped: {}",
skip_code
);
self.db.add_decision(job_id, "skip", reason).await.ok(); self.db.add_decision(job_id, "skip", reason).await.ok();
self.db self.db
.update_job_status(job_id, crate::db::JobState::Skipped) .update_job_status(job_id, crate::db::JobState::Skipped)
@@ -747,6 +758,7 @@ impl Pipeline {
if self.db.increment_attempt_count(job.id).await.is_err() { if self.db.increment_attempt_count(job.id).await.is_err() {
return Err(JobFailure::Transient); return Err(JobFailure::Transient);
} }
let current_attempt_number = job.attempt_count + 1;
if self if self
.update_job_state(job.id, crate::db::JobState::Analyzing) .update_job_state(job.id, crate::db::JobState::Analyzing)
.await .await
@@ -905,6 +917,15 @@ impl Pipeline {
} }
} }
match self.should_stop_job(job.id).await {
Ok(true) => {
tracing::info!("Job {} was cancelled during encode planning.", job.id);
return Ok(());
}
Ok(false) => {}
Err(_) => return Err(JobFailure::Transient),
}
let (should_execute, action, reason, next_status) = match &plan.decision { let (should_execute, action, reason, next_status) = match &plan.decision {
TranscodeDecision::Transcode { reason } => ( TranscodeDecision::Transcode { reason } => (
true, true,
@@ -925,7 +946,14 @@ impl Pipeline {
}; };
if !should_execute { if !should_execute {
tracing::info!("Decision: SKIP Job {} - {}", job.id, &reason); let explanation = crate::explanations::decision_from_legacy("skip", &reason);
tracing::info!(
"Decision: SKIP Job {} - {} (code={}, summary={})",
job.id,
&reason,
explanation.code,
explanation.summary
);
let _ = self.db.add_decision(job.id, "skip", &reason).await; let _ = self.db.add_decision(job.id, "skip", &reason).await;
let _ = self let _ = self
.update_job_state(job.id, crate::db::JobState::Skipped) .update_job_state(job.id, crate::db::JobState::Skipped)
@@ -999,6 +1027,7 @@ impl Pipeline {
self.dry_run, self.dry_run,
); );
let encode_started_at = chrono::Utc::now();
match executor.execute(&job, &plan, &analysis).await { match executor.execute(&job, &plan, &analysis).await {
Ok(result) => { Ok(result) => {
if result.fallback_occurred && !plan.allow_fallback { if result.fallback_occurred && !plan.allow_fallback {
@@ -1013,6 +1042,20 @@ impl Pipeline {
let _ = self let _ = self
.update_job_state(job.id, crate::db::JobState::Failed) .update_job_state(job.id, crate::db::JobState::Failed)
.await; .await;
let _ = self
.db
.insert_encode_attempt(crate::db::EncodeAttemptInput {
job_id: job.id,
attempt_number: current_attempt_number,
started_at: Some(encode_started_at.to_rfc3339()),
outcome: "failed".to_string(),
failure_code: Some("fallback_blocked".to_string()),
failure_summary: Some(summary.to_string()),
input_size_bytes: Some(metadata.size_bytes as i64),
output_size_bytes: None,
encode_time_seconds: Some(start_time.elapsed().as_secs_f64()),
})
.await;
return Err(JobFailure::EncoderUnavailable); return Err(JobFailure::EncoderUnavailable);
} }
@@ -1026,6 +1069,8 @@ impl Pipeline {
plan: &plan, plan: &plan,
bypass_quality_gates, bypass_quality_gates,
start_time, start_time,
encode_started_at,
attempt_number: current_attempt_number,
metadata, metadata,
execution_result: &result, execution_result: &result,
}, },
@@ -1040,6 +1085,8 @@ impl Pipeline {
execution_result: &result, execution_result: &result,
config_snapshot: &config_snapshot, config_snapshot: &config_snapshot,
start_time, start_time,
encode_started_at,
attempt_number: current_attempt_number,
temp_output_path: &temp_output_path, temp_output_path: &temp_output_path,
}, },
&err, &err,
@@ -1093,6 +1140,20 @@ impl Pipeline {
let _ = self let _ = self
.update_job_state(job.id, crate::db::JobState::Cancelled) .update_job_state(job.id, crate::db::JobState::Cancelled)
.await; .await;
let _ = self
.db
.insert_encode_attempt(crate::db::EncodeAttemptInput {
job_id: job.id,
attempt_number: current_attempt_number,
started_at: Some(encode_started_at.to_rfc3339()),
outcome: "cancelled".to_string(),
failure_code: None,
failure_summary: None,
input_size_bytes: Some(metadata.size_bytes as i64),
output_size_bytes: None,
encode_time_seconds: Some(start_time.elapsed().as_secs_f64()),
})
.await;
} else { } else {
let msg = format!("Transcode failed: {e}"); let msg = format!("Transcode failed: {e}");
tracing::error!("Job {}: {}", job.id, msg); tracing::error!("Job {}: {}", job.id, msg);
@@ -1105,6 +1166,20 @@ impl Pipeline {
let _ = self let _ = self
.update_job_state(job.id, crate::db::JobState::Failed) .update_job_state(job.id, crate::db::JobState::Failed)
.await; .await;
let _ = self
.db
.insert_encode_attempt(crate::db::EncodeAttemptInput {
job_id: job.id,
attempt_number: current_attempt_number,
started_at: Some(encode_started_at.to_rfc3339()),
outcome: "failed".to_string(),
failure_code: Some(explanation.code.clone()),
failure_summary: Some(msg),
input_size_bytes: Some(metadata.size_bytes as i64),
output_size_bytes: None,
encode_time_seconds: Some(start_time.elapsed().as_secs_f64()),
})
.await;
} }
Err(map_failure(&e)) Err(map_failure(&e))
} }
@@ -1360,6 +1435,20 @@ impl Pipeline {
self.update_job_state(job_id, crate::db::JobState::Completed) self.update_job_state(job_id, crate::db::JobState::Completed)
.await?; .await?;
self.update_job_progress(job_id, 100.0).await; self.update_job_progress(job_id, 100.0).await;
let _ = self
.db
.insert_encode_attempt(crate::db::EncodeAttemptInput {
job_id,
attempt_number: context.attempt_number,
started_at: Some(context.encode_started_at.to_rfc3339()),
outcome: "completed".to_string(),
failure_code: None,
failure_summary: None,
input_size_bytes: Some(input_size as i64),
output_size_bytes: Some(output_size as i64),
encode_time_seconds: Some(encode_duration),
})
.await;
self.emit_telemetry_event(TelemetryEventParams { self.emit_telemetry_event(TelemetryEventParams {
telemetry_enabled, telemetry_enabled,
@@ -1466,6 +1555,20 @@ impl Pipeline {
let _ = self let _ = self
.update_job_state(job_id, crate::db::JobState::Failed) .update_job_state(job_id, crate::db::JobState::Failed)
.await; .await;
let _ = self
.db
.insert_encode_attempt(crate::db::EncodeAttemptInput {
job_id,
attempt_number: context.attempt_number,
started_at: Some(context.encode_started_at.to_rfc3339()),
outcome: "failed".to_string(),
failure_code: Some(failure_explanation.code.clone()),
failure_summary: Some(message),
input_size_bytes: Some(context.metadata.size_bytes as i64),
output_size_bytes: None,
encode_time_seconds: Some(context.start_time.elapsed().as_secs_f64()),
})
.await;
} }
async fn emit_telemetry_event(&self, params: TelemetryEventParams<'_>) { async fn emit_telemetry_event(&self, params: TelemetryEventParams<'_>) {
@@ -1779,6 +1882,8 @@ mod tests {
execution_result: &result, execution_result: &result,
config_snapshot: &config_snapshot, config_snapshot: &config_snapshot,
start_time: std::time::Instant::now(), start_time: std::time::Instant::now(),
encode_started_at: chrono::Utc::now(),
attempt_number: 1,
temp_output_path: &temp_output, temp_output_path: &temp_output,
}, },
&crate::error::AlchemistError::Unknown("disk full".to_string()), &crate::error::AlchemistError::Unknown("disk full".to_string()),

View File

@@ -339,12 +339,13 @@ fn should_transcode(
}; };
let normalized_bpp = bpp.map(|value| value * res_correction); let normalized_bpp = bpp.map(|value| value * res_correction);
// Raise threshold for uncertain analysis: low confidence = fewer speculative encodes.
let mut threshold = match analysis.confidence { let mut threshold = match analysis.confidence {
crate::media::pipeline::AnalysisConfidence::High => config.transcode.min_bpp_threshold, crate::media::pipeline::AnalysisConfidence::High => config.transcode.min_bpp_threshold,
crate::media::pipeline::AnalysisConfidence::Medium => { crate::media::pipeline::AnalysisConfidence::Medium => {
config.transcode.min_bpp_threshold * 0.7 config.transcode.min_bpp_threshold * 1.3
} }
crate::media::pipeline::AnalysisConfidence::Low => config.transcode.min_bpp_threshold * 0.5, crate::media::pipeline::AnalysisConfidence::Low => config.transcode.min_bpp_threshold * 1.8,
}; };
if target_codec == OutputCodec::Av1 { if target_codec == OutputCodec::Av1 {
threshold *= 0.7; threshold *= 0.7;
@@ -626,8 +627,16 @@ fn encoder_runtime_settings(
}, },
None, None,
), ),
Encoder::Av1Nvenc | Encoder::HevcNvenc | Encoder::H264Nvenc => ( Encoder::Av1Nvenc => (
RateControl::Cq { value: 25 }, RateControl::Cq { value: 28 },
Some(quality_profile.nvenc_preset().to_string()),
),
Encoder::HevcNvenc => (
RateControl::Cq { value: 24 },
Some(quality_profile.nvenc_preset().to_string()),
),
Encoder::H264Nvenc => (
RateControl::Cq { value: 21 },
Some(quality_profile.nvenc_preset().to_string()), Some(quality_profile.nvenc_preset().to_string()),
), ),
Encoder::Av1Videotoolbox | Encoder::HevcVideotoolbox | Encoder::H264Videotoolbox => ( Encoder::Av1Videotoolbox | Encoder::HevcVideotoolbox | Encoder::H264Videotoolbox => (
@@ -645,7 +654,18 @@ fn encoder_runtime_settings(
Some(preset.to_string()), Some(preset.to_string()),
) )
} }
Encoder::Av1Aom => (RateControl::Crf { value: 32 }, Some("6".to_string())), Encoder::Av1Aom => {
let (cpu_used, default_crf) = match config.hardware.cpu_preset {
crate::config::CpuPreset::Slow => ("2", 24u8),
crate::config::CpuPreset::Medium => ("4", 28u8),
crate::config::CpuPreset::Fast => ("6", 30u8),
crate::config::CpuPreset::Faster => ("8", 32u8),
};
(
RateControl::Crf { value: default_crf },
Some(cpu_used.to_string()),
)
}
Encoder::HevcX265 => { Encoder::HevcX265 => {
let preset = config.hardware.cpu_preset.as_str().to_string(); let preset = config.hardware.cpu_preset.as_str().to_string();
let default_crf = match config.hardware.cpu_preset { let default_crf = match config.hardware.cpu_preset {
@@ -901,7 +921,10 @@ fn plan_subtitles(
} }
} }
fn subtitle_copy_supported(container: &str, subtitle_streams: &[SubtitleStreamMetadata]) -> bool { pub(crate) fn subtitle_copy_supported(
container: &str,
subtitle_streams: &[SubtitleStreamMetadata],
) -> bool {
if subtitle_streams.is_empty() { if subtitle_streams.is_empty() {
return true; return true;
} }

View File

@@ -68,7 +68,7 @@ impl Agent {
in_flight_jobs: Arc::new(AtomicUsize::new(0)), in_flight_jobs: Arc::new(AtomicUsize::new(0)),
idle_notified: Arc::new(AtomicBool::new(false)), idle_notified: Arc::new(AtomicBool::new(false)),
analyzing_boot: Arc::new(AtomicBool::new(false)), analyzing_boot: Arc::new(AtomicBool::new(false)),
analysis_semaphore: Arc::new(tokio::sync::Semaphore::new(1)), analysis_semaphore: Arc::new(tokio::sync::Semaphore::new(concurrent_jobs.clamp(1, 4))),
} }
} }
@@ -167,6 +167,38 @@ impl Agent {
self.draining.store(false, Ordering::SeqCst); self.draining.store(false, Ordering::SeqCst);
} }
/// Restart the engine loop without re-execing the process.
/// Pauses the engine, cancels all in-flight jobs, resets state flags,
/// and resumes. Cancelled jobs remain in the cancelled state.
pub async fn restart(&self) {
info!("Engine restart requested.");
self.pause();
let active_states = [
crate::db::JobState::Encoding,
crate::db::JobState::Remuxing,
crate::db::JobState::Analyzing,
crate::db::JobState::Resuming,
];
for state in &active_states {
match self.db.get_jobs_by_status(*state).await {
Ok(jobs) => {
for job in jobs {
self.orchestrator.cancel_job(job.id);
}
}
Err(e) => {
error!("Restart: failed to fetch {:?} jobs: {}", state, e);
}
}
}
self.draining.store(false, Ordering::SeqCst);
self.idle_notified.store(false, Ordering::SeqCst);
self.resume();
info!("Engine restart complete.");
}
pub fn set_boot_analyzing(&self, value: bool) { pub fn set_boot_analyzing(&self, value: bool) {
self.analyzing_boot.store(value, Ordering::SeqCst); self.analyzing_boot.store(value, Ordering::SeqCst);
if value { if value {
@@ -311,6 +343,11 @@ impl Agent {
return; return;
} }
info!(
"Updating concurrent job limit from {} to {}",
current, new_limit
);
if new_limit > current { if new_limit > current {
let mut held = self.held_permits.lock().await; let mut held = self.held_permits.lock().await;
let mut increase = new_limit - current; let mut increase = new_limit - current;
@@ -392,6 +429,11 @@ impl Agent {
continue; continue;
} }
}; };
debug!(
"Worker slot acquired (in_flight={}, limit={})",
self.in_flight_jobs.load(Ordering::SeqCst),
self.concurrent_jobs_limit()
);
// Re-check drain after permit acquisition (belt-and-suspenders) // Re-check drain after permit acquisition (belt-and-suspenders)
if self.is_draining() { if self.is_draining() {
@@ -403,7 +445,13 @@ impl Agent {
match self.db.claim_next_job().await { match self.db.claim_next_job().await {
Ok(Some(job)) => { Ok(Some(job)) => {
self.idle_notified.store(false, Ordering::SeqCst); self.idle_notified.store(false, Ordering::SeqCst);
self.in_flight_jobs.fetch_add(1, Ordering::SeqCst); let next_in_flight = self.in_flight_jobs.fetch_add(1, Ordering::SeqCst) + 1;
info!(
"Claimed job {} for processing (in_flight={}, limit={})",
job.id,
next_in_flight,
self.concurrent_jobs_limit()
);
let agent = self.clone(); let agent = self.clone();
let counter = self.in_flight_jobs.clone(); let counter = self.in_flight_jobs.clone();
tokio::spawn(async move { tokio::spawn(async move {
@@ -423,6 +471,11 @@ impl Agent {
}); });
} }
Ok(None) => { Ok(None) => {
debug!(
"No queued job available (in_flight={}, limit={})",
self.in_flight_jobs.load(Ordering::SeqCst),
self.concurrent_jobs_limit()
);
if self.in_flight_jobs.load(Ordering::SeqCst) == 0 if self.in_flight_jobs.load(Ordering::SeqCst) == 0
&& !self.idle_notified.swap(true, Ordering::SeqCst) && !self.idle_notified.swap(true, Ordering::SeqCst)
{ {

View File

@@ -13,6 +13,8 @@ use tokio::sync::oneshot;
use tracing::{error, info, warn}; use tracing::{error, info, warn};
pub struct Transcoder { pub struct Transcoder {
// std::sync::Mutex is intentional: critical sections never cross .await boundaries,
// so there is no deadlock risk. Contention is negligible (≤ concurrent_jobs entries).
cancel_channels: Arc<Mutex<HashMap<i64, oneshot::Sender<()>>>>, cancel_channels: Arc<Mutex<HashMap<i64, oneshot::Sender<()>>>>,
pending_cancels: Arc<Mutex<HashSet<i64>>>, pending_cancels: Arc<Mutex<HashSet<i64>>>,
} }
@@ -234,6 +236,7 @@ impl Transcoder {
total_duration: Option<f64>, total_duration: Option<f64>,
) -> Result<()> { ) -> Result<()> {
info!("Executing FFmpeg command: {:?}", cmd); info!("Executing FFmpeg command: {:?}", cmd);
let ffmpeg_start = std::time::Instant::now();
cmd.stdout(Stdio::null()).stderr(Stdio::piped()); cmd.stdout(Stdio::null()).stderr(Stdio::piped());
if let Some(id) = job_id { if let Some(id) = job_id {
@@ -286,15 +289,21 @@ impl Transcoder {
} }
} }
info!(
"Job {:?}: FFmpeg spawned ({:.3}s since command start)",
job_id,
ffmpeg_start.elapsed().as_secs_f64()
);
let mut reader = BufReader::new(stderr).lines(); let mut reader = BufReader::new(stderr).lines();
let mut kill_rx = kill_rx; let mut kill_rx = kill_rx;
let mut killed = false; let mut killed = false;
let mut last_lines = std::collections::VecDeque::with_capacity(20); let mut last_lines = std::collections::VecDeque::with_capacity(20);
let mut progress_state = FFmpegProgressState::default(); let mut progress_state = FFmpegProgressState::default();
let mut first_frame_logged = false;
loop { loop {
tokio::select! { tokio::select! {
line_res_timeout = tokio::time::timeout(tokio::time::Duration::from_secs(600), reader.next_line()) => { line_res_timeout = tokio::time::timeout(tokio::time::Duration::from_secs(120), reader.next_line()) => {
match line_res_timeout { match line_res_timeout {
Ok(line_res) => match line_res { Ok(line_res) => match line_res {
Ok(Some(line)) => { Ok(Some(line)) => {
@@ -308,11 +317,28 @@ impl Transcoder {
last_lines.pop_front(); last_lines.pop_front();
} }
// Detect VideoToolbox software fallback
if line.contains("Using software encoder") || line.contains("using software encoder") {
warn!(
"Job {:?}: VideoToolbox falling back to software encoder ({}s elapsed)",
job_id,
ffmpeg_start.elapsed().as_secs_f64()
);
}
if let Some(observer) = observer.as_ref() { if let Some(observer) = observer.as_ref() {
observer.on_log(line.clone()).await; observer.on_log(line.clone()).await;
if let Some(total_duration) = total_duration { if let Some(total_duration) = total_duration {
if let Some(progress) = progress_state.ingest_line(&line) { if let Some(progress) = progress_state.ingest_line(&line) {
if !first_frame_logged {
first_frame_logged = true;
info!(
"Job {:?}: first progress event ({:.3}s since spawn)",
job_id,
ffmpeg_start.elapsed().as_secs_f64()
);
}
observer.on_progress(progress, total_duration).await; observer.on_progress(progress, total_duration).await;
} }
} }
@@ -325,7 +351,7 @@ impl Transcoder {
} }
}, },
Err(_) => { Err(_) => {
error!("Job {:?} stalled: No output from FFmpeg for 10 minutes. Killing process...", job_id); error!("Job {:?} stalled: No output from FFmpeg for 2 minutes. Killing process...", job_id);
let _ = child.kill().await; let _ = child.kill().await;
killed = true; killed = true;
if let Some(id) = job_id { if let Some(id) = job_id {
@@ -379,7 +405,11 @@ impl Transcoder {
} }
if status.success() { if status.success() {
info!("FFmpeg command completed successfully"); info!(
"Job {:?}: FFmpeg completed successfully ({:.3}s total)",
job_id,
ffmpeg_start.elapsed().as_secs_f64()
);
Ok(()) Ok(())
} else { } else {
let error_detail = last_lines.make_contiguous().join("\n"); let error_detail = last_lines.make_contiguous().join("\n");

View File

@@ -325,6 +325,7 @@ pub(crate) struct JobDetailResponse {
job: Job, job: Job,
metadata: Option<crate::media::pipeline::MediaMetadata>, metadata: Option<crate::media::pipeline::MediaMetadata>,
encode_stats: Option<crate::db::DetailedEncodeStats>, encode_stats: Option<crate::db::DetailedEncodeStats>,
encode_attempts: Vec<crate::db::EncodeAttempt>,
job_logs: Vec<crate::db::LogEntry>, job_logs: Vec<crate::db::LogEntry>,
job_failure_summary: Option<String>, job_failure_summary: Option<String>,
decision_explanation: Option<Explanation>, decision_explanation: Option<Explanation>,
@@ -343,11 +344,7 @@ pub(crate) async fn get_job_detail_handler(
// Avoid long probes while the job is still active. // Avoid long probes while the job is still active.
let metadata = match job.status { let metadata = match job.status {
JobState::Queued JobState::Queued | JobState::Analyzing => None,
| JobState::Analyzing
| JobState::Encoding
| JobState::Remuxing
| JobState::Completed => None,
_ => { _ => {
let analyzer = crate::media::analyzer::FfmpegAnalyzer; let analyzer = crate::media::analyzer::FfmpegAnalyzer;
use crate::media::pipeline::Analyzer; use crate::media::pipeline::Analyzer;
@@ -403,10 +400,17 @@ pub(crate) async fn get_job_detail_handler(
(None, None) (None, None)
}; };
let encode_attempts = state
.db
.get_encode_attempts_by_job(id)
.await
.unwrap_or_default();
axum::Json(JobDetailResponse { axum::Json(JobDetailResponse {
job, job,
metadata, metadata,
encode_stats, encode_stats,
encode_attempts,
job_logs, job_logs,
job_failure_summary, job_failure_summary,
decision_explanation, decision_explanation,
@@ -439,6 +443,13 @@ pub(crate) async fn stop_drain_handler(State(state): State<Arc<AppState>>) -> im
axum::Json(serde_json::json!({ "status": "running" })) axum::Json(serde_json::json!({ "status": "running" }))
} }
pub(crate) async fn restart_engine_handler(
State(state): State<Arc<AppState>>,
) -> impl IntoResponse {
state.agent.restart().await;
axum::Json(serde_json::json!({ "status": "running" }))
}
pub(crate) async fn engine_status_handler(State(state): State<Arc<AppState>>) -> impl IntoResponse { pub(crate) async fn engine_status_handler(State(state): State<Arc<AppState>>) -> impl IntoResponse {
axum::Json(serde_json::json!({ axum::Json(serde_json::json!({
"status": if state.agent.is_draining() { "status": if state.agent.is_draining() {

View File

@@ -149,7 +149,28 @@ pub(crate) async fn auth_middleware(
} }
fn request_is_lan(req: &Request) -> bool { fn request_is_lan(req: &Request) -> bool {
request_ip(req).is_some_and(is_lan_ip) let direct_peer = req
.extensions()
.get::<ConnectInfo<SocketAddr>>()
.map(|info| info.0.ip());
let resolved = request_ip(req);
// If resolved IP differs from direct peer, forwarded headers were used.
// Warn operators so misconfigured proxies surface in logs.
if let (Some(peer), Some(resolved_ip)) = (direct_peer, resolved) {
if peer != resolved_ip && is_lan_ip(resolved_ip) {
tracing::warn!(
peer_ip = %peer,
resolved_ip = %resolved_ip,
"Setup gate: access permitted via forwarded headers. \
Verify your reverse proxy is forwarding client IPs correctly \
(X-Forwarded-For / X-Real-IP). Misconfigured proxies may \
expose setup to public traffic."
);
}
}
resolved.is_some_and(is_lan_ip)
} }
fn read_only_api_token_allows(method: &Method, path: &str) -> bool { fn read_only_api_token_allows(method: &Method, path: &str) -> bool {

View File

@@ -307,6 +307,7 @@ fn app_router(state: Arc<AppState>) -> Router {
.route("/api/stats/daily", get(daily_stats_handler)) .route("/api/stats/daily", get(daily_stats_handler))
.route("/api/stats/detailed", get(detailed_stats_handler)) .route("/api/stats/detailed", get(detailed_stats_handler))
.route("/api/stats/savings", get(savings_summary_handler)) .route("/api/stats/savings", get(savings_summary_handler))
.route("/api/stats/skip-reasons", get(skip_reasons_handler))
// Canonical job list endpoint. // Canonical job list endpoint.
.route("/api/jobs", get(jobs_table_handler)) .route("/api/jobs", get(jobs_table_handler))
.route("/api/jobs/table", get(jobs_table_handler)) .route("/api/jobs/table", get(jobs_table_handler))
@@ -339,6 +340,7 @@ fn app_router(state: Arc<AppState>) -> Router {
.route("/api/engine/resume", post(resume_engine_handler)) .route("/api/engine/resume", post(resume_engine_handler))
.route("/api/engine/drain", post(drain_engine_handler)) .route("/api/engine/drain", post(drain_engine_handler))
.route("/api/engine/stop-drain", post(stop_drain_handler)) .route("/api/engine/stop-drain", post(stop_drain_handler))
.route("/api/engine/restart", post(restart_engine_handler))
.route( .route(
"/api/engine/mode", "/api/engine/mode",
get(get_engine_mode_handler).post(set_engine_mode_handler), get(get_engine_mode_handler).post(set_engine_mode_handler),

View File

@@ -101,3 +101,16 @@ pub(crate) async fn savings_summary_handler(
Err(err) => config_read_error_response("load storage savings summary", &err), Err(err) => config_read_error_response("load storage savings summary", &err),
} }
} }
pub(crate) async fn skip_reasons_handler(State(state): State<Arc<AppState>>) -> impl IntoResponse {
match state.db.get_skip_reason_counts().await {
Ok(counts) => {
let items: Vec<serde_json::Value> = counts
.into_iter()
.map(|(code, count)| serde_json::json!({ "code": code, "count": count }))
.collect();
axum::Json(serde_json::json!({ "today": items })).into_response()
}
Err(err) => config_read_error_response("load skip reason counts", &err),
}
}

View File

@@ -1,74 +0,0 @@
# Alchemist Project Audit & Findings
This document provides a comprehensive audit of the Alchemist media transcoding project (v0.3.0-rc.3), covering backend architecture, frontend design, database schema, and operational workflows.
---
## 1. Project Architecture & Pipeline
Alchemist implements a robust, asynchronous media transcoding pipeline managed by a central `Agent`. The pipeline follows a strictly ordered lifecycle:
1. **Scanner (`src/media/scanner.rs`):** Performs a high-speed traversal of watch folders. It uses `mtime_hash` (seconds + nanoseconds) to detect changes without full file analysis, efficiently handling re-scans and minimizing DB writes.
2. **Analyzer (`src/media/analyzer.rs`):** Executes `ffprobe` to extract normalized media metadata (codecs, bit depth, BPP, bitrate). Analysis results are used to populate the `DetailedEncodeStats` and `Decision` tables.
3. **Planner (`src/media/planner.rs`):** A complex decision engine that evaluates whether to **Skip**, **Remux**, or **Transcode** a file based on user profiles.
* *Finding:* The planning logic is heavily hardcoded with "magic thresholds" (e.g., Bits-per-pixel thresholds). While effective, these could be more exposed as "Advanced Settings" in the UI.
4. **Executor (`src/media/executor.rs`):** Orchestrates the `ffmpeg` process. It dynamically selects encoders (NVENC, VAAPI, QSV, ProRes, or CPU fallback) based on the target profile and host hardware capabilities detected in `src/system/hardware.rs`.
---
## 2. Backend & API Design (Rust/Axum)
* **Concurrency:** Utilizes `tokio` for async orchestration and `rayon` for CPU-intensive tasks (like file hashing or list processing). The scheduler supports multiple concurrency modes: `Background` (1 job), `Balanced` (capped), and `Throughput` (uncapped).
* **State Management:** The backend uses `broadcast` channels to separate high-volume events (Progress, Logs) from low-volume system events (Config updates). This prevents UI "flicker" and unnecessary re-renders in the frontend.
* **API Structure:**
* **RESTful endpoints** for jobs, settings, and stats.
* **SSE (`src/server/sse.rs`)** for real-time progress updates, ensuring a reactive UI without high-frequency polling.
* **Auth (`src/server/auth.rs`):** Implements JWT-based authentication with Argon2 hashing for the initial setup.
---
## 3. Database Schema (SQLite/SQLx)
* **Stability:** The project uses 16+ migrations, showing a mature evolution from a simple schema to a sophisticated job-tracking system.
* **Decision Logging:** The `decisions` and `job_failure_explanations` tables are a standout feature. They store the "why" behind every action as structured JSON, which is then humanized in the UI (e.g., explaining exactly why a file was skipped).
* **Data Integrity:** Foreign keys and WAL (Write-Ahead Logging) mode ensure database stability even during heavy concurrent I/O.
---
## 4. Frontend Design (Astro/React/Helios)
* **Stack:** Astro 5 provides a fast, static-first framework with React 18 handles the complex stateful dashboards.
* **Design System ("Helios"):**
* *Identity:* A dark-themed, data-dense industrial aesthetic.
* *Findings:* While functional, the system suffers from "component bloat." `JobManager.tsx` (~2,000 lines) is a significant maintainability risk. It contains UI logic, filtering logic, and data transformation logic mixed together.
* **Data Visualization:** Uses `recharts` for historical trends and performance metrics.
* *Improvement:* The charts are currently static snapshots. Adding real-time interactivity (brushing, zooming) would improve the exploration of large datasets.
---
## 5. System & Hardware Integration
* **Hardware Discovery:** `src/system/hardware.rs` is extensive, detecting NVIDIA, Intel, AMD, and Apple Silicon capabilities. It correctly maps these to `ffmpeg` encoder flags.
* **FS Browser:** A custom filesystem browser (`src/system/fs_browser.rs`) allows for secure directory selection during setup, preventing path injection and ensuring platform-agnostic path handling.
---
## 6. Critical Areas for Improvement
### **Maintainability (High Priority)**
* **Decouple `JobManager.tsx`:** Refactor into functional hooks (`useJobs`, `useFilters`) and smaller, presentation-only components.
* **Standardize Formatters:** Move `formatBytes`, `formatTime`, and `formatReduction` into a centralized `lib/formatters.ts` to reduce code duplication across the Dashboard and Stats pages.
### **UX & Performance (Medium Priority)**
* **Polling vs. SSE:** Ensure all real-time metrics (like GPU temperature) are delivered via SSE rather than periodic polling to reduce backend load and improve UI responsiveness.
* **Interactive Decision Explanations:** The current skip reasons are helpful but static. Adding links to the relevant settings (e.g., "Change this threshold in Transcoding Settings") would close the loop for users.
### **Reliability (Low Priority)**
* **E2E Testing:** While Playwright tests exist, they focus on "reliability." Expanding these to cover complex "edge cases" (like network-attached storage disconnects during a scan) would improve long-term stability.
---
## 7. Stitch Recommendation
Use Stitch to generate **atomic component refinements** based on this audit.
* *Prompt Example:* "Refine the JobTable row to use iconic status indicators with tooltips for skip reasons, as outlined in the Alchemist Audit."
* *Prompt Example:* "Create a unified `Formatter` utility library in TypeScript that handles bytes, time, and percentage formatting for the Helios design system."

View File

@@ -1,6 +1,6 @@
{ {
"name": "alchemist-web-e2e", "name": "alchemist-web-e2e",
"version": "0.3.1-rc.1", "version": "0.3.1-rc.3",
"private": true, "private": true,
"packageManager": "bun@1", "packageManager": "bun@1",
"type": "module", "type": "module",

View File

@@ -0,0 +1,102 @@
import { expect, test } from "@playwright/test";
import {
createEngineMode,
createEngineStatus,
fulfillJson,
mockDashboardData,
} from "./helpers";
test.use({ storageState: undefined });
test.beforeEach(async ({ page }) => {
await mockDashboardData(page);
await page.route("**/api/engine/mode", async (route) => {
await fulfillJson(route, 200, createEngineMode());
});
});
test("pause then resume transitions engine state correctly", async ({ page }) => {
let engineStatus = createEngineStatus({ status: "running", manual_paused: false });
let pauseCalls = 0;
let resumeCalls = 0;
await page.route("**/api/engine/status", async (route) => {
await fulfillJson(route, 200, engineStatus);
});
await page.route("**/api/engine/pause", async (route) => {
pauseCalls += 1;
engineStatus = createEngineStatus({ status: "paused", manual_paused: true });
await fulfillJson(route, 200, { status: "paused" });
});
await page.route("**/api/engine/resume", async (route) => {
resumeCalls += 1;
engineStatus = createEngineStatus({ status: "running", manual_paused: false });
await fulfillJson(route, 200, { status: "running" });
});
await page.goto("/settings?tab=system");
await page.getByRole("button", { name: "Pause" }).click();
await expect.poll(() => pauseCalls).toBe(1);
await page.getByRole("button", { name: "Start" }).click();
await expect.poll(() => resumeCalls).toBe(1);
});
test("drain transitions to draining state and cancel-stop reverts it", async ({ page }) => {
let engineStatus = createEngineStatus({ status: "running", manual_paused: false });
let drainCalls = 0;
let stopDrainCalls = 0;
await page.route("**/api/engine/status", async (route) => {
await fulfillJson(route, 200, engineStatus);
});
await page.route("**/api/engine/drain", async (route) => {
drainCalls += 1;
engineStatus = createEngineStatus({
status: "draining",
manual_paused: false,
draining: true,
});
await fulfillJson(route, 200, { status: "draining" });
});
await page.route("**/api/engine/stop-drain", async (route) => {
stopDrainCalls += 1;
engineStatus = createEngineStatus({ status: "running", manual_paused: false });
await fulfillJson(route, 200, { status: "running" });
});
await page.goto("/");
await page.getByRole("button", { name: "Stop" }).click();
await expect.poll(() => drainCalls).toBe(1);
await expect(page.getByText("Stopping", { exact: true })).toBeVisible();
await expect.poll(() => stopDrainCalls).toBe(0);
});
test("engine restart endpoint is called and status returns to running", async ({ page }) => {
let engineStatus = createEngineStatus({ status: "running", manual_paused: false });
let restartCalls = 0;
await page.route("**/api/engine/status", async (route) => {
await fulfillJson(route, 200, engineStatus);
});
await page.route("**/api/engine/restart", async (route) => {
restartCalls += 1;
engineStatus = createEngineStatus({ status: "running", manual_paused: false });
await fulfillJson(route, 200, { status: "running" });
});
await page.goto("/");
const result = await page.evaluate(async () => {
const res = await fetch("/api/engine/restart", { method: "POST" });
const body = await res.json() as { status: string };
return { status: res.status, body };
});
expect(restartCalls).toBe(1);
expect(result.status).toBe(200);
expect(result.body.status).toBe("running");
});

View File

@@ -159,6 +159,18 @@ export interface JobDetailFixture {
message: string; message: string;
created_at: string; created_at: string;
}>; }>;
encode_attempts?: Array<{
id: number;
attempt_number: number;
started_at: string | null;
finished_at: string;
outcome: "completed" | "failed" | "cancelled";
failure_code: string | null;
failure_summary: string | null;
input_size_bytes: number | null;
output_size_bytes: number | null;
encode_time_seconds: number | null;
}>;
job_failure_summary?: string; job_failure_summary?: string;
decision_explanation?: ExplanationFixture | null; decision_explanation?: ExplanationFixture | null;
failure_explanation?: ExplanationFixture | null; failure_explanation?: ExplanationFixture | null;

View File

@@ -142,7 +142,7 @@ test("search requests are debounced and failed job details show summary and logs
await mockJobDetails(page, { 2: failedDetail }); await mockJobDetails(page, { 2: failedDetail });
await page.goto("/jobs"); await page.goto("/jobs");
await page.getByPlaceholder("Search files...").fill("failed"); await page.getByPlaceholder("Search files...").first().fill("failed");
await expect await expect
.poll(() => requests.some((url) => url.searchParams.get("search") === "failed")) .poll(() => requests.some((url) => url.searchParams.get("search") === "failed"))
@@ -286,7 +286,7 @@ test("queued job with no metadata shows waiting for analysis placeholder", async
await page.getByTitle("/media/queued.mkv").click(); await page.getByTitle("/media/queued.mkv").click();
await expect(page.getByRole("dialog")).toBeVisible(); await expect(page.getByRole("dialog")).toBeVisible();
await expect(page.getByText("Waiting for analysis")).toBeVisible(); await expect(page.getByText("Waiting in queue")).toBeVisible();
await expect(page.getByText("Unknown bit depth")).not.toBeVisible(); await expect(page.getByText("Unknown bit depth")).not.toBeVisible();
}); });

View File

@@ -162,7 +162,7 @@ test("notification targets can be added, tested, and removed", async ({ page })
await expect(page.getByText("Test notification sent.").first()).toBeVisible(); await expect(page.getByText("Test notification sent.").first()).toBeVisible();
expect(testPayload).toMatchObject({ expect(testPayload).toMatchObject({
name: "Playwright Target", name: "Playwright Target",
target_type: "discord", target_type: "discord_webhook",
}); });
await page.getByLabel("Delete notification target Playwright Target").click(); await page.getByLabel("Delete notification target Playwright Target").click();

View File

@@ -1,6 +1,6 @@
{ {
"name": "alchemist-web", "name": "alchemist-web",
"version": "0.3.1-rc.1", "version": "0.3.1-rc.3",
"private": true, "private": true,
"packageManager": "bun@1", "packageManager": "bun@1",
"type": "module", "type": "module",

View File

@@ -5,18 +5,18 @@ import { apiJson, isApiError } from "../lib/api";
import { showToast } from "../lib/toast"; import { showToast } from "../lib/toast";
interface SystemInfo { interface SystemInfo {
version: string;
os_version: string;
is_docker: boolean;
telemetry_enabled: boolean;
ffmpeg_version: string; ffmpeg_version: string;
is_docker: boolean;
os_version: string;
telemetry_enabled: boolean;
version: string;
} }
interface UpdateInfo { interface UpdateInfo {
current_version: string; current_version: string;
latest_version: string | null; latest_version: string | null;
update_available: boolean;
release_url: string | null; release_url: string | null;
update_available: boolean;
} }
interface AboutDialogProps { interface AboutDialogProps {

View File

@@ -8,14 +8,14 @@ interface Props {
} }
interface State { interface State {
hasError: boolean; errorMessage: string;
errorMessage: string; hasError: boolean;
} }
export class ErrorBoundary extends Component<Props, State> { export class ErrorBoundary extends Component<Props, State> {
public state: State = { public state: State = {
hasError: false, errorMessage: "",
errorMessage: "", hasError: false,
}; };
public static getDerivedStateFromError(error: Error): State { public static getDerivedStateFromError(error: Error): State {

View File

@@ -14,24 +14,24 @@ interface HardwareInfo {
failed: number; failed: number;
}; };
backends?: Array<{ backends?: Array<{
kind: string;
codec: string; codec: string;
encoder: string;
device_path: string | null; device_path: string | null;
encoder: string;
kind: string;
}>; }>;
detection_notes?: string[]; detection_notes?: string[];
} }
interface HardwareProbeEntry { interface HardwareProbeEntry {
vendor: string;
codec: string;
encoder: string;
backend: string; backend: string;
codec: string;
device_path: string | null; device_path: string | null;
success: boolean; encoder: string;
selected: boolean; selected: boolean;
summary: string;
stderr?: string | null; stderr?: string | null;
success: boolean;
summary: string;
vendor: string;
} }
interface HardwareProbeLog { interface HardwareProbeLog {
@@ -39,11 +39,11 @@ interface HardwareProbeLog {
} }
interface HardwareSettings { interface HardwareSettings {
allow_cpu_fallback: boolean;
allow_cpu_encoding: boolean; allow_cpu_encoding: boolean;
allow_cpu_fallback: boolean;
cpu_preset: string; cpu_preset: string;
preferred_vendor: string | null;
device_path: string | null; device_path: string | null;
preferred_vendor: string | null;
} }
export default function HardwareSettings() { export default function HardwareSettings() {

View File

@@ -39,15 +39,16 @@ export default function HeaderActions() {
labelColor: "text-helios-solar", labelColor: "text-helios-solar",
}, },
draining: { draining: {
dot: "bg-helios-slate animate-pulse", dot: "bg-helios-solar animate-pulse",
label: "Stopping", label: "Stopping",
labelColor: "text-helios-slate", labelColor: "text-helios-solar",
}, },
} as const; } as const;
const status = engineStatus?.status ?? "paused"; const status = engineStatus?.status ?? "paused";
const isIdle = status === "running" && (stats?.active ?? 0) === 0; const isIdle = status === "running" && (stats?.active ?? 0) === 0;
const displayStatus: keyof typeof statusConfig = isIdle ? "idle" : status; const displayStatus: keyof typeof statusConfig =
status === "draining" ? "draining" : isIdle ? "idle" : status;
const refreshEngineStatus = async () => { const refreshEngineStatus = async () => {
const data = await apiJson<EngineStatus>("/api/engine/status"); const data = await apiJson<EngineStatus>("/api/engine/status");

File diff suppressed because it is too large Load Diff

View File

@@ -14,24 +14,24 @@ import { apiJson, isApiError } from "../lib/api";
import { showToast } from "../lib/toast"; import { showToast } from "../lib/toast";
interface CodecSavings { interface CodecSavings {
codec: string;
bytes_saved: number; bytes_saved: number;
codec: string;
job_count: number; job_count: number;
} }
interface DailySavings { interface DailySavings {
date: string;
bytes_saved: number; bytes_saved: number;
date: string;
} }
interface SavingsSummary { interface SavingsSummary {
total_input_bytes: number;
total_output_bytes: number;
total_bytes_saved: number;
savings_percent: number;
job_count: number; job_count: number;
savings_by_codec: CodecSavings[]; savings_by_codec: CodecSavings[];
savings_over_time: DailySavings[]; savings_over_time: DailySavings[];
savings_percent: number;
total_bytes_saved: number;
total_input_bytes: number;
total_output_bytes: number;
} }
const GIB = 1_073_741_824; const GIB = 1_073_741_824;

View File

@@ -5,11 +5,11 @@ import { showToast } from "../lib/toast";
import ConfirmDialog from "./ui/ConfirmDialog"; import ConfirmDialog from "./ui/ConfirmDialog";
interface ScheduleWindow { interface ScheduleWindow {
id: number;
start_time: string;
end_time: string;
days_of_week: string; days_of_week: string;
enabled: boolean; enabled: boolean;
end_time: string;
id: number;
start_time: string;
} }
const DAYS = ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"]; const DAYS = ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"];

View File

@@ -0,0 +1,511 @@
import { X, Clock, Info, Activity, Database, Zap, Maximize2, AlertCircle, RefreshCw, Ban, Trash2 } from "lucide-react";
import { motion, AnimatePresence } from "framer-motion";
import { clsx, type ClassValue } from "clsx";
import { twMerge } from "tailwind-merge";
import type { RefObject } from "react";
import type React from "react";
import type { JobDetail, EncodeStats, ExplanationView, LogEntry, ConfirmConfig, Job } from "./types";
import { formatBytes, formatDuration, logLevelClass, isJobActive } from "./types";
function cn(...inputs: ClassValue[]) {
return twMerge(clsx(inputs));
}
interface JobDetailModalProps {
focusedJob: JobDetail | null;
detailDialogRef: RefObject<HTMLDivElement | null>;
detailLoading: boolean;
onClose: () => void;
focusedDecision: ExplanationView | null;
focusedFailure: ExplanationView | null;
focusedJobLogs: LogEntry[];
shouldShowFfmpegOutput: boolean;
completedEncodeStats: EncodeStats | null;
focusedEmptyState: { title: string; detail: string } | null;
openConfirm: (config: ConfirmConfig) => void;
handleAction: (id: number, action: "cancel" | "restart" | "delete") => Promise<void>;
handlePriority: (job: Job, priority: number, label: string) => Promise<void>;
getStatusBadge: (status: string) => React.ReactElement;
}
export function JobDetailModal({
focusedJob, detailDialogRef, detailLoading, onClose,
focusedDecision, focusedFailure, focusedJobLogs, shouldShowFfmpegOutput,
completedEncodeStats, focusedEmptyState,
openConfirm, handleAction, handlePriority, getStatusBadge,
}: JobDetailModalProps) {
return (
<AnimatePresence>
{focusedJob && (
<>
<motion.div
initial={{ opacity: 0 }}
animate={{ opacity: 1 }}
exit={{ opacity: 0 }}
onClick={onClose}
className="fixed inset-0 bg-black/60 backdrop-blur-sm z-[100]"
/>
<div className="fixed inset-0 flex items-center justify-center pointer-events-none z-[101]">
<motion.div
key="modal-content"
initial={{ opacity: 0, scale: 0.95, y: 10 }}
animate={{ opacity: 1, scale: 1, y: 0 }}
exit={{ opacity: 0, scale: 0.95, y: 10 }}
transition={{ duration: 0.2 }}
ref={detailDialogRef as React.RefObject<HTMLDivElement>}
role="dialog"
aria-modal="true"
aria-labelledby="job-details-title"
aria-describedby="job-details-path"
tabIndex={-1}
className="w-full max-w-2xl bg-helios-surface border border-helios-line/20 rounded-lg shadow-2xl pointer-events-auto overflow-hidden mx-4"
>
{/* Header */}
<div className="p-6 border-b border-helios-line/10 flex justify-between items-start gap-4 bg-helios-surface-soft/50">
<div className="flex-1 min-w-0">
<div className="flex items-center gap-3 mb-1">
{getStatusBadge(focusedJob.job.status)}
<span className="text-xs font-medium text-helios-slate">Job ID #{focusedJob.job.id}</span>
<span className="text-xs font-medium text-helios-slate">Priority {focusedJob.job.priority}</span>
</div>
<h2 id="job-details-title" className="text-lg font-bold text-helios-ink truncate" title={focusedJob.job.input_path}>
{focusedJob.job.input_path.split(/[/\\]/).pop()}
</h2>
<p id="job-details-path" className="text-xs text-helios-slate truncate opacity-60">{focusedJob.job.input_path}</p>
</div>
<button
onClick={onClose}
className="p-2 hover:bg-helios-line/10 rounded-md transition-colors text-helios-slate"
>
<X size={20} />
</button>
</div>
<div className="p-6 space-y-8 max-h-[70vh] overflow-y-auto custom-scrollbar">
{detailLoading && (
<p className="text-xs text-helios-slate" aria-live="polite">Loading job details...</p>
)}
{/* Active-encode status banner */}
{focusedEmptyState && (focusedJob.job.status === "encoding" || focusedJob.job.status === "remuxing") && (
<div className="flex items-center gap-3 rounded-lg border border-helios-line/20 bg-helios-surface-soft px-4 py-3">
<div className="p-1.5 rounded-lg bg-helios-surface border border-helios-line/20 text-helios-slate shrink-0">
<Clock size={14} />
</div>
<p className="text-xs font-medium text-helios-ink">{focusedEmptyState.title}</p>
</div>
)}
{focusedJob.metadata || completedEncodeStats ? (
<>
{focusedJob.metadata && (
<>
{/* Stats Grid */}
<div className="grid grid-cols-2 lg:grid-cols-3 gap-4">
<div className="p-4 rounded-lg bg-helios-surface-soft border border-helios-line/20 space-y-1">
<div className="flex items-center gap-2 text-helios-slate mb-1">
<Activity size={12} />
<span className="text-xs font-medium text-helios-slate">Video Codec</span>
</div>
<p className="text-sm font-bold text-helios-ink capitalize">
{focusedJob.metadata.codec_name || "Unknown"}
</p>
<p className="text-xs text-helios-slate">
{(focusedJob.metadata.bit_depth ? `${focusedJob.metadata.bit_depth}-bit` : "Unknown bit depth")} {focusedJob.metadata.container.toUpperCase()}
</p>
</div>
<div className="p-4 rounded-lg bg-helios-surface-soft border border-helios-line/20 space-y-1">
<div className="flex items-center gap-2 text-helios-slate mb-1">
<Maximize2 size={12} />
<span className="text-xs font-medium text-helios-slate">Resolution</span>
</div>
<p className="text-sm font-bold text-helios-ink">
{`${focusedJob.metadata.width}x${focusedJob.metadata.height}`}
</p>
<p className="text-xs text-helios-slate">
{focusedJob.metadata.fps.toFixed(2)} FPS
</p>
</div>
<div className="p-4 rounded-lg bg-helios-surface-soft border border-helios-line/20 space-y-1">
<div className="flex items-center gap-2 text-helios-slate mb-1">
<Clock size={12} />
<span className="text-xs font-medium text-helios-slate">Duration</span>
</div>
<p className="text-sm font-bold text-helios-ink">
{formatDuration(focusedJob.metadata.duration_secs)}
</p>
</div>
</div>
{/* Media Details */}
<div className="grid grid-cols-1 lg:grid-cols-2 gap-8">
<div className="space-y-4">
<h3 className="text-xs font-medium text-helios-slate/70 flex items-center gap-2">
<Database size={12} /> Input Details
</h3>
<div className="space-y-3">
<div className="flex justify-between items-center text-xs">
<span className="text-helios-slate font-medium">File Size</span>
<span className="text-helios-ink font-bold">{formatBytes(focusedJob.metadata.size_bytes)}</span>
</div>
<div className="flex justify-between items-center text-xs">
<span className="text-helios-slate font-medium">Video Bitrate</span>
<span className="text-helios-ink font-bold">
{(focusedJob.metadata.video_bitrate_bps ?? focusedJob.metadata.container_bitrate_bps)
? `${(((focusedJob.metadata.video_bitrate_bps ?? focusedJob.metadata.container_bitrate_bps) as number) / 1000).toFixed(0)} kbps`
: "-"}
</span>
</div>
<div className="flex justify-between items-center text-xs">
<span className="text-helios-slate font-medium">Audio</span>
<span className="text-helios-ink font-bold capitalize">
{focusedJob.metadata.audio_codec || "N/A"} ({focusedJob.metadata.audio_channels || 0}ch)
</span>
</div>
</div>
</div>
<div className="space-y-4">
<h3 className="text-xs font-medium text-helios-solar flex items-center gap-2">
<Zap size={12} /> Output Details
</h3>
{focusedJob.encode_stats ? (
<div className="space-y-3">
<div className="flex justify-between items-center text-xs">
<span className="text-helios-slate font-medium">Result Size</span>
<span className="text-helios-solar font-bold">{formatBytes(focusedJob.encode_stats.output_size_bytes)}</span>
</div>
<div className="flex justify-between items-center text-xs">
<span className="text-helios-slate font-medium">Reduction</span>
<span className="text-green-500 font-bold">
{((1 - focusedJob.encode_stats.compression_ratio) * 100).toFixed(1)}% Saved
</span>
</div>
<div className="flex justify-between items-center text-xs">
<span className="text-helios-slate font-medium">VMAF Score</span>
<div className="flex items-center gap-1.5">
<div className="h-1.5 w-16 bg-helios-line/10 rounded-full overflow-hidden">
<div className="h-full bg-helios-solar" style={{ width: `${focusedJob.encode_stats.vmaf_score || 0}%` }} />
</div>
<span className="text-helios-ink font-bold">
{focusedJob.encode_stats.vmaf_score?.toFixed(1) || "-"}
</span>
</div>
</div>
</div>
) : (
<div className="h-[80px] flex items-center justify-center border border-dashed border-helios-line/20 rounded-lg text-xs text-helios-slate italic">
{focusedJob.job.status === "encoding"
? "Encoding in progress..."
: focusedJob.job.status === "remuxing"
? "Remuxing in progress..."
: "No encode data available"}
</div>
)}
</div>
</div>
</>
)}
{completedEncodeStats && (
<div className="space-y-4">
<h3 className="text-xs font-medium text-helios-solar flex items-center gap-2">
<Zap size={12} /> Encode Results
</h3>
<div className="p-4 rounded-lg bg-helios-surface-soft border border-helios-line/20 space-y-3">
<div className="flex justify-between items-center text-xs">
<span className="text-helios-slate font-medium">Input size</span>
<span className="text-helios-ink font-bold">{formatBytes(completedEncodeStats.input_size_bytes)}</span>
</div>
<div className="flex justify-between items-center text-xs">
<span className="text-helios-slate font-medium">Output size</span>
<span className="text-helios-ink font-bold">{formatBytes(completedEncodeStats.output_size_bytes)}</span>
</div>
<div className="flex justify-between items-center text-xs">
<span className="text-helios-slate font-medium">Reduction</span>
<span className="text-green-500 font-bold">
{completedEncodeStats.input_size_bytes > 0
? `${((1 - completedEncodeStats.output_size_bytes / completedEncodeStats.input_size_bytes) * 100).toFixed(1)}% saved`
: "—"}
</span>
</div>
<div className="flex justify-between items-center text-xs">
<span className="text-helios-slate font-medium">Encode time</span>
<span className="text-helios-ink font-bold">{formatDuration(completedEncodeStats.encode_time_seconds)}</span>
</div>
<div className="flex justify-between items-center text-xs">
<span className="text-helios-slate font-medium">Speed</span>
<span className="text-helios-ink font-bold">{`${completedEncodeStats.encode_speed.toFixed(2)}\u00d7 realtime`}</span>
</div>
<div className="flex justify-between items-center text-xs">
<span className="text-helios-slate font-medium">Avg bitrate</span>
<span className="text-helios-ink font-bold">{`${completedEncodeStats.avg_bitrate_kbps} kbps`}</span>
</div>
<div className="flex justify-between items-center text-xs">
<span className="text-helios-slate font-medium">VMAF</span>
<span className="text-helios-ink font-bold">{completedEncodeStats.vmaf_score?.toFixed(1) ?? "—"}</span>
</div>
</div>
</div>
)}
</>
) : focusedEmptyState ? (
<div className="flex items-center gap-3 rounded-lg border border-helios-line/20 bg-helios-surface-soft px-4 py-5">
<div className="p-2 rounded-lg bg-helios-surface border border-helios-line/20 text-helios-slate shrink-0">
<Clock size={18} />
</div>
<div>
<p className="text-sm font-medium text-helios-ink">
{focusedEmptyState.title}
</p>
<p className="text-xs text-helios-slate mt-0.5">
{focusedEmptyState.detail}
</p>
</div>
</div>
) : null}
{/* Decision Info */}
{focusedDecision && focusedJob.job.status !== "failed" && focusedJob.job.status !== "skipped" && (
<div className="p-4 rounded-lg bg-helios-solar/5 border border-helios-solar/10">
<div className="flex items-center gap-2 text-helios-solar mb-1">
<Info size={12} />
<span className="text-xs font-medium text-helios-slate">Decision Context</span>
</div>
<div className="space-y-3">
<p className="text-sm font-medium text-helios-ink">
{focusedJob.job.status === "completed"
? "Transcoded"
: focusedDecision.summary}
</p>
<p className="text-xs leading-relaxed text-helios-slate">
{focusedDecision.detail}
</p>
{Object.keys(focusedDecision.measured).length > 0 && (
<div className="space-y-1.5 rounded-lg border border-helios-line/20 bg-helios-surface-soft px-3 py-2.5">
{Object.entries(focusedDecision.measured).map(([k, v]) => (
<div key={k} className="flex items-center justify-between text-xs">
<span className="font-mono text-helios-slate">{k}</span>
<span className="font-mono font-bold text-helios-ink">{String(v)}</span>
</div>
))}
</div>
)}
{focusedDecision.operator_guidance && (
<div className="flex items-start gap-2 rounded-lg border border-helios-solar/20 bg-helios-solar/5 px-3 py-2.5">
<span className="text-xs leading-relaxed text-helios-solar">
{focusedDecision.operator_guidance}
</span>
</div>
)}
</div>
</div>
)}
{focusedJob.job.status === "skipped" && focusedDecision && (
<div className="p-4 rounded-lg bg-helios-surface-soft border border-helios-line/10">
<p className="text-sm text-helios-ink leading-relaxed">
Alchemist analysed this file and decided not to transcode it. Here&apos;s why:
</p>
<div className="mt-3 space-y-3">
<p className="text-sm font-medium text-helios-ink">
{focusedDecision.summary}
</p>
<p className="text-xs leading-relaxed text-helios-slate">
{focusedDecision.detail}
</p>
{Object.keys(focusedDecision.measured).length > 0 && (
<div className="space-y-1.5 rounded-lg border border-helios-line/20 bg-helios-surface px-3 py-2.5">
{Object.entries(focusedDecision.measured).map(([k, v]) => (
<div key={k} className="flex items-center justify-between text-xs">
<span className="font-mono text-helios-slate">{k}</span>
<span className="font-mono font-bold text-helios-ink">{String(v)}</span>
</div>
))}
</div>
)}
{focusedDecision.operator_guidance && (
<div className="flex items-start gap-2 rounded-lg border border-helios-solar/20 bg-helios-solar/5 px-3 py-2.5">
<span className="text-xs leading-relaxed text-helios-solar">
{focusedDecision.operator_guidance}
</span>
</div>
)}
</div>
</div>
)}
{focusedJob.job.status === "failed" && (
<div className="rounded-lg border border-status-error/20 bg-status-error/5 px-4 py-4 space-y-2">
<div className="flex items-center gap-2">
<AlertCircle size={14} className="text-status-error shrink-0" />
<span className="text-xs font-semibold text-status-error uppercase tracking-wide">
Failure Reason
</span>
</div>
{focusedFailure ? (
<>
<p className="text-sm font-medium text-helios-ink">
{focusedFailure.summary}
</p>
<p className="text-xs leading-relaxed text-helios-slate">
{focusedFailure.detail}
</p>
{focusedFailure.operator_guidance && (
<p className="text-xs leading-relaxed text-status-error">
{focusedFailure.operator_guidance}
</p>
)}
{focusedFailure.legacy_reason !== focusedFailure.detail && (
<p className="text-xs font-mono text-helios-slate/70 break-all leading-relaxed">
{focusedFailure.legacy_reason}
</p>
)}
</>
) : (
<p className="text-sm text-helios-slate">
No error details captured. Check the logs below.
</p>
)}
</div>
)}
{(focusedJob.encode_attempts ?? []).length > 0 && (
<details className="rounded-lg border border-helios-line/15 bg-helios-surface-soft/40 p-4">
<summary className="cursor-pointer text-xs text-helios-solar">
Attempt History ({(focusedJob.encode_attempts ?? []).length})
</summary>
<div className="mt-3 space-y-2">
{(focusedJob.encode_attempts ?? []).map((attempt) => (
<div key={attempt.id} className="flex items-start gap-3 rounded-lg border border-helios-line/10 bg-helios-main/50 px-3 py-2 text-xs">
<span className={cn(
"mt-0.5 shrink-0 rounded px-1.5 py-0.5 font-mono font-semibold",
attempt.outcome === "completed" && "bg-status-success/15 text-status-success",
attempt.outcome === "failed" && "bg-status-error/15 text-status-error",
attempt.outcome === "cancelled" && "bg-helios-slate/15 text-helios-slate",
)}>#{attempt.attempt_number}</span>
<div className="min-w-0 flex-1">
<div className="flex items-center gap-2">
<span className="capitalize font-medium text-helios-ink">{attempt.outcome}</span>
{attempt.encode_time_seconds != null && (
<span className="text-helios-slate">{attempt.encode_time_seconds < 60
? `${attempt.encode_time_seconds.toFixed(1)}s`
: `${(attempt.encode_time_seconds / 60).toFixed(1)}m`}</span>
)}
{attempt.input_size_bytes != null && attempt.output_size_bytes != null && (
<span className="text-helios-slate">
{formatBytes(attempt.input_size_bytes)} {formatBytes(attempt.output_size_bytes)}
</span>
)}
</div>
{attempt.failure_summary && (
<p className="mt-0.5 text-helios-slate/80 truncate">{attempt.failure_summary}</p>
)}
<p className="mt-0.5 text-helios-slate/50">{new Date(attempt.finished_at).toLocaleString()}</p>
</div>
</div>
))}
</div>
</details>
)}
{shouldShowFfmpegOutput && (
<details className="rounded-lg border border-helios-line/15 bg-helios-surface-soft/40 p-4">
<summary className="cursor-pointer text-xs text-helios-solar">
Show FFmpeg output ({focusedJobLogs.length} lines)
</summary>
<div className="mt-3 max-h-48 overflow-y-auto rounded-lg bg-helios-main/70 p-3">
{focusedJobLogs.map((entry) => (
<div
key={entry.id}
className={cn(
"font-mono text-xs leading-relaxed whitespace-pre-wrap break-words",
logLevelClass(entry.level)
)}
>
{entry.message}
</div>
))}
</div>
</details>
)}
{/* Action Toolbar */}
<div className="flex items-center justify-between pt-4 border-t border-helios-line/10">
<div className="flex gap-2">
<button
onClick={() => void handlePriority(focusedJob.job, focusedJob.job.priority + 10, "Priority boosted")}
className="px-3 py-2 border border-helios-line/20 bg-helios-surface text-helios-slate rounded-lg text-sm font-bold hover:bg-helios-surface-soft transition-all"
>
Boost +10
</button>
<button
onClick={() => void handlePriority(focusedJob.job, focusedJob.job.priority - 10, "Priority lowered")}
className="px-3 py-2 border border-helios-line/20 bg-helios-surface text-helios-slate rounded-lg text-sm font-bold hover:bg-helios-surface-soft transition-all"
>
Lower -10
</button>
<button
onClick={() => void handlePriority(focusedJob.job, 0, "Priority reset")}
className="px-3 py-2 border border-helios-line/20 bg-helios-surface text-helios-slate rounded-lg text-sm font-bold hover:bg-helios-surface-soft transition-all"
>
Reset
</button>
{(focusedJob.job.status === "failed" || focusedJob.job.status === "cancelled") && (
<button
onClick={() =>
openConfirm({
title: "Retry job",
body: "Retry this job now?",
confirmLabel: "Retry",
onConfirm: () => handleAction(focusedJob.job.id, "restart"),
})
}
className="px-4 py-2 bg-helios-solar text-helios-main rounded-lg text-sm font-bold flex items-center gap-2 hover:brightness-110 active:scale-95 transition-all shadow-sm"
>
<RefreshCw size={14} /> Retry Job
</button>
)}
{["encoding", "analyzing", "remuxing"].includes(focusedJob.job.status) && (
<button
onClick={() =>
openConfirm({
title: "Cancel job",
body: "Stop this job immediately?",
confirmLabel: "Cancel",
confirmTone: "danger",
onConfirm: () => handleAction(focusedJob.job.id, "cancel"),
})
}
className="px-4 py-2 border border-helios-line/20 bg-helios-surface text-helios-slate rounded-lg text-sm font-bold flex items-center gap-2 hover:bg-helios-surface-soft active:scale-95 transition-all"
>
<Ban size={14} /> Stop / Cancel
</button>
)}
</div>
{!isJobActive(focusedJob.job) && (
<button
onClick={() =>
openConfirm({
title: "Delete job",
body: "Delete this job from history?",
confirmLabel: "Delete",
confirmTone: "danger",
onConfirm: () => handleAction(focusedJob.job.id, "delete"),
})
}
className="px-4 py-2 text-red-500 hover:bg-red-500/5 rounded-lg text-sm font-bold flex items-center gap-2 transition-all"
>
<Trash2 size={14} /> Delete
</button>
)}
</div>
</div>
</motion.div>
</div>
</>
)}
</AnimatePresence>
);
}

View File

@@ -0,0 +1,303 @@
import type { ExplanationView, LogEntry } from "./types";
function formatReductionPercent(value?: string): string {
if (!value) return "?";
const parsed = Number.parseFloat(value);
return Number.isFinite(parsed) ? `${(parsed * 100).toFixed(0)}%` : value;
}
export function humanizeSkipReason(reason: string): ExplanationView {
const pipeIdx = reason.indexOf("|");
const key = pipeIdx === -1
? reason.trim()
: reason.slice(0, pipeIdx).trim();
const paramStr = pipeIdx === -1 ? "" : reason.slice(pipeIdx + 1);
const measured: Record<string, string | number | boolean | null> = {};
for (const pair of paramStr.split(",")) {
const [rawKey, ...rawValueParts] = pair.split("=");
if (!rawKey || rawValueParts.length === 0) continue;
measured[rawKey.trim()] = rawValueParts.join("=").trim();
}
const makeDecision = (
code: string,
summary: string,
detail: string,
operator_guidance: string | null,
): ExplanationView => ({
category: "decision",
code,
summary,
detail,
operator_guidance,
measured,
legacy_reason: reason,
});
switch (key) {
case "analysis_failed":
return makeDecision(
"analysis_failed",
"File could not be analyzed",
`FFprobe failed to read this file. It may be corrupt, incomplete, or in an unsupported format. Error: ${measured.error ?? "unknown"}`,
"Try playing the file in VLC or another media player. If it plays fine, re-run the scan. If not, the file may be damaged.",
);
case "planning_failed":
return makeDecision(
"planning_failed",
"Transcoding plan could not be created",
`An internal error occurred while planning the transcode for this file. This is likely a bug. Error: ${measured.error ?? "unknown"}`,
"Check the logs below for details. If this happens repeatedly, please report it as a bug.",
);
case "already_target_codec":
return makeDecision(
"already_target_codec",
"Already in target format",
`This file is already encoded as ${measured.codec ?? "the target codec"}${measured.bit_depth ? ` at ${measured.bit_depth}-bit` : ""}. Re-encoding would waste time and could reduce quality.`,
null,
);
case "already_target_codec_wrong_container":
return makeDecision(
"already_target_codec_wrong_container",
"Target codec, wrong container",
`The video is already in the right codec but wrapped in a ${measured.container ?? "MP4"} container. Alchemist will remux it to ${measured.target_extension ?? "MKV"} - fast and lossless, no quality loss.`,
null,
);
case "bpp_below_threshold":
return makeDecision(
"bpp_below_threshold",
"Already efficiently compressed",
`Bits-per-pixel (${measured.bpp ?? "?"}) is below the minimum threshold (${measured.threshold ?? "?"}). This file is already well-compressed - transcoding it would spend significant time for minimal space savings.`,
"If you want to force transcoding, lower the BPP threshold in Settings -> Transcoding.",
);
case "below_min_file_size":
return makeDecision(
"below_min_file_size",
"File too small to process",
`File size (${measured.size_mb ?? "?"}MB) is below the minimum threshold (${measured.threshold_mb ?? "?"}MB). Small files aren't worth the transcoding overhead.`,
"Lower the minimum file size threshold in Settings -> Transcoding if you want small files processed.",
);
case "size_reduction_insufficient":
return makeDecision(
"size_reduction_insufficient",
"Not enough space would be saved",
`The predicted size reduction (${formatReductionPercent(String(measured.reduction ?? measured.predicted ?? ""))}) is below the required threshold (${formatReductionPercent(String(measured.threshold ?? ""))}). Transcoding this file wouldn't recover meaningful storage.`,
"Lower the size reduction threshold in Settings -> Transcoding to encode files with smaller savings.",
);
case "no_suitable_encoder":
case "no_available_encoders":
return makeDecision(
key,
"No encoder available",
`No encoder was found for ${measured.codec ?? measured.requested_codec ?? "the target codec"}. Hardware detection may have failed, or CPU fallback is disabled.`,
"Check Settings -> Hardware. Enable CPU fallback, or verify your GPU is detected correctly.",
);
case "preferred_codec_unavailable_fallback_disabled":
return makeDecision(
"preferred_codec_unavailable_fallback_disabled",
"Preferred encoder unavailable",
`The preferred codec (${measured.codec ?? "target codec"}) is not available and CPU fallback is disabled in settings.`,
"Go to Settings -> Hardware and enable CPU fallback, or check that your GPU encoder is working correctly.",
);
case "Output path matches input path":
case "output_path_matches_input":
return makeDecision(
"output_path_matches_input",
"Output would overwrite source",
"The configured output path is the same as the source file. Alchemist refused to proceed to avoid overwriting your original file.",
"Go to Settings -> Files and configure a different output suffix or output folder.",
);
case "Output already exists":
case "output_already_exists":
return makeDecision(
"output_already_exists",
"Output file already exists",
"A transcoded version of this file already exists at the output path. Alchemist skipped it to avoid duplicating work.",
"If you want to re-transcode it, delete the existing output file first, then retry the job.",
);
case "incomplete_metadata":
return makeDecision(
"incomplete_metadata",
"Missing file metadata",
`FFprobe could not determine the ${measured.missing ?? "required metadata"} for this file. Without reliable metadata Alchemist cannot make a valid transcoding decision.`,
"Run a Library Doctor scan to check if this file is corrupt. Try playing it in a media player to confirm it is readable.",
);
case "already_10bit":
return makeDecision(
"already_10bit",
"Already 10-bit",
"This file is already encoded in high-quality 10-bit depth. Re-encoding it could reduce quality.",
null,
);
case "remux: mp4_to_mkv_stream_copy":
case "remux_mp4_to_mkv_stream_copy":
return makeDecision(
"remux_mp4_to_mkv_stream_copy",
"Remuxed (no re-encode)",
"This file was remuxed from MP4 to MKV using stream copy - fast and lossless. No quality was lost.",
null,
);
case "Low quality (VMAF)":
case "quality_below_threshold":
return makeDecision(
"quality_below_threshold",
"Quality check failed",
"The encoded file scored below the minimum VMAF quality threshold. Alchemist rejected the output to protect quality.",
"The original file has been preserved. You can lower the VMAF threshold in Settings -> Quality, or disable VMAF checking entirely.",
);
case "transcode_h264_source":
return makeDecision(
"transcode_h264_source",
"H.264 source prioritized",
"This file is H.264, which is typically a strong candidate for reclaiming space, so Alchemist prioritized it for transcoding.",
null,
);
case "transcode_recommended":
return makeDecision(
"transcode_recommended",
"Transcode recommended",
"Alchemist determined this file is a strong candidate for transcoding based on the current codec and measured efficiency.",
null,
);
default:
return makeDecision("legacy_decision", "Decision recorded", reason, null);
}
}
export function explainFailureSummary(summary: string): ExplanationView {
const normalized = summary.toLowerCase();
const makeFailure = (
code: string,
title: string,
detail: string,
operator_guidance: string | null,
): ExplanationView => ({
category: "failure",
code,
summary: title,
detail,
operator_guidance,
measured: {},
legacy_reason: summary,
});
if (normalized.includes("cancelled")) {
return makeFailure("cancelled", "Job was cancelled", "This job was cancelled before encoding completed. The original file is untouched.", null);
}
if (normalized.includes("no such file or directory")) {
return makeFailure("source_missing", "Source file missing", "The source file could not be found. It may have been moved or deleted.", "Check that the source file still exists and is readable by Alchemist.");
}
if (normalized.includes("invalid data found") || normalized.includes("moov atom not found")) {
return makeFailure("corrupt_or_unreadable_media", "Media could not be read", "This file appears to be corrupt or incomplete. Try running a Library Doctor scan.", "Verify the source file manually or run Library Doctor to confirm whether it is readable.");
}
if (normalized.includes("permission denied")) {
return makeFailure("permission_denied", "Permission denied", "Alchemist doesn't have permission to read this file. Check the file permissions.", "Check the file and output path permissions for the Alchemist process user.");
}
if (normalized.includes("encoder not found") || normalized.includes("unknown encoder")) {
return makeFailure("encoder_unavailable", "Required encoder unavailable", "The required encoder is not available in your FFmpeg installation.", "Check FFmpeg encoder availability and hardware settings.");
}
if (normalized.includes("out of memory") || normalized.includes("cannot allocate memory")) {
return makeFailure("resource_exhausted", "System ran out of memory", "The system ran out of memory during encoding. Try reducing concurrent jobs.", "Reduce concurrent jobs or rerun under lower system load.");
}
if (normalized.includes("transcode_failed") || normalized.includes("ffmpeg exited")) {
return makeFailure("unknown_ffmpeg_failure", "FFmpeg failed", "FFmpeg failed during encoding. This is often caused by a corrupt source file or an encoder configuration issue. Check the logs below for the specific FFmpeg error.", "Inspect the FFmpeg output in the job logs for the exact failure.");
}
if (normalized.includes("probing failed")) {
return makeFailure("analysis_failed", "Analysis failed", "FFprobe could not read this file. It may be corrupt or in an unsupported format.", "Inspect the source file manually or run Library Doctor to confirm whether it is readable.");
}
if (normalized.includes("planning_failed") || normalized.includes("planner")) {
return makeFailure("planning_failed", "Planner failed", "An error occurred while planning the transcode. Check the logs below for details.", "Treat repeated planner failures as a bug and inspect the logs for the triggering input.");
}
if (normalized.includes("output_size=0") || normalized.includes("output was empty")) {
return makeFailure("unknown_ffmpeg_failure", "Empty output produced", "Encoding produced an empty output file. This usually means FFmpeg crashed silently. Check the logs below for FFmpeg output.", "Inspect the FFmpeg logs before retrying the job.");
}
if (normalized.includes("videotoolbox") || normalized.includes("vt_compression") || normalized.includes("err=-12902") || normalized.includes("mediaserverd") || normalized.includes("no capable devices")) {
return makeFailure("hardware_backend_failure", "Hardware backend failed", "The VideoToolbox hardware encoder failed. This can happen when the GPU is busy, the file uses an unsupported pixel format, or macOS Media Services are unavailable.", "Retry the job. If it keeps failing, check the hardware probe log or enable CPU fallback in Settings -> Hardware.");
}
if (normalized.includes("encoder fallback") || normalized.includes("fallback detected")) {
return makeFailure("fallback_blocked", "Fallback blocked by policy", "The hardware encoder was unavailable and fell back to software encoding, which was not allowed by your settings.", "Enable CPU fallback in Settings -> Hardware, or retry when the GPU is less busy.");
}
if (normalized.includes("ffmpeg failed")) {
return makeFailure("unknown_ffmpeg_failure", "FFmpeg failed", "FFmpeg failed during encoding. Check the logs below for the specific error. Common causes: unsupported pixel format, codec not available, or corrupt source file.", "Inspect the FFmpeg output in the job logs for the exact failure.");
}
return makeFailure("legacy_failure", "Failure recorded", summary, "Inspect the job logs for additional context.");
}
export function explainFailureLogs(logs: LogEntry[]): ExplanationView | null {
const sourceEntries = logs.filter((entry) => entry.message.trim().length > 0);
if (sourceEntries.length === 0) return null;
const recentEntries = sourceEntries.slice(-25);
const prioritizedEntry = [...recentEntries]
.reverse()
.find((entry) => ["error", "warn", "warning"].includes(entry.level.toLowerCase()))
?? recentEntries[recentEntries.length - 1];
const combined = recentEntries.map((entry) => entry.message).join("\n");
const normalized = combined.toLowerCase();
const primaryMessage = prioritizedEntry.message;
const makeFailure = (
code: string,
summary: string,
detail: string,
operator_guidance: string | null,
): ExplanationView => ({
category: "failure",
code,
summary,
detail,
operator_guidance,
measured: {},
legacy_reason: primaryMessage,
});
if (normalized.includes("qscale not available for encoder")) {
return makeFailure("encoder_parameter_mismatch", "Encoder settings rejected", "FFmpeg rejected the selected encoder parameters for this hardware backend. The command was accepted by Alchemist, but the encoder refused to start with the generated rate-control options.", "Check the FFmpeg output below for the rejected flag and compare it with your current codec and hardware settings.");
}
if (normalized.includes("videotoolbox") || normalized.includes("vt_compression") || normalized.includes("mediaserverd") || normalized.includes("no capable devices") || normalized.includes("could not open encoder before eof")) {
return makeFailure("hardware_backend_failure", "Hardware backend failed", "The hardware encoder failed to initialize or produce output. This usually points to an unsupported source format, a backend-specific FFmpeg parameter issue, or temporary media-services instability on the host.", "Retry the job first. If it fails again, inspect the backend-specific FFmpeg lines below and verify hardware fallback settings.");
}
if (normalized.includes("nothing was written into output file") || normalized.includes("received no packets") || normalized.includes("output_size=0") || normalized.includes("conversion failed")) {
return makeFailure("empty_output", "Encoder produced no output", "FFmpeg ran, but no media packets were successfully written to the output file. This usually means the encoder crashed or rejected the stream before real output started.", "Check the lines around the first FFmpeg error below to find the encoder/backend-specific cause.");
}
if (normalized.includes("unknown encoder") || normalized.includes("encoder not found")) {
return makeFailure("encoder_unavailable", "Required encoder unavailable", "The selected encoder is not available in this FFmpeg build.", "Verify FFmpeg encoder support and your hardware settings, then retry the job.");
}
if (normalized.includes("invalid data found") || normalized.includes("moov atom not found") || normalized.includes("error while decoding") || normalized.includes("corrupt")) {
return makeFailure("corrupt_or_unreadable_media", "Media could not be decoded", "FFmpeg hit a decode/read error while processing the source. The file is likely corrupt, incomplete, or not fully readable.", "Try playing the file manually or run Library Doctor to confirm whether the source is intact.");
}
if (normalized.includes("permission denied") || normalized.includes("operation not permitted") || normalized.includes("read-only file system") || normalized.includes("no such file or directory")) {
return makeFailure("path_or_permission_failure", "Path or permission failure", "Alchemist could not read the source or write the output at the required path.", "Check that the source still exists and that the Alchemist process user can read and write the configured paths.");
}
if (normalized.includes("ffmpeg failed") || normalized.includes("transcode failed")) {
return makeFailure("unknown_ffmpeg_failure", "FFmpeg failed", "FFmpeg reported a fatal encoding error, but no more specific structured explanation was stored for this job.", "Inspect the raw FFmpeg output below for the first concrete encoder or media error.");
}
return null;
}
export function normalizeDecisionExplanation(
explanation: ExplanationView | null | undefined,
legacyReason?: string | null,
): ExplanationView | null {
if (explanation) return explanation;
if (legacyReason) return humanizeSkipReason(legacyReason);
return null;
}
export function normalizeFailureExplanation(
explanation: ExplanationView | null | undefined,
legacySummary?: string | null,
logs?: LogEntry[] | null,
): ExplanationView | null {
if (explanation) return explanation;
if (logs && logs.length > 0) {
const parsedFromLogs = explainFailureLogs(logs);
if (parsedFromLogs) return parsedFromLogs;
}
if (legacySummary) return explainFailureSummary(legacySummary);
return null;
}

View File

@@ -0,0 +1,225 @@
import { RefreshCw, Ban, Trash2, MoreHorizontal } from "lucide-react";
import { motion, AnimatePresence } from "framer-motion";
import { clsx, type ClassValue } from "clsx";
import { twMerge } from "tailwind-merge";
import type { RefObject, MutableRefObject } from "react";
import type React from "react";
import type { Job, ConfirmConfig } from "./types";
import { isJobActive, retryCountdown } from "./types";
function cn(...inputs: ClassValue[]) {
return twMerge(clsx(inputs));
}
interface JobsTableProps {
jobs: Job[];
loading: boolean;
selected: Set<number>;
focusedJobId: number | null;
tick: number;
encodeStartTimes: MutableRefObject<Map<number, number>>;
menuJobId: number | null;
menuRef: RefObject<HTMLDivElement | null>;
toggleSelect: (id: number) => void;
toggleSelectAll: () => void;
fetchJobDetails: (id: number) => Promise<void>;
setMenuJobId: (id: number | null) => void;
openConfirm: (config: ConfirmConfig) => void;
handleAction: (id: number, action: "cancel" | "restart" | "delete") => Promise<void>;
handlePriority: (job: Job, priority: number, label: string) => Promise<void>;
getStatusBadge: (status: string) => React.ReactElement;
}
function calcEta(encodeStartTimes: MutableRefObject<Map<number, number>>, jobId: number, progress: number): string | null {
if (progress <= 0 || progress >= 100) return null;
const startMs = encodeStartTimes.current.get(jobId);
if (!startMs) return null;
const elapsedMs = Date.now() - startMs;
const totalMs = elapsedMs / (progress / 100);
const remainingMs = totalMs - elapsedMs;
const remainingSecs = Math.round(remainingMs / 1000);
if (remainingSecs < 0) return null;
if (remainingSecs < 60) return `~${remainingSecs}s remaining`;
const mins = Math.ceil(remainingSecs / 60);
return `~${mins} min remaining`;
}
export function JobsTable({
jobs, loading, selected, focusedJobId, tick, encodeStartTimes,
menuJobId, menuRef, toggleSelect, toggleSelectAll,
fetchJobDetails, setMenuJobId, openConfirm, handleAction, handlePriority,
getStatusBadge,
}: JobsTableProps) {
return (
<div className="bg-helios-surface/50 border border-helios-line/20 rounded-lg overflow-hidden shadow-sm">
<table className="w-full text-left border-collapse">
<thead className="bg-helios-surface border-b border-helios-line/20 text-xs font-medium text-helios-slate">
<tr>
<th className="px-6 py-4 w-10">
<input type="checkbox"
checked={jobs.length > 0 && jobs.every(j => selected.has(j.id))}
onChange={toggleSelectAll}
className="rounded border-helios-line/30 bg-helios-surface-soft accent-helios-solar"
/>
</th>
<th className="px-6 py-4">File</th>
<th className="px-6 py-4">Status</th>
<th className="px-6 py-4">Progress</th>
<th className="hidden md:table-cell px-6 py-4">Updated</th>
<th className="px-6 py-4 w-14"></th>
</tr>
</thead>
<tbody className="divide-y divide-helios-line/10">
{loading && jobs.length === 0 ? (
Array.from({ length: 5 }).map((_, index) => (
<tr key={`loading-${index}`}>
<td colSpan={6} className="px-6 py-3">
<div className="h-10 w-full rounded-md bg-helios-surface-soft/60 animate-pulse" />
</td>
</tr>
))
) : jobs.length === 0 ? (
<tr>
<td colSpan={6} className="px-6 py-12 text-center text-helios-slate">
No jobs found
</td>
</tr>
) : (
jobs.map((job) => (
<tr
key={job.id}
onClick={() => void fetchJobDetails(job.id)}
className={cn(
"group hover:bg-helios-surface/80 transition-all cursor-pointer",
selected.has(job.id) && "bg-helios-surface-soft",
focusedJobId === job.id && "bg-helios-solar/5"
)}
>
<td className="px-6 py-4" onClick={(e) => e.stopPropagation()}>
<input type="checkbox"
checked={selected.has(job.id)}
onChange={() => toggleSelect(job.id)}
className="rounded border-helios-line/30 bg-helios-surface-soft accent-helios-solar"
/>
</td>
<td className="px-6 py-4 relative">
<motion.div layoutId={`job-name-${job.id}`} className="flex flex-col">
<span className="font-medium text-helios-ink truncate max-w-[300px]" title={job.input_path}>
{job.input_path.split(/[/\\]/).pop()}
</span>
<div className="flex items-center gap-2">
<span className="text-xs text-helios-slate truncate max-w-[240px]">
{job.input_path}
</span>
<span className="hidden md:inline rounded-full border border-helios-line/20 px-2 py-0.5 text-xs font-bold text-helios-slate">
P{job.priority}
</span>
</div>
</motion.div>
</td>
<td className="px-6 py-4">
<motion.div layoutId={`job-status-${job.id}`}>
{getStatusBadge(job.status)}
</motion.div>
{job.status === "failed" && (() => {
void tick;
const countdown = retryCountdown(job);
return countdown ? (
<p className="text-[10px] font-mono text-helios-slate mt-0.5">
{countdown}
</p>
) : null;
})()}
</td>
<td className="px-6 py-4">
{["encoding", "analyzing", "remuxing"].includes(job.status) ? (
<div className="w-24 space-y-1">
<div className="h-1.5 w-full bg-helios-line/10 rounded-full overflow-hidden">
<div className="h-full bg-helios-solar rounded-full transition-all duration-500" style={{ width: `${job.progress}%` }} />
</div>
<div className="text-xs text-right font-mono text-helios-slate">
{job.progress.toFixed(1)}%
</div>
{job.status === "encoding" && (() => {
const eta = calcEta(encodeStartTimes, job.id, job.progress);
return eta ? (
<p className="text-[10px] text-helios-slate mt-0.5 font-mono">{eta}</p>
) : null;
})()}
{job.status === "encoding" && job.encoder && (
<span className="text-[10px] font-mono text-helios-solar opacity-70">
{job.encoder}
</span>
)}
</div>
) : (
job.vmaf_score ? (
<span className="text-xs font-mono text-helios-slate">
VMAF: {job.vmaf_score.toFixed(1)}
</span>
) : (
<span className="text-helios-slate/50">-</span>
)
)}
</td>
<td className="hidden md:table-cell px-6 py-4 text-xs text-helios-slate font-mono">
{new Date(job.updated_at).toLocaleString()}
</td>
<td className="px-6 py-4" onClick={(e) => e.stopPropagation()}>
<div className="relative" ref={menuJobId === job.id ? (menuRef as React.RefObject<HTMLDivElement>) : null}>
<button
onClick={() => setMenuJobId(menuJobId === job.id ? null : job.id)}
className="p-2 rounded-lg border border-helios-line/20 hover:bg-helios-surface-soft text-helios-slate"
title="Actions"
>
<MoreHorizontal size={14} />
</button>
<AnimatePresence>
{menuJobId === job.id && (
<motion.div
initial={{ opacity: 0, y: 6 }}
animate={{ opacity: 1, y: 0 }}
exit={{ opacity: 0, y: 6 }}
className="absolute right-0 mt-2 w-44 rounded-lg border border-helios-line/20 bg-helios-surface shadow-xl z-20 overflow-hidden"
>
<button onClick={() => { setMenuJobId(null); void fetchJobDetails(job.id); }} className="w-full px-4 py-2 text-left text-xs font-semibold text-helios-ink hover:bg-helios-surface-soft">View details</button>
<button onClick={() => { setMenuJobId(null); void handlePriority(job, job.priority + 10, "Priority boosted"); }} className="w-full px-4 py-2 text-left text-xs font-semibold text-helios-ink hover:bg-helios-surface-soft">Boost priority (+10)</button>
<button onClick={() => { setMenuJobId(null); void handlePriority(job, job.priority - 10, "Priority lowered"); }} className="w-full px-4 py-2 text-left text-xs font-semibold text-helios-ink hover:bg-helios-surface-soft">Lower priority (-10)</button>
<button onClick={() => { setMenuJobId(null); void handlePriority(job, 0, "Priority reset"); }} className="w-full px-4 py-2 text-left text-xs font-semibold text-helios-ink hover:bg-helios-surface-soft">Reset priority</button>
{(job.status === "failed" || job.status === "cancelled") && (
<button
onClick={() => { setMenuJobId(null); openConfirm({ title: "Retry job", body: "Retry this job now?", confirmLabel: "Retry", onConfirm: () => handleAction(job.id, "restart") }); }}
className="w-full px-4 py-2 text-left text-xs font-semibold text-helios-ink hover:bg-helios-surface-soft"
>
Retry
</button>
)}
{["encoding", "analyzing", "remuxing"].includes(job.status) && (
<button
onClick={() => { setMenuJobId(null); openConfirm({ title: "Cancel job", body: "Stop this job immediately?", confirmLabel: "Cancel", confirmTone: "danger", onConfirm: () => handleAction(job.id, "cancel") }); }}
className="w-full px-4 py-2 text-left text-xs font-semibold text-helios-ink hover:bg-helios-surface-soft"
>
Stop / Cancel
</button>
)}
{!isJobActive(job) && (
<button
onClick={() => { setMenuJobId(null); openConfirm({ title: "Delete job", body: "Delete this job from history?", confirmLabel: "Delete", confirmTone: "danger", onConfirm: () => handleAction(job.id, "delete") }); }}
className="w-full px-4 py-2 text-left text-xs font-semibold text-red-500 hover:bg-red-500/5"
>
Delete
</button>
)}
</motion.div>
)}
</AnimatePresence>
</div>
</td>
</tr>
))
)}
</tbody>
</table>
</div>
);
}

View File

@@ -0,0 +1,140 @@
import { Search, RefreshCw, ArrowDown, ArrowUp } from "lucide-react";
import { clsx, type ClassValue } from "clsx";
import { twMerge } from "tailwind-merge";
import type { RefObject } from "react";
import type React from "react";
import type { TabType, SortField } from "./types";
import { SORT_OPTIONS } from "./types";
function cn(...inputs: ClassValue[]) {
return twMerge(clsx(inputs));
}
interface JobsToolbarProps {
activeTab: TabType;
setActiveTab: (tab: TabType) => void;
setPage: (page: number) => void;
searchInput: string;
setSearchInput: (s: string) => void;
compactSearchOpen: boolean;
setCompactSearchOpen: (fn: boolean | ((prev: boolean) => boolean)) => void;
compactSearchRef: RefObject<HTMLDivElement | null>;
compactSearchInputRef: RefObject<HTMLInputElement | null>;
sortBy: SortField;
setSortBy: (s: SortField) => void;
sortDesc: boolean;
setSortDesc: (fn: boolean | ((prev: boolean) => boolean)) => void;
refreshing: boolean;
fetchJobs: () => Promise<void>;
}
export function JobsToolbar({
activeTab, setActiveTab, setPage,
searchInput, setSearchInput,
compactSearchOpen, setCompactSearchOpen, compactSearchRef, compactSearchInputRef,
sortBy, setSortBy, sortDesc, setSortDesc,
refreshing, fetchJobs,
}: JobsToolbarProps) {
return (
<div className="rounded-xl border border-helios-line/10 bg-helios-surface/50 px-3 py-3">
<div className="flex flex-wrap gap-1">
{(["all", "active", "queued", "completed", "failed", "skipped", "archived"] as TabType[]).map((tab) => (
<button
key={tab}
onClick={() => { setActiveTab(tab); setPage(1); }}
className={cn(
"px-3 py-1.5 rounded-md text-sm font-medium transition-all capitalize sm:px-4",
activeTab === tab
? "bg-helios-surface-soft text-helios-ink shadow-sm"
: "text-helios-slate hover:text-helios-ink"
)}
>
{tab}
</button>
))}
</div>
<div className="mt-3 flex flex-col gap-2 sm:flex-row sm:items-center sm:justify-between">
<div className="flex items-center gap-2 sm:min-w-0 sm:flex-1">
<div className="relative hidden xl:block xl:w-64">
<Search className="absolute left-3 top-1/2 -translate-y-1/2 text-helios-slate" size={14} />
<input
type="text"
placeholder="Search files..."
value={searchInput}
onChange={(e) => setSearchInput(e.target.value)}
className="w-full bg-helios-surface border border-helios-line/20 rounded-lg pl-9 pr-4 py-2 text-sm text-helios-ink focus:border-helios-solar outline-none"
/>
</div>
<select
value={sortBy}
onChange={(e) => {
setSortBy(e.target.value as SortField);
setPage(1);
}}
className="h-10 min-w-0 flex-1 rounded-lg border border-helios-line/20 bg-helios-surface px-3 text-sm text-helios-ink outline-none focus:border-helios-solar sm:flex-none sm:w-44"
>
{SORT_OPTIONS.map((option) => (
<option key={option.value} value={option.value}>
{option.label}
</option>
))}
</select>
<button
onClick={() => {
setSortDesc((current) => !current);
setPage(1);
}}
className="flex h-10 w-10 shrink-0 items-center justify-center rounded-lg border border-helios-line/20 bg-helios-surface text-helios-ink hover:bg-helios-surface-soft"
title={sortDesc ? "Sort descending" : "Sort ascending"}
aria-label={sortDesc ? "Sort descending" : "Sort ascending"}
>
{sortDesc ? <ArrowDown size={16} /> : <ArrowUp size={16} />}
</button>
</div>
<div className="flex items-center gap-2 sm:ml-auto">
<button
onClick={() => void fetchJobs()}
className="flex h-10 w-10 shrink-0 items-center justify-center rounded-lg border border-helios-line/20 bg-helios-surface text-helios-ink hover:bg-helios-surface-soft"
title="Refresh jobs"
aria-label="Refresh jobs"
>
<RefreshCw size={16} className={refreshing ? "animate-spin" : undefined} />
</button>
<div ref={compactSearchRef as React.RefObject<HTMLDivElement>} className="relative xl:hidden">
<button
type="button"
onClick={() => setCompactSearchOpen((open) => (searchInput.trim() ? true : !open))}
className="flex h-10 w-10 shrink-0 items-center justify-center rounded-lg border border-helios-line/20 bg-helios-surface text-helios-ink hover:bg-helios-surface-soft"
title="Search files"
aria-label="Search files"
>
<Search size={16} />
</button>
<div
className={cn(
"absolute right-0 top-0 z-20 overflow-hidden rounded-lg border border-helios-line/20 bg-helios-surface shadow-lg shadow-helios-main/20 transition-[width,opacity] duration-200 ease-out",
compactSearchOpen
? "w-[min(18rem,calc(100vw-2rem))] opacity-100"
: "pointer-events-none w-10 opacity-0"
)}
>
<div className="flex h-10 items-center px-3">
<Search size={16} className="shrink-0 text-helios-slate" />
<input
ref={compactSearchInputRef as React.RefObject<HTMLInputElement>}
type="text"
placeholder="Search files..."
value={searchInput}
onChange={(e) => setSearchInput(e.target.value)}
className="ml-2 min-w-0 flex-1 bg-transparent text-sm text-helios-ink outline-none placeholder:text-helios-slate"
/>
</div>
</div>
</div>
</div>
</div>
</div>
);
}

View File

@@ -0,0 +1,218 @@
// Shared types for job management components
export interface ExplanationView {
category: "decision" | "failure";
code: string;
summary: string;
detail: string;
operator_guidance: string | null;
measured: Record<string, string | number | boolean | null>;
legacy_reason: string;
}
export interface ExplanationPayload {
category: "decision" | "failure";
code: string;
summary: string;
detail: string;
operator_guidance: string | null;
measured: Record<string, string | number | boolean | null>;
legacy_reason: string;
}
export interface Job {
id: number;
input_path: string;
output_path: string;
status: string;
priority: number;
progress: number;
created_at: string;
updated_at: string;
attempt_count: number;
vmaf_score?: number;
decision_reason?: string;
decision_explanation?: ExplanationPayload | null;
encoder?: string;
}
export interface JobMetadata {
duration_secs: number;
codec_name: string;
width: number;
height: number;
bit_depth?: number;
size_bytes: number;
video_bitrate_bps?: number;
container_bitrate_bps?: number;
fps: number;
container: string;
audio_codec?: string;
audio_channels?: number;
dynamic_range?: string;
}
export interface EncodeStats {
input_size_bytes: number;
output_size_bytes: number;
compression_ratio: number;
encode_time_seconds: number;
encode_speed: number;
avg_bitrate_kbps: number;
vmaf_score?: number;
}
export interface EncodeAttempt {
id: number;
attempt_number: number;
started_at: string | null;
finished_at: string;
outcome: "completed" | "failed" | "cancelled";
failure_code: string | null;
failure_summary: string | null;
input_size_bytes: number | null;
output_size_bytes: number | null;
encode_time_seconds: number | null;
}
export interface LogEntry {
id: number;
level: string;
message: string;
created_at: string;
}
export interface JobDetail {
job: Job;
metadata: JobMetadata | null;
encode_stats: EncodeStats | null;
encode_attempts: EncodeAttempt[] | null;
job_logs: LogEntry[];
job_failure_summary: string | null;
decision_explanation: ExplanationPayload | null;
failure_explanation: ExplanationPayload | null;
}
export interface CountMessageResponse {
count: number;
message: string;
}
export interface ConfirmConfig {
title: string;
body: string;
confirmLabel: string;
confirmTone?: "danger" | "primary";
onConfirm: () => Promise<void> | void;
}
export type TabType = "all" | "active" | "queued" | "completed" | "failed" | "skipped" | "archived";
export type SortField = "updated_at" | "created_at" | "input_path" | "size";
export const SORT_OPTIONS: Array<{ value: SortField; label: string }> = [
{ value: "updated_at", label: "Last Updated" },
{ value: "created_at", label: "Date Added" },
{ value: "input_path", label: "File Name" },
{ value: "size", label: "File Size" },
];
// Pure data utilities
export function isJobActive(job: Job): boolean {
return ["analyzing", "encoding", "remuxing", "resuming"].includes(job.status);
}
export function retryCountdown(job: Job): string | null {
if (job.status !== "failed") return null;
if (!job.attempt_count || job.attempt_count === 0) return null;
const backoffMins =
job.attempt_count === 1 ? 5
: job.attempt_count === 2 ? 15
: job.attempt_count === 3 ? 60
: 360;
const updatedMs = new Date(job.updated_at).getTime();
const retryAtMs = updatedMs + backoffMins * 60 * 1000;
const remainingMs = retryAtMs - Date.now();
if (remainingMs <= 0) return "Retrying soon";
const remainingMins = Math.ceil(remainingMs / 60_000);
if (remainingMins < 60) return `Retrying in ${remainingMins}m`;
const hrs = Math.floor(remainingMins / 60);
const mins = remainingMins % 60;
return mins > 0 ? `Retrying in ${hrs}h ${mins}m` : `Retrying in ${hrs}h`;
}
export function formatBytes(bytes: number): string {
if (bytes === 0) return "0 B";
const k = 1024;
const sizes = ["B", "KB", "MB", "GB", "TB"];
const i = Math.floor(Math.log(bytes) / Math.log(k));
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + " " + sizes[i];
}
export function formatDuration(seconds: number): string {
const h = Math.floor(seconds / 3600);
const m = Math.floor((seconds % 3600) / 60);
const s = Math.floor(seconds % 60);
return [h, m, s].map(v => v.toString().padStart(2, "0")).join(":");
}
export function logLevelClass(level: string): string {
switch (level.toLowerCase()) {
case "error":
return "text-status-error";
case "warn":
case "warning":
return "text-helios-solar";
default:
return "text-helios-slate";
}
}
export function jobDetailEmptyState(status: string): { title: string; detail: string } {
switch (status) {
case "queued":
return {
title: "Waiting in queue",
detail: "This job is queued and waiting for an available worker slot.",
};
case "analyzing":
return {
title: "Analyzing media",
detail: "Alchemist is reading the file metadata and planning the next action.",
};
case "encoding":
return {
title: "Encoding in progress",
detail: "The transcode is running now. Detailed input metadata may appear once analysis data is fully persisted.",
};
case "remuxing":
return {
title: "Remuxing in progress",
detail: "The job is copying compatible streams into the target container without re-encoding video.",
};
case "resuming":
return {
title: "Resuming job",
detail: "The job is being re-queued and prepared to continue processing.",
};
case "failed":
return {
title: "No metadata captured",
detail: "This job failed before Alchemist could persist complete media metadata.",
};
case "skipped":
return {
title: "No metadata captured",
detail: "This file was skipped before full media metadata was stored in the job detail view.",
};
default:
return {
title: "No encode data available",
detail: "Detailed metadata is not available for this job yet.",
};
}
}

View File

@@ -0,0 +1,92 @@
import { useEffect } from "react";
import type { MutableRefObject, Dispatch, SetStateAction } from "react";
import type { Job } from "./types";
interface UseJobSSEOptions {
setJobs: Dispatch<SetStateAction<Job[]>>;
fetchJobsRef: MutableRefObject<() => Promise<void>>;
encodeStartTimes: MutableRefObject<Map<number, number>>;
}
export function useJobSSE({ setJobs, fetchJobsRef, encodeStartTimes }: UseJobSSEOptions): void {
useEffect(() => {
let eventSource: EventSource | null = null;
let cancelled = false;
let reconnectTimeout: number | null = null;
let reconnectAttempts = 0;
const getReconnectDelay = () => {
const baseDelay = 1000;
const maxDelay = 30000;
const delay = Math.min(baseDelay * Math.pow(2, reconnectAttempts), maxDelay);
const jitter = delay * 0.25 * (Math.random() * 2 - 1);
return Math.round(delay + jitter);
};
const connect = () => {
if (cancelled) return;
eventSource?.close();
eventSource = new EventSource("/api/events");
eventSource.onopen = () => {
reconnectAttempts = 0;
};
eventSource.addEventListener("status", (e) => {
try {
const { job_id, status } = JSON.parse(e.data) as {
job_id: number;
status: string;
};
if (status === "encoding") {
encodeStartTimes.current.set(job_id, Date.now());
} else {
encodeStartTimes.current.delete(job_id);
}
setJobs((prev) =>
prev.map((job) => job.id === job_id ? { ...job, status } : job)
);
} catch {
/* ignore malformed */
}
});
eventSource.addEventListener("progress", (e) => {
try {
const { job_id, percentage } = JSON.parse(e.data) as {
job_id: number;
percentage: number;
};
setJobs((prev) =>
prev.map((job) => job.id === job_id ? { ...job, progress: percentage } : job)
);
} catch {
/* ignore malformed */
}
});
eventSource.addEventListener("decision", () => {
void fetchJobsRef.current();
});
eventSource.onerror = () => {
eventSource?.close();
if (!cancelled) {
reconnectAttempts++;
const delay = getReconnectDelay();
reconnectTimeout = window.setTimeout(connect, delay);
}
};
};
connect();
return () => {
cancelled = true;
eventSource?.close();
if (reconnectTimeout !== null) {
window.clearTimeout(reconnectTimeout);
}
};
}, []);
}

View File

@@ -9,24 +9,24 @@ function kindStyles(kind: ToastKind): { icon: LucideIcon; className: string } {
if (kind === "success") { if (kind === "success") {
return { return {
icon: CheckCircle2, icon: CheckCircle2,
className: "border-status-success/30 bg-status-success/10 text-status-success", className: "border-status-success/35 bg-helios-surface/95 text-status-success supports-[backdrop-filter]:bg-helios-surface/80 backdrop-blur-xl",
}; };
} }
if (kind === "error") { if (kind === "error") {
return { return {
icon: AlertCircle, icon: AlertCircle,
className: "border-status-error/30 bg-status-error/10 text-status-error", className: "border-status-error/35 bg-helios-surface/95 text-status-error supports-[backdrop-filter]:bg-helios-surface/80 backdrop-blur-xl",
}; };
} }
if (kind === "warning") { if (kind === "warning") {
return { return {
icon: AlertTriangle, icon: AlertTriangle,
className: "border-amber-500/30 bg-amber-500/10 text-amber-500", className: "border-amber-500/35 bg-helios-surface/95 text-amber-500 supports-[backdrop-filter]:bg-helios-surface/80 backdrop-blur-xl",
}; };
} }
return { return {
icon: Info, icon: Info,
className: "border-helios-line/40 bg-helios-surface text-helios-ink", className: "border-helios-line/40 bg-helios-surface/95 text-helios-ink supports-[backdrop-filter]:bg-helios-surface/80 backdrop-blur-xl",
}; };
} }
@@ -84,7 +84,7 @@ export default function ToastRegion() {
<div <div
key={toast.id} key={toast.id}
role={toast.kind === "error" ? "alert" : "status"} role={toast.kind === "error" ? "alert" : "status"}
className={`pointer-events-auto rounded-lg border p-3 shadow-xl ${className}`} className={`pointer-events-auto rounded-lg border p-3 shadow-xl shadow-black/30 ${className}`}
> >
<div className="flex items-start gap-2"> <div className="flex items-start gap-2">
<Icon size={16} /> <Icon size={16} />

View File

@@ -364,13 +364,13 @@
[data-color-profile="midnight"] { [data-color-profile="midnight"] {
--bg-main: 0 0 0; --bg-main: 0 0 0;
--bg-panel: 5 5 5; --bg-panel: 0 0 0;
--bg-elevated: 10 10 10; --bg-elevated: 0 0 0;
--accent-primary: 255 255 255; --accent-primary: 255 255 255;
--accent-secondary: 200 200 200; --accent-secondary: 200 200 200;
--text-primary: 255 255 255; --text-primary: 255 255 255;
--text-muted: 150 150 150; --text-muted: 150 150 150;
--border-subtle: 50 50 50; --border-subtle: 34 34 34;
} }
[data-color-profile="monochrome"] { [data-color-profile="monochrome"] {
@@ -475,6 +475,33 @@
z-index: -1; z-index: -1;
pointer-events: none; pointer-events: none;
} }
html[data-color-profile="midnight"] {
background: rgb(0 0 0);
}
html[data-color-profile="midnight"] body,
html[data-color-profile="midnight"] body::before,
html[data-color-profile="midnight"] .app-main {
background: rgb(0 0 0);
background-color: rgb(0 0 0);
}
/* Midnight OLED: suppress decorative accent-tinted gradient overlays so
OLED pixels stay fully off. These divs use from-helios-solar/10 which
maps to rgba(255,255,255,0.10) — a visible gray on pure-black OLED. */
html[data-color-profile="midnight"] .from-helios-solar\/10 {
--tw-gradient-from: rgb(0 0 0 / 0) var(--tw-gradient-from-position);
--tw-gradient-stops: var(--tw-gradient-from), var(--tw-gradient-to);
}
/* Midnight OLED: darken scrollbar thumb so it doesn't glow white. */
html[data-color-profile="midnight"] *::-webkit-scrollbar-thumb {
background-color: rgba(255 255 255 / 0.08);
}
html[data-color-profile="midnight"] *::-webkit-scrollbar-thumb:hover {
background-color: rgba(255 255 255 / 0.15);
}
} }
@layer components { @layer components {