chore: release v0.3.1-rc.5

This commit is contained in:
2026-04-16 11:37:48 -04:00
parent c454de6116
commit b0646e2629
44 changed files with 4481 additions and 601 deletions

View File

@@ -2,6 +2,24 @@
All notable changes to this project will be documented in this file. All notable changes to this project will be documented in this file.
## [0.3.1-rc.5] - 2026-04-16
### Reliability & Stability
- **Segment-based encode resume** — interrupted encode jobs now persist resume sessions and completed segments so restart and recovery flows can continue without discarding all completed work.
- **Notification target compatibility hardening** — notification target reads/writes now preserve the additive migration path, tolerate legacy shapes, and avoid duplicate-delete projection bugs in settings management.
- **Daily summary reliability** — summary delivery now retries safely after transient failures and avoids duplicate sends across restart boundaries by persisting the last successful day.
- **Job-detail correctness** — completed-job detail loading now fails closed on database errors instead of returning partial `200 OK` payloads, and encode stat duration fallback uses the encoded output rather than the source file.
- **Auth and settings safety** — login now returns server errors for real database failures, and duplicate notification/schedule rows no longer disappear together from a single delete action.
### Jobs & UX
- **Manual enqueue flow** — the jobs UI now supports enqueueing a single absolute file path through the same backend dedupe and output rules used by library scans.
- **Queued-job visibility** — job detail now exposes queue position and processor blocked reasons so operators can see why a queued job is not starting.
- **Attempt-history surfacing** — job detail now shows encode attempt history directly in the modal, including outcome, timing, and captured failure summary.
- **Jobs UI follow-through** — the `JobManager` refactor now ships with dedicated controller/dialog helpers and tighter SSE reconciliation so filtered tables and open detail modals stay aligned with backend truth.
- **Intelligence actions** — remux recommendations and duplicate candidates are now actionable directly from the Intelligence page.
## [0.3.1-rc.3] - 2026-04-12 ## [0.3.1-rc.3] - 2026-04-12
### New Features ### New Features

2
Cargo.lock generated
View File

@@ -13,7 +13,7 @@ dependencies = [
[[package]] [[package]]
name = "alchemist" name = "alchemist"
version = "0.3.1-rc.4" version = "0.3.1-rc.5"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"argon2", "argon2",

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "alchemist" name = "alchemist"
version = "0.3.1-rc.4" version = "0.3.1-rc.5"
edition = "2024" edition = "2024"
rust-version = "1.85" rust-version = "1.85"
license = "GPL-3.0" license = "GPL-3.0"

View File

@@ -30,15 +30,15 @@ Then complete the release-candidate preflight:
Promote to stable only after the RC burn-in is complete and the same automated preflight is still green. Promote to stable only after the RC burn-in is complete and the same automated preflight is still green.
1. Run `just bump 0.3.0`. 1. Run `just bump 0.3.1`.
2. Update `CHANGELOG.md` and `docs/docs/changelog.md` for the stable cut. 2. Update `CHANGELOG.md` and `docs/docs/changelog.md` for the stable cut.
3. Run `just release-check`. 3. Run `just release-check`.
4. Re-run the manual smoke checklist against the final release artifacts: 4. Re-run the manual smoke checklist against the final release artifacts:
- Docker fresh install - Docker fresh install
- Packaged binary first-run - Packaged binary first-run
- Upgrade from the most recent `0.2.x` or `0.3.0-rc.x` - Upgrade from the most recent `0.2.x` or `0.3.1-rc.x`
- Encode, skip, failure, and notification verification - Encode, skip, failure, and notification verification
5. Re-run the Windows contributor verification checklist if Windows parity changed after the last RC. 5. Re-run the Windows contributor verification checklist if Windows parity changed after the last RC.
6. Confirm release notes, docs, and hardware-support wording match the tested release state. 6. Confirm release notes, docs, and hardware-support wording match the tested release state.
7. Merge the stable release commit to `main`. 7. Merge the stable release commit to `main`.
8. Create the annotated tag `v0.3.0` on the exact merged commit. 8. Create the annotated tag `v0.3.1` on the exact merged commit.

View File

@@ -1 +1 @@
0.3.1-rc.4 0.3.1-rc.5

View File

@@ -59,37 +59,37 @@ documentation, or iteration.
- remux-only opportunities - remux-only opportunities
- wasteful audio layouts - wasteful audio layouts
- commentary/descriptive-track cleanup candidates - commentary/descriptive-track cleanup candidates
- Direct actions now exist for queueing remux recommendations and opening duplicate candidates in the shared job-detail flow
### Engine Lifecycle + Planner Docs
- Runtime drain/restart controls exist in the product surface
- Backend and Playwright lifecycle coverage now exists for the current behavior
- Planner and engine lifecycle docs are in-repo and should now be kept in sync with shipped semantics rather than treated as missing work
### Jobs UI Refactor / In Flight
- `JobManager` has been decomposed into focused jobs subcomponents and controller hooks
- SSE ownership is now centered in a dedicated hook and job-detail controller flow
- Treat the current jobs UI surface as shipping product that still needs stabilization and regression coverage, not as a future refactor candidate
--- ---
## Active Priorities ## Active Priorities
### Engine Lifecycle Controls ### `0.3.1` RC Stability Follow-Through
- Finish and harden restart/shutdown semantics from the About/header surface - Keep the current in-flight backend/frontend/test delta focused on reliability, upgrade safety, and release hardening
- Restart must reset the engine loop without re-execing the process - Expand regression coverage for resume/restart/cancel flows, job-detail refresh semantics, settings projection, and intelligence actions
- Shutdown must cancel active jobs and exit cleanly - Keep release docs, changelog entries, and support wording aligned with what the RC actually ships
- Add final backend and Playwright coverage for lifecycle transitions
### Planner and Lifecycle Documentation ### Per-File Encode History Follow-Through
- Document planner heuristics and stable skip/transcode/remux decision boundaries - Attempt history now exists in job detail, but it is still job-scoped rather than grouped by canonical file identity
- Document hardware fallback rules and backend selection semantics - Next hardening pass should make retries, reruns, and settings-driven requeues legible across a files full history
- Document pause, drain, restart, cancel, and shutdown semantics from actual behavior - Include outcome, encode stats, and failure reason where available without regressing the existing job-detail flow
### Per-File Encode History
- Show full attempt history in job detail, grouped by canonical file identity
- Include outcome, encode stats, and failure reason where available
- Make retries, reruns, and settings-driven requeues legible
### Behavior-Preserving Refactor Pass
- Decompose `web/src/components/JobManager.tsx` without changing current behavior
- Extract shared formatting logic
- Clarify SSE vs polling ownership
- Add regression coverage before deeper structural cleanup
### AMD AV1 Validation ### AMD AV1 Validation
- Validate Linux VAAPI and Windows AMF AV1 paths on real hardware - Validate Linux VAAPI and Windows AMF AV1 paths on real hardware
- Confirm encoder selection, fallback behavior, and defaults - Confirm encoder selection, fallback behavior, and defaults
- Keep support claims conservative until validation is real - Keep support claims conservative until validation is real
- Deferred from the current `0.3.1-rc.5` automated-stability pass; do not broaden support claims before this work is complete
--- ---

3
docs/bun.lock generated
View File

@@ -24,6 +24,7 @@
}, },
}, },
"overrides": { "overrides": {
"follow-redirects": "^1.16.0",
"lodash": "^4.18.1", "lodash": "^4.18.1",
"serialize-javascript": "^7.0.5", "serialize-javascript": "^7.0.5",
}, },
@@ -1108,7 +1109,7 @@
"flat": ["flat@5.0.2", "", { "bin": { "flat": "cli.js" } }, "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ=="], "flat": ["flat@5.0.2", "", { "bin": { "flat": "cli.js" } }, "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ=="],
"follow-redirects": ["follow-redirects@1.15.11", "", {}, "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ=="], "follow-redirects": ["follow-redirects@1.16.0", "", {}, "sha512-y5rN/uOsadFT/JfYwhxRS5R7Qce+g3zG97+JrtFZlC9klX/W5hD7iiLzScI4nZqUS7DNUdhPgw4xI8W2LuXlUw=="],
"form-data-encoder": ["form-data-encoder@2.1.4", "", {}, "sha512-yDYSgNMraqvnxiEXO4hi88+YZxaHC6QKzb5N84iRCTDeRO7ZALpir/lVmf/uXUhnwUr2O4HU8s/n6x+yNjQkHw=="], "form-data-encoder": ["form-data-encoder@2.1.4", "", {}, "sha512-yDYSgNMraqvnxiEXO4hi88+YZxaHC6QKzb5N84iRCTDeRO7ZALpir/lVmf/uXUhnwUr2O4HU8s/n6x+yNjQkHw=="],

View File

@@ -3,6 +3,24 @@ title: Changelog
description: Release history for Alchemist. description: Release history for Alchemist.
--- ---
## [0.3.1-rc.5] - 2026-04-16
### Reliability & Stability
- **Segment-based encode resume** — interrupted encode jobs now persist resume sessions and completed segments so restart and recovery flows can continue without discarding all completed work.
- **Notification target compatibility hardening** — notification target reads/writes now preserve the additive migration path, tolerate legacy shapes, and avoid duplicate-delete projection bugs in settings management.
- **Daily summary reliability** — summary delivery now retries safely after transient failures and avoids duplicate sends across restart boundaries by persisting the last successful day.
- **Job-detail correctness** — completed-job detail loading now fails closed on database errors instead of returning partial `200 OK` payloads, and encode stat duration fallback uses the encoded output rather than the source file.
- **Auth and settings safety** — login now returns server errors for real database failures, and duplicate notification/schedule rows no longer disappear together from a single delete action.
### Jobs & UX
- **Manual enqueue flow** — the jobs UI now supports enqueueing a single absolute file path through the same backend dedupe and output rules used by library scans.
- **Queued-job visibility** — job detail now exposes queue position and processor blocked reasons so operators can see why a queued job is not starting.
- **Attempt-history surfacing** — job detail now shows encode attempt history directly in the modal, including outcome, timing, and captured failure summary.
- **Jobs UI follow-through** — the `JobManager` refactor now ships with dedicated controller/dialog helpers and tighter SSE reconciliation so filtered tables and open detail modals stay aligned with backend truth.
- **Intelligence actions** — remux recommendations and duplicate candidates are now actionable directly from the Intelligence page.
## [0.3.1-rc.3] - 2026-04-12 ## [0.3.1-rc.3] - 2026-04-12
### New Features ### New Features

View File

@@ -1,6 +1,6 @@
{ {
"name": "alchemist-docs", "name": "alchemist-docs",
"version": "0.3.1-rc.4", "version": "0.3.1-rc.5",
"private": true, "private": true,
"packageManager": "bun@1.3.5", "packageManager": "bun@1.3.5",
"scripts": { "scripts": {
@@ -48,6 +48,7 @@
"node": ">=20.0" "node": ">=20.0"
}, },
"overrides": { "overrides": {
"follow-redirects": "^1.16.0",
"lodash": "^4.18.1", "lodash": "^4.18.1",
"serialize-javascript": "^7.0.5" "serialize-javascript": "^7.0.5"
} }

View File

@@ -1,34 +1,25 @@
CREATE TABLE IF NOT EXISTS notification_targets_new ( ALTER TABLE notification_targets
id INTEGER PRIMARY KEY AUTOINCREMENT, ADD COLUMN target_type_v2 TEXT;
name TEXT NOT NULL,
target_type TEXT CHECK(target_type IN ('discord_webhook', 'discord_bot', 'gotify', 'webhook', 'telegram', 'email')) NOT NULL,
config_json TEXT NOT NULL DEFAULT '{}',
events TEXT NOT NULL DEFAULT '["encode.failed","encode.completed"]',
enabled BOOLEAN DEFAULT 1,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
);
INSERT INTO notification_targets_new (id, name, target_type, config_json, events, enabled, created_at) ALTER TABLE notification_targets
SELECT ADD COLUMN config_json TEXT NOT NULL DEFAULT '{}';
id,
name, UPDATE notification_targets
CASE target_type SET
target_type_v2 = CASE target_type
WHEN 'discord' THEN 'discord_webhook' WHEN 'discord' THEN 'discord_webhook'
WHEN 'gotify' THEN 'gotify' WHEN 'gotify' THEN 'gotify'
ELSE 'webhook' ELSE 'webhook'
END, END,
CASE target_type config_json = CASE target_type
WHEN 'discord' THEN json_object('webhook_url', endpoint_url) WHEN 'discord' THEN json_object('webhook_url', endpoint_url)
WHEN 'gotify' THEN json_object('server_url', endpoint_url, 'app_token', COALESCE(auth_token, '')) WHEN 'gotify' THEN json_object('server_url', endpoint_url, 'app_token', COALESCE(auth_token, ''))
ELSE json_object('url', endpoint_url, 'auth_token', auth_token) ELSE json_object('url', endpoint_url, 'auth_token', auth_token)
END, END
COALESCE(events, '["failed","completed"]'), WHERE target_type_v2 IS NULL
enabled, OR target_type_v2 = ''
created_at OR config_json IS NULL
FROM notification_targets; OR trim(config_json) = '';
DROP TABLE notification_targets;
ALTER TABLE notification_targets_new RENAME TO notification_targets;
CREATE INDEX IF NOT EXISTS idx_notification_targets_enabled CREATE INDEX IF NOT EXISTS idx_notification_targets_enabled
ON notification_targets(enabled); ON notification_targets(enabled);

View File

@@ -0,0 +1,38 @@
CREATE TABLE IF NOT EXISTS job_resume_sessions (
id INTEGER PRIMARY KEY AUTOINCREMENT,
job_id INTEGER NOT NULL UNIQUE REFERENCES jobs(id) ON DELETE CASCADE,
strategy TEXT NOT NULL,
plan_hash TEXT NOT NULL,
mtime_hash TEXT NOT NULL,
temp_dir TEXT NOT NULL,
concat_manifest_path TEXT NOT NULL,
segment_length_secs INTEGER NOT NULL,
status TEXT NOT NULL DEFAULT 'active',
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE IF NOT EXISTS job_resume_segments (
id INTEGER PRIMARY KEY AUTOINCREMENT,
job_id INTEGER NOT NULL REFERENCES jobs(id) ON DELETE CASCADE,
segment_index INTEGER NOT NULL,
start_secs REAL NOT NULL,
duration_secs REAL NOT NULL,
temp_path TEXT NOT NULL,
status TEXT NOT NULL DEFAULT 'pending',
attempt_count INTEGER NOT NULL DEFAULT 0,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
UNIQUE(job_id, segment_index)
);
CREATE INDEX IF NOT EXISTS idx_job_resume_sessions_status
ON job_resume_sessions(status);
CREATE INDEX IF NOT EXISTS idx_job_resume_segments_job_status
ON job_resume_segments(job_id, status);
INSERT OR REPLACE INTO schema_info (key, value) VALUES
('schema_version', '9'),
('min_compatible_version', '0.2.5'),
('last_updated', datetime('now'));

View File

@@ -1,4 +1,5 @@
use crate::error::Result; use crate::error::Result;
use serde_json::Value as JsonValue;
use sqlx::Row; use sqlx::Row;
use std::collections::HashMap; use std::collections::HashMap;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
@@ -6,6 +7,54 @@ use std::path::{Path, PathBuf};
use super::Db; use super::Db;
use super::types::*; use super::types::*;
fn notification_config_string(config_json: &str, key: &str) -> Option<String> {
serde_json::from_str::<JsonValue>(config_json)
.ok()
.and_then(|value| {
value
.get(key)
.and_then(JsonValue::as_str)
.map(str::to_string)
})
.map(|value| value.trim().to_string())
.filter(|value| !value.is_empty())
}
fn notification_legacy_columns(
target_type: &str,
config_json: &str,
) -> (String, Option<String>, Option<String>) {
match target_type {
"discord_webhook" => (
"discord".to_string(),
notification_config_string(config_json, "webhook_url"),
None,
),
"discord_bot" => (
"discord".to_string(),
Some("https://discord.com".to_string()),
notification_config_string(config_json, "bot_token"),
),
"gotify" => (
"gotify".to_string(),
notification_config_string(config_json, "server_url"),
notification_config_string(config_json, "app_token"),
),
"webhook" => (
"webhook".to_string(),
notification_config_string(config_json, "url"),
notification_config_string(config_json, "auth_token"),
),
"telegram" => (
"webhook".to_string(),
Some("https://api.telegram.org".to_string()),
notification_config_string(config_json, "bot_token"),
),
"email" => ("webhook".to_string(), None, None),
other => (other.to_string(), None, None),
}
}
impl Db { impl Db {
pub async fn get_watch_dirs(&self) -> Result<Vec<WatchDir>> { pub async fn get_watch_dirs(&self) -> Result<Vec<WatchDir>> {
let has_is_recursive = self.watch_dir_flags.has_is_recursive; let has_is_recursive = self.watch_dir_flags.has_is_recursive;
@@ -292,13 +341,23 @@ impl Db {
FROM watch_dirs wd FROM watch_dirs wd
JOIN library_profiles lp ON lp.id = wd.profile_id JOIN library_profiles lp ON lp.id = wd.profile_id
WHERE wd.profile_id IS NOT NULL WHERE wd.profile_id IS NOT NULL
AND (? = wd.path OR ? LIKE wd.path || '/%' OR ? LIKE wd.path || '\\%') AND (
? = wd.path
OR (
length(?) > length(wd.path)
AND (
substr(?, 1, length(wd.path) + 1) = wd.path || '/'
OR substr(?, 1, length(wd.path) + 1) = wd.path || '\\'
)
)
)
ORDER BY LENGTH(wd.path) DESC ORDER BY LENGTH(wd.path) DESC
LIMIT 1", LIMIT 1",
) )
.bind(path) .bind(path)
.bind(path) .bind(path)
.bind(path) .bind(path)
.bind(path)
.fetch_optional(&self.pool) .fetch_optional(&self.pool)
.await?; .await?;
@@ -359,11 +418,43 @@ impl Db {
} }
pub async fn get_notification_targets(&self) -> Result<Vec<NotificationTarget>> { pub async fn get_notification_targets(&self) -> Result<Vec<NotificationTarget>> {
let targets = sqlx::query_as::<_, NotificationTarget>( let flags = &self.notification_target_flags;
"SELECT id, name, target_type, config_json, events, enabled, created_at FROM notification_targets", let targets = if flags.has_target_type_v2 {
) sqlx::query_as::<_, NotificationTarget>(
"SELECT
id,
name,
COALESCE(
NULLIF(target_type_v2, ''),
CASE target_type
WHEN 'discord' THEN 'discord_webhook'
WHEN 'gotify' THEN 'gotify'
ELSE 'webhook'
END
) AS target_type,
CASE
WHEN trim(config_json) != '' THEN config_json
WHEN target_type = 'discord' THEN json_object('webhook_url', endpoint_url)
WHEN target_type = 'gotify' THEN json_object('server_url', endpoint_url, 'app_token', COALESCE(auth_token, ''))
ELSE json_object('url', endpoint_url, 'auth_token', auth_token)
END AS config_json,
events,
enabled,
created_at
FROM notification_targets
ORDER BY id ASC",
)
.fetch_all(&self.pool) .fetch_all(&self.pool)
.await?; .await?
} else {
sqlx::query_as::<_, NotificationTarget>(
"SELECT id, name, target_type, config_json, events, enabled, created_at
FROM notification_targets
ORDER BY id ASC",
)
.fetch_all(&self.pool)
.await?
};
Ok(targets) Ok(targets)
} }
@@ -375,18 +466,42 @@ impl Db {
events: &str, events: &str,
enabled: bool, enabled: bool,
) -> Result<NotificationTarget> { ) -> Result<NotificationTarget> {
let row = sqlx::query_as::<_, NotificationTarget>( let flags = &self.notification_target_flags;
"INSERT INTO notification_targets (name, target_type, config_json, events, enabled) if flags.has_target_type_v2 {
VALUES (?, ?, ?, ?, ?) RETURNING *", let (legacy_target_type, endpoint_url, auth_token) =
) notification_legacy_columns(target_type, config_json);
.bind(name) let result = sqlx::query(
.bind(target_type) "INSERT INTO notification_targets
.bind(config_json) (name, target_type, target_type_v2, endpoint_url, auth_token, config_json, events, enabled)
.bind(events) VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
.bind(enabled) )
.fetch_one(&self.pool) .bind(name)
.await?; .bind(legacy_target_type)
Ok(row) .bind(target_type)
.bind(endpoint_url)
.bind(auth_token)
.bind(config_json)
.bind(events)
.bind(enabled)
.execute(&self.pool)
.await?;
self.get_notification_target_by_id(result.last_insert_rowid())
.await
} else {
let result = sqlx::query(
"INSERT INTO notification_targets (name, target_type, config_json, events, enabled)
VALUES (?, ?, ?, ?, ?)",
)
.bind(name)
.bind(target_type)
.bind(config_json)
.bind(events)
.bind(enabled)
.execute(&self.pool)
.await?;
self.get_notification_target_by_id(result.last_insert_rowid())
.await
}
} }
pub async fn delete_notification_target(&self, id: i64) -> Result<()> { pub async fn delete_notification_target(&self, id: i64) -> Result<()> {
@@ -406,30 +521,97 @@ impl Db {
&self, &self,
targets: &[crate::config::NotificationTargetConfig], targets: &[crate::config::NotificationTargetConfig],
) -> Result<()> { ) -> Result<()> {
let flags = &self.notification_target_flags;
let mut tx = self.pool.begin().await?; let mut tx = self.pool.begin().await?;
sqlx::query("DELETE FROM notification_targets") sqlx::query("DELETE FROM notification_targets")
.execute(&mut *tx) .execute(&mut *tx)
.await?; .await?;
for target in targets { for target in targets {
sqlx::query( let config_json = target.config_json.to_string();
"INSERT INTO notification_targets (name, target_type, config_json, events, enabled) VALUES (?, ?, ?, ?, ?)", let events = serde_json::to_string(&target.events).unwrap_or_else(|_| "[]".to_string());
) if flags.has_target_type_v2 {
.bind(&target.name) let (legacy_target_type, endpoint_url, auth_token) =
.bind(&target.target_type) notification_legacy_columns(&target.target_type, &config_json);
.bind(target.config_json.to_string()) sqlx::query(
.bind(serde_json::to_string(&target.events).unwrap_or_else(|_| "[]".to_string())) "INSERT INTO notification_targets
.bind(target.enabled) (name, target_type, target_type_v2, endpoint_url, auth_token, config_json, events, enabled)
.execute(&mut *tx) VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
.await?; )
.bind(&target.name)
.bind(legacy_target_type)
.bind(&target.target_type)
.bind(endpoint_url)
.bind(auth_token)
.bind(&config_json)
.bind(&events)
.bind(target.enabled)
.execute(&mut *tx)
.await?;
} else {
sqlx::query(
"INSERT INTO notification_targets (name, target_type, config_json, events, enabled) VALUES (?, ?, ?, ?, ?)",
)
.bind(&target.name)
.bind(&target.target_type)
.bind(&config_json)
.bind(&events)
.bind(target.enabled)
.execute(&mut *tx)
.await?;
}
} }
tx.commit().await?; tx.commit().await?;
Ok(()) Ok(())
} }
async fn get_notification_target_by_id(&self, id: i64) -> Result<NotificationTarget> {
let flags = &self.notification_target_flags;
let row = if flags.has_target_type_v2 {
sqlx::query_as::<_, NotificationTarget>(
"SELECT
id,
name,
COALESCE(
NULLIF(target_type_v2, ''),
CASE target_type
WHEN 'discord' THEN 'discord_webhook'
WHEN 'gotify' THEN 'gotify'
ELSE 'webhook'
END
) AS target_type,
CASE
WHEN trim(config_json) != '' THEN config_json
WHEN target_type = 'discord' THEN json_object('webhook_url', endpoint_url)
WHEN target_type = 'gotify' THEN json_object('server_url', endpoint_url, 'app_token', COALESCE(auth_token, ''))
ELSE json_object('url', endpoint_url, 'auth_token', auth_token)
END AS config_json,
events,
enabled,
created_at
FROM notification_targets
WHERE id = ?",
)
.bind(id)
.fetch_one(&self.pool)
.await?
} else {
sqlx::query_as::<_, NotificationTarget>(
"SELECT id, name, target_type, config_json, events, enabled, created_at
FROM notification_targets
WHERE id = ?",
)
.bind(id)
.fetch_one(&self.pool)
.await?
};
Ok(row)
}
pub async fn get_schedule_windows(&self) -> Result<Vec<ScheduleWindow>> { pub async fn get_schedule_windows(&self) -> Result<Vec<ScheduleWindow>> {
let windows = sqlx::query_as::<_, ScheduleWindow>("SELECT * FROM schedule_windows") let windows =
.fetch_all(&self.pool) sqlx::query_as::<_, ScheduleWindow>("SELECT * FROM schedule_windows ORDER BY id ASC")
.await?; .fetch_all(&self.pool)
.await?;
Ok(windows) Ok(windows)
} }
@@ -582,3 +764,101 @@ impl Db {
Ok(()) Ok(())
} }
} }
#[cfg(test)]
mod tests {
use super::*;
fn temp_db_path(prefix: &str) -> PathBuf {
let mut path = std::env::temp_dir();
path.push(format!("{prefix}_{}.db", rand::random::<u64>()));
path
}
fn sample_profile(name: &str) -> NewLibraryProfile {
NewLibraryProfile {
name: name.to_string(),
preset: "balanced".to_string(),
codec: "av1".to_string(),
quality_profile: "balanced".to_string(),
hdr_mode: "preserve".to_string(),
audio_mode: "copy".to_string(),
crf_override: None,
notes: None,
}
}
#[tokio::test]
async fn profile_lookup_treats_percent_and_underscore_as_literals() -> anyhow::Result<()> {
let db_path = temp_db_path("alchemist_profile_lookup_literals");
let db = Db::new(db_path.to_string_lossy().as_ref()).await?;
let underscore_profile = db.create_profile(sample_profile("underscore")).await?;
let percent_profile = db.create_profile(sample_profile("percent")).await?;
let underscore_watch = db.add_watch_dir("/media/TV_4K", true).await?;
db.assign_profile_to_watch_dir(underscore_watch.id, Some(underscore_profile))
.await?;
let percent_watch = db.add_watch_dir("/media/Movies%20", true).await?;
db.assign_profile_to_watch_dir(percent_watch.id, Some(percent_profile))
.await?;
assert_eq!(
db.get_profile_for_path("/media/TV_4K/show/file.mkv")
.await?
.map(|profile| profile.name),
Some("underscore".to_string())
);
assert_eq!(
db.get_profile_for_path("/media/TVA4K/show/file.mkv")
.await?
.map(|profile| profile.name),
None
);
assert_eq!(
db.get_profile_for_path("/media/Movies%20/title/file.mkv")
.await?
.map(|profile| profile.name),
Some("percent".to_string())
);
assert_eq!(
db.get_profile_for_path("/media/MoviesABCD/title/file.mkv")
.await?
.map(|profile| profile.name),
None
);
db.pool.close().await;
let _ = std::fs::remove_file(db_path);
Ok(())
}
#[tokio::test]
async fn profile_lookup_prefers_longest_literal_matching_watch_dir() -> anyhow::Result<()> {
let db_path = temp_db_path("alchemist_profile_lookup_longest");
let db = Db::new(db_path.to_string_lossy().as_ref()).await?;
let base_profile = db.create_profile(sample_profile("base")).await?;
let nested_profile = db.create_profile(sample_profile("nested")).await?;
let base_watch = db.add_watch_dir("/media", true).await?;
db.assign_profile_to_watch_dir(base_watch.id, Some(base_profile))
.await?;
let nested_watch = db.add_watch_dir("/media/TV_4K", true).await?;
db.assign_profile_to_watch_dir(nested_watch.id, Some(nested_profile))
.await?;
assert_eq!(
db.get_profile_for_path("/media/TV_4K/show/file.mkv")
.await?
.map(|profile| profile.name),
Some("nested".to_string())
);
db.pool.close().await;
let _ = std::fs::remove_file(db_path);
Ok(())
}
}

View File

@@ -662,6 +662,166 @@ impl Db {
Ok(Some((pos + 1) as u32)) Ok(Some((pos + 1) as u32))
} }
pub async fn get_resume_session(&self, job_id: i64) -> Result<Option<JobResumeSession>> {
let session = sqlx::query_as::<_, JobResumeSession>(
"SELECT id, job_id, strategy, plan_hash, mtime_hash, temp_dir,
concat_manifest_path, segment_length_secs, status, created_at, updated_at
FROM job_resume_sessions
WHERE job_id = ?",
)
.bind(job_id)
.fetch_optional(&self.pool)
.await?;
Ok(session)
}
pub async fn get_resume_sessions_by_job_ids(
&self,
ids: &[i64],
) -> Result<Vec<JobResumeSession>> {
if ids.is_empty() {
return Ok(Vec::new());
}
let mut qb = sqlx::QueryBuilder::<sqlx::Sqlite>::new(
"SELECT id, job_id, strategy, plan_hash, mtime_hash, temp_dir,
concat_manifest_path, segment_length_secs, status, created_at, updated_at
FROM job_resume_sessions
WHERE job_id IN (",
);
let mut separated = qb.separated(", ");
for id in ids {
separated.push_bind(id);
}
separated.push_unseparated(")");
let sessions = qb
.build_query_as::<JobResumeSession>()
.fetch_all(&self.pool)
.await?;
Ok(sessions)
}
pub async fn upsert_resume_session(
&self,
input: &UpsertJobResumeSessionInput,
) -> Result<JobResumeSession> {
let session = sqlx::query_as::<_, JobResumeSession>(
"INSERT INTO job_resume_sessions
(job_id, strategy, plan_hash, mtime_hash, temp_dir,
concat_manifest_path, segment_length_secs, status)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(job_id) DO UPDATE SET
strategy = excluded.strategy,
plan_hash = excluded.plan_hash,
mtime_hash = excluded.mtime_hash,
temp_dir = excluded.temp_dir,
concat_manifest_path = excluded.concat_manifest_path,
segment_length_secs = excluded.segment_length_secs,
status = excluded.status,
updated_at = CURRENT_TIMESTAMP
RETURNING id, job_id, strategy, plan_hash, mtime_hash, temp_dir,
concat_manifest_path, segment_length_secs, status, created_at, updated_at",
)
.bind(input.job_id)
.bind(&input.strategy)
.bind(&input.plan_hash)
.bind(&input.mtime_hash)
.bind(&input.temp_dir)
.bind(&input.concat_manifest_path)
.bind(input.segment_length_secs)
.bind(&input.status)
.fetch_one(&self.pool)
.await?;
Ok(session)
}
pub async fn delete_resume_session(&self, job_id: i64) -> Result<()> {
sqlx::query("DELETE FROM job_resume_sessions WHERE job_id = ?")
.bind(job_id)
.execute(&self.pool)
.await?;
Ok(())
}
pub async fn list_resume_segments(&self, job_id: i64) -> Result<Vec<JobResumeSegment>> {
let segments = sqlx::query_as::<_, JobResumeSegment>(
"SELECT id, job_id, segment_index, start_secs, duration_secs,
temp_path, status, attempt_count, created_at, updated_at
FROM job_resume_segments
WHERE job_id = ?
ORDER BY segment_index ASC",
)
.bind(job_id)
.fetch_all(&self.pool)
.await?;
Ok(segments)
}
pub async fn upsert_resume_segment(
&self,
input: &UpsertJobResumeSegmentInput,
) -> Result<JobResumeSegment> {
let segment = sqlx::query_as::<_, JobResumeSegment>(
"INSERT INTO job_resume_segments
(job_id, segment_index, start_secs, duration_secs, temp_path, status, attempt_count)
VALUES (?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(job_id, segment_index) DO UPDATE SET
start_secs = excluded.start_secs,
duration_secs = excluded.duration_secs,
temp_path = excluded.temp_path,
status = excluded.status,
attempt_count = excluded.attempt_count,
updated_at = CURRENT_TIMESTAMP
RETURNING id, job_id, segment_index, start_secs, duration_secs,
temp_path, status, attempt_count, created_at, updated_at",
)
.bind(input.job_id)
.bind(input.segment_index)
.bind(input.start_secs)
.bind(input.duration_secs)
.bind(&input.temp_path)
.bind(&input.status)
.bind(input.attempt_count)
.fetch_one(&self.pool)
.await?;
Ok(segment)
}
pub async fn set_resume_segment_status(
&self,
job_id: i64,
segment_index: i64,
status: &str,
attempt_count: i32,
) -> Result<()> {
sqlx::query(
"UPDATE job_resume_segments
SET status = ?, attempt_count = ?, updated_at = CURRENT_TIMESTAMP
WHERE job_id = ? AND segment_index = ?",
)
.bind(status)
.bind(attempt_count)
.bind(job_id)
.bind(segment_index)
.execute(&self.pool)
.await?;
Ok(())
}
pub async fn completed_resume_duration_secs(&self, job_id: i64) -> Result<f64> {
let duration = sqlx::query_scalar::<_, Option<f64>>(
"SELECT SUM(duration_secs)
FROM job_resume_segments
WHERE job_id = ? AND status = 'completed'",
)
.bind(job_id)
.fetch_one(&self.pool)
.await?
.unwrap_or(0.0);
Ok(duration)
}
/// Returns all jobs in queued or failed state that need /// Returns all jobs in queued or failed state that need
/// analysis. Used by the startup auto-analyzer. /// analysis. Used by the startup auto-analyzer.
pub async fn get_jobs_for_analysis(&self) -> Result<Vec<Job>> { pub async fn get_jobs_for_analysis(&self) -> Result<Vec<Job>> {

View File

@@ -43,10 +43,16 @@ pub(crate) struct WatchDirSchemaFlags {
has_profile_id: bool, has_profile_id: bool,
} }
#[derive(Clone, Debug)]
pub(crate) struct NotificationTargetSchemaFlags {
has_target_type_v2: bool,
}
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct Db { pub struct Db {
pub(crate) pool: SqlitePool, pub(crate) pool: SqlitePool,
pub(crate) watch_dir_flags: std::sync::Arc<WatchDirSchemaFlags>, pub(crate) watch_dir_flags: std::sync::Arc<WatchDirSchemaFlags>,
pub(crate) notification_target_flags: std::sync::Arc<NotificationTargetSchemaFlags>,
} }
impl Db { impl Db {
@@ -102,9 +108,28 @@ impl Db {
has_profile_id: check("profile_id").await, has_profile_id: check("profile_id").await,
}; };
let notification_check = |column: &str| {
let pool = pool.clone();
let column = column.to_string();
async move {
let row = sqlx::query(
"SELECT name FROM pragma_table_info('notification_targets') WHERE name = ?",
)
.bind(&column)
.fetch_optional(&pool)
.await
.unwrap_or(None);
row.is_some()
}
};
let notification_target_flags = NotificationTargetSchemaFlags {
has_target_type_v2: notification_check("target_type_v2").await,
};
Ok(Self { Ok(Self {
pool, pool,
watch_dir_flags: std::sync::Arc::new(watch_dir_flags), watch_dir_flags: std::sync::Arc::new(watch_dir_flags),
notification_target_flags: std::sync::Arc::new(notification_target_flags),
}) })
} }
} }

View File

@@ -238,6 +238,58 @@ pub struct ConversionJob {
pub updated_at: String, pub updated_at: String,
} }
#[derive(Debug, Serialize, Deserialize, Clone, sqlx::FromRow)]
pub struct JobResumeSession {
pub id: i64,
pub job_id: i64,
pub strategy: String,
pub plan_hash: String,
pub mtime_hash: String,
pub temp_dir: String,
pub concat_manifest_path: String,
pub segment_length_secs: i64,
pub status: String,
pub created_at: String,
pub updated_at: String,
}
#[derive(Debug, Serialize, Deserialize, Clone, sqlx::FromRow)]
pub struct JobResumeSegment {
pub id: i64,
pub job_id: i64,
pub segment_index: i64,
pub start_secs: f64,
pub duration_secs: f64,
pub temp_path: String,
pub status: String,
pub attempt_count: i32,
pub created_at: String,
pub updated_at: String,
}
#[derive(Debug, Clone)]
pub struct UpsertJobResumeSessionInput {
pub job_id: i64,
pub strategy: String,
pub plan_hash: String,
pub mtime_hash: String,
pub temp_dir: String,
pub concat_manifest_path: String,
pub segment_length_secs: i64,
pub status: String,
}
#[derive(Debug, Clone)]
pub struct UpsertJobResumeSegmentInput {
pub job_id: i64,
pub segment_index: i64,
pub start_secs: f64,
pub duration_secs: f64,
pub temp_path: String,
pub status: String,
pub attempt_count: i32,
}
#[derive(Debug, Serialize, Deserialize, Clone, sqlx::FromRow)] #[derive(Debug, Serialize, Deserialize, Clone, sqlx::FromRow)]
pub struct ScheduleWindow { pub struct ScheduleWindow {
pub id: i64, pub id: i64,

View File

@@ -321,6 +321,11 @@ async fn run() -> Result<()> {
Ok(count) if count > 0 => { Ok(count) if count > 0 => {
warn!("{} interrupted jobs reset to queued", count); warn!("{} interrupted jobs reset to queued", count);
for job in interrupted_jobs { for job in interrupted_jobs {
let has_resume_session =
db.get_resume_session(job.id).await.ok().flatten().is_some();
if has_resume_session {
continue;
}
let temp_path = orphaned_temp_output_path(&job.output_path); let temp_path = orphaned_temp_output_path(&job.output_path);
if std::fs::metadata(&temp_path).is_ok() { if std::fs::metadata(&temp_path).is_ok() {
match std::fs::remove_file(&temp_path) { match std::fs::remove_file(&temp_path) {

View File

@@ -154,6 +154,8 @@ impl Executor for FfmpegExecutor {
metadata: &analysis.metadata, metadata: &analysis.metadata,
plan, plan,
observer: Some(observer.clone()), observer: Some(observer.clone()),
clip_start_seconds: None,
clip_duration_seconds: None,
}) })
.await?; .await?;
@@ -171,6 +173,8 @@ impl Executor for FfmpegExecutor {
metadata: &analysis.metadata, metadata: &analysis.metadata,
plan, plan,
observer: Some(observer), observer: Some(observer),
clip_start_seconds: None,
clip_duration_seconds: None,
}) })
.await?; .await?;
} }
@@ -251,7 +255,7 @@ impl Executor for FfmpegExecutor {
} }
} }
fn output_codec_from_name(codec: &str) -> Option<crate::config::OutputCodec> { pub(crate) fn output_codec_from_name(codec: &str) -> Option<crate::config::OutputCodec> {
if codec.eq_ignore_ascii_case("av1") { if codec.eq_ignore_ascii_case("av1") {
Some(crate::config::OutputCodec::Av1) Some(crate::config::OutputCodec::Av1)
} else if codec.eq_ignore_ascii_case("hevc") || codec.eq_ignore_ascii_case("h265") { } else if codec.eq_ignore_ascii_case("hevc") || codec.eq_ignore_ascii_case("h265") {
@@ -263,7 +267,10 @@ fn output_codec_from_name(codec: &str) -> Option<crate::config::OutputCodec> {
} }
} }
fn encoder_tag_matches(requested: crate::media::pipeline::Encoder, encoder_tag: &str) -> bool { pub(crate) fn encoder_tag_matches(
requested: crate::media::pipeline::Encoder,
encoder_tag: &str,
) -> bool {
let tag = encoder_tag.to_ascii_lowercase(); let tag = encoder_tag.to_ascii_lowercase();
let expected_markers: &[&str] = match requested { let expected_markers: &[&str] = match requested {
crate::media::pipeline::Encoder::Av1Qsv crate::media::pipeline::Encoder::Av1Qsv

View File

@@ -135,6 +135,8 @@ pub struct FFmpegCommandBuilder<'a> {
metadata: &'a crate::media::pipeline::MediaMetadata, metadata: &'a crate::media::pipeline::MediaMetadata,
plan: &'a TranscodePlan, plan: &'a TranscodePlan,
hw_info: Option<&'a HardwareInfo>, hw_info: Option<&'a HardwareInfo>,
clip_start_seconds: Option<f64>,
clip_duration_seconds: Option<f64>,
} }
impl<'a> FFmpegCommandBuilder<'a> { impl<'a> FFmpegCommandBuilder<'a> {
@@ -150,6 +152,8 @@ impl<'a> FFmpegCommandBuilder<'a> {
metadata, metadata,
plan, plan,
hw_info: None, hw_info: None,
clip_start_seconds: None,
clip_duration_seconds: None,
} }
} }
@@ -158,6 +162,16 @@ impl<'a> FFmpegCommandBuilder<'a> {
self self
} }
pub fn with_clip(
mut self,
clip_start_seconds: Option<f64>,
clip_duration_seconds: Option<f64>,
) -> Self {
self.clip_start_seconds = clip_start_seconds;
self.clip_duration_seconds = clip_duration_seconds;
self
}
pub fn build(self) -> Result<tokio::process::Command> { pub fn build(self) -> Result<tokio::process::Command> {
let args = self.build_args()?; let args = self.build_args()?;
let mut cmd = tokio::process::Command::new("ffmpeg"); let mut cmd = tokio::process::Command::new("ffmpeg");
@@ -189,14 +203,23 @@ impl<'a> FFmpegCommandBuilder<'a> {
"-nostats".to_string(), "-nostats".to_string(),
"-progress".to_string(), "-progress".to_string(),
"pipe:2".to_string(), "pipe:2".to_string(),
"-i".to_string(),
self.input.display().to_string(),
"-map_metadata".to_string(),
"0".to_string(),
"-map".to_string(),
"0:v:0".to_string(),
]; ];
args.push("-i".to_string());
args.push(self.input.display().to_string());
if let Some(clip_start_seconds) = self.clip_start_seconds {
args.push("-ss".to_string());
args.push(format!("{clip_start_seconds:.3}"));
}
if let Some(clip_duration_seconds) = self.clip_duration_seconds {
args.push("-t".to_string());
args.push(format!("{clip_duration_seconds:.3}"));
}
args.push("-map_metadata".to_string());
args.push("0".to_string());
args.push("-map".to_string());
args.push("0:v:0".to_string());
if !matches!(self.plan.audio, AudioStreamPlan::Drop) { if !matches!(self.plan.audio, AudioStreamPlan::Drop) {
match &self.plan.audio_stream_indices { match &self.plan.audio_stream_indices {
None => { None => {
@@ -1039,6 +1062,30 @@ mod tests {
assert!(args.iter().any(|arg| arg.contains("format=nv12,hwupload"))); assert!(args.iter().any(|arg| arg.contains("format=nv12,hwupload")));
} }
#[test]
fn vaapi_cq_mode_sets_inverted_global_quality() {
let metadata = metadata();
let mut plan = plan_for(Encoder::HevcVaapi);
plan.rate_control = Some(RateControl::Cq { value: 23 });
let mut info = hw_info("/dev/dri/renderD128");
info.vendor = crate::system::hardware::Vendor::Amd;
let builder = FFmpegCommandBuilder::new(
Path::new("/tmp/in.mkv"),
Path::new("/tmp/out.mkv"),
&metadata,
&plan,
)
.with_hardware(Some(&info));
let args = builder
.build_args()
.unwrap_or_else(|err| panic!("failed to build vaapi cq args: {err}"));
let quality_index = args
.iter()
.position(|arg| arg == "-global_quality")
.unwrap_or_else(|| panic!("missing -global_quality"));
assert_eq!(args.get(quality_index + 1).map(String::as_str), Some("77"));
}
#[test] #[test]
fn command_args_cover_videotoolbox_backend() { fn command_args_cover_videotoolbox_backend() {
let metadata = metadata(); let metadata = metadata();
@@ -1148,6 +1195,42 @@ mod tests {
assert!(args.contains(&"hevc_amf".to_string())); assert!(args.contains(&"hevc_amf".to_string()));
} }
#[test]
fn amf_cq_mode_sets_cqp_flags() {
let metadata = metadata();
let mut plan = plan_for(Encoder::HevcAmf);
plan.rate_control = Some(RateControl::Cq { value: 19 });
let builder = FFmpegCommandBuilder::new(
Path::new("/tmp/in.mkv"),
Path::new("/tmp/out.mkv"),
&metadata,
&plan,
);
let args = builder
.build_args()
.unwrap_or_else(|err| panic!("failed to build amf cq args: {err}"));
assert!(args.windows(2).any(|window| window == ["-rc", "cqp"]));
assert!(args.windows(2).any(|window| window == ["-qp_i", "19"]));
assert!(args.windows(2).any(|window| window == ["-qp_p", "19"]));
}
#[test]
fn clip_window_adds_trim_arguments() {
let metadata = metadata();
let plan = plan_for(Encoder::H264X264);
let args = FFmpegCommandBuilder::new(
Path::new("/tmp/in.mkv"),
Path::new("/tmp/out.mkv"),
&metadata,
&plan,
)
.with_clip(Some(12.5), Some(8.0))
.build_args()
.unwrap_or_else(|err| panic!("failed to build clipped args: {err}"));
assert!(args.windows(2).any(|window| window == ["-ss", "12.500"]));
assert!(args.windows(2).any(|window| window == ["-t", "8.000"]));
}
#[test] #[test]
fn mp4_audio_transcode_uses_aac_profile() { fn mp4_audio_transcode_uses_aac_profile() {
let mut plan = plan_for(Encoder::H264X264); let mut plan = plan_for(Encoder::H264X264);

File diff suppressed because it is too large Load Diff

View File

@@ -16,6 +16,7 @@ use tokio::sync::{Mutex, RwLock};
use tracing::{error, warn}; use tracing::{error, warn};
type NotificationResult<T> = Result<T, Box<dyn std::error::Error + Send + Sync>>; type NotificationResult<T> = Result<T, Box<dyn std::error::Error + Send + Sync>>;
const DAILY_SUMMARY_LAST_SUCCESS_KEY: &str = "notifications.daily_summary.last_success_date";
#[derive(Clone)] #[derive(Clone)]
pub struct NotificationManager { pub struct NotificationManager {
@@ -231,9 +232,15 @@ impl NotificationManager {
}); });
tokio::spawn(async move { tokio::spawn(async move {
let start = tokio::time::Instant::now()
+ delay_until_next_minute_boundary(chrono::Local::now());
let mut interval = tokio::time::interval_at(start, Duration::from_secs(60));
loop { loop {
tokio::time::sleep(Duration::from_secs(30)).await; interval.tick().await;
if let Err(err) = summary_manager.maybe_send_daily_summary().await { if let Err(err) = summary_manager
.maybe_send_daily_summary_at(chrono::Local::now())
.await
{
error!("Daily summary notification error: {}", err); error!("Daily summary notification error: {}", err);
} }
} }
@@ -301,9 +308,11 @@ impl NotificationManager {
Ok(()) Ok(())
} }
async fn maybe_send_daily_summary(&self) -> NotificationResult<()> { async fn maybe_send_daily_summary_at(
&self,
now: chrono::DateTime<chrono::Local>,
) -> NotificationResult<()> {
let config = self.config.read().await.clone(); let config = self.config.read().await.clone();
let now = chrono::Local::now();
let parts = config let parts = config
.notifications .notifications
.daily_summary_time_local .daily_summary_time_local
@@ -314,43 +323,100 @@ impl NotificationManager {
} }
let hour = parts[0].parse::<u32>().unwrap_or(9); let hour = parts[0].parse::<u32>().unwrap_or(9);
let minute = parts[1].parse::<u32>().unwrap_or(0); let minute = parts[1].parse::<u32>().unwrap_or(0);
if now.hour() != hour || now.minute() != minute { let Some(scheduled_at) = now
.with_hour(hour)
.and_then(|value| value.with_minute(minute))
.and_then(|value| value.with_second(0))
.and_then(|value| value.with_nanosecond(0))
else {
return Ok(());
};
if now < scheduled_at {
return Ok(()); return Ok(());
} }
let summary_key = now.format("%Y-%m-%d").to_string(); let summary_key = now.format("%Y-%m-%d").to_string();
{ if self.daily_summary_already_sent(&summary_key).await? {
let mut last_sent = self.daily_summary_last_sent.lock().await; return Ok(());
if last_sent.as_deref() == Some(summary_key.as_str()) {
return Ok(());
}
// Mark sent before releasing lock to prevent duplicate sends
// if the scheduler fires twice in the same minute.
*last_sent = Some(summary_key.clone());
} }
let summary = self.db.get_daily_summary_stats().await?;
let targets = self.db.get_notification_targets().await?; let targets = self.db.get_notification_targets().await?;
let mut eligible_targets = Vec::new();
for target in targets { for target in targets {
if !target.enabled { if !target.enabled {
continue; continue;
} }
let allowed: Vec<String> = serde_json::from_str(&target.events).unwrap_or_default(); let allowed: Vec<String> = match serde_json::from_str(&target.events) {
Ok(events) => events,
Err(err) => {
warn!(
"Failed to parse events for notification target '{}': {}",
target.name, err
);
Vec::new()
}
};
let normalized_allowed = crate::config::normalize_notification_events(&allowed); let normalized_allowed = crate::config::normalize_notification_events(&allowed);
if !normalized_allowed if normalized_allowed
.iter() .iter()
.any(|event| event == crate::config::NOTIFICATION_EVENT_DAILY_SUMMARY) .any(|event| event == crate::config::NOTIFICATION_EVENT_DAILY_SUMMARY)
{ {
continue; eligible_targets.push(target);
} }
}
if eligible_targets.is_empty() {
self.mark_daily_summary_sent(&summary_key).await?;
return Ok(());
}
let summary = self.db.get_daily_summary_stats().await?;
let mut delivered = 0usize;
for target in eligible_targets {
if let Err(err) = self.send_daily_summary_target(&target, &summary).await { if let Err(err) = self.send_daily_summary_target(&target, &summary).await {
error!( error!(
"Failed to send daily summary to target '{}': {}", "Failed to send daily summary to target '{}': {}",
target.name, err target.name, err
); );
continue;
}
delivered += 1;
}
if delivered > 0 {
self.mark_daily_summary_sent(&summary_key).await?;
}
Ok(())
}
async fn daily_summary_already_sent(&self, summary_key: &str) -> NotificationResult<bool> {
{
let last_sent = self.daily_summary_last_sent.lock().await;
if last_sent.as_deref() == Some(summary_key) {
return Ok(true);
} }
} }
let persisted = self
.db
.get_preference(DAILY_SUMMARY_LAST_SUCCESS_KEY)
.await?;
if persisted.as_deref() == Some(summary_key) {
let mut last_sent = self.daily_summary_last_sent.lock().await;
*last_sent = Some(summary_key.to_string());
return Ok(true);
}
Ok(false)
}
async fn mark_daily_summary_sent(&self, summary_key: &str) -> NotificationResult<()> {
self.db
.set_preference(DAILY_SUMMARY_LAST_SUCCESS_KEY, summary_key)
.await?;
let mut last_sent = self.daily_summary_last_sent.lock().await;
*last_sent = Some(summary_key.to_string());
Ok(()) Ok(())
} }
@@ -851,6 +917,17 @@ impl NotificationManager {
} }
} }
fn delay_until_next_minute_boundary(now: chrono::DateTime<chrono::Local>) -> Duration {
let remaining_seconds = 60_u64.saturating_sub(now.second() as u64).max(1);
let mut delay = Duration::from_secs(remaining_seconds);
if now.nanosecond() > 0 {
delay = delay
.checked_sub(Duration::from_nanos(now.nanosecond() as u64))
.unwrap_or_else(|| Duration::from_millis(1));
}
delay
}
async fn _unused_ensure_public_endpoint(raw: &str) -> Result<(), Box<dyn std::error::Error>> { async fn _unused_ensure_public_endpoint(raw: &str) -> Result<(), Box<dyn std::error::Error>> {
let url = Url::parse(raw)?; let url = Url::parse(raw)?;
let host = match url.host_str() { let host = match url.host_str() {
@@ -912,9 +989,38 @@ fn is_private_ip(ip: IpAddr) -> bool {
mod tests { mod tests {
use super::*; use super::*;
use crate::db::JobState; use crate::db::JobState;
use std::sync::{
Arc,
atomic::{AtomicUsize, Ordering},
};
use tokio::io::{AsyncReadExt, AsyncWriteExt}; use tokio::io::{AsyncReadExt, AsyncWriteExt};
use tokio::net::TcpListener; use tokio::net::TcpListener;
fn scheduled_test_time(hour: u32, minute: u32) -> chrono::DateTime<chrono::Local> {
chrono::Local::now()
.with_hour(hour)
.and_then(|value| value.with_minute(minute))
.and_then(|value| value.with_second(0))
.and_then(|value| value.with_nanosecond(0))
.unwrap_or_else(chrono::Local::now)
}
async fn add_daily_summary_webhook_target(
db: &Db,
addr: std::net::SocketAddr,
) -> NotificationResult<()> {
let config_json = serde_json::json!({ "url": format!("http://{}", addr) }).to_string();
db.add_notification_target(
"daily-summary",
"webhook",
&config_json,
"[\"daily.summary\"]",
true,
)
.await?;
Ok(())
}
#[tokio::test] #[tokio::test]
async fn test_webhook_errors_on_non_success() async fn test_webhook_errors_on_non_success()
-> std::result::Result<(), Box<dyn std::error::Error + Send + Sync>> { -> std::result::Result<(), Box<dyn std::error::Error + Send + Sync>> {
@@ -1061,4 +1167,154 @@ mod tests {
let _ = std::fs::remove_file(db_path); let _ = std::fs::remove_file(db_path);
Ok(()) Ok(())
} }
#[tokio::test]
async fn daily_summary_retries_after_failed_delivery_and_marks_success()
-> std::result::Result<(), Box<dyn std::error::Error + Send + Sync>> {
let mut db_path = std::env::temp_dir();
let token: u64 = rand::random();
db_path.push(format!("alchemist_notifications_daily_retry_{}.db", token));
let db = Db::new(db_path.to_string_lossy().as_ref()).await?;
let mut test_config = crate::config::Config::default();
test_config.notifications.allow_local_notifications = true;
test_config.notifications.daily_summary_time_local = "09:00".to_string();
let config = Arc::new(RwLock::new(test_config));
let manager = NotificationManager::new(db.clone(), config);
let listener = match TcpListener::bind("127.0.0.1:0").await {
Ok(listener) => listener,
Err(err) if err.kind() == std::io::ErrorKind::PermissionDenied => {
return Ok(());
}
Err(err) => return Err(err.into()),
};
let addr = listener.local_addr()?;
add_daily_summary_webhook_target(&db, addr).await?;
let request_count = Arc::new(AtomicUsize::new(0));
let request_count_task = request_count.clone();
let listener_task = tokio::spawn(async move {
loop {
let Ok((mut socket, _)) = listener.accept().await else {
break;
};
let mut buf = [0u8; 1024];
let _ = socket.read(&mut buf).await;
let index = request_count_task.fetch_add(1, Ordering::SeqCst);
let response = if index == 0 {
"HTTP/1.1 500 Internal Server Error\r\nContent-Length: 0\r\n\r\n"
} else {
"HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n"
};
let _ = socket.write_all(response.as_bytes()).await;
}
});
let first_now = scheduled_test_time(9, 5);
manager.maybe_send_daily_summary_at(first_now).await?;
assert_eq!(request_count.load(Ordering::SeqCst), 1);
assert_eq!(
db.get_preference(DAILY_SUMMARY_LAST_SUCCESS_KEY).await?,
None
);
manager
.maybe_send_daily_summary_at(first_now + chrono::Duration::minutes(1))
.await?;
assert_eq!(request_count.load(Ordering::SeqCst), 2);
assert_eq!(
db.get_preference(DAILY_SUMMARY_LAST_SUCCESS_KEY).await?,
Some(first_now.format("%Y-%m-%d").to_string())
);
listener_task.abort();
let _ = std::fs::remove_file(db_path);
Ok(())
}
#[tokio::test]
async fn daily_summary_is_restart_safe_after_successful_delivery()
-> std::result::Result<(), Box<dyn std::error::Error + Send + Sync>> {
let mut db_path = std::env::temp_dir();
let token: u64 = rand::random();
db_path.push(format!(
"alchemist_notifications_daily_restart_{}.db",
token
));
let db = Db::new(db_path.to_string_lossy().as_ref()).await?;
let mut test_config = crate::config::Config::default();
test_config.notifications.allow_local_notifications = true;
test_config.notifications.daily_summary_time_local = "09:00".to_string();
let config = Arc::new(RwLock::new(test_config));
let listener = match TcpListener::bind("127.0.0.1:0").await {
Ok(listener) => listener,
Err(err) if err.kind() == std::io::ErrorKind::PermissionDenied => {
return Ok(());
}
Err(err) => return Err(err.into()),
};
let addr = listener.local_addr()?;
add_daily_summary_webhook_target(&db, addr).await?;
let request_count = Arc::new(AtomicUsize::new(0));
let request_count_task = request_count.clone();
let listener_task = tokio::spawn(async move {
loop {
let Ok((mut socket, _)) = listener.accept().await else {
break;
};
let mut buf = [0u8; 1024];
let _ = socket.read(&mut buf).await;
request_count_task.fetch_add(1, Ordering::SeqCst);
let _ = socket
.write_all(b"HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n")
.await;
}
});
let first_now = scheduled_test_time(9, 2);
let manager = NotificationManager::new(db.clone(), config.clone());
manager.maybe_send_daily_summary_at(first_now).await?;
assert_eq!(request_count.load(Ordering::SeqCst), 1);
let restarted_manager = NotificationManager::new(db.clone(), config.clone());
restarted_manager
.maybe_send_daily_summary_at(first_now + chrono::Duration::minutes(10))
.await?;
assert_eq!(request_count.load(Ordering::SeqCst), 1);
listener_task.abort();
let _ = std::fs::remove_file(db_path);
Ok(())
}
#[tokio::test]
async fn daily_summary_marks_day_sent_when_no_targets_are_eligible()
-> std::result::Result<(), Box<dyn std::error::Error + Send + Sync>> {
let mut db_path = std::env::temp_dir();
let token: u64 = rand::random();
db_path.push(format!(
"alchemist_notifications_daily_no_targets_{}.db",
token
));
let db = Db::new(db_path.to_string_lossy().as_ref()).await?;
let mut test_config = crate::config::Config::default();
test_config.notifications.daily_summary_time_local = "09:00".to_string();
let config = Arc::new(RwLock::new(test_config));
let manager = NotificationManager::new(db.clone(), config);
let now = scheduled_test_time(9, 1);
manager.maybe_send_daily_summary_at(now).await?;
assert_eq!(
db.get_preference(DAILY_SUMMARY_LAST_SUCCESS_KEY).await?,
Some(now.format("%Y-%m-%d").to_string())
);
let _ = std::fs::remove_file(db_path);
Ok(())
}
} }

View File

@@ -29,6 +29,8 @@ pub struct TranscodeRequest<'a> {
pub metadata: &'a crate::media::pipeline::MediaMetadata, pub metadata: &'a crate::media::pipeline::MediaMetadata,
pub plan: &'a TranscodePlan, pub plan: &'a TranscodePlan,
pub observer: Option<Arc<dyn ExecutionObserver>>, pub observer: Option<Arc<dyn ExecutionObserver>>,
pub clip_start_seconds: Option<f64>,
pub clip_duration_seconds: Option<f64>,
} }
#[allow(async_fn_in_trait)] #[allow(async_fn_in_trait)]
@@ -187,6 +189,7 @@ impl Transcoder {
request.plan, request.plan,
) )
.with_hardware(request.hw_info) .with_hardware(request.hw_info)
.with_clip(request.clip_start_seconds, request.clip_duration_seconds)
.build()?; .build()?;
info!("━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"); info!("━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━");

View File

@@ -15,6 +15,7 @@ use chrono::Utc;
use rand::Rng; use rand::Rng;
use std::net::SocketAddr; use std::net::SocketAddr;
use std::sync::Arc; use std::sync::Arc;
use tracing::error;
#[derive(serde::Deserialize)] #[derive(serde::Deserialize)]
pub(crate) struct LoginPayload { pub(crate) struct LoginPayload {
@@ -32,11 +33,13 @@ pub(crate) async fn login_handler(
} }
let mut is_valid = true; let mut is_valid = true;
let user_result = state let user_result = match state.db.get_user_by_username(&payload.username).await {
.db Ok(user) => user,
.get_user_by_username(&payload.username) Err(err) => {
.await error!("Login lookup failed for '{}': {}", payload.username, err);
.unwrap_or(None); return (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()).into_response();
}
};
// A valid argon2 static hash of a random string used to simulate work and equalize timing // A valid argon2 static hash of a random string used to simulate work and equalize timing
const DUMMY_HASH: &str = "$argon2id$v=19$m=19456,t=2,p=1$c2FsdHN0cmluZzEyMzQ1Ng$1tJ2tA109qj15m3u5+kS/sX5X1UoZ6/H9b/30tX9N/g"; const DUMMY_HASH: &str = "$argon2id$v=19$m=19456,t=2,p=1$c2FsdHN0cmluZzEyMzQ1Ng$1tJ2tA109qj15m3u5+kS/sX5X1UoZ6/H9b/30tX9N/g";

View File

@@ -10,7 +10,11 @@ use axum::{
response::{IntoResponse, Response}, response::{IntoResponse, Response},
}; };
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::sync::Arc; use std::{
path::{Path as FsPath, PathBuf},
sync::Arc,
time::SystemTime,
};
#[derive(Serialize)] #[derive(Serialize)]
struct BlockedJob { struct BlockedJob {
@@ -24,6 +28,17 @@ struct BlockedJobsResponse {
blocked: Vec<BlockedJob>, blocked: Vec<BlockedJob>,
} }
#[derive(Deserialize)]
pub(crate) struct EnqueueJobPayload {
path: String,
}
#[derive(Serialize)]
pub(crate) struct EnqueueJobResponse {
enqueued: bool,
message: String,
}
pub(crate) fn blocked_jobs_response(message: impl Into<String>, blocked: &[Job]) -> Response { pub(crate) fn blocked_jobs_response(message: impl Into<String>, blocked: &[Job]) -> Response {
let payload = BlockedJobsResponse { let payload = BlockedJobsResponse {
message: message.into(), message: message.into(),
@@ -38,6 +53,166 @@ pub(crate) fn blocked_jobs_response(message: impl Into<String>, blocked: &[Job])
(StatusCode::CONFLICT, axum::Json(payload)).into_response() (StatusCode::CONFLICT, axum::Json(payload)).into_response()
} }
fn resolve_source_root(path: &FsPath, watch_dirs: &[crate::db::WatchDir]) -> Option<PathBuf> {
watch_dirs
.iter()
.map(|watch_dir| PathBuf::from(&watch_dir.path))
.filter(|watch_dir| path.starts_with(watch_dir))
.max_by_key(|watch_dir| watch_dir.components().count())
}
async fn purge_resume_sessions_for_jobs(state: &AppState, ids: &[i64]) {
let sessions = match state.db.get_resume_sessions_by_job_ids(ids).await {
Ok(sessions) => sessions,
Err(err) => {
tracing::warn!("Failed to load resume sessions for purge: {}", err);
return;
}
};
for session in sessions {
if let Err(err) = state.db.delete_resume_session(session.job_id).await {
tracing::warn!(
job_id = session.job_id,
"Failed to delete resume session rows: {err}"
);
continue;
}
let temp_dir = PathBuf::from(&session.temp_dir);
if temp_dir.exists() {
if let Err(err) = tokio::fs::remove_dir_all(&temp_dir).await {
tracing::warn!(
job_id = session.job_id,
path = %temp_dir.display(),
"Failed to remove resume temp dir: {err}"
);
}
}
}
}
pub(crate) async fn enqueue_job_handler(
State(state): State<Arc<AppState>>,
axum::Json(payload): axum::Json<EnqueueJobPayload>,
) -> impl IntoResponse {
let submitted_path = payload.path.trim();
if submitted_path.is_empty() {
return (
StatusCode::BAD_REQUEST,
axum::Json(EnqueueJobResponse {
enqueued: false,
message: "Path must not be empty.".to_string(),
}),
)
.into_response();
}
let requested_path = PathBuf::from(submitted_path);
if !requested_path.is_absolute() {
return (
StatusCode::BAD_REQUEST,
axum::Json(EnqueueJobResponse {
enqueued: false,
message: "Path must be absolute.".to_string(),
}),
)
.into_response();
}
let canonical_path = match std::fs::canonicalize(&requested_path) {
Ok(path) => path,
Err(err) => {
return (
StatusCode::BAD_REQUEST,
axum::Json(EnqueueJobResponse {
enqueued: false,
message: format!("Unable to resolve path: {err}"),
}),
)
.into_response();
}
};
let metadata = match std::fs::metadata(&canonical_path) {
Ok(metadata) => metadata,
Err(err) => {
return (
StatusCode::BAD_REQUEST,
axum::Json(EnqueueJobResponse {
enqueued: false,
message: format!("Unable to read file metadata: {err}"),
}),
)
.into_response();
}
};
if !metadata.is_file() {
return (
StatusCode::BAD_REQUEST,
axum::Json(EnqueueJobResponse {
enqueued: false,
message: "Path must point to a file.".to_string(),
}),
)
.into_response();
}
let extension = canonical_path
.extension()
.and_then(|value| value.to_str())
.map(|value| value.to_ascii_lowercase());
let supported = crate::media::scanner::Scanner::new().extensions;
if extension
.as_deref()
.is_none_or(|value| !supported.iter().any(|candidate| candidate == value))
{
return (
StatusCode::BAD_REQUEST,
axum::Json(EnqueueJobResponse {
enqueued: false,
message: "File type is not supported for enqueue.".to_string(),
}),
)
.into_response();
}
let watch_dirs = match state.db.get_watch_dirs().await {
Ok(watch_dirs) => watch_dirs,
Err(err) => {
return (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()).into_response();
}
};
let discovered = crate::media::pipeline::DiscoveredMedia {
path: canonical_path.clone(),
mtime: metadata.modified().unwrap_or(SystemTime::UNIX_EPOCH),
source_root: resolve_source_root(&canonical_path, &watch_dirs),
};
match crate::media::pipeline::enqueue_discovered_with_db(state.db.as_ref(), discovered).await {
Ok(true) => (
StatusCode::OK,
axum::Json(EnqueueJobResponse {
enqueued: true,
message: format!("Enqueued {}.", canonical_path.display()),
}),
)
.into_response(),
Ok(false) => (
StatusCode::OK,
axum::Json(EnqueueJobResponse {
enqueued: false,
message:
"File was not enqueued because it matched existing output or dedupe rules."
.to_string(),
}),
)
.into_response(),
Err(err) => (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()).into_response(),
}
}
pub(crate) async fn request_job_cancel(state: &AppState, job: &Job) -> Result<bool> { pub(crate) async fn request_job_cancel(state: &AppState, job: &Job) -> Result<bool> {
state.transcoder.add_cancel_request(job.id).await; state.transcoder.add_cancel_request(job.id).await;
match job.status { match job.status {
@@ -226,7 +401,12 @@ pub(crate) async fn batch_jobs_handler(
}; };
match result { match result {
Ok(count) => axum::Json(serde_json::json!({ "count": count })).into_response(), Ok(count) => {
if payload.action == "delete" {
purge_resume_sessions_for_jobs(state.as_ref(), &payload.ids).await;
}
axum::Json(serde_json::json!({ "count": count })).into_response()
}
Err(e) => (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response(), Err(e) => (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response(),
} }
} }
@@ -270,8 +450,13 @@ pub(crate) async fn restart_failed_handler(
pub(crate) async fn clear_completed_handler( pub(crate) async fn clear_completed_handler(
State(state): State<Arc<AppState>>, State(state): State<Arc<AppState>>,
) -> impl IntoResponse { ) -> impl IntoResponse {
let completed_job_ids = match state.db.get_jobs_by_status(JobState::Completed).await {
Ok(jobs) => jobs.into_iter().map(|job| job.id).collect::<Vec<_>>(),
Err(e) => return (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response(),
};
match state.db.clear_completed_jobs().await { match state.db.clear_completed_jobs().await {
Ok(count) => { Ok(count) => {
purge_resume_sessions_for_jobs(state.as_ref(), &completed_job_ids).await;
let message = if count == 0 { let message = if count == 0 {
"No completed jobs were waiting to be cleared.".to_string() "No completed jobs were waiting to be cleared.".to_string()
} else if count == 1 { } else if count == 1 {
@@ -324,7 +509,10 @@ pub(crate) async fn delete_job_handler(
state.transcoder.cancel_job(id); state.transcoder.cancel_job(id);
match state.db.delete_job(id).await { match state.db.delete_job(id).await {
Ok(_) => StatusCode::OK.into_response(), Ok(_) => {
purge_resume_sessions_for_jobs(state.as_ref(), &[id]).await;
StatusCode::OK.into_response()
}
Err(e) if is_row_not_found(&e) => StatusCode::NOT_FOUND.into_response(), Err(e) if is_row_not_found(&e) => StatusCode::NOT_FOUND.into_response(),
Err(e) => (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response(), Err(e) => (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response(),
} }
@@ -383,7 +571,13 @@ pub(crate) async fn get_job_detail_handler(
// Try to get encode stats (using the subquery result or a specific query) // Try to get encode stats (using the subquery result or a specific query)
// For now we'll just query the encode_stats table if completed // For now we'll just query the encode_stats table if completed
let encode_stats = if job.status == JobState::Completed { let encode_stats = if job.status == JobState::Completed {
state.db.get_encode_stats_by_job_id(id).await.ok() match state.db.get_encode_stats_by_job_id(id).await {
Ok(stats) => Some(stats),
Err(err) if is_row_not_found(&err) => None,
Err(err) => {
return (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()).into_response();
}
}
} else { } else {
None None
}; };
@@ -424,14 +618,18 @@ pub(crate) async fn get_job_detail_handler(
(None, None) (None, None)
}; };
let encode_attempts = state let encode_attempts = match state.db.get_encode_attempts_by_job(id).await {
.db Ok(attempts) => attempts,
.get_encode_attempts_by_job(id) Err(err) => return (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()).into_response(),
.await };
.unwrap_or_default();
let queue_position = if job.status == JobState::Queued { let queue_position = if job.status == JobState::Queued {
state.db.get_queue_position(id).await.unwrap_or(None) match state.db.get_queue_position(id).await {
Ok(position) => position,
Err(err) => {
return (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()).into_response();
}
}
} else { } else {
None None
}; };

View File

@@ -341,6 +341,7 @@ fn app_router(state: Arc<AppState>) -> Router {
// Canonical job list endpoint. // Canonical job list endpoint.
.route("/api/jobs", get(jobs_table_handler)) .route("/api/jobs", get(jobs_table_handler))
.route("/api/jobs/table", get(jobs_table_handler)) .route("/api/jobs/table", get(jobs_table_handler))
.route("/api/jobs/enqueue", post(enqueue_job_handler))
.route("/api/jobs/batch", post(batch_jobs_handler)) .route("/api/jobs/batch", post(batch_jobs_handler))
.route("/api/logs/history", get(logs_history_handler)) .route("/api/logs/history", get(logs_history_handler))
.route("/api/logs", delete(clear_logs_handler)) .route("/api/logs", delete(clear_logs_handler))
@@ -376,6 +377,7 @@ fn app_router(state: Arc<AppState>) -> Router {
get(get_engine_mode_handler).post(set_engine_mode_handler), get(get_engine_mode_handler).post(set_engine_mode_handler),
) )
.route("/api/engine/status", get(engine_status_handler)) .route("/api/engine/status", get(engine_status_handler))
.route("/api/processor/status", get(processor_status_handler))
.route( .route(
"/api/settings/transcode", "/api/settings/transcode",
get(get_transcode_settings_handler).post(update_transcode_settings_handler), get(get_transcode_settings_handler).post(update_transcode_settings_handler),

View File

@@ -641,9 +641,8 @@ pub(crate) async fn add_notification_handler(
} }
match state.db.get_notification_targets().await { match state.db.get_notification_targets().await {
Ok(targets) => targets Ok(mut targets) => targets
.into_iter() .pop()
.find(|target| target.name == payload.name)
.map(|target| axum::Json(notification_target_response(target)).into_response()) .map(|target| axum::Json(notification_target_response(target)).into_response())
.unwrap_or_else(|| StatusCode::OK.into_response()), .unwrap_or_else(|| StatusCode::OK.into_response()),
Err(e) => (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response(), Err(e) => (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response(),
@@ -654,23 +653,23 @@ pub(crate) async fn delete_notification_handler(
State(state): State<Arc<AppState>>, State(state): State<Arc<AppState>>,
Path(id): Path<i64>, Path(id): Path<i64>,
) -> impl IntoResponse { ) -> impl IntoResponse {
let target = match state.db.get_notification_targets().await { let target_index = match state.db.get_notification_targets().await {
Ok(targets) => targets.into_iter().find(|target| target.id == id), Ok(targets) => targets.iter().position(|target| target.id == id),
Err(e) => return (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response(), Err(e) => return (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response(),
}; };
let Some(target) = target else { let Some(target_index) = target_index else {
return StatusCode::NOT_FOUND.into_response(); return StatusCode::NOT_FOUND.into_response();
}; };
let mut next_config = state.config.read().await.clone(); let mut next_config = state.config.read().await.clone();
let target_config_json = target.config_json.clone(); if target_index >= next_config.notifications.targets.len() {
let parsed_target_config_json = return (
serde_json::from_str::<JsonValue>(&target_config_json).unwrap_or(JsonValue::Null); StatusCode::INTERNAL_SERVER_ERROR,
next_config.notifications.targets.retain(|candidate| { "notification settings projection is out of sync with config",
!(candidate.name == target.name )
&& candidate.target_type == target.target_type .into_response();
&& candidate.config_json == parsed_target_config_json) }
}); next_config.notifications.targets.remove(target_index);
if let Err(response) = save_config_or_response(&state, &next_config).await { if let Err(response) = save_config_or_response(&state, &next_config).await {
return *response; return *response;
} }
@@ -837,13 +836,8 @@ pub(crate) async fn add_schedule_handler(
state.scheduler.trigger(); state.scheduler.trigger();
match state.db.get_schedule_windows().await { match state.db.get_schedule_windows().await {
Ok(windows) => windows Ok(mut windows) => windows
.into_iter() .pop()
.find(|window| {
window.start_time == start_time
&& window.end_time == end_time
&& window.enabled == payload.enabled
})
.map(|window| axum::Json(serde_json::json!(window)).into_response()) .map(|window| axum::Json(serde_json::json!(window)).into_response())
.unwrap_or_else(|| StatusCode::OK.into_response()), .unwrap_or_else(|| StatusCode::OK.into_response()),
Err(e) => (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response(), Err(e) => (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response(),
@@ -854,22 +848,23 @@ pub(crate) async fn delete_schedule_handler(
State(state): State<Arc<AppState>>, State(state): State<Arc<AppState>>,
Path(id): Path<i64>, Path(id): Path<i64>,
) -> impl IntoResponse { ) -> impl IntoResponse {
let window = match state.db.get_schedule_windows().await { let window_index = match state.db.get_schedule_windows().await {
Ok(windows) => windows.into_iter().find(|window| window.id == id), Ok(windows) => windows.iter().position(|window| window.id == id),
Err(e) => return (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response(), Err(e) => return (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response(),
}; };
let Some(window) = window else { let Some(window_index) = window_index else {
return StatusCode::NOT_FOUND.into_response(); return StatusCode::NOT_FOUND.into_response();
}; };
let days_of_week: Vec<i32> = serde_json::from_str(&window.days_of_week).unwrap_or_default();
let mut next_config = state.config.read().await.clone(); let mut next_config = state.config.read().await.clone();
next_config.schedule.windows.retain(|candidate| { if window_index >= next_config.schedule.windows.len() {
!(candidate.start_time == window.start_time return (
&& candidate.end_time == window.end_time StatusCode::INTERNAL_SERVER_ERROR,
&& candidate.enabled == window.enabled "schedule settings projection is out of sync with config",
&& candidate.days_of_week == days_of_week) )
}); .into_response();
}
next_config.schedule.windows.remove(window_index);
if let Err(response) = save_config_or_response(&state, &next_config).await { if let Err(response) = save_config_or_response(&state, &next_config).await {
return *response; return *response;
} }

View File

@@ -27,6 +27,17 @@ struct SystemResources {
gpu_memory_percent: Option<f32>, gpu_memory_percent: Option<f32>,
} }
#[derive(Serialize)]
pub(crate) struct ProcessorStatusResponse {
blocked_reason: Option<&'static str>,
message: String,
manual_paused: bool,
scheduler_paused: bool,
draining: bool,
active_jobs: i64,
concurrent_limit: usize,
}
#[derive(Serialize)] #[derive(Serialize)]
struct DuplicateGroup { struct DuplicateGroup {
stem: String, stem: String,
@@ -135,6 +146,54 @@ pub(crate) async fn system_resources_handler(State(state): State<Arc<AppState>>)
axum::Json(value).into_response() axum::Json(value).into_response()
} }
pub(crate) async fn processor_status_handler(State(state): State<Arc<AppState>>) -> Response {
let stats = match state.db.get_job_stats().await {
Ok(stats) => stats,
Err(err) => return config_read_error_response("load processor status", &err),
};
let concurrent_limit = state.agent.concurrent_jobs_limit();
let manual_paused = state.agent.is_manual_paused();
let scheduler_paused = state.agent.is_scheduler_paused();
let draining = state.agent.is_draining();
let active_jobs = stats.active;
let (blocked_reason, message) = if manual_paused {
(
Some("manual_paused"),
"The engine is manually paused and will not start queued jobs.".to_string(),
)
} else if scheduler_paused {
(
Some("scheduled_pause"),
"The schedule is currently pausing the engine.".to_string(),
)
} else if draining {
(
Some("draining"),
"The engine is draining and will not start new queued jobs.".to_string(),
)
} else if active_jobs >= concurrent_limit as i64 {
(
Some("workers_busy"),
"All worker slots are currently busy.".to_string(),
)
} else {
(None, "Workers are available.".to_string())
};
axum::Json(ProcessorStatusResponse {
blocked_reason,
message,
manual_paused,
scheduler_paused,
draining,
active_jobs,
concurrent_limit,
})
.into_response()
}
pub(crate) async fn library_intelligence_handler(State(state): State<Arc<AppState>>) -> Response { pub(crate) async fn library_intelligence_handler(State(state): State<Arc<AppState>>) -> Response {
use std::collections::HashMap; use std::collections::HashMap;
use std::path::Path; use std::path::Path;

View File

@@ -547,6 +547,69 @@ async fn engine_status_endpoint_reports_draining_state()
Ok(()) Ok(())
} }
#[tokio::test]
async fn processor_status_endpoint_reports_blocking_reason_precedence()
-> std::result::Result<(), Box<dyn std::error::Error>> {
let (state, app, config_path, db_path) = build_test_app(false, 8, |_| {}).await?;
let token = create_session(state.db.as_ref()).await?;
let (_job, input_path, output_path) = seed_job(state.db.as_ref(), JobState::Encoding).await?;
let response = app
.clone()
.oneshot(auth_request(
Method::GET,
"/api/processor/status",
&token,
Body::empty(),
))
.await?;
assert_eq!(response.status(), StatusCode::OK);
let payload: serde_json::Value = serde_json::from_str(&body_text(response).await)?;
assert_eq!(payload["blocked_reason"], "workers_busy");
state.agent.drain();
let response = app
.clone()
.oneshot(auth_request(
Method::GET,
"/api/processor/status",
&token,
Body::empty(),
))
.await?;
let payload: serde_json::Value = serde_json::from_str(&body_text(response).await)?;
assert_eq!(payload["blocked_reason"], "draining");
state.agent.set_scheduler_paused(true);
let response = app
.clone()
.oneshot(auth_request(
Method::GET,
"/api/processor/status",
&token,
Body::empty(),
))
.await?;
let payload: serde_json::Value = serde_json::from_str(&body_text(response).await)?;
assert_eq!(payload["blocked_reason"], "scheduled_pause");
state.agent.pause();
let response = app
.clone()
.oneshot(auth_request(
Method::GET,
"/api/processor/status",
&token,
Body::empty(),
))
.await?;
let payload: serde_json::Value = serde_json::from_str(&body_text(response).await)?;
assert_eq!(payload["blocked_reason"], "manual_paused");
cleanup_paths(&[input_path, output_path, config_path, db_path]);
Ok(())
}
#[tokio::test] #[tokio::test]
async fn read_only_api_token_allows_observability_only_routes() async fn read_only_api_token_allows_observability_only_routes()
-> std::result::Result<(), Box<dyn std::error::Error>> { -> std::result::Result<(), Box<dyn std::error::Error>> {
@@ -1159,6 +1222,35 @@ async fn public_clients_can_reach_login_after_setup()
Ok(()) Ok(())
} }
#[tokio::test]
async fn login_returns_internal_error_when_user_lookup_fails()
-> std::result::Result<(), Box<dyn std::error::Error>> {
let (state, app, config_path, db_path) = build_test_app(false, 8, |_| {}).await?;
state.db.pool.close().await;
let mut request = remote_request(
Method::POST,
"/api/auth/login",
Body::from(
json!({
"username": "tester",
"password": "not-important"
})
.to_string(),
),
);
request.headers_mut().insert(
header::CONTENT_TYPE,
header::HeaderValue::from_static("application/json"),
);
let response = app.clone().oneshot(request).await?;
assert_eq!(response.status(), StatusCode::INTERNAL_SERVER_ERROR);
cleanup_paths(&[config_path, db_path]);
Ok(())
}
#[tokio::test] #[tokio::test]
async fn settings_bundle_requires_auth_after_setup() async fn settings_bundle_requires_auth_after_setup()
-> std::result::Result<(), Box<dyn std::error::Error>> { -> std::result::Result<(), Box<dyn std::error::Error>> {
@@ -1363,6 +1455,93 @@ async fn settings_bundle_put_projects_extended_settings_to_db()
Ok(()) Ok(())
} }
#[tokio::test]
async fn delete_notification_removes_only_one_duplicate_target()
-> std::result::Result<(), Box<dyn std::error::Error>> {
let duplicate_target = crate::config::NotificationTargetConfig {
name: "Discord".to_string(),
target_type: "discord_webhook".to_string(),
config_json: serde_json::json!({
"webhook_url": "https://discord.com/api/webhooks/test"
}),
endpoint_url: None,
auth_token: None,
events: vec!["encode.completed".to_string()],
enabled: true,
};
let (state, app, config_path, db_path) = build_test_app(false, 8, |config| {
config.notifications.targets = vec![duplicate_target.clone(), duplicate_target.clone()];
})
.await?;
let projected = state.config.read().await.clone();
crate::settings::project_config_to_db(state.db.as_ref(), &projected).await?;
let token = create_session(state.db.as_ref()).await?;
let targets = state.db.get_notification_targets().await?;
assert_eq!(targets.len(), 2);
let response = app
.clone()
.oneshot(auth_request(
Method::DELETE,
&format!("/api/settings/notifications/{}", targets[0].id),
&token,
Body::empty(),
))
.await?;
assert_eq!(response.status(), StatusCode::OK);
let persisted = crate::config::Config::load(config_path.as_path())?;
assert_eq!(persisted.notifications.targets.len(), 1);
let stored_targets = state.db.get_notification_targets().await?;
assert_eq!(stored_targets.len(), 1);
cleanup_paths(&[config_path, db_path]);
Ok(())
}
#[tokio::test]
async fn delete_schedule_removes_only_one_duplicate_window()
-> std::result::Result<(), Box<dyn std::error::Error>> {
let duplicate_window = crate::config::ScheduleWindowConfig {
start_time: "22:00".to_string(),
end_time: "06:00".to_string(),
days_of_week: vec![1, 2, 3],
enabled: true,
};
let (state, app, config_path, db_path) = build_test_app(false, 8, |config| {
config.schedule.windows = vec![duplicate_window.clone(), duplicate_window.clone()];
})
.await?;
let projected = state.config.read().await.clone();
crate::settings::project_config_to_db(state.db.as_ref(), &projected).await?;
let token = create_session(state.db.as_ref()).await?;
let windows = state.db.get_schedule_windows().await?;
assert_eq!(windows.len(), 2);
let response = app
.clone()
.oneshot(auth_request(
Method::DELETE,
&format!("/api/settings/schedule/{}", windows[0].id),
&token,
Body::empty(),
))
.await?;
assert_eq!(response.status(), StatusCode::OK);
let persisted = crate::config::Config::load(config_path.as_path())?;
assert_eq!(persisted.schedule.windows.len(), 1);
let stored_windows = state.db.get_schedule_windows().await?;
assert_eq!(stored_windows.len(), 1);
cleanup_paths(&[config_path, db_path]);
Ok(())
}
#[tokio::test] #[tokio::test]
async fn raw_config_put_overwrites_divergent_db_projection() async fn raw_config_put_overwrites_divergent_db_projection()
-> std::result::Result<(), Box<dyn std::error::Error>> { -> std::result::Result<(), Box<dyn std::error::Error>> {
@@ -1615,6 +1794,219 @@ async fn job_detail_route_falls_back_to_legacy_failure_summary()
Ok(()) Ok(())
} }
#[tokio::test]
async fn job_detail_route_returns_internal_error_when_encode_attempts_query_fails()
-> std::result::Result<(), Box<dyn std::error::Error>> {
let (state, app, config_path, db_path) = build_test_app(false, 8, |_| {}).await?;
let token = create_session(state.db.as_ref()).await?;
let (job, input_path, output_path) = seed_job(state.db.as_ref(), JobState::Queued).await?;
sqlx::query("DROP TABLE encode_attempts")
.execute(&state.db.pool)
.await?;
let response = app
.clone()
.oneshot(auth_request(
Method::GET,
&format!("/api/jobs/{}/details", job.id),
&token,
Body::empty(),
))
.await?;
assert_eq!(response.status(), StatusCode::INTERNAL_SERVER_ERROR);
cleanup_paths(&[input_path, output_path, config_path, db_path]);
Ok(())
}
#[tokio::test]
async fn enqueue_job_endpoint_accepts_supported_absolute_files()
-> std::result::Result<(), Box<dyn std::error::Error>> {
let (state, app, config_path, db_path) = build_test_app(false, 8, |_| {}).await?;
let token = create_session(state.db.as_ref()).await?;
let input_path = temp_path("alchemist_enqueue_input", "mkv");
std::fs::write(&input_path, b"test")?;
let canonical_input = std::fs::canonicalize(&input_path)?;
let response = app
.clone()
.oneshot(auth_json_request(
Method::POST,
"/api/jobs/enqueue",
&token,
json!({ "path": input_path.to_string_lossy() }),
))
.await?;
assert_eq!(response.status(), StatusCode::OK);
let payload: serde_json::Value = serde_json::from_str(&body_text(response).await)?;
assert_eq!(payload["enqueued"], true);
assert!(
state
.db
.get_job_by_input_path(canonical_input.to_string_lossy().as_ref())
.await?
.is_some()
);
cleanup_paths(&[input_path, config_path, db_path]);
Ok(())
}
#[tokio::test]
async fn enqueue_job_endpoint_rejects_relative_paths_and_unsupported_extensions()
-> std::result::Result<(), Box<dyn std::error::Error>> {
let (_state, app, config_path, db_path) = build_test_app(false, 8, |_| {}).await?;
let token = create_session(_state.db.as_ref()).await?;
let response = app
.clone()
.oneshot(auth_json_request(
Method::POST,
"/api/jobs/enqueue",
&token,
json!({ "path": "relative/movie.mkv" }),
))
.await?;
assert_eq!(response.status(), StatusCode::BAD_REQUEST);
let unsupported = temp_path("alchemist_enqueue_unsupported", "txt");
std::fs::write(&unsupported, b"test")?;
let response = app
.clone()
.oneshot(auth_json_request(
Method::POST,
"/api/jobs/enqueue",
&token,
json!({ "path": unsupported.to_string_lossy() }),
))
.await?;
assert_eq!(response.status(), StatusCode::BAD_REQUEST);
cleanup_paths(&[unsupported, config_path, db_path]);
Ok(())
}
#[tokio::test]
async fn enqueue_job_endpoint_returns_noop_for_generated_output_paths()
-> std::result::Result<(), Box<dyn std::error::Error>> {
let (_state, app, config_path, db_path) = build_test_app(false, 8, |_| {}).await?;
let token = create_session(_state.db.as_ref()).await?;
let generated_dir = temp_path("alchemist_enqueue_generated_dir", "dir");
std::fs::create_dir_all(&generated_dir)?;
let generated = generated_dir.join("movie-alchemist.mkv");
std::fs::write(&generated, b"test")?;
let response = app
.clone()
.oneshot(auth_json_request(
Method::POST,
"/api/jobs/enqueue",
&token,
json!({ "path": generated.to_string_lossy() }),
))
.await?;
assert_eq!(response.status(), StatusCode::OK);
let payload: serde_json::Value = serde_json::from_str(&body_text(response).await)?;
assert_eq!(payload["enqueued"], false);
cleanup_paths(&[generated_dir, config_path, db_path]);
Ok(())
}
#[tokio::test]
async fn delete_job_endpoint_purges_resume_session_temp_dir()
-> std::result::Result<(), Box<dyn std::error::Error>> {
let (state, app, config_path, db_path) = build_test_app(false, 8, |_| {}).await?;
let token = create_session(state.db.as_ref()).await?;
let (job, input_path, output_path) = seed_job(state.db.as_ref(), JobState::Failed).await?;
let resume_dir = temp_path("alchemist_resume_delete", "dir");
std::fs::create_dir_all(&resume_dir)?;
std::fs::write(resume_dir.join("segment-00000.mkv"), b"segment")?;
state
.db
.upsert_resume_session(&crate::db::UpsertJobResumeSessionInput {
job_id: job.id,
strategy: "segment_v1".to_string(),
plan_hash: "plan".to_string(),
mtime_hash: "mtime".to_string(),
temp_dir: resume_dir.to_string_lossy().to_string(),
concat_manifest_path: resume_dir
.join("segments.ffconcat")
.to_string_lossy()
.to_string(),
segment_length_secs: 120,
status: "active".to_string(),
})
.await?;
let response = app
.clone()
.oneshot(auth_request(
Method::POST,
&format!("/api/jobs/{}/delete", job.id),
&token,
Body::empty(),
))
.await?;
assert_eq!(response.status(), StatusCode::OK);
assert!(state.db.get_resume_session(job.id).await?.is_none());
assert!(!resume_dir.exists());
cleanup_paths(&[resume_dir, input_path, output_path, config_path, db_path]);
Ok(())
}
#[tokio::test]
async fn clear_completed_purges_resume_sessions()
-> std::result::Result<(), Box<dyn std::error::Error>> {
let (state, app, config_path, db_path) = build_test_app(false, 8, |_| {}).await?;
let token = create_session(state.db.as_ref()).await?;
let (job, input_path, output_path) = seed_job(state.db.as_ref(), JobState::Completed).await?;
let resume_dir = temp_path("alchemist_resume_clear_completed", "dir");
std::fs::create_dir_all(&resume_dir)?;
std::fs::write(resume_dir.join("segment-00000.mkv"), b"segment")?;
state
.db
.upsert_resume_session(&crate::db::UpsertJobResumeSessionInput {
job_id: job.id,
strategy: "segment_v1".to_string(),
plan_hash: "plan".to_string(),
mtime_hash: "mtime".to_string(),
temp_dir: resume_dir.to_string_lossy().to_string(),
concat_manifest_path: resume_dir
.join("segments.ffconcat")
.to_string_lossy()
.to_string(),
segment_length_secs: 120,
status: "segments_complete".to_string(),
})
.await?;
let response = app
.clone()
.oneshot(auth_request(
Method::POST,
"/api/jobs/clear-completed",
&token,
Body::empty(),
))
.await?;
assert_eq!(response.status(), StatusCode::OK);
assert!(state.db.get_resume_session(job.id).await?.is_none());
assert!(!resume_dir.exists());
cleanup_paths(&[resume_dir, input_path, output_path, config_path, db_path]);
Ok(())
}
#[tokio::test] #[tokio::test]
async fn delete_active_job_returns_conflict() -> std::result::Result<(), Box<dyn std::error::Error>> async fn delete_active_job_returns_conflict() -> std::result::Result<(), Box<dyn std::error::Error>>
{ {

View File

@@ -49,6 +49,12 @@ async fn v0_2_5_fixture_upgrades_and_preserves_core_state() -> Result<()> {
let notifications = db.get_notification_targets().await?; let notifications = db.get_notification_targets().await?;
assert_eq!(notifications.len(), 1); assert_eq!(notifications.len(), 1);
assert_eq!(notifications[0].target_type, "discord_webhook"); assert_eq!(notifications[0].target_type, "discord_webhook");
let notification_config: serde_json::Value =
serde_json::from_str(&notifications[0].config_json)?;
assert_eq!(
notification_config["webhook_url"].as_str(),
Some("https://discord.invalid/webhook")
);
let schedule_windows = db.get_schedule_windows().await?; let schedule_windows = db.get_schedule_windows().await?;
assert_eq!(schedule_windows.len(), 1); assert_eq!(schedule_windows.len(), 1);
@@ -101,7 +107,7 @@ async fn v0_2_5_fixture_upgrades_and_preserves_core_state() -> Result<()> {
.fetch_one(&pool) .fetch_one(&pool)
.await? .await?
.get("value"); .get("value");
assert_eq!(schema_version, "8"); assert_eq!(schema_version, "9");
let min_compatible_version: String = let min_compatible_version: String =
sqlx::query("SELECT value FROM schema_info WHERE key = 'min_compatible_version'") sqlx::query("SELECT value FROM schema_info WHERE key = 'min_compatible_version'")
@@ -153,6 +159,45 @@ async fn v0_2_5_fixture_upgrades_and_preserves_core_state() -> Result<()> {
.get("count"); .get("count");
assert_eq!(job_failure_explanations_exists, 1); assert_eq!(job_failure_explanations_exists, 1);
let notification_columns = sqlx::query("PRAGMA table_info(notification_targets)")
.fetch_all(&pool)
.await?
.into_iter()
.map(|row| row.get::<String, _>("name"))
.collect::<Vec<_>>();
assert!(
notification_columns
.iter()
.any(|name| name == "endpoint_url")
);
assert!(notification_columns.iter().any(|name| name == "auth_token"));
assert!(
notification_columns
.iter()
.any(|name| name == "target_type_v2")
);
assert!(
notification_columns
.iter()
.any(|name| name == "config_json")
);
let resume_sessions_exists: i64 = sqlx::query(
"SELECT COUNT(*) as count FROM sqlite_master WHERE type = 'table' AND name = 'job_resume_sessions'",
)
.fetch_one(&pool)
.await?
.get("count");
assert_eq!(resume_sessions_exists, 1);
let resume_segments_exists: i64 = sqlx::query(
"SELECT COUNT(*) as count FROM sqlite_master WHERE type = 'table' AND name = 'job_resume_segments'",
)
.fetch_one(&pool)
.await?
.get("count");
assert_eq!(resume_segments_exists, 1);
pool.close().await; pool.close().await;
drop(db); drop(db);
let _ = fs::remove_file(&db_path); let _ = fs::remove_file(&db_path);

View File

@@ -43,6 +43,20 @@ fn ffmpeg_ready() -> bool {
ffmpeg_available() && ffprobe_available() ffmpeg_available() && ffprobe_available()
} }
fn ffmpeg_has_encoder(name: &str) -> bool {
Command::new("ffmpeg")
.args(["-hide_banner", "-encoders"])
.output()
.ok()
.map(|output| {
output.status.success()
&& String::from_utf8_lossy(&output.stdout)
.lines()
.any(|line| line.contains(name))
})
.unwrap_or(false)
}
/// Get the path to test fixtures /// Get the path to test fixtures
fn fixtures_path() -> PathBuf { fn fixtures_path() -> PathBuf {
let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR")); let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
@@ -68,6 +82,75 @@ fn cleanup_temp_dir(path: &Path) {
let _ = std::fs::remove_dir_all(path); let _ = std::fs::remove_dir_all(path);
} }
#[tokio::test]
async fn amd_vaapi_smoke_test_is_hardware_gated() -> Result<()> {
let Some(device_path) = std::env::var("ALCHEMIST_TEST_AMD_VAAPI_DEVICE").ok() else {
println!("Skipping test: ALCHEMIST_TEST_AMD_VAAPI_DEVICE not set");
return Ok(());
};
if !ffmpeg_available() || !ffmpeg_has_encoder("h264_vaapi") {
println!("Skipping test: ffmpeg or h264_vaapi encoder not available");
return Ok(());
}
let status = Command::new("ffmpeg")
.args([
"-hide_banner",
"-loglevel",
"error",
"-vaapi_device",
&device_path,
"-f",
"lavfi",
"-i",
"testsrc=size=64x64:rate=1:d=1",
"-vf",
"format=nv12,hwupload",
"-c:v",
"h264_vaapi",
"-f",
"null",
"-",
])
.status()?;
assert!(
status.success(),
"expected VAAPI smoke transcode to succeed"
);
Ok(())
}
#[tokio::test]
async fn amd_amf_smoke_test_is_hardware_gated() -> Result<()> {
if std::env::var("ALCHEMIST_TEST_AMD_AMF").ok().as_deref() != Some("1") {
println!("Skipping test: ALCHEMIST_TEST_AMD_AMF not set");
return Ok(());
}
if !ffmpeg_available() || !ffmpeg_has_encoder("h264_amf") {
println!("Skipping test: ffmpeg or h264_amf encoder not available");
return Ok(());
}
let status = Command::new("ffmpeg")
.args([
"-hide_banner",
"-loglevel",
"error",
"-f",
"lavfi",
"-i",
"testsrc=size=64x64:rate=1:d=1",
"-c:v",
"h264_amf",
"-f",
"null",
"-",
])
.status()?;
assert!(status.success(), "expected AMF smoke transcode to succeed");
Ok(())
}
/// Create a test database /// Create a test database
async fn create_test_db() -> Result<(Arc<Db>, PathBuf)> { async fn create_test_db() -> Result<(Arc<Db>, PathBuf)> {
let mut db_path = std::env::temp_dir(); let mut db_path = std::env::temp_dir();

View File

@@ -1,6 +1,6 @@
{ {
"name": "alchemist-web-e2e", "name": "alchemist-web-e2e",
"version": "0.3.1-rc.4", "version": "0.3.1-rc.5",
"private": true, "private": true,
"packageManager": "bun@1", "packageManager": "bun@1",
"type": "module", "type": "module",
@@ -8,7 +8,7 @@
"test": "playwright test", "test": "playwright test",
"test:headed": "playwright test --headed", "test:headed": "playwright test --headed",
"test:ui": "playwright test --ui", "test:ui": "playwright test --ui",
"test:reliability": "playwright test tests/settings-nonok.spec.ts tests/setup-recovery.spec.ts tests/setup-happy-path.spec.ts tests/new-user-redirect.spec.ts tests/stats-poller.spec.ts tests/jobs-actions-nonok.spec.ts tests/jobs-stability.spec.ts tests/library-intake-stability.spec.ts" "test:reliability": "playwright test tests/settings-nonok.spec.ts tests/setup-recovery.spec.ts tests/setup-happy-path.spec.ts tests/new-user-redirect.spec.ts tests/stats-poller.spec.ts tests/jobs-actions-nonok.spec.ts tests/jobs-stability.spec.ts tests/library-intake-stability.spec.ts tests/intelligence-actions.spec.ts"
}, },
"devDependencies": { "devDependencies": {
"@playwright/test": "^1.54.2" "@playwright/test": "^1.54.2"

View File

@@ -37,10 +37,10 @@ export default defineConfig({
], ],
webServer: { webServer: {
command: command:
"sh -c 'mkdir -p .runtime/media && cd .. && (cd web && bun install --frozen-lockfile && bun run build) && if [ -x ./target/debug/alchemist ]; then ./target/debug/alchemist --reset-auth; else cargo run --locked --no-default-features -- --reset-auth; fi'", "sh -c 'mkdir -p .runtime/media && rm -f .runtime/alchemist.db .runtime/alchemist.db-wal .runtime/alchemist.db-shm && cd .. && (cd web && bun install --frozen-lockfile && bun run build) && if [ -x ./target/debug/alchemist ]; then ./target/debug/alchemist --reset-auth; else cargo run --locked --no-default-features -- --reset-auth; fi'",
url: `${BASE_URL}/api/health`, url: `${BASE_URL}/api/health`,
reuseExistingServer: false, reuseExistingServer: false,
timeout: 120_000, timeout: 300_000,
env: { env: {
ALCHEMIST_CONFIG_PATH: CONFIG_PATH, ALCHEMIST_CONFIG_PATH: CONFIG_PATH,
ALCHEMIST_DB_PATH: DB_PATH, ALCHEMIST_DB_PATH: DB_PATH,

View File

@@ -0,0 +1,118 @@
import { expect, test } from "@playwright/test";
import {
type JobDetailFixture,
fulfillJson,
mockEngineStatus,
mockJobDetails,
} from "./helpers";
const completedDetail: JobDetailFixture = {
job: {
id: 51,
input_path: "/media/duplicates/movie-copy-1.mkv",
output_path: "/output/movie-copy-1-av1.mkv",
status: "completed",
priority: 0,
progress: 100,
created_at: "2025-01-01T00:00:00Z",
updated_at: "2025-01-02T00:00:00Z",
vmaf_score: 95.1,
},
metadata: {
duration_secs: 120,
codec_name: "hevc",
width: 1920,
height: 1080,
bit_depth: 10,
size_bytes: 2_000_000_000,
video_bitrate_bps: 12_000_000,
container_bitrate_bps: 12_500_000,
fps: 24,
container: "mkv",
audio_codec: "aac",
audio_channels: 2,
dynamic_range: "hdr10",
},
encode_stats: {
input_size_bytes: 2_000_000_000,
output_size_bytes: 900_000_000,
compression_ratio: 0.55,
encode_time_seconds: 1800,
encode_speed: 1.6,
avg_bitrate_kbps: 6000,
vmaf_score: 95.1,
},
job_logs: [],
};
test.use({ storageState: undefined });
test.beforeEach(async ({ page }) => {
await mockEngineStatus(page);
});
test("intelligence actions queue remux opportunities and review duplicate jobs", async ({
page,
}) => {
let enqueueCount = 0;
await page.route("**/api/library/intelligence", async (route) => {
await fulfillJson(route, 200, {
duplicate_groups: [
{
stem: "movie-copy",
count: 2,
paths: [
{ id: 51, path: "/media/duplicates/movie-copy-1.mkv", status: "completed" },
{ id: 52, path: "/media/duplicates/movie-copy-2.mkv", status: "queued" },
],
},
],
total_duplicates: 1,
recommendation_counts: {
duplicates: 1,
remux_only_candidate: 2,
wasteful_audio_layout: 0,
commentary_cleanup_candidate: 0,
},
recommendations: [
{
type: "remux_only_candidate",
title: "Remux movie one",
summary: "The file can be normalized with a container-only remux.",
path: "/media/remux/movie-one.mkv",
suggested_action: "Queue a remux to normalize the container without re-encoding the video stream.",
},
{
type: "remux_only_candidate",
title: "Remux movie two",
summary: "The file can be normalized with a container-only remux.",
path: "/media/remux/movie-two.mkv",
suggested_action: "Queue a remux to normalize the container without re-encoding the video stream.",
},
],
});
});
await page.route("**/api/jobs/enqueue", async (route) => {
enqueueCount += 1;
const body = route.request().postDataJSON() as { path: string };
await fulfillJson(route, 200, {
enqueued: true,
message: `Enqueued ${body.path}.`,
});
});
await mockJobDetails(page, { 51: completedDetail });
await page.goto("/intelligence");
await page.getByRole("button", { name: "Queue all" }).click();
await expect.poll(() => enqueueCount).toBe(2);
await expect(
page.getByText("Queue all finished: 2 enqueued, 0 skipped, 0 failed.").first(),
).toBeVisible();
await page.getByRole("button", { name: "Review" }).first().click();
await expect(page.getByRole("dialog")).toBeVisible();
await expect(page.getByText("Encode Results")).toBeVisible();
await expect(page.getByRole("dialog").getByText("/media/duplicates/movie-copy-1.mkv")).toBeVisible();
});

View File

@@ -19,6 +19,17 @@ const completedJob: JobFixture = {
vmaf_score: 95.4, vmaf_score: 95.4,
}; };
const queuedJob: JobFixture = {
id: 44,
input_path: "/media/queued-blocked.mkv",
output_path: "/output/queued-blocked-av1.mkv",
status: "queued",
priority: 0,
progress: 0,
created_at: "2025-01-01T00:00:00Z",
updated_at: "2025-01-02T00:00:00Z",
};
const completedDetail: JobDetailFixture = { const completedDetail: JobDetailFixture = {
job: completedJob, job: completedJob,
metadata: { metadata: {
@@ -183,3 +194,57 @@ test("failed job detail prefers structured failure explanation", async ({ page }
await expect(page.getByText("Structured failure detail from the backend.")).toBeVisible(); await expect(page.getByText("Structured failure detail from the backend.")).toBeVisible();
await expect(page.getByText("Structured failure guidance from the backend.")).toBeVisible(); await expect(page.getByText("Structured failure guidance from the backend.")).toBeVisible();
}); });
test("queued job detail shows the processor blocked reason", async ({ page }) => {
await page.route("**/api/jobs/table**", async (route) => {
await fulfillJson(route, 200, [queuedJob]);
});
await mockJobDetails(page, {
44: {
job: queuedJob,
job_logs: [],
queue_position: 3,
},
});
await page.route("**/api/processor/status", async (route) => {
await fulfillJson(route, 200, {
blocked_reason: "workers_busy",
message: "All worker slots are currently busy.",
manual_paused: false,
scheduler_paused: false,
draining: false,
active_jobs: 1,
concurrent_limit: 1,
});
});
await page.goto("/jobs");
await page.getByTitle("/media/queued-blocked.mkv").click();
await expect(page.getByText("Queue position:")).toBeVisible();
await expect(page.getByText("Blocked:")).toBeVisible();
await expect(page.getByText("All worker slots are currently busy.")).toBeVisible();
});
test("add file submits the enqueue request and surfaces the response", async ({ page }) => {
let postedPath = "";
await page.route("**/api/jobs/table**", async (route) => {
await fulfillJson(route, 200, []);
});
await page.route("**/api/jobs/enqueue", async (route) => {
const body = route.request().postDataJSON() as { path: string };
postedPath = body.path;
await fulfillJson(route, 200, {
enqueued: true,
message: `Enqueued ${body.path}.`,
});
});
await page.goto("/jobs");
await page.getByRole("button", { name: "Add file" }).click();
await page.getByPlaceholder("/Volumes/Media/Movies/example.mkv").fill("/media/manual-add.mkv");
await page.getByRole("dialog").getByRole("button", { name: "Add File", exact: true }).click();
await expect.poll(() => postedPath).toBe("/media/manual-add.mkv");
await expect(page.getByText("Enqueued /media/manual-add.mkv.").first()).toBeVisible();
});

View File

@@ -1,6 +1,6 @@
{ {
"name": "alchemist-web", "name": "alchemist-web",
"version": "0.3.1-rc.4", "version": "0.3.1-rc.5",
"private": true, "private": true,
"packageManager": "bun@1", "packageManager": "bun@1",
"type": "module", "type": "module",

View File

@@ -5,55 +5,16 @@ import { apiAction, apiJson, isApiError } from "../lib/api";
import { useDebouncedValue } from "../lib/useDebouncedValue"; import { useDebouncedValue } from "../lib/useDebouncedValue";
import { showToast } from "../lib/toast"; import { showToast } from "../lib/toast";
import ConfirmDialog from "./ui/ConfirmDialog"; import ConfirmDialog from "./ui/ConfirmDialog";
import { clsx, type ClassValue } from "clsx";
import { twMerge } from "tailwind-merge";
import { withErrorBoundary } from "./ErrorBoundary"; import { withErrorBoundary } from "./ErrorBoundary";
import type { Job, JobDetail, TabType, SortField, ConfirmConfig, CountMessageResponse } from "./jobs/types"; import type { Job, TabType, SortField, CountMessageResponse } from "./jobs/types";
import { SORT_OPTIONS, isJobActive, jobDetailEmptyState } from "./jobs/types"; import { isJobActive } from "./jobs/types";
import { normalizeDecisionExplanation, normalizeFailureExplanation } from "./jobs/JobExplanations";
import { useJobSSE } from "./jobs/useJobSSE"; import { useJobSSE } from "./jobs/useJobSSE";
import { JobsToolbar } from "./jobs/JobsToolbar"; import { JobsToolbar } from "./jobs/JobsToolbar";
import { JobsTable } from "./jobs/JobsTable"; import { JobsTable } from "./jobs/JobsTable";
import { JobDetailModal } from "./jobs/JobDetailModal"; import { JobDetailModal } from "./jobs/JobDetailModal";
import { EnqueuePathDialog } from "./jobs/EnqueuePathDialog";
function cn(...inputs: ClassValue[]) { import { getStatusBadge } from "./jobs/jobStatusBadge";
return twMerge(clsx(inputs)); import { useJobDetailController } from "./jobs/useJobDetailController";
}
function focusableElements(root: HTMLElement): HTMLElement[] {
const selector = [
"a[href]",
"button:not([disabled])",
"input:not([disabled])",
"select:not([disabled])",
"textarea:not([disabled])",
"[tabindex]:not([tabindex='-1'])",
].join(",");
return Array.from(root.querySelectorAll<HTMLElement>(selector)).filter(
(element) => !element.hasAttribute("disabled")
);
}
function getStatusBadge(status: string) {
const styles: Record<string, string> = {
queued: "bg-helios-slate/10 text-helios-slate border-helios-slate/20",
analyzing: "bg-blue-500/10 text-blue-500 border-blue-500/20",
encoding: "bg-helios-solar/10 text-helios-solar border-helios-solar/20 animate-pulse",
remuxing: "bg-helios-solar/10 text-helios-solar border-helios-solar/20 animate-pulse",
completed: "bg-green-500/10 text-green-500 border-green-500/20",
failed: "bg-red-500/10 text-red-500 border-red-500/20",
cancelled: "bg-red-500/10 text-red-500 border-red-500/20",
skipped: "bg-gray-500/10 text-gray-500 border-gray-500/20",
archived: "bg-zinc-500/10 text-zinc-400 border-zinc-500/20",
resuming: "bg-helios-solar/10 text-helios-solar border-helios-solar/20 animate-pulse",
};
return (
<span className={cn("px-2.5 py-1 rounded-md text-xs font-medium border capitalize", styles[status] || styles.queued)}>
{status}
</span>
);
}
function JobManager() { function JobManager() {
const [jobs, setJobs] = useState<Job[]>([]); const [jobs, setJobs] = useState<Job[]>([]);
@@ -67,18 +28,17 @@ function JobManager() {
const [sortBy, setSortBy] = useState<SortField>("updated_at"); const [sortBy, setSortBy] = useState<SortField>("updated_at");
const [sortDesc, setSortDesc] = useState(true); const [sortDesc, setSortDesc] = useState(true);
const [refreshing, setRefreshing] = useState(false); const [refreshing, setRefreshing] = useState(false);
const [focusedJob, setFocusedJob] = useState<JobDetail | null>(null);
const [detailLoading, setDetailLoading] = useState(false);
const [actionError, setActionError] = useState<string | null>(null); const [actionError, setActionError] = useState<string | null>(null);
const [menuJobId, setMenuJobId] = useState<number | null>(null); const [menuJobId, setMenuJobId] = useState<number | null>(null);
const [enqueueDialogOpen, setEnqueueDialogOpen] = useState(false);
const [enqueuePath, setEnqueuePath] = useState("");
const [enqueueSubmitting, setEnqueueSubmitting] = useState(false);
const menuRef = useRef<HTMLDivElement | null>(null); const menuRef = useRef<HTMLDivElement | null>(null);
const detailDialogRef = useRef<HTMLDivElement | null>(null);
const detailLastFocusedRef = useRef<HTMLElement | null>(null);
const compactSearchRef = useRef<HTMLDivElement | null>(null); const compactSearchRef = useRef<HTMLDivElement | null>(null);
const compactSearchInputRef = useRef<HTMLInputElement | null>(null); const compactSearchInputRef = useRef<HTMLInputElement | null>(null);
const confirmOpenRef = useRef(false);
const encodeStartTimes = useRef<Map<number, number>>(new Map()); const encodeStartTimes = useRef<Map<number, number>>(new Map());
const [confirmState, setConfirmState] = useState<ConfirmConfig | null>(null); const focusedJobIdRef = useRef<number | null>(null);
const refreshFocusedJobRef = useRef<() => Promise<void>>(async () => undefined);
const [tick, setTick] = useState(0); const [tick, setTick] = useState(0);
useEffect(() => { useEffect(() => {
@@ -233,7 +193,51 @@ function JobManager() {
}; };
}, []); }, []);
useJobSSE({ setJobs, setFocusedJob, fetchJobsRef, encodeStartTimes }); const {
focusedJob,
setFocusedJob,
detailLoading,
confirmState,
detailDialogRef,
openJobDetails,
handleAction,
handlePriority,
openConfirm,
setConfirmState,
closeJobDetails,
focusedDecision,
focusedFailure,
focusedJobLogs,
shouldShowFfmpegOutput,
completedEncodeStats,
focusedEmptyState,
} = useJobDetailController({
onRefresh: async () => {
await fetchJobs();
},
});
useEffect(() => {
focusedJobIdRef.current = focusedJob?.job.id ?? null;
}, [focusedJob?.job.id]);
useEffect(() => {
refreshFocusedJobRef.current = async () => {
const jobId = focusedJobIdRef.current;
if (jobId !== null) {
await openJobDetails(jobId);
}
};
}, [openJobDetails]);
useJobSSE({
setJobs,
setFocusedJob,
fetchJobsRef,
focusedJobIdRef,
refreshFocusedJobRef,
encodeStartTimes,
});
useEffect(() => { useEffect(() => {
const encodingJobIds = new Set<number>(); const encodingJobIds = new Set<number>();
@@ -268,76 +272,6 @@ function JobManager() {
return () => document.removeEventListener("mousedown", handleClick); return () => document.removeEventListener("mousedown", handleClick);
}, [menuJobId]); }, [menuJobId]);
useEffect(() => {
confirmOpenRef.current = confirmState !== null;
}, [confirmState]);
useEffect(() => {
if (!focusedJob) {
return;
}
detailLastFocusedRef.current = document.activeElement as HTMLElement | null;
const root = detailDialogRef.current;
if (root) {
const focusables = focusableElements(root);
if (focusables.length > 0) {
focusables[0].focus();
} else {
root.focus();
}
}
const onKeyDown = (event: KeyboardEvent) => {
if (!focusedJob || confirmOpenRef.current) {
return;
}
if (event.key === "Escape") {
event.preventDefault();
setFocusedJob(null);
return;
}
if (event.key !== "Tab") {
return;
}
const dialogRoot = detailDialogRef.current;
if (!dialogRoot) {
return;
}
const focusables = focusableElements(dialogRoot);
if (focusables.length === 0) {
event.preventDefault();
dialogRoot.focus();
return;
}
const first = focusables[0];
const last = focusables[focusables.length - 1];
const current = document.activeElement as HTMLElement | null;
if (event.shiftKey && current === first) {
event.preventDefault();
last.focus();
} else if (!event.shiftKey && current === last) {
event.preventDefault();
first.focus();
}
};
document.addEventListener("keydown", onKeyDown);
return () => {
document.removeEventListener("keydown", onKeyDown);
if (detailLastFocusedRef.current) {
detailLastFocusedRef.current.focus();
}
};
}, [focusedJob]);
const toggleSelect = (id: number) => { const toggleSelect = (id: number) => {
const newSet = new Set(selected); const newSet = new Set(selected);
if (newSet.has(id)) newSet.delete(id); if (newSet.has(id)) newSet.delete(id);
@@ -407,96 +341,31 @@ function JobManager() {
} }
}; };
const fetchJobDetails = async (id: number) => { const handleEnqueuePath = async () => {
setActionError(null); setActionError(null);
setDetailLoading(true); setEnqueueSubmitting(true);
try { try {
const data = await apiJson<JobDetail>(`/api/jobs/${id}/details`); const payload = await apiJson<{ enqueued: boolean; message: string }>("/api/jobs/enqueue", {
setFocusedJob(data); method: "POST",
} catch (e) { body: JSON.stringify({ path: enqueuePath }),
const message = isApiError(e) ? e.message : "Failed to fetch job details"; });
showToast({
kind: payload.enqueued ? "success" : "info",
title: "Jobs",
message: payload.message,
});
setEnqueueDialogOpen(false);
setEnqueuePath("");
await fetchJobs();
} catch (error) {
const message = isApiError(error) ? error.message : "Failed to enqueue file";
setActionError(message); setActionError(message);
showToast({ kind: "error", title: "Jobs", message }); showToast({ kind: "error", title: "Jobs", message });
} finally { } finally {
setDetailLoading(false); setEnqueueSubmitting(false);
} }
}; };
const handleAction = async (id: number, action: "cancel" | "restart" | "delete") => {
setActionError(null);
try {
await apiAction(`/api/jobs/${id}/${action}`, { method: "POST" });
if (action === "delete") {
setFocusedJob((current) => (current?.job.id === id ? null : current));
} else if (focusedJob?.job.id === id) {
await fetchJobDetails(id);
}
await fetchJobs();
showToast({
kind: "success",
title: "Jobs",
message: `Job ${action} request completed.`,
});
} catch (e) {
const message = formatJobActionError(e, `Job ${action} failed`);
setActionError(message);
showToast({ kind: "error", title: "Jobs", message });
}
};
const handlePriority = async (job: Job, priority: number, label: string) => {
setActionError(null);
try {
await apiAction(`/api/jobs/${job.id}/priority`, {
method: "POST",
body: JSON.stringify({ priority }),
});
if (focusedJob?.job.id === job.id) {
setFocusedJob({
...focusedJob,
job: {
...focusedJob.job,
priority,
},
});
}
await fetchJobs();
showToast({ kind: "success", title: "Jobs", message: `${label} for job #${job.id}.` });
} catch (e) {
const message = formatJobActionError(e, "Failed to update priority");
setActionError(message);
showToast({ kind: "error", title: "Jobs", message });
}
};
const openConfirm = (config: ConfirmConfig) => {
setConfirmState(config);
};
const focusedDecision = focusedJob
? normalizeDecisionExplanation(
focusedJob.decision_explanation ?? focusedJob.job.decision_explanation,
focusedJob.job.decision_reason,
)
: null;
const focusedFailure = focusedJob
? normalizeFailureExplanation(
focusedJob.failure_explanation,
focusedJob.job_failure_summary,
focusedJob.job_logs,
)
: null;
const focusedJobLogs = focusedJob?.job_logs ?? [];
const shouldShowFfmpegOutput = focusedJob
? ["failed", "completed", "skipped"].includes(focusedJob.job.status) && focusedJobLogs.length > 0
: false;
const completedEncodeStats = focusedJob?.job.status === "completed"
? focusedJob.encode_stats
: null;
const focusedEmptyState = focusedJob
? jobDetailEmptyState(focusedJob.job.status)
: null;
return ( return (
<div className="space-y-6 relative"> <div className="space-y-6 relative">
<div className="flex items-center gap-4 px-1 text-xs text-helios-slate"> <div className="flex items-center gap-4 px-1 text-xs text-helios-slate">
@@ -530,6 +399,7 @@ function JobManager() {
setSortDesc={setSortDesc} setSortDesc={setSortDesc}
refreshing={refreshing} refreshing={refreshing}
fetchJobs={fetchJobs} fetchJobs={fetchJobs}
openEnqueueDialog={() => setEnqueueDialogOpen(true)}
/> />
{actionError && ( {actionError && (
@@ -613,7 +483,7 @@ function JobManager() {
menuRef={menuRef} menuRef={menuRef}
toggleSelect={toggleSelect} toggleSelect={toggleSelect}
toggleSelectAll={toggleSelectAll} toggleSelectAll={toggleSelectAll}
fetchJobDetails={fetchJobDetails} fetchJobDetails={openJobDetails}
setMenuJobId={setMenuJobId} setMenuJobId={setMenuJobId}
openConfirm={openConfirm} openConfirm={openConfirm}
handleAction={handleAction} handleAction={handleAction}
@@ -646,7 +516,7 @@ function JobManager() {
focusedJob={focusedJob} focusedJob={focusedJob}
detailDialogRef={detailDialogRef} detailDialogRef={detailDialogRef}
detailLoading={detailLoading} detailLoading={detailLoading}
onClose={() => setFocusedJob(null)} onClose={closeJobDetails}
focusedDecision={focusedDecision} focusedDecision={focusedDecision}
focusedFailure={focusedFailure} focusedFailure={focusedFailure}
focusedJobLogs={focusedJobLogs} focusedJobLogs={focusedJobLogs}
@@ -661,6 +531,22 @@ function JobManager() {
document.body document.body
)} )}
{typeof document !== "undefined" && createPortal(
<EnqueuePathDialog
open={enqueueDialogOpen}
path={enqueuePath}
submitting={enqueueSubmitting}
onPathChange={setEnqueuePath}
onClose={() => {
if (!enqueueSubmitting) {
setEnqueueDialogOpen(false);
}
}}
onSubmit={handleEnqueuePath}
/>,
document.body,
)}
<ConfirmDialog <ConfirmDialog
open={confirmState !== null} open={confirmState !== null}
title={confirmState?.title ?? ""} title={confirmState?.title ?? ""}

View File

@@ -1,7 +1,12 @@
import { useEffect, useState } from "react"; import { useCallback, useEffect, useMemo, useState } from "react";
import { AlertTriangle, Copy, Sparkles } from "lucide-react"; import { createPortal } from "react-dom";
import { AlertTriangle, Copy, Sparkles, Zap, Search } from "lucide-react";
import { apiJson, isApiError } from "../lib/api"; import { apiJson, isApiError } from "../lib/api";
import { showToast } from "../lib/toast"; import { showToast } from "../lib/toast";
import ConfirmDialog from "./ui/ConfirmDialog";
import { JobDetailModal } from "./jobs/JobDetailModal";
import { getStatusBadge } from "./jobs/jobStatusBadge";
import { useJobDetailController } from "./jobs/useJobDetailController";
interface DuplicatePath { interface DuplicatePath {
id: number; id: number;
@@ -58,36 +63,98 @@ export default function LibraryIntelligence() {
const [data, setData] = useState<IntelligenceResponse | null>(null); const [data, setData] = useState<IntelligenceResponse | null>(null);
const [loading, setLoading] = useState(true); const [loading, setLoading] = useState(true);
const [error, setError] = useState<string | null>(null); const [error, setError] = useState<string | null>(null);
const [queueingRemux, setQueueingRemux] = useState(false);
useEffect(() => { const fetchIntelligence = useCallback(async () => {
const fetch = async () => { try {
try { const result = await apiJson<IntelligenceResponse>("/api/library/intelligence");
const result = await apiJson<IntelligenceResponse>("/api/library/intelligence"); setData(result);
setData(result); setError(null);
} catch (e) { } catch (e) {
const message = isApiError(e) ? e.message : "Failed to load intelligence data."; const message = isApiError(e) ? e.message : "Failed to load intelligence data.";
setError(message); setError(message);
showToast({ showToast({
kind: "error", kind: "error",
title: "Intelligence", title: "Intelligence",
message, message,
}); });
} finally { } finally {
setLoading(false); setLoading(false);
} }
};
void fetch();
}, []); }, []);
const groupedRecommendations = data?.recommendations.reduce<Record<string, IntelligenceRecommendation[]>>( const {
(groups, recommendation) => { focusedJob,
groups[recommendation.type] ??= []; detailLoading,
groups[recommendation.type].push(recommendation); confirmState,
return groups; detailDialogRef,
}, openJobDetails,
{}, handleAction,
) ?? {}; handlePriority,
openConfirm,
setConfirmState,
closeJobDetails,
focusedDecision,
focusedFailure,
focusedJobLogs,
shouldShowFfmpegOutput,
completedEncodeStats,
focusedEmptyState,
} = useJobDetailController({
onRefresh: fetchIntelligence,
});
useEffect(() => {
void fetchIntelligence();
}, [fetchIntelligence]);
const groupedRecommendations = useMemo(
() => data?.recommendations.reduce<Record<string, IntelligenceRecommendation[]>>(
(groups, recommendation) => {
groups[recommendation.type] ??= [];
groups[recommendation.type].push(recommendation);
return groups;
},
{},
) ?? {},
[data],
);
const handleQueueAllRemux = async () => {
const remuxPaths = groupedRecommendations.remux_only_candidate ?? [];
if (remuxPaths.length === 0) {
return;
}
setQueueingRemux(true);
let enqueued = 0;
let skipped = 0;
let failed = 0;
for (const recommendation of remuxPaths) {
try {
const result = await apiJson<{ enqueued: boolean; message: string }>("/api/jobs/enqueue", {
method: "POST",
body: JSON.stringify({ path: recommendation.path }),
});
if (result.enqueued) {
enqueued += 1;
} else {
skipped += 1;
}
} catch {
failed += 1;
}
}
setQueueingRemux(false);
await fetchIntelligence();
showToast({
kind: failed > 0 ? "error" : "success",
title: "Intelligence",
message: `Queue all finished: ${enqueued} enqueued, ${skipped} skipped, ${failed} failed.`,
});
};
return ( return (
<div className="flex flex-col gap-6"> <div className="flex flex-col gap-6">
@@ -128,6 +195,16 @@ export default function LibraryIntelligence() {
<h2 className="text-sm font-semibold text-helios-ink"> <h2 className="text-sm font-semibold text-helios-ink">
{TYPE_LABELS[type] ?? type} {TYPE_LABELS[type] ?? type}
</h2> </h2>
{type === "remux_only_candidate" && recommendations.length > 0 && (
<button
onClick={() => void handleQueueAllRemux()}
disabled={queueingRemux}
className="ml-auto inline-flex items-center gap-2 rounded-lg border border-helios-solar/20 bg-helios-solar/10 px-3 py-1.5 text-xs font-semibold text-helios-solar transition-colors hover:bg-helios-solar/20 disabled:opacity-60"
>
<Zap size={12} />
{queueingRemux ? "Queueing..." : "Queue all"}
</button>
)}
</div> </div>
<div className="divide-y divide-helios-line/10"> <div className="divide-y divide-helios-line/10">
{recommendations.map((recommendation, index) => ( {recommendations.map((recommendation, index) => (
@@ -137,6 +214,28 @@ export default function LibraryIntelligence() {
<h3 className="text-sm font-semibold text-helios-ink">{recommendation.title}</h3> <h3 className="text-sm font-semibold text-helios-ink">{recommendation.title}</h3>
<p className="mt-1 text-sm text-helios-slate">{recommendation.summary}</p> <p className="mt-1 text-sm text-helios-slate">{recommendation.summary}</p>
</div> </div>
{type === "remux_only_candidate" && (
<button
onClick={() => void apiJson<{ enqueued: boolean; message: string }>("/api/jobs/enqueue", {
method: "POST",
body: JSON.stringify({ path: recommendation.path }),
}).then((result) => {
showToast({
kind: result.enqueued ? "success" : "info",
title: "Intelligence",
message: result.message,
});
return fetchIntelligence();
}).catch((err) => {
const message = isApiError(err) ? err.message : "Failed to enqueue remux opportunity.";
showToast({ kind: "error", title: "Intelligence", message });
})}
className="inline-flex items-center gap-2 rounded-lg border border-helios-line/20 bg-helios-surface px-3 py-2 text-xs font-semibold text-helios-ink transition-colors hover:bg-helios-surface-soft"
>
<Zap size={12} />
Queue
</button>
)}
</div> </div>
<p className="mt-3 break-all font-mono text-xs text-helios-slate">{recommendation.path}</p> <p className="mt-3 break-all font-mono text-xs text-helios-slate">{recommendation.path}</p>
<div className="mt-3 rounded-lg border border-helios-line/20 bg-helios-surface-soft/40 px-3 py-2 text-xs text-helios-ink"> <div className="mt-3 rounded-lg border border-helios-line/20 bg-helios-surface-soft/40 px-3 py-2 text-xs text-helios-ink">
@@ -197,6 +296,13 @@ export default function LibraryIntelligence() {
<span className="break-all font-mono text-xs text-helios-slate"> <span className="break-all font-mono text-xs text-helios-slate">
{path.path} {path.path}
</span> </span>
<button
onClick={() => void openJobDetails(path.id)}
className="inline-flex items-center gap-1 rounded-lg border border-helios-line/20 bg-helios-surface px-2.5 py-1.5 text-[11px] font-semibold text-helios-ink transition-colors hover:bg-helios-surface-soft"
>
<Search size={12} />
Review
</button>
<span className="ml-auto shrink-0 text-xs capitalize text-helios-slate/50"> <span className="ml-auto shrink-0 text-xs capitalize text-helios-slate/50">
{path.status} {path.status}
</span> </span>
@@ -209,6 +315,41 @@ export default function LibraryIntelligence() {
)} )}
</> </>
)} )}
{typeof document !== "undefined" && createPortal(
<JobDetailModal
focusedJob={focusedJob}
detailDialogRef={detailDialogRef}
detailLoading={detailLoading}
onClose={closeJobDetails}
focusedDecision={focusedDecision}
focusedFailure={focusedFailure}
focusedJobLogs={focusedJobLogs}
shouldShowFfmpegOutput={shouldShowFfmpegOutput}
completedEncodeStats={completedEncodeStats}
focusedEmptyState={focusedEmptyState}
openConfirm={openConfirm}
handleAction={handleAction}
handlePriority={handlePriority}
getStatusBadge={getStatusBadge}
/>,
document.body,
)}
<ConfirmDialog
open={confirmState !== null}
title={confirmState?.title ?? ""}
description={confirmState?.body ?? ""}
confirmLabel={confirmState?.confirmLabel ?? "Confirm"}
tone={confirmState?.confirmTone ?? "primary"}
onClose={() => setConfirmState(null)}
onConfirm={async () => {
if (!confirmState) {
return;
}
await confirmState.onConfirm();
}}
/>
</div> </div>
); );
} }

View File

@@ -16,6 +16,7 @@ interface SystemSettingsPayload {
} }
interface EngineStatus { interface EngineStatus {
status: "running" | "paused" | "draining";
mode: "background" | "balanced" | "throughput"; mode: "background" | "balanced" | "throughput";
concurrent_limit: number; concurrent_limit: number;
is_manual_override: boolean; is_manual_override: boolean;
@@ -41,6 +42,7 @@ export default function SystemSettings() {
const [engineStatus, setEngineStatus] = const [engineStatus, setEngineStatus] =
useState<EngineStatus | null>(null); useState<EngineStatus | null>(null);
const [modeLoading, setModeLoading] = useState(false); const [modeLoading, setModeLoading] = useState(false);
const [engineActionLoading, setEngineActionLoading] = useState(false);
useEffect(() => { useEffect(() => {
void fetchSettings(); void fetchSettings();
@@ -129,6 +131,32 @@ export default function SystemSettings() {
} }
}; };
const handleEngineAction = async (action: "pause" | "resume") => {
setEngineActionLoading(true);
try {
await apiAction(`/api/engine/${action === "pause" ? "pause" : "resume"}`, {
method: "POST",
});
const updatedStatus = await apiJson<EngineStatus>("/api/engine/status");
setEngineStatus(updatedStatus);
showToast({
kind: "success",
title: "Engine",
message: action === "pause" ? "Engine paused." : "Engine resumed.",
});
} catch (err) {
showToast({
kind: "error",
title: "Engine",
message: isApiError(err)
? err.message
: "Failed to update engine state.",
});
} finally {
setEngineActionLoading(false);
}
};
if (loading) { if (loading) {
return <div className="p-8 text-helios-slate animate-pulse">Loading system settings...</div>; return <div className="p-8 text-helios-slate animate-pulse">Loading system settings...</div>;
} }
@@ -210,6 +238,25 @@ export default function SystemSettings() {
</p> </p>
)} )}
</div> </div>
<div className="flex items-center justify-between rounded-lg border border-helios-line/20 bg-helios-surface-soft/40 px-4 py-3">
<div>
<p className="text-xs font-semibold uppercase tracking-wide text-helios-slate">
Engine State
</p>
<p className="mt-1 text-sm text-helios-ink capitalize">
{engineStatus.status}
</p>
</div>
<button
type="button"
onClick={() => void handleEngineAction(engineStatus.status === "paused" ? "resume" : "pause")}
disabled={engineActionLoading || engineStatus.status === "draining"}
className="rounded-lg border border-helios-line/20 bg-helios-surface px-4 py-2 text-sm font-semibold text-helios-ink transition-colors hover:bg-helios-surface-soft disabled:opacity-50"
>
{engineStatus.status === "paused" ? "Start" : "Pause"}
</button>
</div>
</div> </div>
)} )}

View File

@@ -0,0 +1,98 @@
import type { FormEvent } from "react";
import { X } from "lucide-react";
interface EnqueuePathDialogProps {
open: boolean;
path: string;
submitting: boolean;
onPathChange: (value: string) => void;
onClose: () => void;
onSubmit: () => Promise<void>;
}
export function EnqueuePathDialog({
open,
path,
submitting,
onPathChange,
onClose,
onSubmit,
}: EnqueuePathDialogProps) {
if (!open) {
return null;
}
const handleSubmit = async (event: FormEvent<HTMLFormElement>) => {
event.preventDefault();
await onSubmit();
};
return (
<>
<div
className="fixed inset-0 z-[110] bg-black/60 backdrop-blur-sm"
onClick={onClose}
/>
<div className="fixed inset-0 z-[111] flex items-center justify-center px-4">
<form
onSubmit={(event) => void handleSubmit(event)}
role="dialog"
aria-modal="true"
aria-labelledby="enqueue-path-title"
className="w-full max-w-xl rounded-xl border border-helios-line/20 bg-helios-surface shadow-2xl"
>
<div className="flex items-start justify-between gap-4 border-b border-helios-line/10 bg-helios-surface-soft/50 px-6 py-5">
<div>
<h2 id="enqueue-path-title" className="text-lg font-bold text-helios-ink">Add File</h2>
<p className="mt-1 text-sm text-helios-slate">
Enqueue one absolute filesystem path without running a full scan.
</p>
</div>
<button
type="button"
onClick={onClose}
className="rounded-md p-2 text-helios-slate transition-colors hover:bg-helios-line/10"
aria-label="Close add file dialog"
>
<X size={18} />
</button>
</div>
<div className="space-y-3 px-6 py-5">
<label className="block text-xs font-semibold uppercase tracking-wide text-helios-slate">
Absolute Path
</label>
<input
type="text"
value={path}
onChange={(event) => onPathChange(event.target.value)}
placeholder="/Volumes/Media/Movies/example.mkv"
className="w-full rounded-lg border border-helios-line/20 bg-helios-surface px-4 py-3 text-sm text-helios-ink outline-none focus:border-helios-solar"
autoFocus
/>
<p className="text-xs text-helios-slate">
Supported media files only. Paths are resolved on the server before enqueue.
</p>
</div>
<div className="flex items-center justify-end gap-3 border-t border-helios-line/10 px-6 py-4">
<button
type="button"
onClick={onClose}
className="rounded-lg border border-helios-line/20 px-4 py-2 text-sm font-semibold text-helios-slate transition-colors hover:bg-helios-surface-soft"
>
Cancel
</button>
<button
type="submit"
disabled={submitting}
className="rounded-lg bg-helios-solar px-4 py-2 text-sm font-bold text-helios-main transition-all hover:brightness-110 disabled:opacity-60"
>
{submitting ? "Adding..." : "Add File"}
</button>
</div>
</form>
</div>
</>
);
}

View File

@@ -2,9 +2,10 @@ import { X, Clock, Info, Activity, Database, Zap, Maximize2, AlertCircle, Refres
import { motion, AnimatePresence } from "framer-motion"; import { motion, AnimatePresence } from "framer-motion";
import { clsx, type ClassValue } from "clsx"; import { clsx, type ClassValue } from "clsx";
import { twMerge } from "tailwind-merge"; import { twMerge } from "tailwind-merge";
import type { RefObject } from "react"; import { useEffect, useState, type RefObject } from "react";
import type React from "react"; import type React from "react";
import type { JobDetail, EncodeStats, ExplanationView, LogEntry, ConfirmConfig, Job } from "./types"; import { apiJson } from "../../lib/api";
import type { JobDetail, EncodeStats, ExplanationView, LogEntry, ConfirmConfig, Job, ProcessorStatus } from "./types";
import { formatBytes, formatDuration, logLevelClass, isJobActive } from "./types"; import { formatBytes, formatDuration, logLevelClass, isJobActive } from "./types";
function cn(...inputs: ClassValue[]) { function cn(...inputs: ClassValue[]) {
@@ -34,6 +35,32 @@ export function JobDetailModal({
completedEncodeStats, focusedEmptyState, completedEncodeStats, focusedEmptyState,
openConfirm, handleAction, handlePriority, getStatusBadge, openConfirm, handleAction, handlePriority, getStatusBadge,
}: JobDetailModalProps) { }: JobDetailModalProps) {
const [processorStatus, setProcessorStatus] = useState<ProcessorStatus | null>(null);
useEffect(() => {
if (!focusedJob || focusedJob.job.status !== "queued") {
setProcessorStatus(null);
return;
}
let cancelled = false;
void apiJson<ProcessorStatus>("/api/processor/status")
.then((status) => {
if (!cancelled) {
setProcessorStatus(status);
}
})
.catch(() => {
if (!cancelled) {
setProcessorStatus(null);
}
});
return () => {
cancelled = true;
};
}, [focusedJob]);
return ( return (
<AnimatePresence> <AnimatePresence>
{focusedJob && ( {focusedJob && (
@@ -267,6 +294,11 @@ export function JobDetailModal({
Queue position: <span className="font-semibold text-helios-ink">#{focusedJob.queue_position}</span> Queue position: <span className="font-semibold text-helios-ink">#{focusedJob.queue_position}</span>
</p> </p>
)} )}
{focusedJob.job.status === "queued" && processorStatus?.blocked_reason && (
<p className="text-xs text-helios-slate mt-1">
Blocked: <span className="font-semibold text-helios-ink">{processorStatus.message}</span>
</p>
)}
</div> </div>
</div> </div>
) : null} ) : null}

View File

@@ -1,4 +1,4 @@
import { Search, RefreshCw, ArrowDown, ArrowUp } from "lucide-react"; import { Search, RefreshCw, ArrowDown, ArrowUp, Plus } from "lucide-react";
import { clsx, type ClassValue } from "clsx"; import { clsx, type ClassValue } from "clsx";
import { twMerge } from "tailwind-merge"; import { twMerge } from "tailwind-merge";
import type { RefObject } from "react"; import type { RefObject } from "react";
@@ -26,6 +26,7 @@ interface JobsToolbarProps {
setSortDesc: (fn: boolean | ((prev: boolean) => boolean)) => void; setSortDesc: (fn: boolean | ((prev: boolean) => boolean)) => void;
refreshing: boolean; refreshing: boolean;
fetchJobs: () => Promise<void>; fetchJobs: () => Promise<void>;
openEnqueueDialog: () => void;
} }
export function JobsToolbar({ export function JobsToolbar({
@@ -33,7 +34,7 @@ export function JobsToolbar({
searchInput, setSearchInput, searchInput, setSearchInput,
compactSearchOpen, setCompactSearchOpen, compactSearchRef, compactSearchInputRef, compactSearchOpen, setCompactSearchOpen, compactSearchRef, compactSearchInputRef,
sortBy, setSortBy, sortDesc, setSortDesc, sortBy, setSortBy, sortDesc, setSortDesc,
refreshing, fetchJobs, refreshing, fetchJobs, openEnqueueDialog,
}: JobsToolbarProps) { }: JobsToolbarProps) {
return ( return (
<div className="rounded-xl border border-helios-line/10 bg-helios-surface/50 px-3 py-3"> <div className="rounded-xl border border-helios-line/10 bg-helios-surface/50 px-3 py-3">
@@ -94,6 +95,13 @@ export function JobsToolbar({
</div> </div>
<div className="flex items-center gap-2 sm:ml-auto"> <div className="flex items-center gap-2 sm:ml-auto">
<button
onClick={openEnqueueDialog}
className="inline-flex h-10 items-center gap-2 rounded-lg border border-helios-line/20 bg-helios-surface px-3 text-sm font-semibold text-helios-ink hover:bg-helios-surface-soft"
>
<Plus size={16} />
<span>Add file</span>
</button>
<button <button
onClick={() => void fetchJobs()} onClick={() => void fetchJobs()}
className="flex h-10 w-10 shrink-0 items-center justify-center rounded-lg border border-helios-line/20 bg-helios-surface text-helios-ink hover:bg-helios-surface-soft" className="flex h-10 w-10 shrink-0 items-center justify-center rounded-lg border border-helios-line/20 bg-helios-surface text-helios-ink hover:bg-helios-surface-soft"

View File

@@ -0,0 +1,32 @@
import { clsx, type ClassValue } from "clsx";
import { twMerge } from "tailwind-merge";
function cn(...inputs: ClassValue[]) {
return twMerge(clsx(inputs));
}
export function getStatusBadge(status: string) {
const styles: Record<string, string> = {
queued: "bg-helios-slate/10 text-helios-slate border-helios-slate/20",
analyzing: "bg-blue-500/10 text-blue-500 border-blue-500/20",
encoding: "bg-helios-solar/10 text-helios-solar border-helios-solar/20 animate-pulse",
remuxing: "bg-helios-solar/10 text-helios-solar border-helios-solar/20 animate-pulse",
completed: "bg-green-500/10 text-green-500 border-green-500/20",
failed: "bg-red-500/10 text-red-500 border-red-500/20",
cancelled: "bg-red-500/10 text-red-500 border-red-500/20",
skipped: "bg-gray-500/10 text-gray-500 border-gray-500/20",
archived: "bg-zinc-500/10 text-zinc-400 border-zinc-500/20",
resuming: "bg-helios-solar/10 text-helios-solar border-helios-solar/20 animate-pulse",
};
return (
<span
className={cn(
"px-2.5 py-1 rounded-md text-xs font-medium border capitalize",
styles[status] || styles.queued,
)}
>
{status}
</span>
);
}

View File

@@ -94,6 +94,16 @@ export interface JobDetail {
queue_position: number | null; queue_position: number | null;
} }
export interface ProcessorStatus {
blocked_reason: "manual_paused" | "scheduled_pause" | "draining" | "workers_busy" | null;
message: string;
manual_paused: boolean;
scheduler_paused: boolean;
draining: boolean;
active_jobs: number;
concurrent_limit: number;
}
export interface CountMessageResponse { export interface CountMessageResponse {
count: number; count: number;
message: string; message: string;

View File

@@ -0,0 +1,237 @@
import { useCallback, useEffect, useRef, useState } from "react";
import { apiAction, apiJson, isApiError } from "../../lib/api";
import { showToast } from "../../lib/toast";
import { normalizeDecisionExplanation, normalizeFailureExplanation } from "./JobExplanations";
import type {
ConfirmConfig,
EncodeStats,
ExplanationView,
Job,
JobDetail,
LogEntry,
} from "./types";
import { jobDetailEmptyState } from "./types";
function focusableElements(root: HTMLElement): HTMLElement[] {
const selector = [
"a[href]",
"button:not([disabled])",
"input:not([disabled])",
"select:not([disabled])",
"textarea:not([disabled])",
"[tabindex]:not([tabindex='-1'])",
].join(",");
return Array.from(root.querySelectorAll<HTMLElement>(selector)).filter(
(element) => !element.hasAttribute("disabled"),
);
}
function formatJobActionError(error: unknown, fallback: string) {
if (!isApiError(error)) {
return fallback;
}
const blocked = Array.isArray((error.body as { blocked?: unknown } | undefined)?.blocked)
? ((error.body as { blocked?: Array<{ id?: number; status?: string }> }).blocked ?? [])
: [];
if (blocked.length === 0) {
return error.message;
}
const summary = blocked
.map((job) => `#${job.id ?? "?"} (${job.status ?? "unknown"})`)
.join(", ");
return `${error.message}: ${summary}`;
}
interface UseJobDetailControllerOptions {
onRefresh?: () => Promise<void>;
}
export function useJobDetailController(options: UseJobDetailControllerOptions = {}) {
const [focusedJob, setFocusedJob] = useState<JobDetail | null>(null);
const [detailLoading, setDetailLoading] = useState(false);
const [confirmState, setConfirmState] = useState<ConfirmConfig | null>(null);
const detailDialogRef = useRef<HTMLDivElement | null>(null);
const detailLastFocusedRef = useRef<HTMLElement | null>(null);
const confirmOpenRef = useRef(false);
useEffect(() => {
confirmOpenRef.current = confirmState !== null;
}, [confirmState]);
useEffect(() => {
if (!focusedJob) {
return;
}
detailLastFocusedRef.current = document.activeElement as HTMLElement | null;
const root = detailDialogRef.current;
if (root) {
const focusables = focusableElements(root);
if (focusables.length > 0) {
focusables[0].focus();
} else {
root.focus();
}
}
const onKeyDown = (event: KeyboardEvent) => {
if (!focusedJob || confirmOpenRef.current) {
return;
}
if (event.key === "Escape") {
event.preventDefault();
setFocusedJob(null);
return;
}
if (event.key !== "Tab") {
return;
}
const dialogRoot = detailDialogRef.current;
if (!dialogRoot) {
return;
}
const focusables = focusableElements(dialogRoot);
if (focusables.length === 0) {
event.preventDefault();
dialogRoot.focus();
return;
}
const first = focusables[0];
const last = focusables[focusables.length - 1];
const current = document.activeElement as HTMLElement | null;
if (event.shiftKey && current === first) {
event.preventDefault();
last.focus();
} else if (!event.shiftKey && current === last) {
event.preventDefault();
first.focus();
}
};
document.addEventListener("keydown", onKeyDown);
return () => {
document.removeEventListener("keydown", onKeyDown);
if (detailLastFocusedRef.current) {
detailLastFocusedRef.current.focus();
}
};
}, [focusedJob]);
const openJobDetails = useCallback(async (id: number) => {
setDetailLoading(true);
try {
const data = await apiJson<JobDetail>(`/api/jobs/${id}/details`);
setFocusedJob(data);
} catch (error) {
const message = isApiError(error) ? error.message : "Failed to fetch job details";
showToast({ kind: "error", title: "Jobs", message });
} finally {
setDetailLoading(false);
}
}, []);
const handleAction = useCallback(async (id: number, action: "cancel" | "restart" | "delete") => {
try {
await apiAction(`/api/jobs/${id}/${action}`, { method: "POST" });
if (action === "delete") {
setFocusedJob((current) => (current?.job.id === id ? null : current));
} else if (focusedJob?.job.id === id) {
await openJobDetails(id);
}
if (options.onRefresh) {
await options.onRefresh();
}
showToast({
kind: "success",
title: "Jobs",
message: `Job ${action} request completed.`,
});
} catch (error) {
const message = formatJobActionError(error, `Job ${action} failed`);
showToast({ kind: "error", title: "Jobs", message });
}
}, [focusedJob?.job.id, openJobDetails, options]);
const handlePriority = useCallback(async (job: Job, priority: number, label: string) => {
try {
await apiAction(`/api/jobs/${job.id}/priority`, {
method: "POST",
body: JSON.stringify({ priority }),
});
if (focusedJob?.job.id === job.id) {
setFocusedJob({
...focusedJob,
job: {
...focusedJob.job,
priority,
},
});
}
if (options.onRefresh) {
await options.onRefresh();
}
showToast({ kind: "success", title: "Jobs", message: `${label} for job #${job.id}.` });
} catch (error) {
const message = formatJobActionError(error, "Failed to update priority");
showToast({ kind: "error", title: "Jobs", message });
}
}, [focusedJob, options]);
const openConfirm = useCallback((config: ConfirmConfig) => {
setConfirmState(config);
}, []);
const focusedDecision: ExplanationView | null = focusedJob
? normalizeDecisionExplanation(
focusedJob.decision_explanation ?? focusedJob.job.decision_explanation,
focusedJob.job.decision_reason,
)
: null;
const focusedFailure: ExplanationView | null = focusedJob
? normalizeFailureExplanation(
focusedJob.failure_explanation,
focusedJob.job_failure_summary,
focusedJob.job_logs,
)
: null;
const focusedJobLogs: LogEntry[] = focusedJob?.job_logs ?? [];
const shouldShowFfmpegOutput = focusedJob
? ["failed", "completed", "skipped"].includes(focusedJob.job.status) && focusedJobLogs.length > 0
: false;
const completedEncodeStats: EncodeStats | null = focusedJob?.job.status === "completed"
? focusedJob.encode_stats
: null;
const focusedEmptyState = focusedJob
? jobDetailEmptyState(focusedJob.job.status)
: null;
return {
focusedJob,
setFocusedJob,
detailLoading,
confirmState,
detailDialogRef,
openJobDetails,
handleAction,
handlePriority,
openConfirm,
setConfirmState,
closeJobDetails: () => setFocusedJob(null),
focusedDecision,
focusedFailure,
focusedJobLogs,
shouldShowFfmpegOutput,
completedEncodeStats,
focusedEmptyState,
};
}

View File

@@ -6,10 +6,19 @@ interface UseJobSSEOptions {
setJobs: Dispatch<SetStateAction<Job[]>>; setJobs: Dispatch<SetStateAction<Job[]>>;
setFocusedJob: Dispatch<SetStateAction<JobDetail | null>>; setFocusedJob: Dispatch<SetStateAction<JobDetail | null>>;
fetchJobsRef: MutableRefObject<() => Promise<void>>; fetchJobsRef: MutableRefObject<() => Promise<void>>;
focusedJobIdRef: MutableRefObject<number | null>;
refreshFocusedJobRef: MutableRefObject<() => Promise<void>>;
encodeStartTimes: MutableRefObject<Map<number, number>>; encodeStartTimes: MutableRefObject<Map<number, number>>;
} }
export function useJobSSE({ setJobs, setFocusedJob, fetchJobsRef, encodeStartTimes }: UseJobSSEOptions): void { export function useJobSSE({
setJobs,
setFocusedJob,
fetchJobsRef,
focusedJobIdRef,
refreshFocusedJobRef,
encodeStartTimes,
}: UseJobSSEOptions): void {
useEffect(() => { useEffect(() => {
let eventSource: EventSource | null = null; let eventSource: EventSource | null = null;
let cancelled = false; let cancelled = false;
@@ -49,8 +58,21 @@ export function useJobSSE({ setJobs, setFocusedJob, fetchJobsRef, encodeStartTim
prev.map((job) => job.id === job_id ? { ...job, status } : job) prev.map((job) => job.id === job_id ? { ...job, status } : job)
); );
setFocusedJob((prev) => setFocusedJob((prev) =>
prev?.job.id === job_id ? { ...prev, job: { ...prev.job, status } } : prev prev?.job.id === job_id
? {
...prev,
queue_position: status === "queued" ? prev.queue_position : null,
job: {
...prev.job,
status,
},
}
: prev
); );
void fetchJobsRef.current();
if (focusedJobIdRef.current === job_id) {
void refreshFocusedJobRef.current();
}
} catch { } catch {
/* ignore malformed */ /* ignore malformed */
} }
@@ -65,15 +87,33 @@ export function useJobSSE({ setJobs, setFocusedJob, fetchJobsRef, encodeStartTim
setJobs((prev) => setJobs((prev) =>
prev.map((job) => job.id === job_id ? { ...job, progress: percentage } : job) prev.map((job) => job.id === job_id ? { ...job, progress: percentage } : job)
); );
setFocusedJob((prev) =>
prev?.job.id === job_id
? { ...prev, job: { ...prev.job, progress: percentage } }
: prev
);
} catch { } catch {
/* ignore malformed */ /* ignore malformed */
} }
}); });
eventSource.addEventListener("decision", () => { eventSource.addEventListener("decision", (e) => {
try {
const payload = JSON.parse(e.data) as { job_id?: number };
if (payload.job_id != null && focusedJobIdRef.current === payload.job_id) {
void refreshFocusedJobRef.current();
}
} catch {
/* ignore malformed */
}
void fetchJobsRef.current(); void fetchJobsRef.current();
}); });
eventSource.addEventListener("lagged", () => {
void fetchJobsRef.current();
void refreshFocusedJobRef.current();
});
eventSource.onerror = () => { eventSource.onerror = () => {
eventSource?.close(); eventSource?.close();
if (!cancelled) { if (!cancelled) {