chore: release v0.3.1-rc.1

This commit is contained in:
2026-04-07 22:13:43 -04:00
parent 09cf230820
commit 4000640f85
74 changed files with 5626 additions and 1020 deletions

View File

@@ -495,6 +495,17 @@ jobs:
merge-multiple: true
path: release-assets
- name: Render distribution metadata
shell: bash
run: |
set -euo pipefail
VERSION="${{ inputs.release_tag }}"
VERSION="${VERSION#v}"
python3 scripts/render_distribution.py \
--version "${VERSION}" \
--assets-dir release-assets \
--output-dir release-assets/distribution
- name: Publish release
uses: softprops/action-gh-release@v2
with:

96
Cargo.lock generated
View File

@@ -13,7 +13,7 @@ dependencies = [
[[package]]
name = "alchemist"
version = "0.3.0"
version = "0.3.1-rc.1"
dependencies = [
"anyhow",
"argon2",
@@ -23,6 +23,7 @@ dependencies = [
"futures",
"http-body-util",
"inquire",
"lettre",
"mime_guess",
"notify",
"num_cpus",
@@ -38,6 +39,7 @@ dependencies = [
"thiserror",
"tokio",
"tokio-stream",
"tokio-util",
"toml",
"tower",
"tracing",
@@ -182,6 +184,7 @@ dependencies = [
"matchit 0.7.3",
"memchr",
"mime",
"multer",
"percent-encoding",
"pin-project-lite",
"rustversion",
@@ -598,6 +601,31 @@ dependencies = [
"serde",
]
[[package]]
name = "email-encoding"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9298e6504d9b9e780ed3f7dfd43a61be8cd0e09eb07f7706a945b0072b6670b6"
dependencies = [
"base64",
"memchr",
]
[[package]]
name = "email_address"
version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e079f19b08ca6239f47f8ba8509c11cf3ea30095831f7fed61441475edd8c449"
[[package]]
name = "encoding_rs"
version = "0.8.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3"
dependencies = [
"cfg-if",
]
[[package]]
name = "equivalent"
version = "1.0.2"
@@ -636,6 +664,12 @@ dependencies = [
"pin-project-lite",
]
[[package]]
name = "fastrand"
version = "2.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9f1f227452a390804cdb637b74a86990f2a7d7ba4b7d5693aac9b4dd6defd8d6"
[[package]]
name = "filetime"
version = "0.2.27"
@@ -1289,6 +1323,33 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2"
[[package]]
name = "lettre"
version = "0.11.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dabda5859ee7c06b995b9d1165aa52c39110e079ef609db97178d86aeb051fa7"
dependencies = [
"async-trait",
"base64",
"email-encoding",
"email_address",
"fastrand",
"futures-io",
"futures-util",
"httpdate",
"idna",
"mime",
"nom",
"percent-encoding",
"quoted_printable",
"rustls",
"socket2",
"tokio",
"tokio-rustls",
"url",
"webpki-roots 1.0.6",
]
[[package]]
name = "libc"
version = "0.2.183"
@@ -1427,6 +1488,23 @@ dependencies = [
"windows-sys 0.61.2",
]
[[package]]
name = "multer"
version = "3.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "83e87776546dc87511aa5ee218730c92b666d7264ab6ed41f9d215af9cd5224b"
dependencies = [
"bytes",
"encoding_rs",
"futures-util",
"http",
"httparse",
"memchr",
"mime",
"spin",
"version_check",
]
[[package]]
name = "newline-converter"
version = "0.3.0"
@@ -1436,6 +1514,15 @@ dependencies = [
"unicode-segmentation",
]
[[package]]
name = "nom"
version = "8.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df9761775871bdef83bee530e60050f7e54b1105350d6884eb0fb4f46c2f9405"
dependencies = [
"memchr",
]
[[package]]
name = "notify"
version = "6.1.1"
@@ -1742,6 +1829,12 @@ dependencies = [
"proc-macro2",
]
[[package]]
name = "quoted_printable"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "478e0585659a122aa407eb7e3c0e1fa51b1d8a870038bd29f0cf4a8551eea972"
[[package]]
name = "r-efi"
version = "5.3.0"
@@ -1987,6 +2080,7 @@ version = "0.23.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "758025cb5fccfd3bc2fd74708fd4682be41d99e5dff73c377c0646c6012c73a4"
dependencies = [
"log",
"once_cell",
"ring",
"rustls-pki-types",

View File

@@ -1,6 +1,6 @@
[package]
name = "alchemist"
version = "0.3.0"
version = "0.3.1-rc.1"
edition = "2024"
rust-version = "1.85"
license = "GPL-3.0"
@@ -32,7 +32,7 @@ num_cpus = "1.16"
inquire = { version = "0.7" }
futures = { version = "0.3" }
toml = "0.8"
axum = { version = "0.7", features = ["macros"] }
axum = { version = "0.7", features = ["macros", "multipart"] }
rayon = "1.10"
tokio-stream = { version = "0.1", features = ["sync"] }
thiserror = "2.0.17"
@@ -46,6 +46,8 @@ sysinfo = "0.32"
uuid = { version = "1", features = ["v4"] }
sha2 = "0.10"
trait-variant = "0.1.2"
tokio-util = { version = "0.7", features = ["io"] }
lettre = { version = "0.11", default-features = false, features = ["builder", "smtp-transport", "tokio1-rustls-tls"] }
[dev-dependencies]
http-body-util = "0.1"

View File

@@ -21,10 +21,12 @@ Everything is visible in the web dashboard. You can see what is running, what wa
## Features
- Give movies, TV, and home videos different behavior with per-library profiles.
- Convert or remux a single uploaded file from the **Convert** page using the same pipeline Alchemist uses for library jobs. Experimental.
- Catch corrupt or broken files before they surprise you with Library Doctor.
- See exactly how much storage you have recovered in the savings dashboard.
- Understand every skipped file immediately with plain-English explanations.
- Get a ping when work finishes through Discord, Gotify, or a webhook.
- Get a ping when work finishes through Discord, Gotify, Telegram, email, or a webhook.
- Create named API tokens for automation, with `read_only` and `full_access` access classes.
- Keep heavy jobs out of the way with a scheduler for off-peak hours.
- Push urgent files to the front with the priority queue.
- Switch the engine between background, balanced, and throughput modes without restarting the app.
@@ -32,6 +34,7 @@ Everything is visible in the web dashboard. You can see what is running, what wa
- Preserve HDR metadata or tonemap to SDR depending on what you need.
- Add folders once and let watch folders keep monitoring them automatically.
- Shape audio output with stream rules for commentary stripping, language filtering, and default-track retention.
- Surface storage-focused recommendations through Library Intelligence, including remux opportunities and commentary cleanup candidates.
## Hardware Support
@@ -61,8 +64,8 @@ services:
ports:
- "3000:3000"
volumes:
- /path/to/config:/app/config
- /path/to/data:/app/data
- ~/.config/alchemist:/app/config
- ~/.config/alchemist:/app/data
- /path/to/media:/media
environment:
- ALCHEMIST_CONFIG_PATH=/app/config/config.toml
@@ -72,10 +75,15 @@ services:
Then open [http://localhost:3000](http://localhost:3000) in your browser.
On Linux and macOS, the default host-side config location is
`~/.config/alchemist/config.toml`. When you use Docker, the
recommended bind mount is still `~/.config/alchemist`, mapped
into `/app/config` and `/app/data` inside the container.
If you prefer `docker run`, this is the trimmed equivalent:
```bash
docker run -d --name alchemist -p 3000:3000 -v /path/to/config:/app/config -v /path/to/data:/app/data -v /path/to/media:/media -e ALCHEMIST_CONFIG_PATH=/app/config/config.toml -e ALCHEMIST_DB_PATH=/app/data/alchemist.db --restart unless-stopped ghcr.io/bybrooklyn/alchemist:latest
docker run -d --name alchemist -p 3000:3000 -v ~/.config/alchemist:/app/config -v ~/.config/alchemist:/app/data -v /path/to/media:/media -e ALCHEMIST_CONFIG_PATH=/app/config/config.toml -e ALCHEMIST_DB_PATH=/app/data/alchemist.db --restart unless-stopped ghcr.io/bybrooklyn/alchemist:latest
```
### Binary
@@ -132,6 +140,13 @@ The core contributor path is supported on Windows. Broader release and utility r
4. Alchemist scans and starts working automatically.
5. Check the Dashboard to see progress and savings.
## Automation + Subpath Notes
- API automation can use bearer tokens created in **Settings → API Tokens**.
- Read-only tokens are limited to observability and monitoring routes.
- Alchemist can also be served under a subpath such as `/alchemist`
using `ALCHEMIST_BASE_URL=/alchemist`.
## Supported Platforms
| Platform | Status |

View File

@@ -1 +1 @@
0.3.0
0.3.1-rc.1

View File

@@ -1,476 +1,155 @@
# Alchemist Backlog
Future improvements and features to consider for the project.
Current and future work for Alchemist, organized around the
actual repo state rather than historical priorities.
Alchemist should remain an automation-first media
optimization tool, not drift into a general-purpose media
workbench.
---
## Out of Scope — Explicitly Not Planned
## Implemented / In Progress
These are deliberate design decisions, not omissions. Do not add them.
These items now exist in the repo and should be treated as
current product surface that still needs hardening,
documentation, or iteration.
- **Custom FFmpeg flags / raw flag injection** — Alchemist is designed to be approachable and safe. Exposing raw FFmpeg arguments (whether per-profile, per-job, or in the conversion sandbox) would make it a footgun and undermine the beginner-first design. The encoding pipeline is the abstraction; users configure outcomes, not commands.
- **Distributed encoding across multiple machines** — Not a goal. Alchemist is a single-host tool. Multi-node orchestration is a different product.
### Conversion / Remux Workflow
- Dedicated **Convert** page for single-file upload-driven conversion
- Probe-driven UI with container, video, audio, subtitle, and remux-only controls
- FFmpeg command preview
- Temporary upload/output lifecycle under `~/.config/alchemist/temp`
- Reuse of the existing queue and worker system
- Status polling and download flow
- Treat this as an experimental utility, not a second core
product track
### Notification Platform Expansion
- Provider-specific notification target model backed by `config_json`
- Discord webhook, Discord bot, Gotify, generic webhook, Telegram, and email targets
- Richer event taxonomy:
- `encode.queued`
- `encode.started`
- `encode.completed`
- `encode.failed`
- `scan.completed`
- `engine.idle`
- `daily.summary`
- Per-target event filtering
- Daily summary scheduling via `daily_summary_time_local`
### API Token Authentication + API Docs
- Named static API tokens with `read_only` and `full_access` classes
- Hash-only token storage, plaintext shown once at creation
- Token management endpoints and Settings UI
- Hand-maintained OpenAPI contract plus human API docs
### Base URL / Subpath Support
- `ALCHEMIST_BASE_URL` and matching config support
- Router nesting under a configured path prefix
- Frontend fetches, redirects, navigation, and SSE path generation updated for subpaths
### Distribution Foundation
- In-repo distribution metadata sources for:
- Homebrew
- AUR
- Windows update-check metadata
- Release workflow renders package metadata from release assets/checksums
- Windows in-app update check against GitHub Releases
### Expanded Library Intelligence
- Duplicate groups remain
- Storage-focused recommendation categories added:
- remux-only opportunities
- wasteful audio layouts
- commentary/descriptive-track cleanup candidates
---
## High Priority
## Active Priorities
Testing policy for this section:
### Engine Lifecycle Controls
- Finish and harden restart/shutdown semantics from the About/header surface
- Restart must reset the engine loop without re-execing the process
- Shutdown must cancel active jobs and exit cleanly
- Add final backend and Playwright coverage for lifecycle transitions
- Backend/unit/integration coverage and Playwright coverage are exit criteria for each item below.
- Do not treat "more tests" as a standalone product track; attach the required coverage to the feature or refactor that needs it.
### Planner and Lifecycle Documentation
- Document planner heuristics and stable skip/transcode/remux decision boundaries
- Document hardware fallback rules and backend selection semantics
- Document pause, drain, restart, cancel, and shutdown semantics from actual behavior
### 1. Engine Lifecycle Controls
### Per-File Encode History
- Show full attempt history in job detail, grouped by canonical file identity
- Include outcome, encode stats, and failure reason where available
- Make retries, reruns, and settings-driven requeues legible
#### Goal
- Make engine lifecycle controls real, explicit, and operator-safe from the header/About surface.
### Behavior-Preserving Refactor Pass
- Decompose `web/src/components/JobManager.tsx` without changing current behavior
- Extract shared formatting logic
- Clarify SSE vs polling ownership
- Add regression coverage before deeper structural cleanup
#### Scope
- Redesign the About screen so it fits the current visual language.
- Add a **Restart Engine** action that restarts the engine loop without killing the Alchemist process.
- Add a **Shutdown Alchemist** action that cancels active jobs immediately and exits the process cleanly.
- Define and surface the lifecycle states needed to make restart and shutdown understandable in the UI.
#### Non-Goals
- Do not re-exec the whole app process to implement restart.
- Do not drain active jobs to completion on shutdown; shutdown means cancel and exit.
#### Dependencies
- Backend lifecycle endpoints and orchestration semantics for restart and shutdown.
- Reliable event/state propagation so the UI can reflect transient lifecycle states without stale polling or SSE behavior.
#### Acceptance Criteria
- Restart tears down and reinitializes the engine loop while the binary stays alive.
- Shutdown stops accepting new work, cancels active jobs, persists the right terminal states, and exits cleanly.
- Job rows, logs, and toasts clearly distinguish pause, drain, restart, cancellation, and shutdown.
- The About surface exposes restart and shutdown with confirmation and clear failure handling.
#### Required Tests
- Backend tests for restart/shutdown semantics and lifecycle state transitions.
- Playwright coverage for About screen controls, confirmations, success states, and failure states.
#### Solution
- Add a dedicated engine lifecycle API instead of overloading pause/drain:
- Add authenticated lifecycle routes for `restart engine` and `shutdown app`.
- Keep restart scoped to the engine loop only; do not re-exec the binary.
- Keep shutdown as cancel-all-and-exit; do not reuse drain semantics.
- Introduce a server-owned shutdown trigger so HTTP-initiated shutdown uses the same shutdown path as Ctrl+C and SIGTERM:
- Extend `RunServerArgs` and `AppState` with a shutdown signal sender.
- Update `axum::serve(...).with_graceful_shutdown(...)` to also listen for an internal shutdown signal.
- Add an explicit lifecycle transition guard:
- Reject overlapping restart/shutdown requests while a lifecycle action is already in progress.
- Surface lifecycle state through `/api/engine/status` so the UI can render restarting/shutting-down states cleanly.
- Implement restart as an engine-loop reset, not a process restart:
- Pause new intake.
- Cancel active jobs immediately through the orchestrator.
- Clear drain state and any temporary lifecycle flags.
- Reinitialize the engine loop state needed to resume normal processing.
- Resume only if the scheduler is not actively pausing the engine.
- Implement shutdown as a process-level cancel-and-exit flow:
- Pause intake.
- Cancel all active jobs immediately.
- Give cancellation and persistence a short bounded window to flush terminal state.
- Trigger the internal shutdown signal so the server exits through the same top-level path already used for signals.
- Split the backend work by file responsibility:
- `src/media/processor.rs`: add restart/shutdown lifecycle methods and transient lifecycle state.
- `src/server/mod.rs`: wire new lifecycle routes and internal shutdown signaling into `AppState` and server startup.
- `src/server/jobs.rs` or a new dedicated engine/server lifecycle module: implement authenticated handlers for restart/shutdown.
- `src/main.rs`: keep the top-level exit behavior but make sure HTTP-triggered shutdown lands in the same path as signal-triggered shutdown.
- Update the UI in two passes:
- Redesign `web/src/components/AboutDialog.tsx` to match the current visual system and include restart/shutdown actions plus confirmation UX.
- Update `web/src/components/HeaderActions.tsx` and any engine-status consumers to understand the new lifecycle states.
- Add coverage before shipping:
- Backend tests for restart, shutdown, overlapping request rejection, and status payload transitions.
- Playwright tests for About modal actions, confirmation dialogs, success flows, disabled/loading states, and failure toasts.
### 2. Planner and Lifecycle Documentation
#### Goal
- Lock down current behavior before deeper refactors by documenting planner heuristics, hardware fallback rules, and engine lifecycle semantics.
#### Scope
- Document the current planner heuristics and stable skip/transcode/remux decision boundaries.
- Document hardware fallback rules and vendor/backend selection semantics.
- Document lifecycle semantics for pause, drain, restart, cancel, and shutdown.
#### Non-Goals
- No product behavior changes.
- No speculative redesign of the planner or lifecycle model.
#### Dependencies
- Cross-check against the existing backend behavior and tests, not just intended behavior.
#### Acceptance Criteria
- Future cleanup work has a single documented source of truth for planner and lifecycle behavior.
- The docs are specific enough to catch accidental behavior changes during refactors.
#### Required Tests
- Add or tighten assertions where documentation work uncovers missing coverage around planner decisions, hardware fallback, or lifecycle states.
#### Solution
### 3. Per-File Encode History
#### Goal
- Show a complete attempt history in the job detail panel for files that have been processed more than once.
#### Scope
- Group history by canonical file identity rather than path-only matching.
- Show date, outcome, encode stats where applicable, and failure reason where applicable.
- Make repeated retries, re-queues after settings changes, and manual reruns understandable at a glance.
#### Non-Goals
- Do not turn this into a general media-management timeline.
- Do not rely on path-only grouping when a canonical identity is available.
#### Dependencies
- Query shaping across `jobs`, `encode_stats`, and `job_failure_explanations`.
- A stable canonical file identity strategy that survives path changes better than naive path matching.
#### Acceptance Criteria
- Job detail shows prior attempts for the same canonical file identity with enough detail to explain repeated outcomes.
- Operators can distinguish retry noise from truly separate processing attempts.
#### Required Tests
- Backend coverage for history lookup and canonical identity grouping.
- UI coverage for rendering mixed completed/failed/skipped histories.
#### Solution
### 4. Behavior-Preserving Refactor Pass
#### Goal
- Improve internal structure without changing visible product behavior.
#### Scope
- Refactor `web/src/components/JobManager.tsx` into smaller components and hooks without changing screens, filters, polling, SSE updates, or job actions.
- Centralize duplicated byte/time/reduction formatting logic into shared utilities while preserving current output formatting.
- Preserve the current realtime model, but make ownership clearer: job/config/system events via SSE, resource metrics via polling.
- Add regression coverage around planner decisions, watcher behavior, job lifecycle transitions, and decision explanation rendering before deeper refactors.
#### Non-Goals
- No new screens, filters, realtime behaviors, or job actions.
- No opportunistic product changes hidden inside the refactor.
#### Dependencies
- Planner/lifecycle documentation and regression coverage should land before deeper structural work.
#### Acceptance Criteria
- Existing behavior, strings, filters, and action flows remain stable.
- `JobManager` is decomposed enough that future feature work does not require editing a single monolithic file for unrelated changes.
- Realtime ownership is easier to reason about and less likely to regress.
#### Required Tests
- Keep current backend and Playwright suites green.
- Add targeted regression coverage before extracting behavior into hooks/components.
#### Solution
### 5. AMD AV1 Validation
#### Goal
- Validate and tune the existing AMD AV1 paths on real hardware.
#### Scope
- Cover Linux VAAPI and Windows AMF separately.
- Verify encoder selection, fallback behavior, and quality/performance defaults.
- Treat this as validation/tuning of existing wiring, not support-from-scratch.
#### Non-Goals
- Do not expand the stable support promise before validation is complete.
- Do not invent a fake validation story without real hardware runs.
#### Dependencies
- Access to representative Linux VAAPI and Windows AMF hardware.
- Repeatable manual verification notes and any scripted checks that can be automated.
#### Acceptance Criteria
- AMD AV1 is either validated with documented defaults and caveats, or explicitly left outside the supported matrix with clearer docs.
- Linux and Windows results are documented separately.
#### Required Tests
- Scripted verification where possible, plus recorded manual validation runs on real hardware.
#### Solution
### AMD AV1 Validation
- Validate Linux VAAPI and Windows AMF AV1 paths on real hardware
- Confirm encoder selection, fallback behavior, and defaults
- Keep support claims conservative until validation is real
---
## Medium Priority
## Later
### Power User Conversion / Remux Mode
**Target: 0.3.1**
### Documentation
- Architecture diagrams
- Contributor walkthrough improvements
- Video tutorials for common workflows
#### Overview
- Introduce a conversion mode that allows users to upload a single file and perform customizable transcoding or remuxing operations using Alchemist's existing pipeline
- Exposes the same encoding parameters Alchemist uses internally — no raw flag injection
- Clear separation between remux mode (container-only, lossless) and transcode mode (re-encode)
#### Goals
- Provide a fast, interactive way to process single files
- Reuse Alchemist's existing job queue and worker system
- Avoid becoming a HandBrake clone; prioritize clarity over exhaustive configurability
#### Storage Structure
- Store temporary files under `~/.alchemist/temp/`
```text
~/.alchemist/
temp/
uploads/ # raw uploaded files
outputs/ # processed outputs
jobs/ # job metadata (JSON)
```
- Each job gets a unique ID (UUID or short hash)
- Files stored per job:
`uploads/{job_id}/input.ext`
`outputs/{job_id}/output.ext`
`jobs/{job_id}.json`
#### Core Workflow
1. User uploads file (drag-and-drop or file picker)
2. File is stored in `~/.alchemist/temp/uploads/{job_id}/`
3. Media is probed (`ffprobe`) and stream info is displayed
4. User configures conversion settings
5. User submits job
6. Job is added to Alchemist queue
7. Worker processes job using standard pipeline
8. Output is saved to `~/.alchemist/temp/outputs/{job_id}/`
9. User downloads result
#### UI Design Principles
- Must feel like a visual encoding editor
- No oversimplified presets as the primary UX
- All major encoding options exposed
- Clear separation between remux and transcode modes
#### UI Sections
##### 1. Input
- File upload (drag-and-drop)
- Display:
- container format
- video streams (codec, resolution, HDR info)
- audio streams (codec, channels)
- subtitle streams
##### 2. Output Container
- Options: `mkv`, `mp4`, `webm`, `mov`
##### 3. Video Settings
- Codec: `copy`, `h264`, `hevc`, `av1`
- Mode: CRF (quality-based) or Bitrate (kbps)
- Preset: `ultrafast` to `veryslow`
- Resolution: original, custom (width/height), scale factor
- HDR: preserve, tonemap to SDR, strip metadata
##### 4. Audio Settings
- Codec: `copy`, `aac`, `opus`, `mp3`
- Bitrate
- Channels (`auto`, stereo, 5.1, etc.)
##### 5. Subtitle Settings
- Options: `copy`, burn-in, remove
##### 6. Remux Mode
- Toggle: `[ ] Remux only (no re-encode)`
- Forces stream copy, disables all encoding options
- Use cases: container changes, stream compatibility fixes, zero quality loss operations
##### 7. Command Preview
- Display the generated FFmpeg command before execution
- Example: `ffmpeg -i input.mkv -c:v libaom-av1 -crf 28 -b:v 0 -c:a opus output.mkv`
- Read-only — for transparency and debugging, not for editing
#### Job System Integration
- Use the existing Alchemist job queue
- Treat each conversion as a standard job
- Stream logs live to the UI
#### Job Metadata Example
```json
{
"id": "abc123",
"input_path": "...",
"output_path": "...",
"mode": "transcode | remux",
"video": { "codec": "av1", "crf": 28, "preset": "slow" },
"audio": { "codec": "opus", "bitrate": 128 },
"container": "mkv",
"status": "queued"
}
```
#### Cleanup Strategy
- Auto-delete uploads after X hours
- Auto-delete outputs after download or timeout
- Enforce a max file size limit
- Run a periodic cleanup job that scans the temp directory
#### Security Considerations
- Sanitize filenames
- Prevent path traversal
- Validate file types via probing, not extension
- Isolate the temp directory
- Do not allow arbitrary file path input
#### Non-Goals
- Not a beginner-focused tool
- Not a replacement for full automation workflows
- Not a cloud encoding service; no public hosting assumed
- No raw FFmpeg flag injection (see Out of Scope)
#### Solution
### Library Intelligence
- Expand recommendations beyond duplicate detection into remux-only opportunities, wasteful audio layouts, commentary/descriptive-track cleanup, and duplicate-ish title variants
- Keep the feature focused on storage and library quality, not general media management
#### Solution
### Auto-Priority Rules
- Define rules that automatically assign queue priority based on file attributes
- Rule conditions: file path pattern (glob), file age, file size, source watch folder
- Example: "anything under `/movies/` gets priority 2", "files over 20 GB get priority 1"
- Rules evaluated at enqueue time; manual priority overrides still win
- Configured in Settings alongside other library behavior
#### Solution
### Performance Optimizations
- Profile scanner/analyzer hot paths before changing behavior
- Only tune connection pooling after measuring database contention under load
- Consider caching repeated FFprobe calls on identical files if profiling shows probe churn is material
#### Solution
### Audio Normalization
- Apply EBU R128 loudness normalization to audio streams during transcode
- Target: -23 LUFS integrated, -1 dBTP true peak (broadcast standard)
- Opt-in per library profile, disabled by default
- Implemented via `loudnorm` FFmpeg filter — no new dependencies
- Two-pass mode for accurate results; single-pass for speed
- Should surface loudness stats (measured LUFS, correction applied) in
the job detail panel alongside existing encode stats
- Do not normalize if audio is being copied (copy mode bypasses this)
#### Solution
### UI Improvements
- Add keyboard shortcuts for common actions
#### Solution
### Notification Improvements
- **Granular event types** — current events are too coarse. Add:
- `encode.started` — job moved from queued to encoding
- `encode.completed` — with savings summary (size before/after)
- `encode.failed` — with failure reason included in payload
- `scan.completed` — N files discovered, M queued
- `engine.idle` — queue drained, nothing left to process
- `daily.summary` — opt-in digest of the day's activity
- **Per-target event filtering** — each notification target should
independently choose which events it receives. Currently, all targets
get the same events. A Discord webhook might want everything; a
phone webhook might only want failures.
- **Richer payloads** — completed job notifications should include
filename, input size, output size, space saved, and encode time.
Currently, the payload is minimal.
- **Add Telegram integration** — bot token + chat ID, same event
model as Discord. No new dependencies needed (reqwest already present).
- **Improve Discord notifications** — add bot token support where it meaningfully improves delivery or richer messaging.
- **Add email support** — SMTP with TLS. Lower priority than Telegram.
Most self-hosters already have Discord or Telegram.
#### Solution
---
## Low Priority
### Code Quality
- Increase coverage for edge cases
- Add property-based tests for codec parameter generation
- Add fuzzing for FFprobe parsing
### Planning / Simulation Mode
- Not a current focus. If revisited, start with a single current-config dry-run before attempting comparison mode.
- Add a first-class simulation flow that answers what Alchemist would transcode, remux, or skip without mutating the library.
- Show estimated total bytes recoverable, action counts, top skip reasons, and per-file predicted actions.
- Reuse the scanner, analyzer, and planner, but stop before executor and promotion stages.
- Only add profile/codec/threshold comparison snapshots after the simple single-config flow proves useful.
- Promote this only after the current Active Priorities are done
- Single-config dry run first
- No comparison matrix or scenario planner until the first simple flow proves useful
#### Solution
### Audio Normalization
- Add opt-in EBU R128 loudness normalization during transcode
- Surface loudness metrics in job detail
- Keep copy-mode bypass behavior explicit
- Keep this secondary unless it clearly supports the automation-first mission
### API Token Authentication + API Documentation
- Add support for static bearer tokens as an alternative to session cookies
- Enables programmatic access from scripts, home automation (Home Assistant, n8n), and CLI tools without managing session state
- Tokens generated and revoked from Settings; no expiry by default, revocable any time
- Expand API documentation to cover all endpoints with request/response examples
### Auto-Priority Rules
- Add explainable enqueue-time priority automation
- Manual priority overrides must still win
- Matched rules must be visible in the UI to keep queue behavior trustworthy
#### Solution
### UI Improvements
- Tighten settings and detail-panel consistency
- Improve dense forms, empty states, and action affordances
- Keep this narrowly scoped to automation-supporting UX problems
### Passthrough Mode
- A toggle that keeps all watch folders and watcher active but prevents the planner from queuing new jobs
- Different from Pause — Pause stops active encodes; Passthrough lets the system observe and index the library without touching anything
- Useful when testing settings or onboarding a new library without triggering encodes immediately
#### Solution
### Base URL / Subpath Configuration
- Allow Alchemist to be served at a non-root path (e.g. `/alchemist/`) via `ALCHEMIST_BASE_URL`
- Common self-hosting pattern for reverse proxy setups running multiple services on one domain
- Low urgency — most users run Alchemist on a dedicated subdomain or port
#### Solution
### Keyboard Shortcuts
- Add a concrete shortcut set for common jobs/logs/conversion actions
- Avoid a vague “shortcut layer everywhere” rollout
- First likely cut if scope pressure appears
### Features from DESIGN_PHILOSOPHY.md
- Add batch job templates
#### Solution
### Code Quality
- Increase test coverage for edge cases
- Add property-based testing for codec parameter generation
- Add fuzzing for FFprobe output parsing
#### Solution
### Documentation
- Add architecture diagrams
- Add contributor guide with development setup
- Video tutorials for common workflows
#### Solution
### Distribution
- Add Homebrew formula
- Add AUR package
- Add Flatpak/Snap packages
- Improve Windows installer (WiX) with auto-updates
#### Solution
### Distribution Follow-Ons
- Flatpak / Snap packaging
- Additional installer polish beyond the current Windows update-check flow
- Only promote these if they become strategically important
---
## Completed (Recent)
## Out of Scope
- [x] Split server.rs into modules
- [x] Add typed broadcast channels
- [x] Add security headers middleware
- [x] Add database query timeouts
- [x] Add config file permission check
- [x] Handle SSE lagged events in frontend
- [x] Create FFmpeg integration tests
- [x] Expand documentation site
- [x] Pin MSRV in Cargo.toml
- [x] Add schema versioning for migrations
- [x] Enable SQLite WAL mode
- [x] Add theme persistence and selection
- [x] Add job history filtering and search
- [x] Add subtitle extraction sidecars
- [x] Decision clarity — structured skip/failure explanations with codes, plain-English summaries, measured values, and operator guidance
- [x] Retry backoff visibility — countdown on failed jobs, attempt count in job detail
- [x] Per-library profiles (Space Saver, Quality First, Balanced, Streaming)
- [x] Engine runtime modes (Background / Balanced / Throughput) with drain support
- [x] Container remuxing (MP4 → MKV lossless)
- [x] Stream rules (commentary stripping, language filtering, default-only audio)
- [x] VMAF quality gating
- [x] Library Intelligence duplicate detection
- [x] Library Doctor health scanning
- [x] Boot auto-analysis
- [x] Mobile layout
- Custom FFmpeg flags / raw flag injection
- Distributed encoding across multiple machines
- Features that turn Alchemist into a general-purpose media
workbench
- Fuzzy media-management intelligence that drifts away from storage quality and encode operations

View File

@@ -7,12 +7,12 @@ services:
- "3000:3000"
volumes:
# Configuration file
- ./config.toml:/app/config/config.toml:ro
- ${HOME}/.config/alchemist/config.toml:/app/config/config.toml:ro
# Media directories (adjust paths as needed)
- /path/to/media:/media
- /path/to/output:/output
# Persistent database
- alchemist_data:/app/data
- ${HOME}/.config/alchemist:/app/data
environment:
- RUST_LOG=info
- TZ=America/New_York
@@ -29,6 +29,3 @@ services:
- driver: nvidia
count: 1
capabilities: [gpu]
volumes:
alchemist_data:

View File

@@ -13,11 +13,66 @@ except:
`/api/settings/bundle`, `/api/system/hardware`
Authentication is established by `POST /api/auth/login`.
The backend also accepts `Authorization: Bearer <token>`,
but the web UI uses the session cookie.
The backend also accepts `Authorization: Bearer <token>`.
Bearer tokens now come in two classes:
- `read_only` — observability-only routes
- `full_access` — same route access as an authenticated session
The web UI still uses the session cookie.
Machine-readable contract:
- [OpenAPI spec](/openapi.yaml)
## Authentication
### API tokens
API tokens are created in **Settings → API Tokens**.
- token values are only shown once at creation time
- only hashed token material is stored server-side
- revoked tokens stop working immediately
Read-only tokens are intentionally limited to observability
routes such as stats, jobs, logs history, SSE, system info,
hardware info, library intelligence, and health/readiness.
### `GET /api/settings/api-tokens`
Lists token metadata only. Plaintext token values are never
returned after creation.
### `POST /api/settings/api-tokens`
Request:
```json
{
"name": "Prometheus",
"access_level": "read_only"
}
```
Response:
```json
{
"token": {
"id": 1,
"name": "Prometheus",
"access_level": "read_only"
},
"plaintext_token": "alc_tok_..."
}
```
### `DELETE /api/settings/api-tokens/:id`
Revokes a token in place. Existing automations using it will
begin receiving `401` or `403` depending on route class.
### `POST /api/auth/login`
Request:

View File

@@ -59,7 +59,8 @@ Default config file location:
| Field | Type | Default | Description |
|------|------|---------|-------------|
| `enabled` | bool | `false` | Master switch for notifications |
| `targets` | list | `[]` | Notification target objects with `name`, `target_type`, `endpoint_url`, `auth_token`, `events`, and `enabled` |
| `daily_summary_time_local` | string | `"09:00"` | Global local-time send window for daily summary notifications |
| `targets` | list | `[]` | Notification target objects with `name`, `target_type`, `config_json`, `events`, and `enabled` |
## `[files]`
@@ -96,6 +97,7 @@ requires at least one day in every window.
| `enable_telemetry` | bool | `false` | Opt-in anonymous telemetry switch |
| `log_retention_days` | int | `30` | Log retention period in days |
| `engine_mode` | string | `"balanced"` | Runtime engine mode: `background`, `balanced`, or `throughput` |
| `base_url` | string | `""` | Path prefix for serving Alchemist under a subpath such as `/alchemist` |
## Example

View File

@@ -13,8 +13,8 @@ services:
ports:
- "3000:3000"
volumes:
- /path/to/config:/app/config
- /path/to/data:/app/data
- ~/.config/alchemist:/app/config
- ~/.config/alchemist:/app/data
- /path/to/media:/media
- /tmp/alchemist:/tmp # optional: fast SSD for temp files
environment:
@@ -27,8 +27,7 @@ services:
| Mount | Purpose |
|-------|---------|
| `/app/config` | `config.toml` persists across restarts |
| `/app/data` | `alchemist.db` (SQLite) — persists across restarts |
| `~/.config/alchemist` on the host | Mounted into `/app/config` and `/app/data` so `config.toml` and `alchemist.db` persist across restarts |
| `/media` | Your media library — mount read-write |
| `/tmp` (optional) | Temp dir for in-progress encodes — use a fast SSD |

View File

@@ -9,6 +9,7 @@ description: All environment variables Alchemist reads at startup.
| `ALCHEMIST_CONFIG` | (alias) | Alias for `ALCHEMIST_CONFIG_PATH` |
| `ALCHEMIST_DB_PATH` | `~/.config/alchemist/alchemist.db` | Path to SQLite database |
| `ALCHEMIST_DATA_DIR` | (none) | Sets data dir; `alchemist.db` placed here |
| `ALCHEMIST_BASE_URL` | root (`/`) | Path prefix for serving Alchemist under a subpath such as `/alchemist` |
| `ALCHEMIST_CONFIG_MUTABLE` | `true` | Set `false` to block runtime config writes |
| `RUST_LOG` | `info` | Log level: `info`, `debug`, `alchemist=trace` |
@@ -26,3 +27,11 @@ environment:
- ALCHEMIST_CONFIG_PATH=/app/config/config.toml
- ALCHEMIST_DB_PATH=/app/data/alchemist.db
```
Recommended host bind mount:
```yaml
volumes:
- ~/.config/alchemist:/app/config
- ~/.config/alchemist:/app/data
```

View File

@@ -35,8 +35,8 @@ services:
ports:
- "3000:3000"
volumes:
- /path/to/config:/app/config
- /path/to/data:/app/data
- ~/.config/alchemist:/app/config
- ~/.config/alchemist:/app/data
- /path/to/media:/media
environment:
- ALCHEMIST_CONFIG_PATH=/app/config/config.toml
@@ -58,8 +58,8 @@ docker run -d \
--name alchemist \
--gpus all \
-p 3000:3000 \
-v /path/to/config:/app/config \
-v /path/to/data:/app/data \
-v ~/.config/alchemist:/app/config \
-v ~/.config/alchemist:/app/data \
-v /path/to/media:/media \
-e ALCHEMIST_CONFIG_PATH=/app/config/config.toml \
-e ALCHEMIST_DB_PATH=/app/data/alchemist.db \
@@ -99,8 +99,8 @@ services:
ports:
- "3000:3000"
volumes:
- /path/to/config:/app/config
- /path/to/data:/app/data
- ~/.config/alchemist:/app/config
- ~/.config/alchemist:/app/data
- /path/to/media:/media
devices:
- /dev/dri:/dev/dri
@@ -123,8 +123,8 @@ docker run -d \
--group-add video \
--group-add render \
-p 3000:3000 \
-v /path/to/config:/app/config \
-v /path/to/data:/app/data \
-v ~/.config/alchemist:/app/config \
-v ~/.config/alchemist:/app/data \
-v /path/to/media:/media \
-e ALCHEMIST_CONFIG_PATH=/app/config/config.toml \
-e ALCHEMIST_DB_PATH=/app/data/alchemist.db \
@@ -159,8 +159,8 @@ services:
ports:
- "3000:3000"
volumes:
- /path/to/config:/app/config
- /path/to/data:/app/data
- ~/.config/alchemist:/app/config
- ~/.config/alchemist:/app/data
- /path/to/media:/media
devices:
- /dev/dri:/dev/dri
@@ -183,8 +183,8 @@ docker run -d \
--group-add video \
--group-add render \
-p 3000:3000 \
-v /path/to/config:/app/config \
-v /path/to/data:/app/data \
-v ~/.config/alchemist:/app/config \
-v ~/.config/alchemist:/app/data \
-v /path/to/media:/media \
-e ALCHEMIST_CONFIG_PATH=/app/config/config.toml \
-e ALCHEMIST_DB_PATH=/app/data/alchemist.db \

View File

@@ -47,8 +47,8 @@ services:
ports:
- "3000:3000"
volumes:
- /path/to/config:/app/config
- /path/to/data:/app/data
- ~/.config/alchemist:/app/config
- ~/.config/alchemist:/app/data
- /path/to/media:/media
devices:
- /dev/dri:/dev/dri
@@ -71,8 +71,8 @@ docker run -d \
--group-add video \
--group-add render \
-p 3000:3000 \
-v /path/to/config:/app/config \
-v /path/to/data:/app/data \
-v ~/.config/alchemist:/app/config \
-v ~/.config/alchemist:/app/data \
-v /path/to/media:/media \
-e ALCHEMIST_CONFIG_PATH=/app/config/config.toml \
-e ALCHEMIST_DB_PATH=/app/data/alchemist.db \

View File

@@ -53,8 +53,8 @@ services:
ports:
- "3000:3000"
volumes:
- /path/to/config:/app/config
- /path/to/data:/app/data
- ~/.config/alchemist:/app/config
- ~/.config/alchemist:/app/data
- /path/to/media:/media
devices:
- /dev/dri:/dev/dri
@@ -77,8 +77,8 @@ docker run -d \
--group-add video \
--group-add render \
-p 3000:3000 \
-v /path/to/config:/app/config \
-v /path/to/data:/app/data \
-v ~/.config/alchemist:/app/config \
-v ~/.config/alchemist:/app/data \
-v /path/to/media:/media \
-e ALCHEMIST_CONFIG_PATH=/app/config/config.toml \
-e ALCHEMIST_DB_PATH=/app/data/alchemist.db \

View File

@@ -41,8 +41,8 @@ services:
ports:
- "3000:3000"
volumes:
- /path/to/config:/app/config
- /path/to/data:/app/data
- ~/.config/alchemist:/app/config
- ~/.config/alchemist:/app/data
- /path/to/media:/media
environment:
- ALCHEMIST_CONFIG_PATH=/app/config/config.toml
@@ -64,8 +64,8 @@ docker run -d \
--name alchemist \
--gpus all \
-p 3000:3000 \
-v /path/to/config:/app/config \
-v /path/to/data:/app/data \
-v ~/.config/alchemist:/app/config \
-v ~/.config/alchemist:/app/data \
-v /path/to/media:/media \
-e ALCHEMIST_CONFIG_PATH=/app/config/config.toml \
-e ALCHEMIST_DB_PATH=/app/data/alchemist.db \

View File

@@ -18,8 +18,8 @@ services:
ports:
- "3000:3000"
volumes:
- /path/to/config:/app/config
- /path/to/data:/app/data
- ~/.config/alchemist:/app/config
- ~/.config/alchemist:/app/data
- /path/to/media:/media
environment:
- ALCHEMIST_CONFIG_PATH=/app/config/config.toml
@@ -43,8 +43,8 @@ For GPU passthrough (NVIDIA, Intel, AMD) see
docker run -d \
--name alchemist \
-p 3000:3000 \
-v /path/to/config:/app/config \
-v /path/to/data:/app/data \
-v ~/.config/alchemist:/app/config \
-v ~/.config/alchemist:/app/data \
-v /path/to/media:/media \
-e ALCHEMIST_CONFIG_PATH=/app/config/config.toml \
-e ALCHEMIST_DB_PATH=/app/data/alchemist.db \
@@ -58,6 +58,14 @@ Download from [GitHub Releases](https://github.com/bybrooklyn/alchemist/releases
Available for Linux x86_64, Linux ARM64, Windows x86_64,
macOS Apple Silicon, and macOS Intel.
### Package-manager metadata
Release packaging metadata is generated from this repos
`packaging/` templates during release publication.
- Homebrew formula source lives under `packaging/homebrew/`
- AUR metadata source lives under `packaging/aur/`
FFmpeg must be installed separately:
```bash
@@ -73,6 +81,11 @@ winget install Gyan.FFmpeg # Windows
alchemist.exe # Windows
```
On Windows, Alchemist now exposes an in-app update check in
the About dialog that compares the running version against
the latest stable GitHub Release and links directly to the
download page when an update is available.
## From source
For macOS and Linux:

View File

@@ -1,6 +1,6 @@
---
title: Notifications
description: Configure Discord, Gotify, and webhook alerts.
description: Configure Discord, Gotify, Telegram, email, and webhook alerts.
---
Configure notification targets in **Settings → Notifications**.
@@ -13,15 +13,33 @@ Create a webhook in your Discord channel settings
(channel → Integrations → Webhooks). Paste the URL into
Alchemist.
### Discord bot
Provide a bot token and target channel ID. This is useful
when you want a single bot identity instead of per-channel
webhooks.
### Gotify
Enter your Gotify server URL and app token.
Enter your Gotify server URL and app token. Gotify supports
the same event filtering model as the other providers.
### Generic webhook
Alchemist sends a JSON POST to any URL you configure.
Works with Home Assistant, ntfy, Apprise, and custom scripts.
### Telegram
Provide a bot token and chat ID. Alchemist posts the same
human-readable event summaries it uses for Discord and
Gotify.
### Email
Configure an SMTP host, port, sender address, recipient
addresses, and security mode (`STARTTLS`, `TLS`, or `None`).
Webhook payloads now include structured explanation data
when relevant:
@@ -32,11 +50,27 @@ Discord and Gotify targets use the same structured
summary/detail/guidance internally, but render them as
human-readable message text instead of raw JSON.
## Event types
Targets can subscribe independently to:
- `encode.queued`
- `encode.started`
- `encode.completed`
- `encode.failed`
- `scan.completed`
- `engine.idle`
- `daily.summary`
Daily summaries are opt-in per target and use the global
local-time send window configured in **Settings →
Notifications**.
## Troubleshooting
If notifications aren't arriving:
1. Check the URL or token for extra whitespace
1. Check the URL, token, SMTP host, or chat ID for extra whitespace
2. Check **Logs** — Alchemist logs notification failures
with response code and body
3. Verify the server has network access to the target

View File

@@ -23,6 +23,10 @@ quality validation. Nothing is deleted until you say so.
- Encodes to AV1, HEVC, or H.264 based on your configured target
- Validates output quality (optional VMAF scoring) before promoting the result
- Tells you exactly why every skipped file was skipped
- Supports named API tokens for automation clients and external observability
- Can be served under a path prefix such as `/alchemist`
- Includes an experimental single-file Conversion / Remux workflow
- Expands Library Intelligence beyond duplicate detection into storage-focused recommendations
## What it is not
@@ -48,6 +52,7 @@ FFmpeg expert.
| Get it running | [Installation](/installation) |
| Docker setup | [Docker](/docker) |
| Get your GPU working | [Hardware](/hardware) |
| Automate with tokens | [API](/api) |
| Understand skip decisions | [Skip Decisions](/skip-decisions) |
| Tune per-library behavior | [Profiles](/profiles) |

View File

@@ -9,11 +9,11 @@ Served by the same binary as the backend. Default:
## Header bar
Visible on every page. Shows engine state and provides
**Start**, **Pause**, and **Stop** controls.
**Start** and **Stop** controls plus About and Logout.
- **Start** — begins processing
- **Pause** — freezes active jobs mid-encode, stops new jobs
- **Stop** — drain mode: active jobs finish, no new jobs start
- **About** — version info, environment info, and update-check status
## Dashboard
@@ -44,6 +44,23 @@ Filterable by level, searchable.
Space savings area chart, per-codec breakdown, aggregate
totals. Fills in as jobs complete.
## Intelligence
- Duplicate groups by basename
- Remux-only opportunities
- Wasteful audio layout recommendations
- Commentary / descriptive-track cleanup candidates
## Convert
Experimental single-file workflow:
- Upload a file
- Probe streams and metadata
- Configure transcode or remux settings
- Preview the generated FFmpeg command
- Queue the job and download the result when complete
## Settings tabs
| Tab | Controls |
@@ -54,7 +71,8 @@ totals. Fills in as jobs complete.
| Hardware | GPU vendor, device path, fallback |
| File Settings | Output extension, suffix, output root, replace strategy |
| Quality | VMAF scoring, minimum score, revert on failure |
| Notifications | Discord, Gotify, webhook targets |
| Notifications | Discord webhook, Discord bot, Gotify, Telegram, email, webhook targets, daily summary time |
| API Tokens | Named bearer tokens with `read_only` and `full_access` classes |
| Schedule | Time windows |
| Runtime | Engine mode, concurrent jobs override, Library Doctor |
| Appearance | Color theme (35+ themes) |

View File

@@ -1,6 +1,6 @@
{
"name": "alchemist-docs",
"version": "0.3.0",
"version": "0.3.1-rc.1",
"private": true,
"packageManager": "bun@1.3.5",
"scripts": {

163
docs/static/openapi.yaml vendored Normal file
View File

@@ -0,0 +1,163 @@
openapi: 3.0.3
info:
title: Alchemist API
version: 0.3.0
description: >
Hand-maintained API contract for Alchemist. Authentication may use the
alchemist_session cookie or a bearer token. Bearer tokens support
read_only and full_access classes.
servers:
- url: /
components:
securitySchemes:
sessionCookie:
type: apiKey
in: cookie
name: alchemist_session
bearerToken:
type: http
scheme: bearer
bearerFormat: opaque
schemas:
ApiToken:
type: object
properties:
id:
type: integer
name:
type: string
access_level:
type: string
enum: [read_only, full_access]
created_at:
type: string
format: date-time
last_used_at:
type: string
format: date-time
nullable: true
revoked_at:
type: string
format: date-time
nullable: true
paths:
/api/auth/login:
post:
summary: Create an authenticated session cookie
requestBody:
required: true
content:
application/json:
schema:
type: object
required: [username, password]
properties:
username:
type: string
password:
type: string
responses:
"200":
description: Session created
/api/settings/api-tokens:
get:
summary: List API token metadata
security:
- sessionCookie: []
- bearerToken: []
responses:
"200":
description: Token metadata list
content:
application/json:
schema:
type: array
items:
$ref: "#/components/schemas/ApiToken"
post:
summary: Create an API token
security:
- sessionCookie: []
- bearerToken: []
requestBody:
required: true
content:
application/json:
schema:
type: object
required: [name, access_level]
properties:
name:
type: string
access_level:
type: string
enum: [read_only, full_access]
responses:
"200":
description: Token created; plaintext token shown once
/api/settings/api-tokens/{id}:
delete:
summary: Revoke an API token
security:
- sessionCookie: []
- bearerToken: []
parameters:
- in: path
name: id
required: true
schema:
type: integer
responses:
"200":
description: Token revoked
/api/system/info:
get:
summary: Get runtime version and environment information
security:
- sessionCookie: []
- bearerToken: []
responses:
"200":
description: Runtime info
/api/system/update:
get:
summary: Check GitHub Releases for the latest stable version
security:
- sessionCookie: []
- bearerToken: []
responses:
"200":
description: Update status
/api/jobs:
get:
summary: List jobs
security:
- sessionCookie: []
- bearerToken: []
responses:
"200":
description: Job list
/api/jobs/{id}/details:
get:
summary: Get a single job detail record
security:
- sessionCookie: []
- bearerToken: []
parameters:
- in: path
name: id
required: true
schema:
type: integer
responses:
"200":
description: Job detail
/api/engine/status:
get:
summary: Get current engine status
security:
- sessionCookie: []
- bearerToken: []
responses:
"200":
description: Engine status

View File

@@ -242,9 +242,9 @@ release-verify:
@echo "── Actionlint ──"
actionlint .github/workflows/*.yml
@echo "── Web verify ──"
cd web && bun install --frozen-lockfile && bun run verify && bun audit
cd web && bun install --frozen-lockfile && bun run verify && python3 ../scripts/run_bun_audit.py .
@echo "── Docs verify ──"
cd docs && bun install --frozen-lockfile && bun run build && bun audit
cd docs && bun install --frozen-lockfile && bun run build && python3 ../scripts/run_bun_audit.py .
@echo "── E2E backend build ──"
rm -rf target/debug/incremental
CARGO_INCREMENTAL=0 cargo build --locked --no-default-features
@@ -403,7 +403,7 @@ fmt:
# Clean all build artifacts
clean:
cargo clean
rm -rf web/dist web/node_modules web-e2e/node_modules
rm -rf web/dist web/node_modules web-e2e/node_modules docs/node_modules docs/build
# Count lines of source code
loc:

View File

@@ -0,0 +1,63 @@
CREATE TABLE IF NOT EXISTS notification_targets_new (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
target_type TEXT CHECK(target_type IN ('discord_webhook', 'discord_bot', 'gotify', 'webhook', 'telegram', 'email')) NOT NULL,
config_json TEXT NOT NULL DEFAULT '{}',
events TEXT NOT NULL DEFAULT '["encode.failed","encode.completed"]',
enabled BOOLEAN DEFAULT 1,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
);
INSERT INTO notification_targets_new (id, name, target_type, config_json, events, enabled, created_at)
SELECT
id,
name,
CASE target_type
WHEN 'discord' THEN 'discord_webhook'
WHEN 'gotify' THEN 'gotify'
ELSE 'webhook'
END,
CASE target_type
WHEN 'discord' THEN json_object('webhook_url', endpoint_url)
WHEN 'gotify' THEN json_object('server_url', endpoint_url, 'app_token', COALESCE(auth_token, ''))
ELSE json_object('url', endpoint_url, 'auth_token', auth_token)
END,
COALESCE(events, '["failed","completed"]'),
enabled,
created_at
FROM notification_targets;
DROP TABLE notification_targets;
ALTER TABLE notification_targets_new RENAME TO notification_targets;
CREATE INDEX IF NOT EXISTS idx_notification_targets_enabled
ON notification_targets(enabled);
CREATE TABLE IF NOT EXISTS conversion_jobs (
id INTEGER PRIMARY KEY AUTOINCREMENT,
upload_path TEXT NOT NULL,
output_path TEXT,
mode TEXT NOT NULL,
settings_json TEXT NOT NULL,
probe_json TEXT,
linked_job_id INTEGER REFERENCES jobs(id) ON DELETE SET NULL,
status TEXT NOT NULL DEFAULT 'uploaded',
expires_at TEXT NOT NULL,
downloaded_at TEXT,
created_at TEXT NOT NULL DEFAULT (datetime('now')),
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
);
CREATE INDEX IF NOT EXISTS idx_conversion_jobs_status_updated_at
ON conversion_jobs(status, updated_at);
CREATE INDEX IF NOT EXISTS idx_conversion_jobs_expires_at
ON conversion_jobs(expires_at);
CREATE INDEX IF NOT EXISTS idx_conversion_jobs_linked_job_id
ON conversion_jobs(linked_job_id);
INSERT OR REPLACE INTO schema_info (key, value) VALUES
('schema_version', '7'),
('min_compatible_version', '0.2.5'),
('last_updated', datetime('now'));

View File

@@ -0,0 +1,17 @@
CREATE TABLE IF NOT EXISTS api_tokens (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
token_hash TEXT NOT NULL UNIQUE,
access_level TEXT CHECK(access_level IN ('read_only', 'full_access')) NOT NULL,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
last_used_at DATETIME,
revoked_at DATETIME
);
CREATE INDEX IF NOT EXISTS idx_api_tokens_active
ON api_tokens(revoked_at, access_level);
INSERT OR REPLACE INTO schema_info (key, value) VALUES
('schema_version', '8'),
('min_compatible_version', '0.2.5'),
('last_updated', datetime('now'));

View File

@@ -0,0 +1,14 @@
pkgname=alchemist-bin
pkgver={{VERSION}}
pkgrel=1
pkgdesc="Self-hosted media transcoding pipeline with a web UI"
arch=('x86_64')
url="https://github.com/bybrooklyn/alchemist"
license=('GPL3')
depends=('ffmpeg')
source=("${pkgname}-${pkgver}.tar.gz::https://github.com/bybrooklyn/alchemist/releases/download/v${pkgver}/alchemist-linux-x86_64.tar.gz")
sha256sums=('{{LINUX_X86_64_SHA256}}')
package() {
install -Dm755 "${srcdir}/alchemist" "${pkgdir}/usr/bin/alchemist"
}

View File

@@ -0,0 +1,29 @@
class Alchemist < Formula
desc "Self-hosted media transcoding pipeline with a web UI"
homepage "https://github.com/bybrooklyn/alchemist"
license "GPL-3.0"
version "{{VERSION}}"
on_macos do
if Hardware::CPU.arm?
url "https://github.com/bybrooklyn/alchemist/releases/download/v{{VERSION}}/alchemist-macos-arm64.tar.gz"
sha256 "{{MACOS_ARM64_SHA256}}"
else
url "https://github.com/bybrooklyn/alchemist/releases/download/v{{VERSION}}/alchemist-macos-x86_64.tar.gz"
sha256 "{{MACOS_X86_64_SHA256}}"
end
end
on_linux do
url "https://github.com/bybrooklyn/alchemist/releases/download/v{{VERSION}}/alchemist-linux-x86_64.tar.gz"
sha256 "{{LINUX_X86_64_SHA256}}"
end
def install
bin.install "alchemist"
end
test do
assert_match version.to_s, shell_output("#{bin}/alchemist --version")
end
end

View File

@@ -0,0 +1,5 @@
{
"github_repo": "bybrooklyn/alchemist",
"channel": "stable",
"release_page": "https://github.com/bybrooklyn/alchemist/releases"
}

View File

@@ -0,0 +1,66 @@
#!/usr/bin/env python3
from __future__ import annotations
import argparse
import hashlib
from pathlib import Path
def sha256(path: Path) -> str:
hasher = hashlib.sha256()
with path.open("rb") as handle:
for chunk in iter(lambda: handle.read(1024 * 1024), b""):
hasher.update(chunk)
return hasher.hexdigest()
def render_template(template: str, replacements: dict[str, str]) -> str:
rendered = template
for key, value in replacements.items():
rendered = rendered.replace(f"{{{{{key}}}}}", value)
return rendered
def main() -> int:
parser = argparse.ArgumentParser()
parser.add_argument("--version", required=True)
parser.add_argument("--assets-dir", required=True)
parser.add_argument("--output-dir", required=True)
args = parser.parse_args()
root = Path(__file__).resolve().parent.parent
assets_dir = Path(args.assets_dir).resolve()
output_dir = Path(args.output_dir).resolve()
output_dir.mkdir(parents=True, exist_ok=True)
replacements = {
"VERSION": args.version,
"LINUX_X86_64_SHA256": sha256(assets_dir / "alchemist-linux-x86_64.tar.gz"),
"MACOS_X86_64_SHA256": sha256(assets_dir / "alchemist-macos-x86_64.tar.gz"),
"MACOS_ARM64_SHA256": sha256(assets_dir / "alchemist-macos-arm64.tar.gz"),
}
templates = [
(
root / "packaging/homebrew/alchemist.rb.tmpl",
output_dir / "homebrew/alchemist.rb",
),
(
root / "packaging/aur/PKGBUILD.tmpl",
output_dir / "aur/PKGBUILD",
),
]
for template_path, output_path in templates:
output_path.parent.mkdir(parents=True, exist_ok=True)
output_path.write_text(
render_template(template_path.read_text(), replacements),
encoding="utf-8",
)
return 0
if __name__ == "__main__":
raise SystemExit(main())

32
scripts/run_bun_audit.py Normal file
View File

@@ -0,0 +1,32 @@
#!/usr/bin/env python3
import pathlib
import subprocess
import sys
def main() -> int:
if len(sys.argv) != 2:
print("usage: run_bun_audit.py <cwd>", file=sys.stderr)
return 2
cwd = pathlib.Path(sys.argv[1]).resolve()
try:
completed = subprocess.run(
["bun", "audit"],
cwd=cwd,
check=False,
timeout=60,
)
except subprocess.TimeoutExpired:
print(
f"warning: bun audit timed out after 60s in {cwd}; continuing release-check",
file=sys.stderr,
)
return 0
return completed.returncode
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -1,5 +1,6 @@
use anyhow::Result;
use serde::{Deserialize, Serialize};
use serde_json::{Map as JsonMap, Value as JsonValue};
use std::path::Path;
#[derive(Debug, Serialize, Deserialize, Clone)]
@@ -363,13 +364,15 @@ pub(crate) fn default_tonemap_desat() -> f32 {
0.2
}
#[derive(Debug, Serialize, Deserialize, Clone, Default)]
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct NotificationsConfig {
pub enabled: bool,
#[serde(default)]
pub allow_local_notifications: bool,
#[serde(default)]
pub targets: Vec<NotificationTargetConfig>,
#[serde(default = "default_daily_summary_time_local")]
pub daily_summary_time_local: String,
#[serde(default)]
pub webhook_url: Option<String>,
#[serde(default)]
@@ -380,12 +383,15 @@ pub struct NotificationsConfig {
pub notify_on_failure: bool,
}
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)]
pub struct NotificationTargetConfig {
pub name: String,
pub target_type: String,
pub endpoint_url: String,
#[serde(default)]
pub config_json: JsonValue,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub endpoint_url: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub auth_token: Option<String>,
#[serde(default)]
pub events: Vec<String>,
@@ -393,6 +399,221 @@ pub struct NotificationTargetConfig {
pub enabled: bool,
}
impl Default for NotificationsConfig {
fn default() -> Self {
Self {
enabled: false,
allow_local_notifications: false,
targets: Vec::new(),
daily_summary_time_local: default_daily_summary_time_local(),
webhook_url: None,
discord_webhook: None,
notify_on_complete: false,
notify_on_failure: false,
}
}
}
fn default_daily_summary_time_local() -> String {
"09:00".to_string()
}
pub const NOTIFICATION_EVENT_ENCODE_QUEUED: &str = "encode.queued";
pub const NOTIFICATION_EVENT_ENCODE_STARTED: &str = "encode.started";
pub const NOTIFICATION_EVENT_ENCODE_COMPLETED: &str = "encode.completed";
pub const NOTIFICATION_EVENT_ENCODE_FAILED: &str = "encode.failed";
pub const NOTIFICATION_EVENT_SCAN_COMPLETED: &str = "scan.completed";
pub const NOTIFICATION_EVENT_ENGINE_IDLE: &str = "engine.idle";
pub const NOTIFICATION_EVENT_DAILY_SUMMARY: &str = "daily.summary";
pub const NOTIFICATION_EVENTS: [&str; 7] = [
NOTIFICATION_EVENT_ENCODE_QUEUED,
NOTIFICATION_EVENT_ENCODE_STARTED,
NOTIFICATION_EVENT_ENCODE_COMPLETED,
NOTIFICATION_EVENT_ENCODE_FAILED,
NOTIFICATION_EVENT_SCAN_COMPLETED,
NOTIFICATION_EVENT_ENGINE_IDLE,
NOTIFICATION_EVENT_DAILY_SUMMARY,
];
fn normalize_notification_event(event: &str) -> Option<&'static str> {
match event.trim() {
"queued" | "encode.queued" => Some(NOTIFICATION_EVENT_ENCODE_QUEUED),
"encoding" | "remuxing" | "encode.started" => Some(NOTIFICATION_EVENT_ENCODE_STARTED),
"completed" | "encode.completed" => Some(NOTIFICATION_EVENT_ENCODE_COMPLETED),
"failed" | "encode.failed" => Some(NOTIFICATION_EVENT_ENCODE_FAILED),
"scan.completed" => Some(NOTIFICATION_EVENT_SCAN_COMPLETED),
"engine.idle" => Some(NOTIFICATION_EVENT_ENGINE_IDLE),
"daily.summary" => Some(NOTIFICATION_EVENT_DAILY_SUMMARY),
_ => None,
}
}
pub fn normalize_notification_events(events: &[String]) -> Vec<String> {
let mut normalized = Vec::new();
for event in events {
if let Some(value) = normalize_notification_event(event) {
if !normalized.iter().any(|candidate| candidate == value) {
normalized.push(value.to_string());
}
}
}
normalized
}
fn config_json_string(config_json: &JsonValue, key: &str) -> Option<String> {
config_json
.get(key)
.and_then(JsonValue::as_str)
.map(|value| value.trim().to_string())
.filter(|value| !value.is_empty())
}
impl NotificationTargetConfig {
pub fn migrate_legacy_shape(&mut self) {
self.target_type = match self.target_type.as_str() {
"discord" => "discord_webhook".to_string(),
other => other.to_string(),
};
if !self.config_json.is_object() {
self.config_json = JsonValue::Object(JsonMap::new());
}
let mut config_map = self
.config_json
.as_object()
.cloned()
.unwrap_or_else(JsonMap::new);
match self.target_type.as_str() {
"discord_webhook" => {
if !config_map.contains_key("webhook_url") {
if let Some(endpoint_url) = self.endpoint_url.clone() {
config_map
.insert("webhook_url".to_string(), JsonValue::String(endpoint_url));
}
}
}
"gotify" => {
if !config_map.contains_key("server_url") {
if let Some(endpoint_url) = self.endpoint_url.clone() {
config_map
.insert("server_url".to_string(), JsonValue::String(endpoint_url));
}
}
if !config_map.contains_key("app_token") {
if let Some(auth_token) = self.auth_token.clone() {
config_map.insert("app_token".to_string(), JsonValue::String(auth_token));
}
}
}
"webhook" => {
if !config_map.contains_key("url") {
if let Some(endpoint_url) = self.endpoint_url.clone() {
config_map.insert("url".to_string(), JsonValue::String(endpoint_url));
}
}
if !config_map.contains_key("auth_token") {
if let Some(auth_token) = self.auth_token.clone() {
config_map.insert("auth_token".to_string(), JsonValue::String(auth_token));
}
}
}
_ => {}
}
self.config_json = JsonValue::Object(config_map);
self.events = normalize_notification_events(&self.events);
}
pub fn canonicalize_for_save(&mut self) {
self.endpoint_url = None;
self.auth_token = None;
self.events = normalize_notification_events(&self.events);
if !self.config_json.is_object() {
self.config_json = JsonValue::Object(JsonMap::new());
}
}
pub fn validate(&self) -> Result<()> {
if self.name.trim().is_empty() {
anyhow::bail!("notification target name must not be empty");
}
if !self.config_json.is_object() {
anyhow::bail!("notification target config_json must be an object");
}
if self.events.is_empty() {
anyhow::bail!("notification target events must not be empty");
}
for event in &self.events {
if normalize_notification_event(event).is_none() {
anyhow::bail!("unsupported notification event '{}'", event);
}
}
match self.target_type.as_str() {
"discord_webhook" => {
if config_json_string(&self.config_json, "webhook_url").is_none() {
anyhow::bail!("discord_webhook target requires config_json.webhook_url");
}
}
"discord_bot" => {
if config_json_string(&self.config_json, "bot_token").is_none() {
anyhow::bail!("discord_bot target requires config_json.bot_token");
}
if config_json_string(&self.config_json, "channel_id").is_none() {
anyhow::bail!("discord_bot target requires config_json.channel_id");
}
}
"gotify" => {
if config_json_string(&self.config_json, "server_url").is_none() {
anyhow::bail!("gotify target requires config_json.server_url");
}
if config_json_string(&self.config_json, "app_token").is_none() {
anyhow::bail!("gotify target requires config_json.app_token");
}
}
"webhook" => {
if config_json_string(&self.config_json, "url").is_none() {
anyhow::bail!("webhook target requires config_json.url");
}
}
"telegram" => {
if config_json_string(&self.config_json, "bot_token").is_none() {
anyhow::bail!("telegram target requires config_json.bot_token");
}
if config_json_string(&self.config_json, "chat_id").is_none() {
anyhow::bail!("telegram target requires config_json.chat_id");
}
}
"email" => {
if config_json_string(&self.config_json, "smtp_host").is_none() {
anyhow::bail!("email target requires config_json.smtp_host");
}
if config_json_string(&self.config_json, "from_address").is_none() {
anyhow::bail!("email target requires config_json.from_address");
}
if self
.config_json
.get("to_addresses")
.and_then(JsonValue::as_array)
.map(|values| !values.is_empty())
!= Some(true)
{
anyhow::bail!("email target requires non-empty config_json.to_addresses");
}
}
other => anyhow::bail!("unsupported notification target type '{}'", other),
}
Ok(())
}
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct FileSettingsConfig {
pub delete_source: bool,
@@ -461,6 +682,8 @@ pub struct SystemConfig {
/// Enable HSTS header (only enable if running behind HTTPS)
#[serde(default)]
pub https_only: bool,
#[serde(default)]
pub base_url: String,
}
fn default_true() -> bool {
@@ -487,6 +710,7 @@ impl Default for SystemConfig {
log_retention_days: default_log_retention_days(),
engine_mode: EngineMode::default(),
https_only: false,
base_url: String::new(),
}
}
}
@@ -602,6 +826,7 @@ impl Default for Config {
log_retention_days: default_log_retention_days(),
engine_mode: EngineMode::default(),
https_only: false,
base_url: String::new(),
},
}
}
@@ -615,6 +840,7 @@ impl Config {
let content = std::fs::read_to_string(path)?;
let mut config: Config = toml::from_str(&content)?;
config.migrate_legacy_notifications();
config.apply_env_overrides();
config.validate()?;
Ok(config)
}
@@ -696,6 +922,12 @@ impl Config {
}
}
validate_schedule_time(&self.notifications.daily_summary_time_local)?;
normalize_base_url(&self.system.base_url)?;
for target in &self.notifications.targets {
target.validate()?;
}
// Validate VMAF threshold
if self.quality.min_vmaf_score < 0.0 || self.quality.min_vmaf_score > 100.0 {
anyhow::bail!(
@@ -737,12 +969,10 @@ impl Config {
}
pub(crate) fn migrate_legacy_notifications(&mut self) {
if !self.notifications.targets.is_empty() {
return;
}
if self.notifications.targets.is_empty() {
let mut targets = Vec::new();
let events = [
let events = normalize_notification_events(
&[
self.notifications
.notify_on_complete
.then_some("completed".to_string()),
@@ -752,13 +982,15 @@ impl Config {
]
.into_iter()
.flatten()
.collect::<Vec<_>>();
.collect::<Vec<_>>(),
);
if let Some(discord_webhook) = self.notifications.discord_webhook.clone() {
targets.push(NotificationTargetConfig {
name: "Discord".to_string(),
target_type: "discord".to_string(),
endpoint_url: discord_webhook,
target_type: "discord_webhook".to_string(),
config_json: serde_json::json!({ "webhook_url": discord_webhook }),
endpoint_url: None,
auth_token: None,
events: events.clone(),
enabled: self.notifications.enabled,
@@ -769,7 +1001,8 @@ impl Config {
targets.push(NotificationTargetConfig {
name: "Webhook".to_string(),
target_type: "webhook".to_string(),
endpoint_url: webhook_url,
config_json: serde_json::json!({ "url": webhook_url }),
endpoint_url: None,
auth_token: None,
events,
enabled: self.notifications.enabled,
@@ -779,14 +1012,67 @@ impl Config {
self.notifications.targets = targets;
}
for target in &mut self.notifications.targets {
target.migrate_legacy_shape();
}
self.notifications.daily_summary_time_local = self
.notifications
.daily_summary_time_local
.trim()
.to_string();
if self.notifications.daily_summary_time_local.is_empty() {
self.notifications.daily_summary_time_local = default_daily_summary_time_local();
}
}
pub(crate) fn canonicalize_for_save(&mut self) {
self.system.base_url = normalize_base_url(&self.system.base_url).unwrap_or_default();
if !self.notifications.targets.is_empty() {
self.notifications.webhook_url = None;
self.notifications.discord_webhook = None;
self.notifications.notify_on_complete = false;
self.notifications.notify_on_failure = false;
}
self.notifications.daily_summary_time_local = self
.notifications
.daily_summary_time_local
.trim()
.to_string();
if self.notifications.daily_summary_time_local.is_empty() {
self.notifications.daily_summary_time_local = default_daily_summary_time_local();
}
for target in &mut self.notifications.targets {
target.canonicalize_for_save();
}
}
pub(crate) fn apply_env_overrides(&mut self) {
if let Ok(base_url) = std::env::var("ALCHEMIST_BASE_URL") {
self.system.base_url = base_url;
}
self.system.base_url = normalize_base_url(&self.system.base_url).unwrap_or_default();
}
}
pub fn normalize_base_url(value: &str) -> Result<String> {
let trimmed = value.trim();
if trimmed.is_empty() || trimmed == "/" {
return Ok(String::new());
}
if trimmed.contains("://") {
anyhow::bail!("system.base_url must be a path prefix, not a full URL");
}
if !trimmed.starts_with('/') {
anyhow::bail!("system.base_url must start with '/'");
}
if trimmed.contains('?') || trimmed.contains('#') {
anyhow::bail!("system.base_url must not contain query or fragment components");
}
let normalized = trimmed.trim_end_matches('/');
if normalized.contains("//") {
anyhow::bail!("system.base_url must not contain repeated slashes");
}
Ok(normalized.to_string())
}
fn validate_schedule_time(value: &str) -> Result<()> {
@@ -837,10 +1123,13 @@ mod tests {
config.migrate_legacy_notifications();
assert_eq!(config.notifications.targets.len(), 1);
assert_eq!(config.notifications.targets[0].target_type, "discord");
assert_eq!(
config.notifications.targets[0].target_type,
"discord_webhook"
);
assert_eq!(
config.notifications.targets[0].events,
vec!["completed".to_string(), "failed".to_string()]
vec!["encode.completed".to_string(), "encode.failed".to_string()]
);
}
@@ -850,9 +1139,10 @@ mod tests {
config.notifications.targets = vec![NotificationTargetConfig {
name: "Webhook".to_string(),
target_type: "webhook".to_string(),
endpoint_url: "https://example.com/webhook".to_string(),
config_json: serde_json::json!({ "url": "https://example.com/webhook" }),
endpoint_url: Some("https://example.com/webhook".to_string()),
auth_token: None,
events: vec!["completed".to_string()],
events: vec!["encode.completed".to_string()],
enabled: true,
}];
config.notifications.webhook_url = Some("https://legacy.example.com".to_string());
@@ -868,4 +1158,65 @@ mod tests {
assert_eq!(EngineMode::default(), EngineMode::Balanced);
assert_eq!(EngineMode::Balanced.concurrent_jobs_for_cpu_count(8), 4);
}
#[test]
fn normalize_base_url_accepts_root_or_empty() {
assert_eq!(
normalize_base_url("").unwrap_or_else(|err| panic!("empty base url: {err}")),
""
);
assert_eq!(
normalize_base_url("/").unwrap_or_else(|err| panic!("root base url: {err}")),
""
);
assert_eq!(
normalize_base_url("/alchemist/")
.unwrap_or_else(|err| panic!("trimmed base url: {err}")),
"/alchemist"
);
}
#[test]
fn normalize_base_url_rejects_invalid_values() {
assert!(normalize_base_url("alchemist").is_err());
assert!(normalize_base_url("https://example.com/alchemist").is_err());
assert!(normalize_base_url("/a//b").is_err());
}
#[test]
fn env_base_url_override_takes_priority_on_load() {
let config_path = std::env::temp_dir().join(format!(
"alchemist_base_url_override_{}.toml",
rand::random::<u64>()
));
std::fs::write(
&config_path,
r#"
[transcode]
size_reduction_threshold = 0.3
min_bpp_threshold = 0.1
min_file_size_mb = 50
concurrent_jobs = 1
[hardware]
preferred_vendor = "cpu"
allow_cpu_fallback = true
[scanner]
directories = []
[system]
base_url = "/from-config"
"#,
)
.unwrap_or_else(|err| panic!("failed to write temp config: {err}"));
// SAFETY: test-only environment mutation.
unsafe { std::env::set_var("ALCHEMIST_BASE_URL", "/from-env") };
let config =
Config::load(&config_path).unwrap_or_else(|err| panic!("failed to load config: {err}"));
assert_eq!(config.system.base_url, "/from-env");
unsafe { std::env::remove_var("ALCHEMIST_BASE_URL") };
let _ = std::fs::remove_file(config_path);
}
}

510
src/conversion.rs Normal file
View File

@@ -0,0 +1,510 @@
use crate::config::{OutputCodec, TonemapAlgorithm};
use crate::error::{AlchemistError, Result};
use crate::media::ffmpeg::{FFmpegCommandBuilder, encoder_caps_clone};
use crate::media::pipeline::{
AudioCodec, AudioStreamPlan, Encoder, EncoderBackend, FilterStep, MediaAnalysis, RateControl,
SubtitleStreamPlan, TranscodeDecision, TranscodePlan,
};
use crate::system::hardware::HardwareInfo;
use serde::{Deserialize, Serialize};
use std::path::Path;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ConversionSettings {
pub output_container: String,
pub remux_only: bool,
pub video: ConversionVideoSettings,
pub audio: ConversionAudioSettings,
pub subtitles: ConversionSubtitleSettings,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ConversionVideoSettings {
pub codec: String,
pub mode: String,
pub value: Option<u32>,
pub preset: Option<String>,
pub resolution: ConversionResolutionSettings,
pub hdr_mode: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ConversionResolutionSettings {
pub mode: String,
pub width: Option<u32>,
pub height: Option<u32>,
pub scale_factor: Option<f32>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ConversionAudioSettings {
pub codec: String,
pub bitrate_kbps: Option<u16>,
pub channels: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ConversionSubtitleSettings {
pub mode: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ConversionPreview {
pub normalized_settings: ConversionSettings,
pub command_preview: String,
}
impl Default for ConversionSettings {
fn default() -> Self {
Self {
output_container: "mkv".to_string(),
remux_only: false,
video: ConversionVideoSettings {
codec: "hevc".to_string(),
mode: "crf".to_string(),
value: Some(24),
preset: Some("medium".to_string()),
resolution: ConversionResolutionSettings {
mode: "original".to_string(),
width: None,
height: None,
scale_factor: None,
},
hdr_mode: "preserve".to_string(),
},
audio: ConversionAudioSettings {
codec: "copy".to_string(),
bitrate_kbps: Some(160),
channels: Some("auto".to_string()),
},
subtitles: ConversionSubtitleSettings {
mode: "copy".to_string(),
},
}
}
}
pub fn build_plan(
analysis: &MediaAnalysis,
output_path: &Path,
settings: &ConversionSettings,
hw_info: Option<HardwareInfo>,
) -> Result<TranscodePlan> {
let normalized = normalize_settings(analysis, settings)?;
let container = normalized.output_container.clone();
if normalized.remux_only {
let requested_codec = infer_source_codec(&analysis.metadata.codec_name)?;
return Ok(TranscodePlan {
decision: TranscodeDecision::Remux {
reason: "conversion_remux_only".to_string(),
},
is_remux: true,
copy_video: true,
output_path: Some(output_path.to_path_buf()),
container,
requested_codec,
output_codec: Some(requested_codec),
encoder: None,
backend: None,
rate_control: None,
encoder_preset: None,
threads: 0,
audio: AudioStreamPlan::Copy,
audio_stream_indices: None,
subtitles: SubtitleStreamPlan::CopyAllCompatible,
filters: Vec::new(),
allow_fallback: true,
fallback: None,
});
}
let requested_codec = match normalized.video.codec.as_str() {
"copy" => infer_source_codec(&analysis.metadata.codec_name)?,
"av1" => OutputCodec::Av1,
"hevc" => OutputCodec::Hevc,
"h264" => OutputCodec::H264,
other => {
return Err(AlchemistError::Config(format!(
"Unsupported conversion video codec '{}'",
other
)));
}
};
let copy_video = normalized.video.codec == "copy";
let encoder = if copy_video {
None
} else {
Some(select_encoder_for_codec(
requested_codec,
hw_info.as_ref(),
&encoder_caps_clone(),
)?)
};
let backend = encoder.map(|value| value.backend());
let rate_control = if copy_video {
None
} else {
let selected_encoder = encoder.ok_or_else(|| {
AlchemistError::Config("Conversion encoder selection missing".to_string())
})?;
Some(build_rate_control(
&normalized.video.mode,
normalized.video.value,
selected_encoder,
)?)
};
let mut filters = Vec::new();
if !copy_video {
match normalized.video.resolution.mode.as_str() {
"custom" => {
let width = normalized
.video
.resolution
.width
.unwrap_or(analysis.metadata.width)
.max(2);
let height = normalized
.video
.resolution
.height
.unwrap_or(analysis.metadata.height)
.max(2);
filters.push(FilterStep::Scale {
width: even(width),
height: even(height),
});
}
"scale_factor" => {
let factor = normalized.video.resolution.scale_factor.unwrap_or(1.0);
let width =
even(((analysis.metadata.width as f32) * factor).round().max(2.0) as u32);
let height = even(
((analysis.metadata.height as f32) * factor)
.round()
.max(2.0) as u32,
);
filters.push(FilterStep::Scale { width, height });
}
_ => {}
}
match normalized.video.hdr_mode.as_str() {
"tonemap" => filters.push(FilterStep::Tonemap {
algorithm: TonemapAlgorithm::Hable,
peak: 100.0,
desat: 0.2,
}),
"strip_metadata" => filters.push(FilterStep::StripHdrMetadata),
_ => {}
}
}
let subtitles = build_subtitle_plan(analysis, &normalized, copy_video)?;
if let SubtitleStreamPlan::Burn { stream_index } = subtitles {
filters.push(FilterStep::SubtitleBurn { stream_index });
}
let audio = build_audio_plan(&normalized.audio)?;
Ok(TranscodePlan {
decision: TranscodeDecision::Transcode {
reason: "conversion_requested".to_string(),
},
is_remux: false,
copy_video,
output_path: Some(output_path.to_path_buf()),
container,
requested_codec,
output_codec: Some(requested_codec),
encoder,
backend,
rate_control,
encoder_preset: normalized.video.preset.clone(),
threads: 0,
audio,
audio_stream_indices: None,
subtitles,
filters,
allow_fallback: true,
fallback: None,
})
}
pub fn preview_command(
input_path: &Path,
output_path: &Path,
analysis: &MediaAnalysis,
settings: &ConversionSettings,
hw_info: Option<HardwareInfo>,
) -> Result<ConversionPreview> {
let normalized = normalize_settings(analysis, settings)?;
let plan = build_plan(analysis, output_path, &normalized, hw_info.clone())?;
let args = FFmpegCommandBuilder::new(input_path, output_path, &analysis.metadata, &plan)
.with_hardware(hw_info.as_ref())
.build_args()?;
Ok(ConversionPreview {
normalized_settings: normalized,
command_preview: format!(
"ffmpeg {}",
args.iter()
.map(|arg| shell_escape(arg))
.collect::<Vec<_>>()
.join(" ")
),
})
}
fn shell_escape(value: &str) -> String {
if value
.chars()
.all(|ch| ch.is_ascii_alphanumeric() || "-_./:=+".contains(ch))
{
value.to_string()
} else {
format!("'{}'", value.replace('\'', "'\\''"))
}
}
fn normalize_settings(
analysis: &MediaAnalysis,
settings: &ConversionSettings,
) -> Result<ConversionSettings> {
let mut normalized = settings.clone();
if normalized.output_container.trim().is_empty() {
normalized.output_container = "mkv".to_string();
}
normalized.output_container = normalized.output_container.trim().to_ascii_lowercase();
normalized.video.codec = normalized.video.codec.trim().to_ascii_lowercase();
normalized.video.mode = normalized.video.mode.trim().to_ascii_lowercase();
normalized.video.hdr_mode = normalized.video.hdr_mode.trim().to_ascii_lowercase();
normalized.video.resolution.mode = normalized.video.resolution.mode.trim().to_ascii_lowercase();
normalized.audio.codec = normalized.audio.codec.trim().to_ascii_lowercase();
normalized.subtitles.mode = normalized.subtitles.mode.trim().to_ascii_lowercase();
normalized.audio.channels = Some(
normalized
.audio
.channels
.as_deref()
.unwrap_or("auto")
.trim()
.to_ascii_lowercase(),
);
if normalized.remux_only {
normalized.video.codec = "copy".to_string();
normalized.audio.codec = "copy".to_string();
normalized.subtitles.mode = "copy".to_string();
normalized.video.mode = "crf".to_string();
normalized.video.value = None;
normalized.video.resolution.mode = "original".to_string();
normalized.video.hdr_mode = "preserve".to_string();
}
if normalized.video.codec == "copy" {
if normalized.video.resolution.mode != "original" {
return Err(AlchemistError::Config(
"Video copy cannot be combined with resize controls".to_string(),
));
}
if normalized.video.hdr_mode != "preserve" {
return Err(AlchemistError::Config(
"Video copy cannot be combined with HDR transforms".to_string(),
));
}
if normalized.subtitles.mode == "burn" {
return Err(AlchemistError::Config(
"Burn-in subtitles requires video re-encoding".to_string(),
));
}
}
if normalized.subtitles.mode == "burn"
&& !analysis
.metadata
.subtitle_streams
.iter()
.any(|stream| stream.burnable)
{
return Err(AlchemistError::Config(
"No burnable subtitle stream is available for this file".to_string(),
));
}
Ok(normalized)
}
fn build_audio_plan(settings: &ConversionAudioSettings) -> Result<AudioStreamPlan> {
match settings.codec.as_str() {
"copy" => Ok(AudioStreamPlan::Copy),
"aac" => Ok(AudioStreamPlan::Transcode {
codec: AudioCodec::Aac,
bitrate_kbps: settings.bitrate_kbps.unwrap_or(160),
channels: parse_audio_channels(settings.channels.as_deref()),
}),
"opus" => Ok(AudioStreamPlan::Transcode {
codec: AudioCodec::Opus,
bitrate_kbps: settings.bitrate_kbps.unwrap_or(160),
channels: parse_audio_channels(settings.channels.as_deref()),
}),
"mp3" => Ok(AudioStreamPlan::Transcode {
codec: AudioCodec::Mp3,
bitrate_kbps: settings.bitrate_kbps.unwrap_or(192),
channels: parse_audio_channels(settings.channels.as_deref()),
}),
"remove" | "drop" | "none" => Ok(AudioStreamPlan::Drop),
other => Err(AlchemistError::Config(format!(
"Unsupported conversion audio codec '{}'",
other
))),
}
}
fn build_subtitle_plan(
analysis: &MediaAnalysis,
settings: &ConversionSettings,
copy_video: bool,
) -> Result<SubtitleStreamPlan> {
match settings.subtitles.mode.as_str() {
"copy" => Ok(SubtitleStreamPlan::CopyAllCompatible),
"remove" | "drop" | "none" => Ok(SubtitleStreamPlan::Drop),
"burn" => {
if copy_video {
return Err(AlchemistError::Config(
"Burn-in subtitles requires video re-encoding".to_string(),
));
}
let stream = analysis
.metadata
.subtitle_streams
.iter()
.find(|stream| stream.forced && stream.burnable)
.or_else(|| {
analysis
.metadata
.subtitle_streams
.iter()
.find(|stream| stream.default && stream.burnable)
})
.or_else(|| {
analysis
.metadata
.subtitle_streams
.iter()
.find(|stream| stream.burnable)
})
.ok_or_else(|| {
AlchemistError::Config(
"No burnable subtitle stream is available for this file".to_string(),
)
})?;
Ok(SubtitleStreamPlan::Burn {
stream_index: stream.stream_index,
})
}
other => Err(AlchemistError::Config(format!(
"Unsupported subtitle mode '{}'",
other
))),
}
}
fn parse_audio_channels(value: Option<&str>) -> Option<u32> {
match value.unwrap_or("auto") {
"auto" => None,
"stereo" => Some(2),
"5.1" => Some(6),
other => other.parse::<u32>().ok(),
}
}
fn build_rate_control(mode: &str, value: Option<u32>, encoder: Encoder) -> Result<RateControl> {
match mode {
"bitrate" => Ok(RateControl::Bitrate {
kbps: value.unwrap_or(4000),
}),
_ => {
let quality = value.unwrap_or(24) as u8;
match encoder.backend() {
EncoderBackend::Qsv => Ok(RateControl::QsvQuality { value: quality }),
EncoderBackend::Cpu => Ok(RateControl::Crf { value: quality }),
_ => Ok(RateControl::Cq { value: quality }),
}
}
}
}
fn select_encoder_for_codec(
requested_codec: OutputCodec,
hw_info: Option<&HardwareInfo>,
encoder_caps: &crate::media::ffmpeg::EncoderCapabilities,
) -> Result<Encoder> {
if let Some(hw) = hw_info {
for backend in &hw.backends {
if backend.codec != requested_codec.as_str() {
continue;
}
if let Some(encoder) = encoder_from_name(&backend.encoder) {
return Ok(encoder);
}
}
}
match requested_codec {
OutputCodec::Av1 if encoder_caps.has_libsvtav1() => Ok(Encoder::Av1Svt),
OutputCodec::Hevc if encoder_caps.has_libx265() => Ok(Encoder::HevcX265),
OutputCodec::H264 if encoder_caps.has_libx264() => Ok(Encoder::H264X264),
_ => Err(AlchemistError::Config(format!(
"No encoder is available for requested codec '{}'",
requested_codec.as_str()
))),
}
}
fn encoder_from_name(name: &str) -> Option<Encoder> {
match name {
"av1_qsv" => Some(Encoder::Av1Qsv),
"av1_nvenc" => Some(Encoder::Av1Nvenc),
"av1_vaapi" => Some(Encoder::Av1Vaapi),
"av1_videotoolbox" => Some(Encoder::Av1Videotoolbox),
"av1_amf" => Some(Encoder::Av1Amf),
"libsvtav1" => Some(Encoder::Av1Svt),
"libaom-av1" => Some(Encoder::Av1Aom),
"hevc_qsv" => Some(Encoder::HevcQsv),
"hevc_nvenc" => Some(Encoder::HevcNvenc),
"hevc_vaapi" => Some(Encoder::HevcVaapi),
"hevc_videotoolbox" => Some(Encoder::HevcVideotoolbox),
"hevc_amf" => Some(Encoder::HevcAmf),
"libx265" => Some(Encoder::HevcX265),
"h264_qsv" => Some(Encoder::H264Qsv),
"h264_nvenc" => Some(Encoder::H264Nvenc),
"h264_vaapi" => Some(Encoder::H264Vaapi),
"h264_videotoolbox" => Some(Encoder::H264Videotoolbox),
"h264_amf" => Some(Encoder::H264Amf),
"libx264" => Some(Encoder::H264X264),
_ => None,
}
}
fn infer_source_codec(value: &str) -> Result<OutputCodec> {
match value {
"av1" => Ok(OutputCodec::Av1),
"hevc" | "h265" => Ok(OutputCodec::Hevc),
"h264" | "avc1" => Ok(OutputCodec::H264),
other => Err(AlchemistError::Config(format!(
"Source codec '{}' cannot be used with video copy mode",
other
))),
}
}
fn even(value: u32) -> u32 {
if value % 2 == 0 {
value
} else {
value.saturating_sub(1).max(2)
}
}

381
src/db.rs
View File

@@ -40,6 +40,17 @@ pub struct JobStats {
pub failed: i64,
}
#[derive(Debug, Serialize, Deserialize, Default, Clone)]
#[serde(default)]
pub struct DailySummaryStats {
pub completed: i64,
pub failed: i64,
pub skipped: i64,
pub bytes_saved: i64,
pub top_failure_reasons: Vec<String>,
pub top_skip_reasons: Vec<String>,
}
#[derive(Debug, Serialize, Deserialize, sqlx::FromRow)]
pub struct LogEntry {
pub id: i64,
@@ -56,6 +67,8 @@ pub enum AlchemistEvent {
job_id: i64,
status: JobState,
},
ScanCompleted,
EngineIdle,
Progress {
job_id: i64,
percentage: f64,
@@ -170,6 +183,11 @@ impl From<AlchemistEvent> for JobEvent {
AlchemistEvent::JobStateChanged { job_id, status } => {
JobEvent::StateChanged { job_id, status }
}
AlchemistEvent::ScanCompleted | AlchemistEvent::EngineIdle => JobEvent::Log {
level: "info".to_string(),
job_id: None,
message: "non-job event".to_string(),
},
AlchemistEvent::Progress {
job_id,
percentage,
@@ -331,13 +349,28 @@ pub struct NotificationTarget {
pub id: i64,
pub name: String,
pub target_type: String,
pub endpoint_url: String,
pub auth_token: Option<String>,
pub config_json: String,
pub events: String,
pub enabled: bool,
pub created_at: DateTime<Utc>,
}
#[derive(Debug, Serialize, Deserialize, Clone, sqlx::FromRow)]
pub struct ConversionJob {
pub id: i64,
pub upload_path: String,
pub output_path: Option<String>,
pub mode: String,
pub settings_json: String,
pub probe_json: Option<String>,
pub linked_job_id: Option<i64>,
pub status: String,
pub expires_at: String,
pub downloaded_at: Option<String>,
pub created_at: String,
pub updated_at: String,
}
#[derive(Debug, Serialize, Deserialize, Clone, sqlx::FromRow)]
pub struct ScheduleWindow {
pub id: i64,
@@ -1813,7 +1846,9 @@ impl Db {
}
pub async fn get_notification_targets(&self) -> Result<Vec<NotificationTarget>> {
let targets = sqlx::query_as::<_, NotificationTarget>("SELECT id, name, target_type, endpoint_url, auth_token, events, enabled, created_at FROM notification_targets")
let targets = sqlx::query_as::<_, NotificationTarget>(
"SELECT id, name, target_type, config_json, events, enabled, created_at FROM notification_targets",
)
.fetch_all(&self.pool)
.await?;
Ok(targets)
@@ -1823,19 +1858,17 @@ impl Db {
&self,
name: &str,
target_type: &str,
endpoint_url: &str,
auth_token: Option<&str>,
config_json: &str,
events: &str,
enabled: bool,
) -> Result<NotificationTarget> {
let row = sqlx::query_as::<_, NotificationTarget>(
"INSERT INTO notification_targets (name, target_type, endpoint_url, auth_token, events, enabled)
VALUES (?, ?, ?, ?, ?, ?) RETURNING *"
"INSERT INTO notification_targets (name, target_type, config_json, events, enabled)
VALUES (?, ?, ?, ?, ?) RETURNING *",
)
.bind(name)
.bind(target_type)
.bind(endpoint_url)
.bind(auth_token)
.bind(config_json)
.bind(events)
.bind(enabled)
.fetch_one(&self.pool)
@@ -1866,12 +1899,11 @@ impl Db {
.await?;
for target in targets {
sqlx::query(
"INSERT INTO notification_targets (name, target_type, endpoint_url, auth_token, events, enabled) VALUES (?, ?, ?, ?, ?, ?)",
"INSERT INTO notification_targets (name, target_type, config_json, events, enabled) VALUES (?, ?, ?, ?, ?)",
)
.bind(&target.name)
.bind(&target.target_type)
.bind(&target.endpoint_url)
.bind(target.auth_token.as_deref())
.bind(target.config_json.to_string())
.bind(serde_json::to_string(&target.events).unwrap_or_else(|_| "[]".to_string()))
.bind(target.enabled)
.execute(&mut *tx)
@@ -1881,6 +1913,152 @@ impl Db {
Ok(())
}
pub async fn create_conversion_job(
&self,
upload_path: &str,
mode: &str,
settings_json: &str,
probe_json: Option<&str>,
expires_at: &str,
) -> Result<ConversionJob> {
let row = sqlx::query_as::<_, ConversionJob>(
"INSERT INTO conversion_jobs (upload_path, mode, settings_json, probe_json, expires_at)
VALUES (?, ?, ?, ?, ?)
RETURNING *",
)
.bind(upload_path)
.bind(mode)
.bind(settings_json)
.bind(probe_json)
.bind(expires_at)
.fetch_one(&self.pool)
.await?;
Ok(row)
}
pub async fn get_conversion_job(&self, id: i64) -> Result<Option<ConversionJob>> {
let row = sqlx::query_as::<_, ConversionJob>(
"SELECT id, upload_path, output_path, mode, settings_json, probe_json, linked_job_id, status, expires_at, downloaded_at, created_at, updated_at
FROM conversion_jobs
WHERE id = ?",
)
.bind(id)
.fetch_optional(&self.pool)
.await?;
Ok(row)
}
pub async fn get_conversion_job_by_linked_job_id(
&self,
linked_job_id: i64,
) -> Result<Option<ConversionJob>> {
let row = sqlx::query_as::<_, ConversionJob>(
"SELECT id, upload_path, output_path, mode, settings_json, probe_json, linked_job_id, status, expires_at, downloaded_at, created_at, updated_at
FROM conversion_jobs
WHERE linked_job_id = ?",
)
.bind(linked_job_id)
.fetch_optional(&self.pool)
.await?;
Ok(row)
}
pub async fn update_conversion_job_probe(&self, id: i64, probe_json: &str) -> Result<()> {
sqlx::query(
"UPDATE conversion_jobs
SET probe_json = ?, updated_at = datetime('now')
WHERE id = ?",
)
.bind(probe_json)
.bind(id)
.execute(&self.pool)
.await?;
Ok(())
}
pub async fn update_conversion_job_settings(
&self,
id: i64,
settings_json: &str,
mode: &str,
) -> Result<()> {
sqlx::query(
"UPDATE conversion_jobs
SET settings_json = ?, mode = ?, updated_at = datetime('now')
WHERE id = ?",
)
.bind(settings_json)
.bind(mode)
.bind(id)
.execute(&self.pool)
.await?;
Ok(())
}
pub async fn update_conversion_job_start(
&self,
id: i64,
output_path: &str,
linked_job_id: i64,
) -> Result<()> {
sqlx::query(
"UPDATE conversion_jobs
SET output_path = ?, linked_job_id = ?, status = 'queued', updated_at = datetime('now')
WHERE id = ?",
)
.bind(output_path)
.bind(linked_job_id)
.bind(id)
.execute(&self.pool)
.await?;
Ok(())
}
pub async fn update_conversion_job_status(&self, id: i64, status: &str) -> Result<()> {
sqlx::query(
"UPDATE conversion_jobs
SET status = ?, updated_at = datetime('now')
WHERE id = ?",
)
.bind(status)
.bind(id)
.execute(&self.pool)
.await?;
Ok(())
}
pub async fn mark_conversion_job_downloaded(&self, id: i64) -> Result<()> {
sqlx::query(
"UPDATE conversion_jobs
SET downloaded_at = datetime('now'), status = 'downloaded', updated_at = datetime('now')
WHERE id = ?",
)
.bind(id)
.execute(&self.pool)
.await?;
Ok(())
}
pub async fn delete_conversion_job(&self, id: i64) -> Result<()> {
sqlx::query("DELETE FROM conversion_jobs WHERE id = ?")
.bind(id)
.execute(&self.pool)
.await?;
Ok(())
}
pub async fn get_expired_conversion_jobs(&self, now: &str) -> Result<Vec<ConversionJob>> {
let rows = sqlx::query_as::<_, ConversionJob>(
"SELECT id, upload_path, output_path, mode, settings_json, probe_json, linked_job_id, status, expires_at, downloaded_at, created_at, updated_at
FROM conversion_jobs
WHERE expires_at <= ?",
)
.bind(now)
.fetch_all(&self.pool)
.await?;
Ok(rows)
}
pub async fn get_schedule_windows(&self) -> Result<Vec<ScheduleWindow>> {
let windows = sqlx::query_as::<_, ScheduleWindow>("SELECT * FROM schedule_windows")
.fetch_all(&self.pool)
@@ -2284,6 +2462,75 @@ impl Db {
.await
}
pub async fn get_daily_summary_stats(&self) -> Result<DailySummaryStats> {
let pool = &self.pool;
timed_query("get_daily_summary_stats", || async {
let row = sqlx::query(
"SELECT
COALESCE(SUM(CASE WHEN status = 'completed' AND DATE(updated_at, 'localtime') = DATE('now', 'localtime') THEN 1 ELSE 0 END), 0) AS completed,
COALESCE(SUM(CASE WHEN status = 'failed' AND DATE(updated_at, 'localtime') = DATE('now', 'localtime') THEN 1 ELSE 0 END), 0) AS failed,
COALESCE(SUM(CASE WHEN status = 'skipped' AND DATE(updated_at, 'localtime') = DATE('now', 'localtime') THEN 1 ELSE 0 END), 0) AS skipped
FROM jobs",
)
.fetch_one(pool)
.await?;
let completed: i64 = row.get("completed");
let failed: i64 = row.get("failed");
let skipped: i64 = row.get("skipped");
let bytes_row = sqlx::query(
"SELECT COALESCE(SUM(input_size_bytes - output_size_bytes), 0) AS bytes_saved
FROM encode_stats
WHERE DATE(created_at, 'localtime') = DATE('now', 'localtime')",
)
.fetch_one(pool)
.await?;
let bytes_saved: i64 = bytes_row.get("bytes_saved");
let failure_rows = sqlx::query(
"SELECT code, COUNT(*) AS count
FROM job_failure_explanations
WHERE DATE(updated_at, 'localtime') = DATE('now', 'localtime')
GROUP BY code
ORDER BY count DESC, code ASC
LIMIT 3",
)
.fetch_all(pool)
.await?;
let top_failure_reasons = failure_rows
.into_iter()
.map(|row| row.get::<String, _>("code"))
.collect::<Vec<_>>();
let skip_rows = sqlx::query(
"SELECT COALESCE(reason_code, action) AS code, COUNT(*) AS count
FROM decisions
WHERE action = 'skip'
AND DATE(created_at, 'localtime') = DATE('now', 'localtime')
GROUP BY COALESCE(reason_code, action)
ORDER BY count DESC, code ASC
LIMIT 3",
)
.fetch_all(pool)
.await?;
let top_skip_reasons = skip_rows
.into_iter()
.map(|row| row.get::<String, _>("code"))
.collect::<Vec<_>>();
Ok(DailySummaryStats {
completed,
failed,
skipped,
bytes_saved,
top_failure_reasons,
top_skip_reasons,
})
})
.await
}
pub async fn add_log(&self, level: &str, job_id: Option<i64>, message: &str) -> Result<()> {
sqlx::query("INSERT INTO logs (level, job_id, message) VALUES (?, ?, ?)")
.bind(level)
@@ -2432,6 +2679,75 @@ impl Db {
Ok(result.rows_affected())
}
pub async fn list_api_tokens(&self) -> Result<Vec<ApiToken>> {
let tokens = sqlx::query_as::<_, ApiToken>(
"SELECT id, name, access_level, created_at, last_used_at, revoked_at
FROM api_tokens
ORDER BY created_at DESC",
)
.fetch_all(&self.pool)
.await?;
Ok(tokens)
}
pub async fn create_api_token(
&self,
name: &str,
token: &str,
access_level: ApiTokenAccessLevel,
) -> Result<ApiToken> {
let token_hash = hash_api_token(token);
let row = sqlx::query_as::<_, ApiToken>(
"INSERT INTO api_tokens (name, token_hash, access_level)
VALUES (?, ?, ?)
RETURNING id, name, access_level, created_at, last_used_at, revoked_at",
)
.bind(name)
.bind(token_hash)
.bind(access_level)
.fetch_one(&self.pool)
.await?;
Ok(row)
}
pub async fn get_active_api_token(&self, token: &str) -> Result<Option<ApiTokenRecord>> {
let token_hash = hash_api_token(token);
let row = sqlx::query_as::<_, ApiTokenRecord>(
"SELECT id, name, token_hash, access_level, created_at, last_used_at, revoked_at
FROM api_tokens
WHERE token_hash = ? AND revoked_at IS NULL",
)
.bind(token_hash)
.fetch_optional(&self.pool)
.await?;
Ok(row)
}
pub async fn update_api_token_last_used(&self, id: i64) -> Result<()> {
sqlx::query("UPDATE api_tokens SET last_used_at = CURRENT_TIMESTAMP WHERE id = ?")
.bind(id)
.execute(&self.pool)
.await?;
Ok(())
}
pub async fn revoke_api_token(&self, id: i64) -> Result<()> {
let result = sqlx::query(
"UPDATE api_tokens
SET revoked_at = COALESCE(revoked_at, CURRENT_TIMESTAMP)
WHERE id = ?",
)
.bind(id)
.execute(&self.pool)
.await?;
if result.rows_affected() == 0 {
return Err(crate::error::AlchemistError::Database(
sqlx::Error::RowNotFound,
));
}
Ok(())
}
pub async fn record_health_check(
&self,
job_id: i64,
@@ -2599,6 +2915,35 @@ pub struct Session {
pub created_at: DateTime<Utc>,
}
#[derive(Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq, sqlx::Type)]
#[sqlx(rename_all = "snake_case")]
#[serde(rename_all = "snake_case")]
pub enum ApiTokenAccessLevel {
ReadOnly,
FullAccess,
}
#[derive(Debug, Serialize, Deserialize, Clone, sqlx::FromRow)]
pub struct ApiToken {
pub id: i64,
pub name: String,
pub access_level: ApiTokenAccessLevel,
pub created_at: DateTime<Utc>,
pub last_used_at: Option<DateTime<Utc>>,
pub revoked_at: Option<DateTime<Utc>>,
}
#[derive(Debug, Clone, sqlx::FromRow)]
pub struct ApiTokenRecord {
pub id: i64,
pub name: String,
pub token_hash: String,
pub access_level: ApiTokenAccessLevel,
pub created_at: DateTime<Utc>,
pub last_used_at: Option<DateTime<Utc>>,
pub revoked_at: Option<DateTime<Utc>>,
}
/// Hash a session token using SHA256 for secure storage.
///
/// # Security: Timing Attack Resistance
@@ -2625,6 +2970,18 @@ fn hash_session_token(token: &str) -> String {
out
}
pub fn hash_api_token(token: &str) -> String {
let mut hasher = Sha256::new();
hasher.update(token.as_bytes());
let digest = hasher.finalize();
let mut out = String::with_capacity(64);
for byte in digest {
use std::fmt::Write;
let _ = write!(&mut out, "{:02x}", byte);
}
out
}
#[cfg(test)]
mod tests {
use super::*;

View File

@@ -1,6 +1,7 @@
#![deny(clippy::expect_used, clippy::unwrap_used)]
pub mod config;
pub mod conversion;
pub mod db;
pub mod error;
pub mod explanations;

View File

@@ -181,10 +181,6 @@ impl<'a> FFmpegCommandBuilder<'a> {
]);
}
let encoder = self
.plan
.encoder
.ok_or_else(|| AlchemistError::Config("Transcode plan missing encoder".into()))?;
let rate_control = self.plan.rate_control.clone();
let mut args = vec![
"-hide_banner".to_string(),
@@ -219,13 +215,20 @@ impl<'a> FFmpegCommandBuilder<'a> {
args.push("0:s?".to_string());
}
if self.plan.copy_video {
args.extend(["-c:v".to_string(), "copy".to_string()]);
} else {
let encoder = self
.plan
.encoder
.ok_or_else(|| AlchemistError::Config("Transcode plan missing encoder".into()))?;
match encoder {
Encoder::Av1Qsv | Encoder::HevcQsv | Encoder::H264Qsv => {
qsv::append_args(
&mut args,
encoder,
self.hw_info,
rate_control,
rate_control.clone(),
default_quality(&self.plan.rate_control, 23),
);
}
@@ -233,7 +236,7 @@ impl<'a> FFmpegCommandBuilder<'a> {
nvenc::append_args(
&mut args,
encoder,
rate_control,
rate_control.clone(),
self.plan.encoder_preset.as_deref(),
);
}
@@ -243,11 +246,13 @@ impl<'a> FFmpegCommandBuilder<'a> {
Encoder::Av1Amf | Encoder::HevcAmf | Encoder::H264Amf => {
amf::append_args(&mut args, encoder);
}
Encoder::Av1Videotoolbox | Encoder::HevcVideotoolbox | Encoder::H264Videotoolbox => {
Encoder::Av1Videotoolbox
| Encoder::HevcVideotoolbox
| Encoder::H264Videotoolbox => {
videotoolbox::append_args(
&mut args,
encoder,
rate_control,
rate_control.clone(),
default_quality(&self.plan.rate_control, 65),
);
}
@@ -255,11 +260,16 @@ impl<'a> FFmpegCommandBuilder<'a> {
cpu::append_args(
&mut args,
encoder,
rate_control,
rate_control.clone(),
self.plan.encoder_preset.as_deref(),
);
}
}
}
if let Some(RateControl::Bitrate { kbps }) = rate_control {
args.extend(["-b:v".to_string(), format!("{kbps}k")]);
}
if let Some(filtergraph) = render_filtergraph(self.input, &self.plan.filters) {
args.push("-vf".to_string());
@@ -321,6 +331,7 @@ fn default_quality(rate_control: &Option<RateControl>, fallback: u8) -> u8 {
Some(RateControl::Cq { value }) => *value,
Some(RateControl::QsvQuality { value }) => *value,
Some(RateControl::Crf { value }) => *value,
Some(RateControl::Bitrate { .. }) => fallback,
None => fallback,
}
}
@@ -375,6 +386,9 @@ fn apply_color_metadata(
let tonemapped = filters
.iter()
.any(|step| matches!(step, FilterStep::Tonemap { .. }));
let strip_hdr_metadata = filters
.iter()
.any(|step| matches!(step, FilterStep::StripHdrMetadata));
if tonemapped {
args.extend([
@@ -390,6 +404,20 @@ fn apply_color_metadata(
return;
}
if strip_hdr_metadata {
args.extend([
"-color_primaries".to_string(),
"bt709".to_string(),
"-color_trc".to_string(),
"bt709".to_string(),
"-colorspace".to_string(),
"bt709".to_string(),
"-color_range".to_string(),
"tv".to_string(),
]);
return;
}
if let Some(ref primaries) = metadata.color_primaries {
args.extend(["-color_primaries".to_string(), primaries.clone()]);
}
@@ -426,6 +454,13 @@ fn render_filtergraph(input: &Path, filters: &[FilterStep]) -> Option<String> {
escape_filter_path(input)
),
FilterStep::HwUpload => "hwupload".to_string(),
FilterStep::Scale { width, height } => {
format!("scale=w={width}:h={height}:force_original_aspect_ratio=decrease")
}
FilterStep::StripHdrMetadata => {
"setparams=color_primaries=bt709:color_trc=bt709:colorspace=bt709:range=tv"
.to_string()
}
})
.collect::<Vec<_>>()
.join(",");
@@ -811,6 +846,7 @@ mod tests {
reason: "test".to_string(),
},
is_remux: false,
copy_video: false,
output_path: None,
container: "mkv".to_string(),
requested_codec: encoder.output_codec(),

View File

@@ -260,6 +260,7 @@ pub enum RateControl {
Crf { value: u8 },
Cq { value: u8 },
QsvQuality { value: u8 },
Bitrate { kbps: u32 },
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
@@ -281,6 +282,7 @@ pub struct PlannedFallback {
pub enum AudioCodec {
Aac,
Opus,
Mp3,
}
impl AudioCodec {
@@ -288,6 +290,7 @@ impl AudioCodec {
match self {
Self::Aac => "aac",
Self::Opus => "libopus",
Self::Mp3 => "libmp3lame",
}
}
}
@@ -345,12 +348,18 @@ pub enum FilterStep {
stream_index: usize,
},
HwUpload,
Scale {
width: u32,
height: u32,
},
StripHdrMetadata,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TranscodePlan {
pub decision: TranscodeDecision,
pub is_remux: bool,
pub copy_video: bool,
pub output_path: Option<PathBuf>,
pub container: String,
pub requested_codec: crate::config::OutputCodec,
@@ -432,6 +441,7 @@ struct FinalizeJobContext<'a> {
output_path: &'a Path,
temp_output_path: &'a Path,
plan: &'a TranscodePlan,
bypass_quality_gates: bool,
start_time: std::time::Instant,
metadata: &'a MediaMetadata,
execution_result: &'a ExecutionResult,
@@ -790,6 +800,51 @@ impl Pipeline {
let config_snapshot = self.config.read().await.clone();
let hw_info = self.hardware_state.snapshot().await;
let conversion_job = self
.db
.get_conversion_job_by_linked_job_id(job.id)
.await
.ok()
.flatten();
let bypass_quality_gates = conversion_job.is_some();
let mut plan = if let Some(conversion_job) = conversion_job.as_ref() {
let settings: crate::conversion::ConversionSettings =
match serde_json::from_str(&conversion_job.settings_json) {
Ok(settings) => settings,
Err(err) => {
let msg = format!("Invalid conversion job settings: {err}");
tracing::error!("Job {}: {}", job.id, msg);
let _ = self.db.add_log("error", Some(job.id), &msg).await;
let explanation = crate::explanations::failure_from_summary(&msg);
let _ = self
.db
.upsert_job_failure_explanation(job.id, &explanation)
.await;
let _ = self
.update_job_state(job.id, crate::db::JobState::Failed)
.await;
return Err(JobFailure::PlannerBug);
}
};
match crate::conversion::build_plan(&analysis, &output_path, &settings, hw_info.clone())
{
Ok(plan) => plan,
Err(err) => {
let msg = format!("Conversion planning failed: {err}");
tracing::error!("Job {}: {}", job.id, msg);
let _ = self.db.add_log("error", Some(job.id), &msg).await;
let explanation = crate::explanations::failure_from_summary(&msg);
let _ = self
.db
.upsert_job_failure_explanation(job.id, &explanation)
.await;
let _ = self
.update_job_state(job.id, crate::db::JobState::Failed)
.await;
return Err(JobFailure::PlannerBug);
}
}
} else {
let planner = BasicPlanner::new(Arc::new(config_snapshot.clone()), hw_info.clone());
let profile = match self.db.get_profile_for_path(&job.input_path).await {
Ok(profile) => profile,
@@ -808,7 +863,7 @@ impl Pipeline {
return Err(JobFailure::Transient);
}
};
let mut plan = match planner
match planner
.plan(&analysis, &output_path, profile.as_ref())
.await
{
@@ -827,6 +882,7 @@ impl Pipeline {
.await;
return Err(JobFailure::PlannerBug);
}
}
};
if !matches!(plan.decision, TranscodeDecision::Skip { .. }) {
@@ -965,6 +1021,7 @@ impl Pipeline {
output_path: &output_path,
temp_output_path: &temp_output_path,
plan: &plan,
bypass_quality_gates,
start_time,
metadata,
execution_result: &result,
@@ -1124,8 +1181,10 @@ impl Pipeline {
let config = self.config.read().await;
let telemetry_enabled = config.system.enable_telemetry;
if output_size == 0
|| (!context.plan.is_remux && reduction < config.transcode.size_reduction_threshold)
if !context.bypass_quality_gates
&& (output_size == 0
|| (!context.plan.is_remux
&& reduction < config.transcode.size_reduction_threshold))
{
tracing::warn!(
"Job {}: Size reduction gate failed ({:.2}%). Reverting.",
@@ -1152,7 +1211,7 @@ impl Pipeline {
}
let mut vmaf_score = None;
if !context.plan.is_remux && config.quality.enable_vmaf {
if !context.bypass_quality_gates && !context.plan.is_remux && config.quality.enable_vmaf {
tracing::info!("[Job {}] Phase 2: Computing VMAF quality score...", job_id);
let input_clone = input_path.to_path_buf();
let output_clone = context.temp_output_path.to_path_buf();
@@ -1552,6 +1611,7 @@ mod tests {
reason: "test".to_string(),
},
is_remux: false,
copy_video: false,
output_path: None,
container: "mkv".to_string(),
requested_codec: crate::config::OutputCodec::H264,
@@ -1647,6 +1707,7 @@ mod tests {
reason: "test".to_string(),
},
is_remux: false,
copy_video: false,
output_path: Some(temp_output.clone()),
container: "mkv".to_string(),
requested_codec: crate::config::OutputCodec::H264,

View File

@@ -83,6 +83,7 @@ impl Planner for BasicPlanner {
reason: reason.clone(),
},
is_remux: true,
copy_video: true,
output_path: None,
container,
requested_codec,
@@ -188,6 +189,7 @@ impl Planner for BasicPlanner {
Ok(TranscodePlan {
decision,
is_remux: false,
copy_video: false,
output_path: None,
container,
requested_codec,
@@ -217,6 +219,7 @@ fn skip_plan(
TranscodePlan {
decision: TranscodeDecision::Skip { reason },
is_remux: false,
copy_video: false,
output_path: None,
container,
requested_codec,
@@ -845,6 +848,15 @@ fn audio_bitrate_kbps(codec: AudioCodec, channels: Option<u32>) -> u16 {
320
}
}
AudioCodec::Mp3 => {
if channels <= 2 {
192
} else if channels <= 6 {
320
} else {
384
}
}
}
}
@@ -1083,6 +1095,7 @@ fn apply_crf_override(rate_control: RateControl, crf_override: Option<i32>) -> R
RateControl::Crf { .. } => RateControl::Crf { value },
RateControl::Cq { .. } => RateControl::Cq { value },
RateControl::QsvQuality { .. } => RateControl::QsvQuality { value },
RateControl::Bitrate { kbps } => RateControl::Bitrate { kbps },
}
}

View File

@@ -28,6 +28,7 @@ pub struct Agent {
pub(crate) engine_mode: Arc<tokio::sync::RwLock<crate::config::EngineMode>>,
dry_run: bool,
in_flight_jobs: Arc<AtomicUsize>,
idle_notified: Arc<AtomicBool>,
analyzing_boot: Arc<AtomicBool>,
analysis_semaphore: Arc<tokio::sync::Semaphore>,
}
@@ -65,6 +66,7 @@ impl Agent {
engine_mode: Arc::new(tokio::sync::RwLock::new(engine_mode)),
dry_run,
in_flight_jobs: Arc::new(AtomicUsize::new(0)),
idle_notified: Arc::new(AtomicBool::new(false)),
analyzing_boot: Arc::new(AtomicBool::new(false)),
analysis_semaphore: Arc::new(tokio::sync::Semaphore::new(1)),
}
@@ -105,6 +107,7 @@ impl Agent {
// Notify scan completed
let _ = self.event_channels.system.send(SystemEvent::ScanCompleted);
let _ = self.tx.send(AlchemistEvent::ScanCompleted);
Ok(())
}
@@ -144,6 +147,7 @@ impl Agent {
pub fn resume(&self) {
self.paused.store(false, Ordering::SeqCst);
self.idle_notified.store(false, Ordering::SeqCst);
info!("Engine resumed.");
}
@@ -151,6 +155,7 @@ impl Agent {
// Stop accepting new jobs but finish active ones.
// Sets draining=true. Does NOT set paused=true.
self.draining.store(true, Ordering::SeqCst);
self.idle_notified.store(false, Ordering::SeqCst);
info!("Engine draining — finishing active jobs, no new jobs will start.");
}
@@ -397,6 +402,7 @@ impl Agent {
match self.db.claim_next_job().await {
Ok(Some(job)) => {
self.idle_notified.store(false, Ordering::SeqCst);
self.in_flight_jobs.fetch_add(1, Ordering::SeqCst);
let agent = self.clone();
let counter = self.in_flight_jobs.clone();
@@ -417,6 +423,11 @@ impl Agent {
});
}
Ok(None) => {
if self.in_flight_jobs.load(Ordering::SeqCst) == 0
&& !self.idle_notified.swap(true, Ordering::SeqCst)
{
let _ = self.tx.send(crate::db::AlchemistEvent::EngineIdle);
}
drop(permit);
tokio::time::sleep(tokio::time::Duration::from_secs(5)).await;
}

View File

@@ -1,28 +1,122 @@
use crate::config::Config;
use crate::db::{AlchemistEvent, Db, NotificationTarget};
use crate::explanations::Explanation;
use chrono::Timelike;
use lettre::message::{Mailbox, Message, SinglePart, header::ContentType};
use lettre::transport::smtp::authentication::Credentials;
use lettre::{AsyncSmtpTransport, AsyncTransport, Tokio1Executor};
use reqwest::{Client, Url, redirect::Policy};
use serde::Deserialize;
use serde_json::json;
use std::net::IpAddr;
use std::sync::Arc;
use std::time::Duration;
use tokio::net::lookup_host;
use tokio::sync::{RwLock, broadcast};
use tokio::sync::{Mutex, RwLock, broadcast};
use tracing::{error, warn};
type NotificationResult<T> = Result<T, Box<dyn std::error::Error + Send + Sync>>;
#[derive(Clone)]
pub struct NotificationManager {
db: Db,
config: Arc<RwLock<Config>>,
daily_summary_last_sent: Arc<Mutex<Option<String>>>,
}
#[derive(Debug, Deserialize)]
struct DiscordWebhookConfig {
webhook_url: String,
}
#[derive(Debug, Deserialize)]
struct DiscordBotConfig {
bot_token: String,
channel_id: String,
}
#[derive(Debug, Deserialize)]
struct GotifyConfig {
server_url: String,
app_token: String,
}
#[derive(Debug, Deserialize)]
struct WebhookConfig {
url: String,
auth_token: Option<String>,
}
#[derive(Debug, Deserialize)]
struct TelegramConfig {
bot_token: String,
chat_id: String,
}
#[derive(Debug, Deserialize)]
struct EmailConfig {
smtp_host: String,
smtp_port: u16,
username: Option<String>,
password: Option<String>,
from_address: String,
to_addresses: Vec<String>,
security: Option<String>,
}
fn parse_target_config<T: for<'de> Deserialize<'de>>(
target: &NotificationTarget,
) -> NotificationResult<T> {
Ok(serde_json::from_str(&target.config_json)?)
}
fn endpoint_url_for_target(target: &NotificationTarget) -> NotificationResult<Option<String>> {
match target.target_type.as_str() {
"discord_webhook" => Ok(Some(
parse_target_config::<DiscordWebhookConfig>(target)?.webhook_url,
)),
"gotify" => Ok(Some(
parse_target_config::<GotifyConfig>(target)?.server_url,
)),
"webhook" => Ok(Some(parse_target_config::<WebhookConfig>(target)?.url)),
"discord_bot" => Ok(Some("https://discord.com".to_string())),
"telegram" => Ok(Some("https://api.telegram.org".to_string())),
"email" => Ok(None),
_ => Ok(None),
}
}
fn event_key_from_event(event: &AlchemistEvent) -> Option<&'static str> {
match event {
AlchemistEvent::JobStateChanged { status, .. } => match status {
crate::db::JobState::Queued => Some(crate::config::NOTIFICATION_EVENT_ENCODE_QUEUED),
crate::db::JobState::Encoding | crate::db::JobState::Remuxing => {
Some(crate::config::NOTIFICATION_EVENT_ENCODE_STARTED)
}
crate::db::JobState::Completed => {
Some(crate::config::NOTIFICATION_EVENT_ENCODE_COMPLETED)
}
crate::db::JobState::Failed => Some(crate::config::NOTIFICATION_EVENT_ENCODE_FAILED),
_ => None,
},
AlchemistEvent::ScanCompleted => Some(crate::config::NOTIFICATION_EVENT_SCAN_COMPLETED),
AlchemistEvent::EngineIdle => Some(crate::config::NOTIFICATION_EVENT_ENGINE_IDLE),
_ => None,
}
}
impl NotificationManager {
pub fn new(db: Db, config: Arc<RwLock<Config>>) -> Self {
Self { db, config }
Self {
db,
config,
daily_summary_last_sent: Arc::new(Mutex::new(None)),
}
}
pub fn start_listener(&self, mut rx: broadcast::Receiver<AlchemistEvent>) {
let manager_clone = self.clone();
let summary_manager = self.clone();
tokio::spawn(async move {
loop {
@@ -39,20 +133,26 @@ impl NotificationManager {
}
}
});
tokio::spawn(async move {
loop {
tokio::time::sleep(Duration::from_secs(30)).await;
if let Err(err) = summary_manager.maybe_send_daily_summary().await {
error!("Daily summary notification error: {}", err);
}
}
});
}
pub async fn send_test(
&self,
target: &NotificationTarget,
) -> Result<(), Box<dyn std::error::Error>> {
pub async fn send_test(&self, target: &NotificationTarget) -> NotificationResult<()> {
let event = AlchemistEvent::JobStateChanged {
job_id: 0,
status: crate::db::JobState::Completed,
};
self.send(target, &event, "completed").await
self.send(target, &event).await
}
async fn handle_event(&self, event: AlchemistEvent) -> Result<(), Box<dyn std::error::Error>> {
async fn handle_event(&self, event: AlchemistEvent) -> NotificationResult<()> {
let targets = match self.db.get_notification_targets().await {
Ok(t) => t,
Err(e) => {
@@ -65,10 +165,9 @@ impl NotificationManager {
return Ok(());
}
// Filter events
let status = match &event {
AlchemistEvent::JobStateChanged { status, .. } => status.to_string(),
_ => return Ok(()), // Only handle job state changes for now
let event_key = match event_key_from_event(&event) {
Some(event_key) => event_key,
None => return Ok(()),
};
for target in targets {
@@ -86,12 +185,15 @@ impl NotificationManager {
}
};
if allowed.contains(&status) {
let normalized_allowed = crate::config::normalize_notification_events(&allowed);
if normalized_allowed
.iter()
.any(|candidate| candidate == event_key)
{
let manager = self.clone();
let event_clone = event.clone();
let status_clone = status.clone();
tokio::spawn(async move {
if let Err(e) = manager.send(&target, &event_clone, &status_clone).await {
if let Err(e) = manager.send(&target, &event_clone).await {
error!(
"Failed to send notification to target '{}': {}",
target.name, e
@@ -103,13 +205,65 @@ impl NotificationManager {
Ok(())
}
async fn maybe_send_daily_summary(&self) -> NotificationResult<()> {
let config = self.config.read().await.clone();
let now = chrono::Local::now();
let parts = config
.notifications
.daily_summary_time_local
.split(':')
.collect::<Vec<_>>();
if parts.len() != 2 {
return Ok(());
}
let hour = parts[0].parse::<u32>().unwrap_or(9);
let minute = parts[1].parse::<u32>().unwrap_or(0);
if now.hour() != hour || now.minute() != minute {
return Ok(());
}
let summary_key = now.format("%Y-%m-%d").to_string();
{
let last_sent = self.daily_summary_last_sent.lock().await;
if last_sent.as_deref() == Some(summary_key.as_str()) {
return Ok(());
}
}
let summary = self.db.get_daily_summary_stats().await?;
let targets = self.db.get_notification_targets().await?;
for target in targets {
if !target.enabled {
continue;
}
let allowed: Vec<String> = serde_json::from_str(&target.events).unwrap_or_default();
let normalized_allowed = crate::config::normalize_notification_events(&allowed);
if !normalized_allowed
.iter()
.any(|event| event == crate::config::NOTIFICATION_EVENT_DAILY_SUMMARY)
{
continue;
}
if let Err(err) = self.send_daily_summary_target(&target, &summary).await {
error!(
"Failed to send daily summary to target '{}': {}",
target.name, err
);
}
}
*self.daily_summary_last_sent.lock().await = Some(summary_key);
Ok(())
}
async fn send(
&self,
target: &NotificationTarget,
event: &AlchemistEvent,
status: &str,
) -> Result<(), Box<dyn std::error::Error>> {
let url = Url::parse(&target.endpoint_url)?;
) -> NotificationResult<()> {
let event_key = event_key_from_event(event).unwrap_or("unknown");
let client = if let Some(endpoint_url) = endpoint_url_for_target(target)? {
let url = Url::parse(&endpoint_url)?;
let host = url
.host_str()
.ok_or("notification endpoint host is missing")?;
@@ -130,25 +284,28 @@ impl NotificationManager {
let ips = tokio::time::timeout(Duration::from_secs(3), lookup_host(&addr)).await??;
let target_ip = if allow_local {
// When local notifications are allowed, accept any resolved IP
ips.into_iter()
.map(|a| a.ip())
.next()
.ok_or("no IP address found for notification endpoint")?
} else {
// When local notifications are blocked, only use public IPs
ips.into_iter()
.map(|a| a.ip())
.find(|ip| !is_private_ip(*ip))
.ok_or("no public IP address found for notification endpoint")?
};
// Pin the request to the validated IP to prevent DNS rebinding
let client = Client::builder()
Client::builder()
.timeout(Duration::from_secs(10))
.redirect(Policy::none())
.resolve(host, std::net::SocketAddr::new(target_ip, port))
.build()?;
.build()?
} else {
Client::builder()
.timeout(Duration::from_secs(10))
.redirect(Policy::none())
.build()?
};
let (decision_explanation, failure_explanation) = match event {
AlchemistEvent::JobStateChanged { job_id, status } => {
@@ -173,12 +330,23 @@ impl NotificationManager {
};
match target.target_type.as_str() {
"discord" => {
"discord_webhook" => {
self.send_discord_with_client(
&client,
target,
event,
status,
event_key,
decision_explanation.as_ref(),
failure_explanation.as_ref(),
)
.await
}
"discord_bot" => {
self.send_discord_bot_with_client(
&client,
target,
event,
event_key,
decision_explanation.as_ref(),
failure_explanation.as_ref(),
)
@@ -189,7 +357,7 @@ impl NotificationManager {
&client,
target,
event,
status,
event_key,
decision_explanation.as_ref(),
failure_explanation.as_ref(),
)
@@ -200,7 +368,28 @@ impl NotificationManager {
&client,
target,
event,
status,
event_key,
decision_explanation.as_ref(),
failure_explanation.as_ref(),
)
.await
}
"telegram" => {
self.send_telegram_with_client(
&client,
target,
event,
event_key,
decision_explanation.as_ref(),
failure_explanation.as_ref(),
)
.await
}
"email" => {
self.send_email(
target,
event,
event_key,
decision_explanation.as_ref(),
failure_explanation.as_ref(),
)
@@ -232,33 +421,74 @@ impl NotificationManager {
format!("Job #{} is now {}", job_id, status)
}
async fn send_discord_with_client(
fn message_for_event(
&self,
client: &Client,
target: &NotificationTarget,
event: &AlchemistEvent,
status: &str,
decision_explanation: Option<&Explanation>,
failure_explanation: Option<&Explanation>,
) -> Result<(), Box<dyn std::error::Error>> {
let color = match status {
"completed" => 0x00FF00, // Green
"failed" => 0xFF0000, // Red
"queued" => 0xF1C40F, // Yellow
"encoding" | "remuxing" => 0x3498DB, // Blue
_ => 0x95A5A6, // Gray
};
let message = match event {
) -> String {
match event {
AlchemistEvent::JobStateChanged { job_id, status } => self.notification_message(
*job_id,
&status.to_string(),
decision_explanation,
failure_explanation,
),
AlchemistEvent::ScanCompleted => {
"Library scan completed. Review the queue for newly discovered work.".to_string()
}
AlchemistEvent::EngineIdle => {
"The engine is idle. There are no active jobs and no queued work ready to run."
.to_string()
}
_ => "Event occurred".to_string(),
}
}
fn daily_summary_message(&self, summary: &crate::db::DailySummaryStats) -> String {
let mut lines = vec![
"Daily summary".to_string(),
format!("Completed: {}", summary.completed),
format!("Failed: {}", summary.failed),
format!("Skipped: {}", summary.skipped),
format!("Bytes saved: {}", summary.bytes_saved),
];
if !summary.top_failure_reasons.is_empty() {
lines.push(format!(
"Top failure reasons: {}",
summary.top_failure_reasons.join(", ")
));
}
if !summary.top_skip_reasons.is_empty() {
lines.push(format!(
"Top skip reasons: {}",
summary.top_skip_reasons.join(", ")
));
}
lines.join("\n")
}
async fn send_discord_with_client(
&self,
client: &Client,
target: &NotificationTarget,
event: &AlchemistEvent,
event_key: &str,
decision_explanation: Option<&Explanation>,
failure_explanation: Option<&Explanation>,
) -> NotificationResult<()> {
let config = parse_target_config::<DiscordWebhookConfig>(target)?;
let color = match event_key {
"encode.completed" => 0x00FF00,
"encode.failed" => 0xFF0000,
"encode.queued" => 0xF1C40F,
"encode.started" => 0x3498DB,
"daily.summary" => 0x9B59B6,
_ => 0x95A5A6,
};
let message = self.message_for_event(event, decision_explanation, failure_explanation);
let body = json!({
"embeds": [{
"title": "Alchemist Notification",
@@ -269,7 +499,7 @@ impl NotificationManager {
});
client
.post(&target.endpoint_url)
.post(&config.webhook_url)
.json(&body)
.send()
.await?
@@ -277,42 +507,63 @@ impl NotificationManager {
Ok(())
}
async fn send_discord_bot_with_client(
&self,
client: &Client,
target: &NotificationTarget,
event: &AlchemistEvent,
_event_key: &str,
decision_explanation: Option<&Explanation>,
failure_explanation: Option<&Explanation>,
) -> NotificationResult<()> {
let config = parse_target_config::<DiscordBotConfig>(target)?;
let message = self.message_for_event(event, decision_explanation, failure_explanation);
client
.post(format!(
"https://discord.com/api/v10/channels/{}/messages",
config.channel_id
))
.header("Authorization", format!("Bot {}", config.bot_token))
.json(&json!({ "content": message }))
.send()
.await?
.error_for_status()?;
Ok(())
}
async fn send_gotify_with_client(
&self,
client: &Client,
target: &NotificationTarget,
event: &AlchemistEvent,
status: &str,
event_key: &str,
decision_explanation: Option<&Explanation>,
failure_explanation: Option<&Explanation>,
) -> Result<(), Box<dyn std::error::Error>> {
let message = match event {
AlchemistEvent::JobStateChanged { job_id, status } => self.notification_message(
*job_id,
&status.to_string(),
decision_explanation,
failure_explanation,
),
_ => "Event occurred".to_string(),
};
) -> NotificationResult<()> {
let config = parse_target_config::<GotifyConfig>(target)?;
let message = self.message_for_event(event, decision_explanation, failure_explanation);
let priority = match status {
"failed" => 8,
"completed" => 5,
let priority = match event_key {
"encode.failed" => 8,
"encode.completed" => 5,
_ => 2,
};
let mut req = client.post(&target.endpoint_url).json(&json!({
let req = client.post(&config.server_url).json(&json!({
"title": "Alchemist",
"message": message,
"priority": priority
}));
if let Some(token) = &target.auth_token {
req = req.header("X-Gotify-Key", token);
"priority": priority,
"extras": {
"client::display": {
"contentType": "text/plain"
}
req.send().await?.error_for_status()?;
}
}));
req.header("X-Gotify-Key", config.app_token)
.send()
.await?
.error_for_status()?;
Ok(())
}
@@ -321,23 +572,15 @@ impl NotificationManager {
client: &Client,
target: &NotificationTarget,
event: &AlchemistEvent,
status: &str,
event_key: &str,
decision_explanation: Option<&Explanation>,
failure_explanation: Option<&Explanation>,
) -> Result<(), Box<dyn std::error::Error>> {
let message = match event {
AlchemistEvent::JobStateChanged { job_id, status } => self.notification_message(
*job_id,
&status.to_string(),
decision_explanation,
failure_explanation,
),
_ => "Event occurred".to_string(),
};
) -> NotificationResult<()> {
let config = parse_target_config::<WebhookConfig>(target)?;
let message = self.message_for_event(event, decision_explanation, failure_explanation);
let body = json!({
"event": "job_update",
"status": status,
"event": event_key,
"message": message,
"data": event,
"decision_explanation": decision_explanation,
@@ -345,14 +588,207 @@ impl NotificationManager {
"timestamp": chrono::Utc::now().to_rfc3339()
});
let mut req = client.post(&target.endpoint_url).json(&body);
if let Some(token) = &target.auth_token {
let mut req = client.post(&config.url).json(&body);
if let Some(token) = &config.auth_token {
req = req.bearer_auth(token);
}
req.send().await?.error_for_status()?;
Ok(())
}
async fn send_telegram_with_client(
&self,
client: &Client,
target: &NotificationTarget,
event: &AlchemistEvent,
_event_key: &str,
decision_explanation: Option<&Explanation>,
failure_explanation: Option<&Explanation>,
) -> NotificationResult<()> {
let config = parse_target_config::<TelegramConfig>(target)?;
let message = self.message_for_event(event, decision_explanation, failure_explanation);
client
.post(format!(
"https://api.telegram.org/bot{}/sendMessage",
config.bot_token
))
.json(&json!({
"chat_id": config.chat_id,
"text": message
}))
.send()
.await?
.error_for_status()?;
Ok(())
}
async fn send_email(
&self,
target: &NotificationTarget,
event: &AlchemistEvent,
_event_key: &str,
decision_explanation: Option<&Explanation>,
failure_explanation: Option<&Explanation>,
) -> NotificationResult<()> {
let config = parse_target_config::<EmailConfig>(target)?;
let message_text = self.message_for_event(event, decision_explanation, failure_explanation);
let from: Mailbox = config.from_address.parse()?;
let mut builder = Message::builder()
.from(from)
.subject("Alchemist Notification");
for address in &config.to_addresses {
builder = builder.to(address.parse::<Mailbox>()?);
}
let email = builder.singlepart(
SinglePart::builder()
.header(ContentType::TEXT_PLAIN)
.body(message_text),
)?;
let security = config
.security
.as_deref()
.unwrap_or("starttls")
.to_ascii_lowercase();
let mut transport = match security.as_str() {
"tls" | "smtps" => AsyncSmtpTransport::<Tokio1Executor>::relay(&config.smtp_host)?,
"none" => AsyncSmtpTransport::<Tokio1Executor>::builder_dangerous(&config.smtp_host),
_ => AsyncSmtpTransport::<Tokio1Executor>::starttls_relay(&config.smtp_host)?,
}
.port(config.smtp_port);
if let (Some(username), Some(password)) = (config.username.clone(), config.password.clone())
{
transport = transport.credentials(Credentials::new(username, password));
}
transport.build().send(email).await?;
Ok(())
}
async fn send_daily_summary_target(
&self,
target: &NotificationTarget,
summary: &crate::db::DailySummaryStats,
) -> NotificationResult<()> {
let message = self.daily_summary_message(summary);
match target.target_type.as_str() {
"discord_webhook" => {
let config = parse_target_config::<DiscordWebhookConfig>(target)?;
Client::new()
.post(config.webhook_url)
.json(&json!({
"embeds": [{
"title": "Alchemist Daily Summary",
"description": message,
"color": 0x9B59B6,
"timestamp": chrono::Utc::now().to_rfc3339()
}]
}))
.send()
.await?
.error_for_status()?;
}
"discord_bot" => {
let config = parse_target_config::<DiscordBotConfig>(target)?;
Client::new()
.post(format!(
"https://discord.com/api/v10/channels/{}/messages",
config.channel_id
))
.header("Authorization", format!("Bot {}", config.bot_token))
.json(&json!({ "content": message }))
.send()
.await?
.error_for_status()?;
}
"gotify" => {
let config = parse_target_config::<GotifyConfig>(target)?;
Client::new()
.post(config.server_url)
.header("X-Gotify-Key", config.app_token)
.json(&json!({
"title": "Alchemist Daily Summary",
"message": message,
"priority": 4
}))
.send()
.await?
.error_for_status()?;
}
"webhook" => {
let config = parse_target_config::<WebhookConfig>(target)?;
let mut req = Client::new().post(config.url).json(&json!({
"event": crate::config::NOTIFICATION_EVENT_DAILY_SUMMARY,
"summary": summary,
"message": message,
"timestamp": chrono::Utc::now().to_rfc3339()
}));
if let Some(token) = config.auth_token {
req = req.bearer_auth(token);
}
req.send().await?.error_for_status()?;
}
"telegram" => {
let config = parse_target_config::<TelegramConfig>(target)?;
Client::new()
.post(format!(
"https://api.telegram.org/bot{}/sendMessage",
config.bot_token
))
.json(&json!({
"chat_id": config.chat_id,
"text": message
}))
.send()
.await?
.error_for_status()?;
}
"email" => {
let config = parse_target_config::<EmailConfig>(target)?;
let from: Mailbox = config.from_address.parse()?;
let mut builder = Message::builder()
.from(from)
.subject("Alchemist Daily Summary");
for address in &config.to_addresses {
builder = builder.to(address.parse::<Mailbox>()?);
}
let email = builder.singlepart(
SinglePart::builder()
.header(ContentType::TEXT_PLAIN)
.body(message),
)?;
let security = config
.security
.as_deref()
.unwrap_or("starttls")
.to_ascii_lowercase();
let mut transport = match security.as_str() {
"tls" | "smtps" => {
AsyncSmtpTransport::<Tokio1Executor>::relay(&config.smtp_host)?
}
"none" => {
AsyncSmtpTransport::<Tokio1Executor>::builder_dangerous(&config.smtp_host)
}
_ => AsyncSmtpTransport::<Tokio1Executor>::starttls_relay(&config.smtp_host)?,
}
.port(config.smtp_port);
if let (Some(username), Some(password)) =
(config.username.clone(), config.password.clone())
{
transport = transport.credentials(Credentials::new(username, password));
}
transport.build().send(email).await?;
}
_ => {}
}
Ok(())
}
}
async fn _unused_ensure_public_endpoint(raw: &str) -> Result<(), Box<dyn std::error::Error>> {
@@ -421,7 +857,7 @@ mod tests {
#[tokio::test]
async fn test_webhook_errors_on_non_success()
-> std::result::Result<(), Box<dyn std::error::Error>> {
-> std::result::Result<(), Box<dyn std::error::Error + Send + Sync>> {
let mut db_path = std::env::temp_dir();
let token: u64 = rand::random();
db_path.push(format!("alchemist_notifications_test_{}.db", token));
@@ -455,8 +891,7 @@ mod tests {
id: 0,
name: "test".to_string(),
target_type: "webhook".to_string(),
endpoint_url: format!("http://{}", addr),
auth_token: None,
config_json: serde_json::json!({ "url": format!("http://{}", addr) }).to_string(),
events: "[]".to_string(),
enabled: true,
created_at: chrono::Utc::now(),
@@ -466,7 +901,7 @@ mod tests {
status: crate::db::JobState::Failed,
};
let result = manager.send(&target, &event, "failed").await;
let result = manager.send(&target, &event).await;
assert!(result.is_err());
drop(manager);
@@ -476,7 +911,7 @@ mod tests {
#[tokio::test]
async fn webhook_payload_includes_structured_explanations()
-> std::result::Result<(), Box<dyn std::error::Error>> {
-> std::result::Result<(), Box<dyn std::error::Error + Send + Sync>> {
let mut db_path = std::env::temp_dir();
let token: u64 = rand::random();
db_path.push(format!("alchemist_notifications_payload_test_{}.db", token));
@@ -536,8 +971,7 @@ mod tests {
id: 0,
name: "test".to_string(),
target_type: "webhook".to_string(),
endpoint_url: format!("http://{}", addr),
auth_token: None,
config_json: serde_json::json!({ "url": format!("http://{}", addr) }).to_string(),
events: "[\"failed\"]".to_string(),
enabled: true,
created_at: chrono::Utc::now(),
@@ -547,7 +981,7 @@ mod tests {
status: JobState::Failed,
};
manager.send(&target, &event, "failed").await?;
manager.send(&target, &event).await?;
let request = body_task.await??;
let body = request
.split("\r\n\r\n")

View File

@@ -3,6 +3,7 @@ use std::path::{Path, PathBuf};
const DEFAULT_CONFIG_PATH: &str = "config.toml";
const DEFAULT_DB_PATH: &str = "alchemist.db";
const DEFAULT_TEMP_DIR: &str = "temp";
fn parse_bool_env(value: &str) -> Option<bool> {
match value.trim().to_ascii_lowercase().as_str() {
@@ -71,6 +72,13 @@ pub fn db_path() -> PathBuf {
default_data_dir().join(DEFAULT_DB_PATH)
}
pub fn temp_dir() -> PathBuf {
if let Ok(temp_dir) = env::var("ALCHEMIST_TEMP_DIR") {
return PathBuf::from(temp_dir);
}
default_data_dir().join(DEFAULT_TEMP_DIR)
}
pub fn config_mutable() -> bool {
match env::var("ALCHEMIST_CONFIG_MUTABLE") {
Ok(value) => parse_bool_env(&value).unwrap_or(true),

424
src/server/conversion.rs Normal file
View File

@@ -0,0 +1,424 @@
use super::AppState;
use crate::conversion::ConversionSettings;
use crate::media::pipeline::Analyzer as _;
use axum::{
body::Body,
extract::{Multipart, Path, State},
http::{HeaderMap, HeaderValue, StatusCode, header},
response::IntoResponse,
};
use serde::{Deserialize, Serialize};
use std::path::{Path as FsPath, PathBuf};
use std::sync::Arc;
use tokio::fs;
use tokio_util::io::ReaderStream;
#[derive(Serialize)]
pub(crate) struct ConversionUploadResponse {
conversion_job_id: i64,
probe: crate::media::pipeline::MediaAnalysis,
normalized_settings: ConversionSettings,
}
#[derive(Deserialize)]
pub(crate) struct ConversionPreviewPayload {
conversion_job_id: i64,
settings: ConversionSettings,
}
#[derive(Serialize)]
pub(crate) struct ConversionJobStatusResponse {
id: i64,
status: String,
progress: f64,
linked_job_id: Option<i64>,
output_path: Option<String>,
download_ready: bool,
probe: Option<crate::media::pipeline::MediaAnalysis>,
}
fn conversion_root() -> PathBuf {
crate::runtime::temp_dir()
}
fn uploads_root() -> PathBuf {
conversion_root().join("uploads")
}
fn outputs_root() -> PathBuf {
conversion_root().join("outputs")
}
async fn cleanup_expired_jobs(state: &AppState) {
let now = chrono::Utc::now().to_rfc3339();
let expired = match state.db.get_expired_conversion_jobs(&now).await {
Ok(expired) => expired,
Err(_) => return,
};
for job in expired {
let _ = remove_conversion_artifacts(&job).await;
let _ = state.db.delete_conversion_job(job.id).await;
}
}
async fn remove_conversion_artifacts(job: &crate::db::ConversionJob) -> std::io::Result<()> {
let upload_path = FsPath::new(&job.upload_path);
if upload_path.exists() {
let _ = fs::remove_file(upload_path).await;
}
if let Some(output_path) = &job.output_path {
let output_path = FsPath::new(output_path);
if output_path.exists() {
let _ = fs::remove_file(output_path).await;
}
}
Ok(())
}
pub(crate) async fn upload_conversion_handler(
State(state): State<Arc<AppState>>,
mut multipart: Multipart,
) -> impl IntoResponse {
cleanup_expired_jobs(state.as_ref()).await;
let upload_id = uuid::Uuid::new_v4().to_string();
let upload_dir = uploads_root().join(&upload_id);
if let Err(err) = fs::create_dir_all(&upload_dir).await {
return (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()).into_response();
}
let field = match multipart.next_field().await {
Ok(Some(field)) => field,
Ok(None) => return (StatusCode::BAD_REQUEST, "missing upload file").into_response(),
Err(err) => return (StatusCode::BAD_REQUEST, err.to_string()).into_response(),
};
let stored_path: PathBuf = {
let file_name = field
.file_name()
.map(sanitize_filename)
.unwrap_or_else(|| "input.bin".to_string());
let path = upload_dir.join(file_name);
match field.bytes().await {
Ok(bytes) => {
if let Err(err) = fs::write(&path, bytes).await {
return (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()).into_response();
}
path
}
Err(err) => return (StatusCode::BAD_REQUEST, err.to_string()).into_response(),
}
};
let analyzer = crate::media::analyzer::FfmpegAnalyzer;
let analysis = match analyzer.analyze(&stored_path).await {
Ok(analysis) => analysis,
Err(err) => return (StatusCode::BAD_REQUEST, err.to_string()).into_response(),
};
let settings = ConversionSettings::default();
let expires_at = (chrono::Utc::now() + chrono::Duration::hours(24)).to_rfc3339();
let conversion_job = match state
.db
.create_conversion_job(
&stored_path.to_string_lossy(),
if settings.remux_only {
"remux"
} else {
"transcode"
},
&serde_json::to_string(&settings).unwrap_or_else(|_| "{}".to_string()),
Some(&serde_json::to_string(&analysis).unwrap_or_else(|_| "{}".to_string())),
&expires_at,
)
.await
{
Ok(job) => job,
Err(err) => return (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()).into_response(),
};
axum::Json(ConversionUploadResponse {
conversion_job_id: conversion_job.id,
probe: analysis,
normalized_settings: settings,
})
.into_response()
}
pub(crate) async fn preview_conversion_handler(
State(state): State<Arc<AppState>>,
axum::Json(payload): axum::Json<ConversionPreviewPayload>,
) -> impl IntoResponse {
cleanup_expired_jobs(state.as_ref()).await;
let Some(job) = (match state.db.get_conversion_job(payload.conversion_job_id).await {
Ok(job) => job,
Err(err) => return (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()).into_response(),
}) else {
return StatusCode::NOT_FOUND.into_response();
};
let analysis: crate::media::pipeline::MediaAnalysis = match job.probe_json.as_deref() {
Some(probe_json) => match serde_json::from_str(probe_json) {
Ok(analysis) => analysis,
Err(err) => {
return (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()).into_response();
}
},
None => return (StatusCode::BAD_REQUEST, "missing conversion probe").into_response(),
};
let preview_output = outputs_root().join(format!(
"preview-{}.{}",
job.id, payload.settings.output_container
));
let hw_info = state.hardware_state.snapshot().await;
match crate::conversion::preview_command(
FsPath::new(&job.upload_path),
&preview_output,
&analysis,
&payload.settings,
hw_info,
) {
Ok(preview) => {
let _ = state
.db
.update_conversion_job_probe(
job.id,
&serde_json::to_string(&analysis).unwrap_or_else(|_| "{}".to_string()),
)
.await;
let _ = state
.db
.update_conversion_job_status(
job.id,
if preview.normalized_settings.remux_only {
"draft_remux"
} else {
"draft_transcode"
},
)
.await;
let _ = sqlx_update_conversion_settings(
state.as_ref(),
job.id,
&preview.normalized_settings,
)
.await;
axum::Json(preview).into_response()
}
Err(err) => (StatusCode::BAD_REQUEST, err.to_string()).into_response(),
}
}
async fn sqlx_update_conversion_settings(
state: &AppState,
id: i64,
settings: &ConversionSettings,
) -> crate::error::Result<()> {
state
.db
.update_conversion_job_settings(
id,
&serde_json::to_string(settings).unwrap_or_else(|_| "{}".to_string()),
if settings.remux_only {
"remux"
} else {
"transcode"
},
)
.await
}
pub(crate) async fn start_conversion_job_handler(
State(state): State<Arc<AppState>>,
Path(id): Path<i64>,
) -> impl IntoResponse {
cleanup_expired_jobs(state.as_ref()).await;
let Some(job) = (match state.db.get_conversion_job(id).await {
Ok(job) => job,
Err(err) => return (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()).into_response(),
}) else {
return StatusCode::NOT_FOUND.into_response();
};
if job.linked_job_id.is_some() {
return (StatusCode::CONFLICT, "conversion job already started").into_response();
}
let input_path = PathBuf::from(&job.upload_path);
let file_stem = input_path
.file_stem()
.and_then(|value| value.to_str())
.unwrap_or("output");
let settings: ConversionSettings = match serde_json::from_str(&job.settings_json) {
Ok(settings) => settings,
Err(err) => return (StatusCode::BAD_REQUEST, err.to_string()).into_response(),
};
let output_dir = outputs_root().join(job.id.to_string());
if let Err(err) = fs::create_dir_all(&output_dir).await {
return (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()).into_response();
}
let output_path = output_dir.join(format!("{file_stem}.{}", settings.output_container));
let mtime = std::fs::metadata(&input_path)
.and_then(|metadata| metadata.modified())
.unwrap_or(std::time::SystemTime::now());
if let Err(err) = state.db.enqueue_job(&input_path, &output_path, mtime).await {
return (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()).into_response();
}
let linked_job = match state
.db
.get_job_by_input_path(&input_path.to_string_lossy())
.await
{
Ok(Some(job)) => job,
Ok(None) => {
return (StatusCode::INTERNAL_SERVER_ERROR, "linked job missing").into_response();
}
Err(err) => return (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()).into_response(),
};
if let Err(err) = state
.db
.update_conversion_job_start(id, &output_path.to_string_lossy(), linked_job.id)
.await
{
return (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()).into_response();
}
StatusCode::OK.into_response()
}
pub(crate) async fn get_conversion_job_handler(
State(state): State<Arc<AppState>>,
Path(id): Path<i64>,
) -> impl IntoResponse {
cleanup_expired_jobs(state.as_ref()).await;
let Some(conversion_job) = (match state.db.get_conversion_job(id).await {
Ok(job) => job,
Err(err) => return (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()).into_response(),
}) else {
return StatusCode::NOT_FOUND.into_response();
};
let linked_job = match conversion_job.linked_job_id {
Some(job_id) => match state.db.get_job_by_id(job_id).await {
Ok(job) => job,
Err(err) => {
return (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()).into_response();
}
},
None => None,
};
let probe = conversion_job
.probe_json
.as_deref()
.and_then(|value| serde_json::from_str(value).ok());
let download_ready = conversion_job
.output_path
.as_deref()
.map(FsPath::new)
.is_some_and(|path| path.exists());
axum::Json(ConversionJobStatusResponse {
id: conversion_job.id,
status: linked_job
.as_ref()
.map(|job| job.status.to_string())
.unwrap_or(conversion_job.status),
progress: linked_job.as_ref().map(|job| job.progress).unwrap_or(0.0),
linked_job_id: conversion_job.linked_job_id,
output_path: conversion_job.output_path,
download_ready,
probe,
})
.into_response()
}
pub(crate) async fn download_conversion_job_handler(
State(state): State<Arc<AppState>>,
Path(id): Path<i64>,
) -> impl IntoResponse {
cleanup_expired_jobs(state.as_ref()).await;
let Some(job) = (match state.db.get_conversion_job(id).await {
Ok(job) => job,
Err(err) => return (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()).into_response(),
}) else {
return StatusCode::NOT_FOUND.into_response();
};
let Some(output_path) = job.output_path.clone() else {
return StatusCode::NOT_FOUND.into_response();
};
if !FsPath::new(&output_path).exists() {
return StatusCode::NOT_FOUND.into_response();
}
let file = match fs::File::open(&output_path).await {
Ok(file) => file,
Err(err) => return (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()).into_response(),
};
let file_name = FsPath::new(&output_path)
.file_name()
.and_then(|value| value.to_str())
.unwrap_or("output.bin");
let _ = state.db.mark_conversion_job_downloaded(id).await;
let stream = ReaderStream::new(file);
let body = Body::from_stream(stream);
let mut headers = HeaderMap::new();
headers.insert(
header::CONTENT_TYPE,
HeaderValue::from_static("application/octet-stream"),
);
headers.insert(
header::CONTENT_DISPOSITION,
HeaderValue::from_str(&format!("attachment; filename=\"{}\"", file_name))
.unwrap_or_else(|_| HeaderValue::from_static("attachment")),
);
(headers, body).into_response()
}
pub(crate) async fn delete_conversion_job_handler(
State(state): State<Arc<AppState>>,
Path(id): Path<i64>,
) -> impl IntoResponse {
cleanup_expired_jobs(state.as_ref()).await;
let Some(job) = (match state.db.get_conversion_job(id).await {
Ok(job) => job,
Err(err) => return (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()).into_response(),
}) else {
return StatusCode::NOT_FOUND.into_response();
};
if let Some(linked_job_id) = job.linked_job_id {
if let Ok(Some(linked_job)) = state.db.get_job_by_id(linked_job_id).await {
if linked_job.is_active() {
return (StatusCode::CONFLICT, "conversion job is still active").into_response();
}
let _ = state.db.delete_job(linked_job_id).await;
}
}
if let Err(err) = remove_conversion_artifacts(&job).await {
return (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()).into_response();
}
if let Err(err) = state.db.delete_conversion_job(id).await {
return (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()).into_response();
}
StatusCode::OK.into_response()
}
fn sanitize_filename(name: &str) -> String {
name.chars()
.map(|ch| match ch {
'/' | '\\' | ':' | '*' | '?' | '"' | '<' | '>' | '|' => '_',
other => other,
})
.collect()
}

View File

@@ -1,9 +1,10 @@
//! Authentication, rate limiting, and security middleware.
use super::AppState;
use crate::db::ApiTokenAccessLevel;
use axum::{
extract::{ConnectInfo, Request, State},
http::{HeaderName, HeaderValue, StatusCode, header},
http::{HeaderName, HeaderValue, Method, StatusCode, header},
middleware::Next,
response::{IntoResponse, Response},
};
@@ -73,6 +74,7 @@ pub(crate) async fn auth_middleware(
next: Next,
) -> Response {
let path = req.uri().path();
let method = req.method().clone();
// 1. API Protection: Only lock down /api routes
if path.starts_with("/api") {
@@ -132,6 +134,18 @@ pub(crate) async fn auth_middleware(
if let Ok(Some(_session)) = state.db.get_session(&t).await {
return next.run(req).await;
}
if let Ok(Some(api_token)) = state.db.get_active_api_token(&t).await {
let _ = state.db.update_api_token_last_used(api_token.id).await;
match api_token.access_level {
ApiTokenAccessLevel::FullAccess => return next.run(req).await,
ApiTokenAccessLevel::ReadOnly => {
if read_only_api_token_allows(&method, path) {
return next.run(req).await;
}
return (StatusCode::FORBIDDEN, "Forbidden").into_response();
}
}
}
}
return (StatusCode::UNAUTHORIZED, "Unauthorized").into_response();
@@ -143,6 +157,40 @@ pub(crate) async fn auth_middleware(
next.run(req).await
}
fn read_only_api_token_allows(method: &Method, path: &str) -> bool {
if *method != Method::GET && *method != Method::HEAD {
return false;
}
if path == "/api/health"
|| path == "/api/ready"
|| path == "/api/events"
|| path == "/api/stats"
|| path == "/api/stats/aggregated"
|| path == "/api/stats/daily"
|| path == "/api/stats/detailed"
|| path == "/api/stats/savings"
|| path == "/api/jobs"
|| path == "/api/jobs/table"
|| path == "/api/logs/history"
|| path == "/api/engine/status"
|| path == "/api/engine/mode"
|| path == "/api/system/resources"
|| path == "/api/system/info"
|| path == "/api/system/update"
|| path == "/api/system/hardware"
|| path == "/api/system/hardware/probe-log"
|| path == "/api/library/intelligence"
|| path == "/api/library/health"
|| path == "/api/library/health/issues"
|| path.starts_with("/api/jobs/") && path.ends_with("/details")
{
return true;
}
false
}
pub(crate) async fn rate_limit_middleware(
State(state): State<Arc<AppState>>,
req: Request,

View File

@@ -1,6 +1,7 @@
//! HTTP server module: routes, state, middleware, and API handlers.
pub mod auth;
pub mod conversion;
pub mod jobs;
pub mod middleware;
pub mod scan;
@@ -21,9 +22,10 @@ use crate::error::{AlchemistError, Result};
use crate::system::hardware::{HardwareInfo, HardwareProbeLog, HardwareState};
use axum::{
Router,
extract::State,
http::{StatusCode, Uri, header},
middleware as axum_middleware,
response::{IntoResponse, Response},
response::{IntoResponse, Redirect, Response},
routing::{delete, get, post},
};
#[cfg(feature = "embed-web")]
@@ -79,6 +81,7 @@ pub struct AppState {
pub library_scanner: Arc<crate::system::scanner::LibraryScanner>,
pub config_path: PathBuf,
pub config_mutable: bool,
pub base_url: String,
pub hardware_state: HardwareState,
pub hardware_probe_log: Arc<tokio::sync::RwLock<HardwareProbeLog>>,
pub resources_cache: Arc<tokio::sync::Mutex<Option<(serde_json::Value, std::time::Instant)>>>,
@@ -143,6 +146,11 @@ pub async fn run_server(args: RunServerArgs) -> Result<()> {
sys.refresh_cpu_usage();
sys.refresh_memory();
let base_url = {
let config = config.read().await;
config.system.base_url.clone()
};
let state = Arc::new(AppState {
db,
config,
@@ -160,6 +168,7 @@ pub async fn run_server(args: RunServerArgs) -> Result<()> {
library_scanner,
config_path,
config_mutable,
base_url: base_url.clone(),
hardware_state,
hardware_probe_log,
resources_cache: Arc::new(tokio::sync::Mutex::new(None)),
@@ -171,7 +180,18 @@ pub async fn run_server(args: RunServerArgs) -> Result<()> {
// Clone agent for shutdown handler before moving state into router
let shutdown_agent = state.agent.clone();
let app = app_router(state);
let inner_app = app_router(state.clone());
let app = if base_url.is_empty() {
inner_app
} else {
let redirect_target = format!("{base_url}/");
Router::new()
.route(
"/",
get(move || async move { Redirect::permanent(&redirect_target) }),
)
.nest(&base_url, inner_app)
};
let port = std::env::var("ALCHEMIST_SERVER_PORT")
.ok()
@@ -284,6 +304,7 @@ pub async fn run_server(args: RunServerArgs) -> Result<()> {
fn app_router(state: Arc<AppState>) -> Router {
use auth::*;
use conversion::*;
use jobs::*;
use scan::*;
use settings::*;
@@ -315,6 +336,20 @@ fn app_router(state: Arc<AppState>) -> Router {
.route("/api/jobs/:id/restart", post(restart_job_handler))
.route("/api/jobs/:id/delete", post(delete_job_handler))
.route("/api/jobs/:id/details", get(get_job_detail_handler))
.route("/api/conversion/uploads", post(upload_conversion_handler))
.route("/api/conversion/preview", post(preview_conversion_handler))
.route(
"/api/conversion/jobs/:id/start",
post(start_conversion_job_handler),
)
.route(
"/api/conversion/jobs/:id",
get(get_conversion_job_handler).delete(delete_conversion_job_handler),
)
.route(
"/api/conversion/jobs/:id/download",
get(download_conversion_job_handler),
)
.route("/api/events", get(sse_handler))
.route("/api/engine/pause", post(pause_engine_handler))
.route("/api/engine/resume", post(resume_engine_handler))
@@ -373,7 +408,9 @@ fn app_router(state: Arc<AppState>) -> Router {
)
.route(
"/api/settings/notifications",
get(get_notifications_handler).post(add_notification_handler),
get(get_notifications_handler)
.put(update_notifications_settings_handler)
.post(add_notification_handler),
)
.route(
"/api/settings/notifications/:id",
@@ -383,6 +420,14 @@ fn app_router(state: Arc<AppState>) -> Router {
"/api/settings/notifications/test",
post(test_notification_handler),
)
.route(
"/api/settings/api-tokens",
get(list_api_tokens_handler).post(create_api_token_handler),
)
.route(
"/api/settings/api-tokens/:id",
delete(revoke_api_token_handler),
)
.route(
"/api/settings/files",
get(get_file_settings_handler).post(update_file_settings_handler),
@@ -405,6 +450,7 @@ fn app_router(state: Arc<AppState>) -> Router {
// System Routes
.route("/api/system/resources", get(system_resources_handler))
.route("/api/system/info", get(get_system_info_handler))
.route("/api/system/update", get(get_system_update_handler))
.route("/api/system/hardware", get(get_hardware_info_handler))
.route(
"/api/system/hardware/probe-log",
@@ -778,11 +824,11 @@ fn sanitize_asset_path(raw: &str) -> Option<String> {
// Static asset handlers
async fn index_handler() -> impl IntoResponse {
static_handler(Uri::from_static("/index.html")).await
async fn index_handler(State(state): State<Arc<AppState>>) -> impl IntoResponse {
static_handler(State(state), Uri::from_static("/index.html")).await
}
async fn static_handler(uri: Uri) -> impl IntoResponse {
async fn static_handler(State(state): State<Arc<AppState>>, uri: Uri) -> impl IntoResponse {
let raw_path = uri.path().trim_start_matches('/');
let path = match sanitize_asset_path(raw_path) {
Some(path) => path,
@@ -791,7 +837,11 @@ async fn static_handler(uri: Uri) -> impl IntoResponse {
if let Some(content) = load_static_asset(&path) {
let mime = mime_guess::from_path(&path).first_or_octet_stream();
return ([(header::CONTENT_TYPE, mime.as_ref())], content).into_response();
return (
[(header::CONTENT_TYPE, mime.as_ref())],
maybe_inject_base_url(content, mime.as_ref(), &state.base_url),
)
.into_response();
}
// Attempt to serve index.html for directory paths (e.g. /jobs -> jobs/index.html)
@@ -799,7 +849,11 @@ async fn static_handler(uri: Uri) -> impl IntoResponse {
let index_path = format!("{}/index.html", path);
if let Some(content) = load_static_asset(&index_path) {
let mime = mime_guess::from_path("index.html").first_or_octet_stream();
return ([(header::CONTENT_TYPE, mime.as_ref())], content).into_response();
return (
[(header::CONTENT_TYPE, mime.as_ref())],
maybe_inject_base_url(content, mime.as_ref(), &state.base_url),
)
.into_response();
}
}
@@ -836,3 +890,14 @@ async fn static_handler(uri: Uri) -> impl IntoResponse {
// Default fallback to 404 for missing files.
StatusCode::NOT_FOUND.into_response()
}
fn maybe_inject_base_url(content: Vec<u8>, mime: &str, base_url: &str) -> Vec<u8> {
if !mime.starts_with("text/html") {
return content;
}
let Ok(text) = String::from_utf8(content.clone()) else {
return content;
};
text.replace("__ALCHEMIST_BASE_URL__", base_url)
.into_bytes()
}

View File

@@ -8,13 +8,16 @@ use super::{
validate_notification_url, validate_transcode_payload,
};
use crate::config::Config;
use crate::db::ApiTokenAccessLevel;
use axum::{
extract::{Path, State},
http::StatusCode,
response::IntoResponse,
};
use rand::Rng;
use serde::{Deserialize, Serialize};
use serde_json::{Map as JsonMap, Value as JsonValue};
use std::sync::Arc;
// Transcode settings
@@ -414,47 +417,217 @@ pub(crate) async fn update_settings_config_handler(
pub(crate) struct AddNotificationTargetPayload {
name: String,
target_type: String,
endpoint_url: String,
#[serde(default)]
config_json: JsonValue,
#[serde(default)]
endpoint_url: Option<String>,
#[serde(default)]
auth_token: Option<String>,
events: Vec<String>,
enabled: bool,
}
pub(crate) async fn get_notifications_handler(
State(state): State<Arc<AppState>>,
) -> impl IntoResponse {
match state.db.get_notification_targets().await {
Ok(t) => axum::Json(serde_json::json!(t)).into_response(),
Err(e) => (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response(),
#[derive(Serialize)]
pub(crate) struct NotificationTargetResponse {
id: i64,
name: String,
target_type: String,
config_json: JsonValue,
events: Vec<String>,
enabled: bool,
created_at: chrono::DateTime<chrono::Utc>,
}
#[derive(Serialize)]
pub(crate) struct NotificationsSettingsResponse {
daily_summary_time_local: String,
targets: Vec<NotificationTargetResponse>,
}
#[derive(Deserialize)]
pub(crate) struct UpdateNotificationsSettingsPayload {
daily_summary_time_local: String,
}
fn normalize_notification_payload(
payload: &AddNotificationTargetPayload,
) -> crate::config::NotificationTargetConfig {
let mut config_json = payload.config_json.clone();
if !config_json.is_object() {
config_json = JsonValue::Object(JsonMap::new());
}
let Some(config_map) = config_json.as_object_mut() else {
unreachable!("notification config_json should always be an object here");
};
match payload.target_type.as_str() {
"discord_webhook" | "discord" => {
if !config_map.contains_key("webhook_url") {
if let Some(endpoint_url) = payload.endpoint_url.as_ref() {
config_map.insert(
"webhook_url".to_string(),
JsonValue::String(endpoint_url.clone()),
);
}
}
}
"gotify" => {
if !config_map.contains_key("server_url") {
if let Some(endpoint_url) = payload.endpoint_url.as_ref() {
config_map.insert(
"server_url".to_string(),
JsonValue::String(endpoint_url.clone()),
);
}
}
if !config_map.contains_key("app_token") {
if let Some(auth_token) = payload.auth_token.as_ref() {
config_map.insert(
"app_token".to_string(),
JsonValue::String(auth_token.clone()),
);
}
}
}
"webhook" => {
if !config_map.contains_key("url") {
if let Some(endpoint_url) = payload.endpoint_url.as_ref() {
config_map.insert("url".to_string(), JsonValue::String(endpoint_url.clone()));
}
}
if !config_map.contains_key("auth_token") {
if let Some(auth_token) = payload.auth_token.as_ref() {
config_map.insert(
"auth_token".to_string(),
JsonValue::String(auth_token.clone()),
);
}
}
}
_ => {}
}
let mut target = crate::config::NotificationTargetConfig {
name: payload.name.clone(),
target_type: payload.target_type.clone(),
config_json,
endpoint_url: payload.endpoint_url.clone(),
auth_token: payload.auth_token.clone(),
events: payload.events.clone(),
enabled: payload.enabled,
};
target.migrate_legacy_shape();
target
}
fn notification_target_response(
target: crate::db::NotificationTarget,
) -> NotificationTargetResponse {
NotificationTargetResponse {
id: target.id,
name: target.name,
target_type: target.target_type,
config_json: serde_json::from_str(&target.config_json)
.unwrap_or_else(|_| JsonValue::Object(JsonMap::new())),
events: serde_json::from_str(&target.events).unwrap_or_default(),
enabled: target.enabled,
created_at: target.created_at,
}
}
pub(crate) async fn add_notification_handler(
State(state): State<Arc<AppState>>,
axum::Json(payload): axum::Json<AddNotificationTargetPayload>,
) -> impl IntoResponse {
async fn validate_notification_target(
state: &AppState,
target: &crate::config::NotificationTargetConfig,
) -> std::result::Result<(), String> {
target.validate().map_err(|err| err.to_string())?;
let allow_local = state
.config
.read()
.await
.notifications
.allow_local_notifications;
if let Err(msg) = validate_notification_url(&payload.endpoint_url, allow_local).await {
let url = match target.target_type.as_str() {
"discord_webhook" => target
.config_json
.get("webhook_url")
.and_then(JsonValue::as_str)
.map(str::to_string),
"gotify" => target
.config_json
.get("server_url")
.and_then(JsonValue::as_str)
.map(str::to_string),
"webhook" => target
.config_json
.get("url")
.and_then(JsonValue::as_str)
.map(str::to_string),
_ => None,
};
if let Some(url) = url {
validate_notification_url(&url, allow_local).await?;
}
Ok(())
}
pub(crate) async fn get_notifications_handler(
State(state): State<Arc<AppState>>,
) -> impl IntoResponse {
match state.db.get_notification_targets().await {
Ok(t) => {
let daily_summary_time_local = state
.config
.read()
.await
.notifications
.daily_summary_time_local
.clone();
axum::Json(NotificationsSettingsResponse {
daily_summary_time_local,
targets: t
.into_iter()
.map(notification_target_response)
.collect::<Vec<_>>(),
})
.into_response()
}
Err(e) => (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response(),
}
}
pub(crate) async fn update_notifications_settings_handler(
State(state): State<Arc<AppState>>,
axum::Json(payload): axum::Json<UpdateNotificationsSettingsPayload>,
) -> impl IntoResponse {
let mut next_config = state.config.read().await.clone();
next_config.notifications.daily_summary_time_local = payload.daily_summary_time_local;
if let Err(err) = next_config.validate() {
return (StatusCode::BAD_REQUEST, err.to_string()).into_response();
}
if let Err(response) = save_config_or_response(&state, &next_config).await {
return *response;
}
{
let mut config = state.config.write().await;
*config = next_config;
}
StatusCode::OK.into_response()
}
pub(crate) async fn add_notification_handler(
State(state): State<Arc<AppState>>,
axum::Json(payload): axum::Json<AddNotificationTargetPayload>,
) -> impl IntoResponse {
let target = normalize_notification_payload(&payload);
if let Err(msg) = validate_notification_target(&state, &target).await {
return (StatusCode::BAD_REQUEST, msg).into_response();
}
let mut next_config = state.config.read().await.clone();
next_config
.notifications
.targets
.push(crate::config::NotificationTargetConfig {
name: payload.name.clone(),
target_type: payload.target_type.clone(),
endpoint_url: payload.endpoint_url.clone(),
auth_token: payload.auth_token.clone(),
events: payload.events.clone(),
enabled: payload.enabled,
});
next_config.notifications.targets.push(target);
if let Err(e) = next_config.validate() {
return (StatusCode::BAD_REQUEST, e.to_string()).into_response();
@@ -470,12 +643,8 @@ pub(crate) async fn add_notification_handler(
match state.db.get_notification_targets().await {
Ok(targets) => targets
.into_iter()
.find(|target| {
target.name == payload.name
&& target.target_type == payload.target_type
&& target.endpoint_url == payload.endpoint_url
})
.map(|target| axum::Json(serde_json::json!(target)).into_response())
.find(|target| target.name == payload.name)
.map(|target| axum::Json(notification_target_response(target)).into_response())
.unwrap_or_else(|| StatusCode::OK.into_response()),
Err(e) => (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response(),
}
@@ -494,10 +663,13 @@ pub(crate) async fn delete_notification_handler(
};
let mut next_config = state.config.read().await.clone();
let target_config_json = target.config_json.clone();
let parsed_target_config_json =
serde_json::from_str::<JsonValue>(&target_config_json).unwrap_or(JsonValue::Null);
next_config.notifications.targets.retain(|candidate| {
!(candidate.name == target.name
&& candidate.target_type == target.target_type
&& candidate.endpoint_url == target.endpoint_url)
&& candidate.config_json == parsed_target_config_json)
});
if let Err(response) = save_config_or_response(&state, &next_config).await {
return *response;
@@ -513,26 +685,18 @@ pub(crate) async fn test_notification_handler(
State(state): State<Arc<AppState>>,
axum::Json(payload): axum::Json<AddNotificationTargetPayload>,
) -> impl IntoResponse {
let allow_local = state
.config
.read()
.await
.notifications
.allow_local_notifications;
if let Err(msg) = validate_notification_url(&payload.endpoint_url, allow_local).await {
let target_config = normalize_notification_payload(&payload);
if let Err(msg) = validate_notification_target(&state, &target_config).await {
return (StatusCode::BAD_REQUEST, msg).into_response();
}
// Construct a temporary target
let events_json = serde_json::to_string(&payload.events).unwrap_or_default();
let target = crate::db::NotificationTarget {
id: 0,
name: payload.name,
target_type: payload.target_type,
endpoint_url: payload.endpoint_url,
auth_token: payload.auth_token,
events: events_json,
enabled: payload.enabled,
name: target_config.name,
target_type: target_config.target_type,
config_json: target_config.config_json.to_string(),
events: serde_json::to_string(&target_config.events).unwrap_or_else(|_| "[]".to_string()),
enabled: target_config.enabled,
created_at: chrono::Utc::now(),
};
@@ -542,6 +706,71 @@ pub(crate) async fn test_notification_handler(
}
}
// API token settings
#[derive(Deserialize)]
pub(crate) struct CreateApiTokenPayload {
name: String,
access_level: ApiTokenAccessLevel,
}
#[derive(Serialize)]
pub(crate) struct CreatedApiTokenResponse {
token: crate::db::ApiToken,
plaintext_token: String,
}
pub(crate) async fn list_api_tokens_handler(
State(state): State<Arc<AppState>>,
) -> impl IntoResponse {
match state.db.list_api_tokens().await {
Ok(tokens) => axum::Json(tokens).into_response(),
Err(err) => (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()).into_response(),
}
}
pub(crate) async fn create_api_token_handler(
State(state): State<Arc<AppState>>,
axum::Json(payload): axum::Json<CreateApiTokenPayload>,
) -> impl IntoResponse {
if payload.name.trim().is_empty() {
return (StatusCode::BAD_REQUEST, "token name must not be empty").into_response();
}
let plaintext_token = format!(
"alc_tok_{}",
rand::rng()
.sample_iter(rand::distr::Alphanumeric)
.take(48)
.map(char::from)
.collect::<String>()
);
match state
.db
.create_api_token(payload.name.trim(), &plaintext_token, payload.access_level)
.await
{
Ok(token) => axum::Json(CreatedApiTokenResponse {
token,
plaintext_token,
})
.into_response(),
Err(err) => (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()).into_response(),
}
}
pub(crate) async fn revoke_api_token_handler(
State(state): State<Arc<AppState>>,
Path(id): Path<i64>,
) -> impl IntoResponse {
match state.db.revoke_api_token(id).await {
Ok(_) => StatusCode::OK.into_response(),
Err(err) if super::is_row_not_found(&err) => StatusCode::NOT_FOUND.into_response(),
Err(err) => (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()).into_response(),
}
}
// Schedule settings
pub(crate) async fn get_schedule_handler(State(state): State<Arc<AppState>>) -> impl IntoResponse {

View File

@@ -1,6 +1,7 @@
//! System information, hardware info, resources, health handlers.
use super::{AppState, config_read_error_response};
use crate::media::pipeline::{Analyzer as _, Planner as _, TranscodeDecision};
use axum::{
extract::State,
http::StatusCode,
@@ -44,6 +45,26 @@ struct DuplicatePath {
struct LibraryIntelligenceResponse {
duplicate_groups: Vec<DuplicateGroup>,
total_duplicates: usize,
recommendation_counts: RecommendationCounts,
recommendations: Vec<IntelligenceRecommendation>,
}
#[derive(Serialize, Default)]
struct RecommendationCounts {
duplicates: usize,
remux_only_candidate: usize,
wasteful_audio_layout: usize,
commentary_cleanup_candidate: usize,
}
#[derive(Serialize, Clone)]
struct IntelligenceRecommendation {
#[serde(rename = "type")]
recommendation_type: String,
title: String,
summary: String,
path: String,
suggested_action: String,
}
pub(crate) async fn system_resources_handler(State(state): State<Arc<AppState>>) -> Response {
@@ -118,10 +139,16 @@ pub(crate) async fn library_intelligence_handler(State(state): State<Arc<AppStat
use std::collections::HashMap;
use std::path::Path;
match state.db.get_duplicate_candidates().await {
Ok(candidates) => {
let duplicate_candidates = match state.db.get_duplicate_candidates().await {
Ok(candidates) => candidates,
Err(err) => {
error!("Failed to fetch duplicate candidates: {err}");
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
};
let mut groups: HashMap<String, Vec<_>> = HashMap::new();
for candidate in candidates {
for candidate in duplicate_candidates {
let stem = Path::new(&candidate.input_path)
.file_stem()
.map(|s| s.to_string_lossy().to_lowercase())
@@ -153,20 +180,117 @@ pub(crate) async fn library_intelligence_handler(State(state): State<Arc<AppStat
.collect();
duplicate_groups.sort_by(|a, b| b.count.cmp(&a.count).then(a.stem.cmp(&b.stem)));
let total_duplicates = duplicate_groups.iter().map(|group| group.count - 1).sum();
let mut recommendations = Vec::new();
let mut recommendation_counts = RecommendationCounts {
duplicates: duplicate_groups.len(),
..RecommendationCounts::default()
};
let jobs = match state.db.get_all_jobs().await {
Ok(jobs) => jobs,
Err(err) => {
error!("Failed to fetch jobs for intelligence recommendations: {err}");
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
}
};
let analyzer = crate::media::analyzer::FfmpegAnalyzer;
let config_snapshot = state.config.read().await.clone();
let hw_snapshot = state.hardware_state.snapshot().await;
let planner = crate::media::planner::BasicPlanner::new(
std::sync::Arc::new(config_snapshot.clone()),
hw_snapshot,
);
for job in jobs {
if job.status == crate::db::JobState::Cancelled {
continue;
}
let input_path = std::path::Path::new(&job.input_path);
if !input_path.exists() {
continue;
}
let analysis = match analyzer.analyze(input_path).await {
Ok(analysis) => analysis,
Err(_) => continue,
};
let profile: Option<crate::db::LibraryProfile> = state
.db
.get_profile_for_path(&job.input_path)
.await
.unwrap_or_default();
if let Ok(plan) = planner
.plan(
&analysis,
std::path::Path::new(&job.output_path),
profile.as_ref(),
)
.await
{
if matches!(plan.decision, TranscodeDecision::Remux { .. }) {
recommendation_counts.remux_only_candidate += 1;
recommendations.push(IntelligenceRecommendation {
recommendation_type: "remux_only_candidate".to_string(),
title: "Remux-only opportunity".to_string(),
summary: "This file already matches the target video codec and looks like a container-normalization candidate instead of a full re-encode.".to_string(),
path: job.input_path.clone(),
suggested_action: "Queue a remux to normalize the container without re-encoding the video stream.".to_string(),
});
}
}
if analysis.metadata.audio_is_heavy {
recommendation_counts.wasteful_audio_layout += 1;
recommendations.push(IntelligenceRecommendation {
recommendation_type: "wasteful_audio_layout".to_string(),
title: "Wasteful audio layout".to_string(),
summary: "This file contains a lossless or oversized audio stream that is likely worth transcoding for storage recovery.".to_string(),
path: job.input_path.clone(),
suggested_action: "Use a profile that transcodes heavy audio instead of copying it through unchanged.".to_string(),
});
}
if analysis.metadata.audio_streams.iter().any(|stream| {
stream
.title
.as_deref()
.map(|title| {
let lower = title.to_ascii_lowercase();
lower.contains("commentary")
|| lower.contains("director")
|| lower.contains("description")
|| lower.contains("descriptive")
})
.unwrap_or(false)
}) {
recommendation_counts.commentary_cleanup_candidate += 1;
recommendations.push(IntelligenceRecommendation {
recommendation_type: "commentary_cleanup_candidate".to_string(),
title: "Commentary or descriptive track cleanup".to_string(),
summary: "This file appears to contain commentary or descriptive audio tracks that existing stream rules could strip automatically.".to_string(),
path: job.input_path.clone(),
suggested_action: "Enable stream rules to strip commentary or descriptive tracks for this library.".to_string(),
});
}
}
recommendations.sort_by(|a, b| {
a.recommendation_type
.cmp(&b.recommendation_type)
.then(a.path.cmp(&b.path))
});
axum::Json(LibraryIntelligenceResponse {
duplicate_groups,
total_duplicates,
recommendation_counts,
recommendations,
})
.into_response()
}
Err(err) => {
error!("Failed to fetch duplicate candidates: {err}");
StatusCode::INTERNAL_SERVER_ERROR.into_response()
}
}
}
/// Query GPU utilization using nvidia-smi (NVIDIA) or other platform-specific tools
@@ -236,6 +360,14 @@ struct SystemInfo {
ffmpeg_version: String,
}
#[derive(Serialize)]
struct UpdateInfo {
current_version: String,
latest_version: Option<String>,
update_available: bool,
release_url: Option<String>,
}
pub(crate) async fn get_system_info_handler(
State(state): State<Arc<AppState>>,
) -> impl IntoResponse {
@@ -258,6 +390,96 @@ pub(crate) async fn get_system_info_handler(
.into_response()
}
pub(crate) async fn get_system_update_handler() -> impl IntoResponse {
let current_version = crate::version::current().to_string();
match fetch_latest_stable_release().await {
Ok(Some((latest_version, release_url))) => {
let update_available = version_is_newer(&latest_version, &current_version);
axum::Json(UpdateInfo {
current_version,
latest_version: Some(latest_version),
update_available,
release_url: Some(release_url),
})
.into_response()
}
Ok(None) => axum::Json(UpdateInfo {
current_version,
latest_version: None,
update_available: false,
release_url: None,
})
.into_response(),
Err(err) => (
StatusCode::BAD_GATEWAY,
format!("Failed to check for updates: {err}"),
)
.into_response(),
}
}
#[derive(serde::Deserialize)]
struct GitHubReleaseResponse {
tag_name: String,
html_url: String,
}
async fn fetch_latest_stable_release() -> Result<Option<(String, String)>, reqwest::Error> {
let client = reqwest::Client::builder()
.timeout(Duration::from_secs(10))
.user_agent(format!("alchemist/{}", crate::version::current()))
.build()?;
let response = client
.get("https://api.github.com/repos/bybrooklyn/alchemist/releases/latest")
.send()
.await?;
if response.status() == reqwest::StatusCode::NOT_FOUND {
return Ok(None);
}
let release: GitHubReleaseResponse = response.error_for_status()?.json().await?;
Ok(Some((
release.tag_name.trim_start_matches('v').to_string(),
release.html_url,
)))
}
fn version_is_newer(latest: &str, current: &str) -> bool {
parse_version(latest) > parse_version(current)
}
fn parse_version(value: &str) -> (u64, u64, u64) {
let sanitized = value.trim_start_matches('v');
let parts = sanitized
.split(['.', '-'])
.filter_map(|part| part.parse::<u64>().ok())
.collect::<Vec<_>>();
(
*parts.first().unwrap_or(&0),
*parts.get(1).unwrap_or(&0),
*parts.get(2).unwrap_or(&0),
)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn version_compare_detects_newer_stable_release() {
assert!(version_is_newer("0.3.1", "0.3.0"));
assert!(!version_is_newer("0.3.0", "0.3.0"));
assert!(!version_is_newer("0.2.9", "0.3.0"));
}
#[test]
fn parse_version_ignores_prefix_and_suffix() {
assert_eq!(parse_version("v0.3.1"), (0, 3, 1));
assert_eq!(parse_version("0.3.1-rc.1"), (0, 3, 1));
}
}
pub(crate) async fn get_hardware_info_handler(
State(state): State<Arc<AppState>>,
) -> impl IntoResponse {

View File

@@ -114,6 +114,7 @@ where
library_scanner: Arc::new(crate::system::scanner::LibraryScanner::new(db, config)),
config_path: config_path.clone(),
config_mutable: true,
base_url: String::new(),
hardware_state,
hardware_probe_log,
resources_cache: Arc::new(tokio::sync::Mutex::new(None)),
@@ -135,6 +136,17 @@ async fn create_session(
Ok(token)
}
async fn create_api_token(
db: &crate::db::Db,
access_level: crate::db::ApiTokenAccessLevel,
) -> std::result::Result<String, Box<dyn std::error::Error>> {
let token = format!("api-token-{}", rand::random::<u64>());
let _ = db
.create_api_token("test-token", &token, access_level)
.await?;
Ok(token)
}
fn auth_request(method: Method, uri: &str, token: &str, body: Body) -> Request<Body> {
match Request::builder()
.method(method)
@@ -147,6 +159,18 @@ fn auth_request(method: Method, uri: &str, token: &str, body: Body) -> Request<B
}
}
fn bearer_request(method: Method, uri: &str, token: &str, body: Body) -> Request<Body> {
match Request::builder()
.method(method)
.uri(uri)
.header(header::AUTHORIZATION, format!("Bearer {token}"))
.body(body)
{
Ok(request) => request,
Err(err) => panic!("failed to build bearer request: {err}"),
}
}
fn auth_json_request(
method: Method,
uri: &str,
@@ -514,6 +538,234 @@ async fn engine_status_endpoint_reports_draining_state()
Ok(())
}
#[tokio::test]
async fn read_only_api_token_allows_observability_only_routes()
-> std::result::Result<(), Box<dyn std::error::Error>> {
let (state, app, config_path, db_path) = build_test_app(false, 8, |_| {}).await?;
let token =
create_api_token(state.db.as_ref(), crate::db::ApiTokenAccessLevel::ReadOnly).await?;
let response = app
.clone()
.oneshot(bearer_request(
Method::GET,
"/api/system/info",
&token,
Body::empty(),
))
.await?;
assert_eq!(response.status(), StatusCode::OK);
let response = app
.oneshot(bearer_request(
Method::POST,
"/api/engine/resume",
&token,
Body::empty(),
))
.await?;
assert_eq!(response.status(), StatusCode::FORBIDDEN);
drop(state);
let _ = std::fs::remove_file(config_path);
let _ = std::fs::remove_file(db_path);
Ok(())
}
#[tokio::test]
async fn full_access_api_token_allows_mutation_routes()
-> std::result::Result<(), Box<dyn std::error::Error>> {
let (state, app, config_path, db_path) = build_test_app(false, 8, |_| {}).await?;
let token = create_api_token(
state.db.as_ref(),
crate::db::ApiTokenAccessLevel::FullAccess,
)
.await?;
let response = app
.oneshot(bearer_request(
Method::POST,
"/api/engine/resume",
&token,
Body::empty(),
))
.await?;
assert_eq!(response.status(), StatusCode::OK);
drop(state);
let _ = std::fs::remove_file(config_path);
let _ = std::fs::remove_file(db_path);
Ok(())
}
#[tokio::test]
async fn api_token_endpoints_create_list_and_revoke_tokens()
-> std::result::Result<(), Box<dyn std::error::Error>> {
let (state, app, config_path, db_path) = build_test_app(false, 8, |_| {}).await?;
let session = create_session(state.db.as_ref()).await?;
let create_response = app
.clone()
.oneshot(auth_json_request(
Method::POST,
"/api/settings/api-tokens",
&session,
json!({
"name": "Prometheus",
"access_level": "read_only"
}),
))
.await?;
assert_eq!(create_response.status(), StatusCode::OK);
let create_payload: serde_json::Value =
serde_json::from_slice(&to_bytes(create_response.into_body(), usize::MAX).await?)?;
assert_eq!(create_payload["token"]["name"], "Prometheus");
assert_eq!(create_payload["token"]["access_level"], "read_only");
assert!(create_payload["plaintext_token"].as_str().is_some());
let list_response = app
.clone()
.oneshot(auth_request(
Method::GET,
"/api/settings/api-tokens",
&session,
Body::empty(),
))
.await?;
assert_eq!(list_response.status(), StatusCode::OK);
let list_payload: serde_json::Value =
serde_json::from_slice(&to_bytes(list_response.into_body(), usize::MAX).await?)?;
let token_id = list_payload[0]["id"].as_i64().ok_or("missing token id")?;
let revoke_response = app
.oneshot(auth_request(
Method::DELETE,
&format!("/api/settings/api-tokens/{token_id}"),
&session,
Body::empty(),
))
.await?;
assert_eq!(revoke_response.status(), StatusCode::OK);
let tokens = state.db.list_api_tokens().await?;
assert_eq!(tokens.len(), 1);
assert!(tokens[0].revoked_at.is_some());
drop(state);
let _ = std::fs::remove_file(config_path);
let _ = std::fs::remove_file(db_path);
Ok(())
}
#[tokio::test]
async fn api_token_storage_hashes_plaintext_token_material()
-> std::result::Result<(), Box<dyn std::error::Error>> {
let (state, _app, config_path, db_path) = build_test_app(false, 8, |_| {}).await?;
let plaintext = format!("api-token-{}", rand::random::<u64>());
let _ = state
.db
.create_api_token(
"hash-test",
&plaintext,
crate::db::ApiTokenAccessLevel::ReadOnly,
)
.await?;
let record = state
.db
.get_active_api_token(&plaintext)
.await?
.ok_or("missing stored api token")?;
assert_ne!(record.token_hash, plaintext);
assert_eq!(record.token_hash, crate::db::hash_api_token(&plaintext));
drop(state);
let _ = std::fs::remove_file(config_path);
let _ = std::fs::remove_file(db_path);
Ok(())
}
#[tokio::test]
async fn revoked_api_token_is_rejected_by_auth_middleware()
-> std::result::Result<(), Box<dyn std::error::Error>> {
let (state, app, config_path, db_path) = build_test_app(false, 8, |_| {}).await?;
let token = create_api_token(
state.db.as_ref(),
crate::db::ApiTokenAccessLevel::FullAccess,
)
.await?;
let stored = state
.db
.get_active_api_token(&token)
.await?
.ok_or("missing api token")?;
state.db.revoke_api_token(stored.id).await?;
let response = app
.oneshot(bearer_request(
Method::GET,
"/api/system/info",
&token,
Body::empty(),
))
.await?;
assert_eq!(response.status(), StatusCode::UNAUTHORIZED);
drop(state);
let _ = std::fs::remove_file(config_path);
let _ = std::fs::remove_file(db_path);
Ok(())
}
#[tokio::test]
async fn read_only_api_token_cannot_access_settings_config()
-> std::result::Result<(), Box<dyn std::error::Error>> {
let (state, app, config_path, db_path) = build_test_app(false, 8, |_| {}).await?;
let token =
create_api_token(state.db.as_ref(), crate::db::ApiTokenAccessLevel::ReadOnly).await?;
let response = app
.oneshot(bearer_request(
Method::GET,
"/api/settings/config",
&token,
Body::empty(),
))
.await?;
assert_eq!(response.status(), StatusCode::FORBIDDEN);
drop(state);
let _ = std::fs::remove_file(config_path);
let _ = std::fs::remove_file(db_path);
Ok(())
}
#[tokio::test]
async fn nested_base_url_routes_engine_status_through_auth_middleware()
-> std::result::Result<(), Box<dyn std::error::Error>> {
let (state, _app, config_path, db_path) = build_test_app(false, 8, |config| {
config.system.base_url = "/alchemist".to_string();
})
.await?;
let token = create_session(state.db.as_ref()).await?;
let app = Router::new().nest("/alchemist", app_router(state.clone()));
let response = app
.oneshot(auth_request(
Method::GET,
"/alchemist/api/engine/status",
&token,
Body::empty(),
))
.await?;
assert_eq!(response.status(), StatusCode::OK);
drop(state);
let _ = std::fs::remove_file(config_path);
let _ = std::fs::remove_file(db_path);
Ok(())
}
#[tokio::test]
async fn hardware_probe_log_route_returns_runtime_log()
-> std::result::Result<(), Box<dyn std::error::Error>> {
@@ -664,10 +916,11 @@ async fn setup_complete_accepts_nested_settings_payload()
settings.appearance.active_theme_id = Some("midnight".to_string());
settings.notifications.targets = vec![crate::config::NotificationTargetConfig {
name: "Discord".to_string(),
target_type: "discord".to_string(),
endpoint_url: "https://discord.com/api/webhooks/test".to_string(),
target_type: "discord_webhook".to_string(),
config_json: serde_json::json!({ "webhook_url": "https://discord.com/api/webhooks/test" }),
endpoint_url: Some("https://discord.com/api/webhooks/test".to_string()),
auth_token: None,
events: vec!["completed".to_string()],
events: vec!["encode.completed".to_string()],
enabled: true,
}];
settings.schedule.windows = vec![crate::config::ScheduleWindowConfig {
@@ -1005,10 +1258,11 @@ async fn settings_bundle_put_projects_extended_settings_to_db()
payload.notifications.enabled = true;
payload.notifications.targets = vec![crate::config::NotificationTargetConfig {
name: "Discord".to_string(),
target_type: "discord".to_string(),
endpoint_url: "https://discord.com/api/webhooks/test".to_string(),
target_type: "discord_webhook".to_string(),
config_json: serde_json::json!({ "webhook_url": "https://discord.com/api/webhooks/test" }),
endpoint_url: Some("https://discord.com/api/webhooks/test".to_string()),
auth_token: None,
events: vec!["completed".to_string()],
events: vec!["encode.completed".to_string()],
enabled: true,
}];
@@ -1035,7 +1289,7 @@ async fn settings_bundle_put_projects_extended_settings_to_db()
let notifications = state.db.get_notification_targets().await?;
assert_eq!(notifications.len(), 1);
assert_eq!(notifications[0].target_type, "discord");
assert_eq!(notifications[0].target_type, "discord_webhook");
let theme = state.db.get_preference("active_theme_id").await?;
assert_eq!(theme.as_deref(), Some("midnight"));

View File

@@ -65,6 +65,7 @@ pub fn parse_raw_config(raw_toml: &str) -> Result<Config> {
let mut config: Config =
toml::from_str(raw_toml).map_err(|err| AlchemistError::Config(err.to_string()))?;
config.migrate_legacy_notifications();
config.apply_env_overrides();
config
.validate()
.map_err(|err| AlchemistError::Config(err.to_string()))?;

View File

@@ -203,13 +203,13 @@ impl HardwareState {
}
pub trait CommandRunner {
fn output(&self, program: &str, args: &[String]) -> std::io::Result<Output>;
fn output(&self, program: &str, args: &[String]) -> io::Result<Output>;
}
pub struct SystemCommandRunner;
impl CommandRunner for SystemCommandRunner {
fn output(&self, program: &str, args: &[String]) -> std::io::Result<Output> {
fn output(&self, program: &str, args: &[String]) -> io::Result<Output> {
run_command_with_timeout(program, args, Duration::from_secs(8))
}
}
@@ -1389,7 +1389,7 @@ mod tests {
}
impl CommandRunner for FakeRunner {
fn output(&self, program: &str, args: &[String]) -> std::io::Result<Output> {
fn output(&self, program: &str, args: &[String]) -> io::Result<Output> {
match program {
"nvidia-smi" if self.nvidia_smi_ok => Ok(Output {
status: exit_status(true),

View File

@@ -55,7 +55,7 @@ impl LibraryScanner {
let config = self.config.clone();
tokio::spawn(async move {
info!("🚀 Starting full library scan...");
info!("Starting full library scan...");
let watch_dirs = match db.get_watch_dirs().await {
Ok(dirs) => dirs,
@@ -141,7 +141,7 @@ impl LibraryScanner {
s.files_added = added;
s.is_running = false;
s.current_folder = None;
info!("Library scan complete. Added {} new files.", added);
info!("Library scan complete. Added {} new files.", added);
});
Ok(())

View File

@@ -48,7 +48,7 @@ async fn v0_2_5_fixture_upgrades_and_preserves_core_state() -> Result<()> {
let notifications = db.get_notification_targets().await?;
assert_eq!(notifications.len(), 1);
assert_eq!(notifications[0].target_type, "discord");
assert_eq!(notifications[0].target_type, "discord_webhook");
let schedule_windows = db.get_schedule_windows().await?;
assert_eq!(schedule_windows.len(), 1);
@@ -101,7 +101,7 @@ async fn v0_2_5_fixture_upgrades_and_preserves_core_state() -> Result<()> {
.fetch_one(&pool)
.await?
.get("value");
assert_eq!(schema_version, "6");
assert_eq!(schema_version, "8");
let min_compatible_version: String =
sqlx::query("SELECT value FROM schema_info WHERE key = 'min_compatible_version'")

View File

@@ -1,6 +1,6 @@
{
"name": "alchemist-web-e2e",
"version": "0.3.0",
"version": "0.3.1-rc.1",
"private": true,
"packageManager": "bun@1",
"type": "module",

View File

@@ -77,6 +77,14 @@ test("About modal opens and does not contain Al badge", async ({ page }) => {
ffmpeg_version: "N-12345",
});
});
await page.route("**/api/system/update", async (route) => {
await fulfillJson(route, 200, {
current_version: "0.3.0",
latest_version: "0.3.1",
update_available: true,
release_url: "https://github.com/bybrooklyn/alchemist/releases/tag/v0.3.1",
});
});
await page.goto("/");
await page.getByRole("button", { name: "About" }).click();
@@ -84,6 +92,8 @@ test("About modal opens and does not contain Al badge", async ({ page }) => {
await expect(page.getByRole("dialog")).toBeVisible();
await expect(page.getByRole("heading", { name: "Alchemist" })).toBeVisible();
await expect(page.getByText("v0.3.0")).toBeVisible();
await expect(page.getByText("v0.3.1")).toBeVisible();
await expect(page.getByRole("link", { name: "Download Update" })).toBeVisible();
await expect(page.getByText(/^Al$/)).toHaveCount(0);
});

27
web/bun.lock generated
View File

@@ -35,6 +35,7 @@
"smol-toml": "^1.6.1",
"svgo": "^4.0.1",
"unstorage": "^1.17.5",
"vite": "6.4.2",
"yaml": "^2.8.3",
},
"packages": {
@@ -670,30 +671,6 @@
"kleur": ["kleur@4.1.5", "", {}, "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ=="],
"lightningcss": ["lightningcss@1.30.2", "", { "dependencies": { "detect-libc": "^2.0.3" }, "optionalDependencies": { "lightningcss-android-arm64": "1.30.2", "lightningcss-darwin-arm64": "1.30.2", "lightningcss-darwin-x64": "1.30.2", "lightningcss-freebsd-x64": "1.30.2", "lightningcss-linux-arm-gnueabihf": "1.30.2", "lightningcss-linux-arm64-gnu": "1.30.2", "lightningcss-linux-arm64-musl": "1.30.2", "lightningcss-linux-x64-gnu": "1.30.2", "lightningcss-linux-x64-musl": "1.30.2", "lightningcss-win32-arm64-msvc": "1.30.2", "lightningcss-win32-x64-msvc": "1.30.2" } }, "sha512-utfs7Pr5uJyyvDETitgsaqSyjCb2qNRAtuqUeWIAKztsOYdcACf2KtARYXg2pSvhkt+9NfoaNY7fxjl6nuMjIQ=="],
"lightningcss-android-arm64": ["lightningcss-android-arm64@1.30.2", "", { "os": "android", "cpu": "arm64" }, "sha512-BH9sEdOCahSgmkVhBLeU7Hc9DWeZ1Eb6wNS6Da8igvUwAe0sqROHddIlvU06q3WyXVEOYDZ6ykBZQnjTbmo4+A=="],
"lightningcss-darwin-arm64": ["lightningcss-darwin-arm64@1.30.2", "", { "os": "darwin", "cpu": "arm64" }, "sha512-ylTcDJBN3Hp21TdhRT5zBOIi73P6/W0qwvlFEk22fkdXchtNTOU4Qc37SkzV+EKYxLouZ6M4LG9NfZ1qkhhBWA=="],
"lightningcss-darwin-x64": ["lightningcss-darwin-x64@1.30.2", "", { "os": "darwin", "cpu": "x64" }, "sha512-oBZgKchomuDYxr7ilwLcyms6BCyLn0z8J0+ZZmfpjwg9fRVZIR5/GMXd7r9RH94iDhld3UmSjBM6nXWM2TfZTQ=="],
"lightningcss-freebsd-x64": ["lightningcss-freebsd-x64@1.30.2", "", { "os": "freebsd", "cpu": "x64" }, "sha512-c2bH6xTrf4BDpK8MoGG4Bd6zAMZDAXS569UxCAGcA7IKbHNMlhGQ89eRmvpIUGfKWNVdbhSbkQaWhEoMGmGslA=="],
"lightningcss-linux-arm-gnueabihf": ["lightningcss-linux-arm-gnueabihf@1.30.2", "", { "os": "linux", "cpu": "arm" }, "sha512-eVdpxh4wYcm0PofJIZVuYuLiqBIakQ9uFZmipf6LF/HRj5Bgm0eb3qL/mr1smyXIS1twwOxNWndd8z0E374hiA=="],
"lightningcss-linux-arm64-gnu": ["lightningcss-linux-arm64-gnu@1.30.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-UK65WJAbwIJbiBFXpxrbTNArtfuznvxAJw4Q2ZGlU8kPeDIWEX1dg3rn2veBVUylA2Ezg89ktszWbaQnxD/e3A=="],
"lightningcss-linux-arm64-musl": ["lightningcss-linux-arm64-musl@1.30.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-5Vh9dGeblpTxWHpOx8iauV02popZDsCYMPIgiuw97OJ5uaDsL86cnqSFs5LZkG3ghHoX5isLgWzMs+eD1YzrnA=="],
"lightningcss-linux-x64-gnu": ["lightningcss-linux-x64-gnu@1.30.2", "", { "os": "linux", "cpu": "x64" }, "sha512-Cfd46gdmj1vQ+lR6VRTTadNHu6ALuw2pKR9lYq4FnhvgBc4zWY1EtZcAc6EffShbb1MFrIPfLDXD6Xprbnni4w=="],
"lightningcss-linux-x64-musl": ["lightningcss-linux-x64-musl@1.30.2", "", { "os": "linux", "cpu": "x64" }, "sha512-XJaLUUFXb6/QG2lGIW6aIk6jKdtjtcffUT0NKvIqhSBY3hh9Ch+1LCeH80dR9q9LBjG3ewbDjnumefsLsP6aiA=="],
"lightningcss-win32-arm64-msvc": ["lightningcss-win32-arm64-msvc@1.30.2", "", { "os": "win32", "cpu": "arm64" }, "sha512-FZn+vaj7zLv//D/192WFFVA0RgHawIcHqLX9xuWiQt7P0PtdFEVaxgF9rjM/IRYHQXNnk61/H/gb2Ei+kUQ4xQ=="],
"lightningcss-win32-x64-msvc": ["lightningcss-win32-x64-msvc@1.30.2", "", { "os": "win32", "cpu": "x64" }, "sha512-5g1yc73p+iAkid5phb4oVFMB45417DkRevRbt/El/gKXJk4jid+vPFF/AXbxn05Aky8PapwzZrdJShv5C0avjw=="],
"lilconfig": ["lilconfig@3.1.3", "", {}, "sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw=="],
"lines-and-columns": ["lines-and-columns@1.2.4", "", {}, "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg=="],
@@ -1066,7 +1043,7 @@
"victory-vendor": ["victory-vendor@36.9.2", "", { "dependencies": { "@types/d3-array": "^3.0.3", "@types/d3-ease": "^3.0.0", "@types/d3-interpolate": "^3.0.1", "@types/d3-scale": "^4.0.2", "@types/d3-shape": "^3.1.0", "@types/d3-time": "^3.0.0", "@types/d3-timer": "^3.0.0", "d3-array": "^3.1.6", "d3-ease": "^3.0.1", "d3-interpolate": "^3.0.1", "d3-scale": "^4.0.2", "d3-shape": "^3.1.0", "d3-time": "^3.0.0", "d3-timer": "^3.0.1" } }, "sha512-PnpQQMuxlwYdocC8fIJqVXvkeViHYzotI+NJrCuav0ZYFoq912ZHBk3mCeuj+5/VpodOjPe1z0Fk2ihgzlXqjQ=="],
"vite": ["vite@6.4.1", "", { "dependencies": { "esbuild": "^0.25.0", "fdir": "^6.4.4", "picomatch": "^4.0.2", "postcss": "^8.5.3", "rollup": "^4.34.9", "tinyglobby": "^0.2.13" }, "optionalDependencies": { "fsevents": "~2.3.3" }, "peerDependencies": { "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", "jiti": ">=1.21.0", "less": "*", "lightningcss": "^1.21.0", "sass": "*", "sass-embedded": "*", "stylus": "*", "sugarss": "*", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["@types/node", "jiti", "less", "lightningcss", "sass", "sass-embedded", "stylus", "sugarss", "terser", "tsx", "yaml"], "bin": { "vite": "bin/vite.js" } }, "sha512-+Oxm7q9hDoLMyJOYfUYBuHQo+dkAloi33apOPP56pzj+vsdJDzr+j1NISE5pyaAuKL4A3UD34qd0lx5+kfKp2g=="],
"vite": ["vite@6.4.2", "", { "dependencies": { "esbuild": "^0.25.0", "fdir": "^6.4.4", "picomatch": "^4.0.2", "postcss": "^8.5.3", "rollup": "^4.34.9", "tinyglobby": "^0.2.13" }, "optionalDependencies": { "fsevents": "~2.3.3" }, "peerDependencies": { "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", "jiti": ">=1.21.0", "less": "*", "lightningcss": "^1.21.0", "sass": "*", "sass-embedded": "*", "stylus": "*", "sugarss": "*", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["@types/node", "jiti", "less", "lightningcss", "sass", "sass-embedded", "stylus", "sugarss", "terser", "tsx", "yaml"], "bin": { "vite": "bin/vite.js" } }, "sha512-2N/55r4JDJ4gdrCvGgINMy+HH3iRpNIz8K6SFwVsA+JbQScLiC+clmAxBgwiSPgcG9U15QmvqCGWzMbqda5zGQ=="],
"vitefu": ["vitefu@1.1.1", "", { "peerDependencies": { "vite": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0-beta.0" }, "optionalPeers": ["vite"] }, "sha512-B/Fegf3i8zh0yFbpzZ21amWzHmuNlLlmJT6n7bu5e+pCHUKQIfXSYokrqOBGEMMe9UG2sostKQF9mml/vYaWJQ=="],

View File

@@ -1,6 +1,6 @@
{
"name": "alchemist-web",
"version": "0.3.0",
"version": "0.3.1-rc.1",
"private": true,
"packageManager": "bun@1",
"type": "module",
@@ -40,6 +40,7 @@
"rollup": "^4.60.1",
"smol-toml": "^1.6.1",
"svgo": "^4.0.1",
"vite": "6.4.2",
"unstorage": "^1.17.5",
"yaml": "^2.8.3"
}

View File

@@ -12,6 +12,13 @@ interface SystemInfo {
ffmpeg_version: string;
}
interface UpdateInfo {
current_version: string;
latest_version: string | null;
update_available: boolean;
release_url: string | null;
}
interface AboutDialogProps {
isOpen: boolean;
onClose: () => void;
@@ -34,6 +41,7 @@ function focusableElements(root: HTMLElement): HTMLElement[] {
export default function AboutDialog({ isOpen, onClose }: AboutDialogProps) {
const [info, setInfo] = useState<SystemInfo | null>(null);
const [updateInfo, setUpdateInfo] = useState<UpdateInfo | null>(null);
const dialogRef = useRef<HTMLDivElement | null>(null);
const lastFocusedRef = useRef<HTMLElement | null>(null);
@@ -48,6 +56,16 @@ export default function AboutDialog({ isOpen, onClose }: AboutDialogProps) {
}
}, [isOpen, info]);
useEffect(() => {
if (isOpen && !updateInfo) {
apiJson<UpdateInfo>("/api/system/update")
.then(setUpdateInfo)
.catch(() => {
// Non-critical; keep update checks soft-fail.
});
}
}, [isOpen, updateInfo]);
useEffect(() => {
if (!isOpen) {
return;
@@ -161,6 +179,31 @@ export default function AboutDialog({ isOpen, onClose }: AboutDialogProps) {
<InfoRow icon={Server} label="System" value={info.os_version} />
<InfoRow icon={Cpu} label="Environment" value={info.is_docker ? "Docker Container" : "Native"} />
<InfoRow icon={ShieldCheck} label="Telemetry" value={info.telemetry_enabled ? "Enabled" : "Disabled"} />
{updateInfo?.latest_version && (
<div className="rounded-xl bg-helios-surface-soft border border-helios-line/10 p-3">
<div className="flex items-center justify-between gap-3">
<div>
<p className="text-xs font-medium text-helios-slate">Latest Stable</p>
<p className="text-sm font-bold text-helios-ink">v{updateInfo.latest_version}</p>
</div>
{updateInfo.update_available && updateInfo.release_url && (
<a
href={updateInfo.release_url}
target="_blank"
rel="noreferrer"
className="rounded-lg bg-helios-solar px-3 py-2 text-xs font-bold text-helios-main hover:opacity-90 transition-opacity"
>
Download Update
</a>
)}
</div>
<p className="mt-2 text-xs text-helios-slate">
{updateInfo.update_available
? "A newer stable release is available."
: "You are on the latest stable release."}
</p>
</div>
)}
</div>
) : (
<div className="flex justify-center p-8">

View File

@@ -0,0 +1,216 @@
import { useEffect, useState } from "react";
import { KeyRound, Plus, ShieldCheck, Trash2 } from "lucide-react";
import { apiAction, apiJson, isApiError } from "../lib/api";
import { showToast } from "../lib/toast";
import ConfirmDialog from "./ui/ConfirmDialog";
type ApiTokenAccessLevel = "read_only" | "full_access";
interface ApiToken {
id: number;
name: string;
access_level: ApiTokenAccessLevel;
created_at: string;
last_used_at: string | null;
revoked_at: string | null;
}
interface CreatedApiTokenResponse {
token: ApiToken;
plaintext_token: string;
}
export default function ApiTokenSettings() {
const [tokens, setTokens] = useState<ApiToken[]>([]);
const [loading, setLoading] = useState(true);
const [name, setName] = useState("");
const [accessLevel, setAccessLevel] = useState<ApiTokenAccessLevel>("read_only");
const [error, setError] = useState<string | null>(null);
const [pendingDeleteId, setPendingDeleteId] = useState<number | null>(null);
const [createdTokenValue, setCreatedTokenValue] = useState<string | null>(null);
useEffect(() => {
void fetchTokens();
}, []);
const fetchTokens = async () => {
try {
const data = await apiJson<ApiToken[]>("/api/settings/api-tokens");
setTokens(data);
setError(null);
} catch (err) {
setError(isApiError(err) ? err.message : "Failed to load API tokens.");
} finally {
setLoading(false);
}
};
const handleCreate = async (event: React.FormEvent) => {
event.preventDefault();
try {
const payload = await apiJson<CreatedApiTokenResponse>("/api/settings/api-tokens", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
name,
access_level: accessLevel,
}),
});
setTokens((current) => [payload.token, ...current]);
setCreatedTokenValue(payload.plaintext_token);
setName("");
setAccessLevel("read_only");
showToast({
kind: "success",
title: "API Tokens",
message: "Token created. Copy it now — it will not be shown again.",
});
} catch (err) {
const message = isApiError(err) ? err.message : "Failed to create API token.";
setError(message);
showToast({ kind: "error", title: "API Tokens", message });
}
};
const handleRevoke = async (id: number) => {
try {
await apiAction(`/api/settings/api-tokens/${id}`, { method: "DELETE" });
setTokens((current) =>
current.map((token) =>
token.id === id
? { ...token, revoked_at: new Date().toISOString() }
: token,
),
);
showToast({
kind: "success",
title: "API Tokens",
message: "Token revoked.",
});
} catch (err) {
const message = isApiError(err) ? err.message : "Failed to revoke token.";
setError(message);
showToast({ kind: "error", title: "API Tokens", message });
}
};
return (
<div className="space-y-6" aria-live="polite">
<div className="rounded-xl border border-helios-line/20 bg-helios-surface-soft p-4">
<div className="flex items-center gap-2 text-sm font-semibold text-helios-ink">
<ShieldCheck size={16} className="text-helios-solar" />
Static API Tokens
</div>
<p className="mt-1 text-xs text-helios-slate">
Read-only tokens are observability-only. Full-access tokens can do everything an authenticated session can do.
</p>
</div>
{error && (
<div className="rounded-lg border border-status-error/20 bg-status-error/10 px-4 py-3 text-sm text-status-error">
{error}
</div>
)}
{createdTokenValue && (
<div className="rounded-lg border border-helios-solar/30 bg-helios-solar/10 px-4 py-3">
<p className="text-xs font-semibold text-helios-main">Copy this token now</p>
<p className="mt-2 break-all font-mono text-sm text-helios-ink">{createdTokenValue}</p>
</div>
)}
<form onSubmit={handleCreate} className="grid gap-4 rounded-xl border border-helios-line/20 bg-helios-surface p-4 md:grid-cols-[1fr_220px_auto]">
<div>
<label className="block text-xs font-medium text-helios-slate mb-1">Token Name</label>
<input
value={name}
onChange={(event) => setName(event.target.value)}
className="w-full bg-helios-surface-soft border border-helios-line/20 rounded p-2 text-sm text-helios-ink"
placeholder="Home Assistant"
required
/>
</div>
<div>
<label className="block text-xs font-medium text-helios-slate mb-1">Access Level</label>
<select
value={accessLevel}
onChange={(event) => setAccessLevel(event.target.value as ApiTokenAccessLevel)}
className="w-full bg-helios-surface-soft border border-helios-line/20 rounded p-2 text-sm text-helios-ink"
>
<option value="read_only">Read Only</option>
<option value="full_access">Full Access</option>
</select>
</div>
<button
type="submit"
className="self-end flex items-center justify-center gap-2 rounded-lg bg-helios-solar px-4 py-2 text-sm font-bold text-helios-main"
>
<Plus size={16} />
Create Token
</button>
</form>
{loading ? (
<div className="text-sm text-helios-slate animate-pulse">Loading API tokens</div>
) : (
<div className="space-y-3">
{tokens.map((token) => (
<div key={token.id} className="flex items-center justify-between gap-4 rounded-xl border border-helios-line/10 bg-helios-surface p-4">
<div className="flex items-center gap-4">
<div className="rounded-lg bg-helios-surface-soft p-2 text-helios-slate">
<KeyRound size={18} />
</div>
<div>
<h3 className="text-sm font-bold text-helios-ink">{token.name}</h3>
<div className="mt-1 flex flex-wrap items-center gap-2 text-xs text-helios-slate">
<span className="rounded bg-helios-surface-soft px-1.5 py-0.5">
{token.access_level}
</span>
<span>Created {new Date(token.created_at).toLocaleString()}</span>
<span>
{token.last_used_at
? `Last used ${new Date(token.last_used_at).toLocaleString()}`
: "Never used"}
</span>
{token.revoked_at && (
<span className="text-status-error">
Revoked {new Date(token.revoked_at).toLocaleString()}
</span>
)}
</div>
</div>
</div>
<button
onClick={() => setPendingDeleteId(token.id)}
disabled={Boolean(token.revoked_at)}
className="rounded-lg border border-red-500/20 p-2 text-red-500 hover:bg-red-500/10 disabled:opacity-40"
aria-label={`Revoke API token ${token.name}`}
>
<Trash2 size={16} />
</button>
</div>
))}
{tokens.length === 0 && (
<div className="rounded-xl border border-helios-line/10 bg-helios-surface p-6 text-sm text-helios-slate">
No API tokens created yet.
</div>
)}
</div>
)}
<ConfirmDialog
open={pendingDeleteId !== null}
title="Revoke API token"
description="Revoke this token? Existing automations or scripts using it will stop working immediately."
confirmLabel="Revoke"
tone="danger"
onClose={() => setPendingDeleteId(null)}
onConfirm={async () => {
if (pendingDeleteId === null) return;
await handleRevoke(pendingDeleteId);
setPendingDeleteId(null);
}}
/>
</div>
);
}

View File

@@ -1,5 +1,6 @@
import { useEffect } from "react";
import { apiFetch, apiJson } from "../lib/api";
import { stripBasePath, withBasePath } from "../lib/basePath";
interface SetupStatus {
setup_required?: boolean;
@@ -10,7 +11,7 @@ export default function AuthGuard() {
let cancelled = false;
const checkAuth = async () => {
const path = window.location.pathname;
const path = stripBasePath(window.location.pathname);
const isAuthPage = path.startsWith("/login") || path.startsWith("/setup");
if (isAuthPage) {
return;
@@ -27,7 +28,9 @@ export default function AuthGuard() {
return;
}
window.location.href = setupStatus.setup_required ? "/setup" : "/login";
window.location.href = setupStatus.setup_required
? withBasePath("/setup")
: withBasePath("/login");
} catch {
// Keep user on current page on transient backend/network failures.
}

View File

@@ -0,0 +1,525 @@
import { useEffect, useState } from "react";
import { Upload, Wand2, Play, Download, Trash2 } from "lucide-react";
import { apiAction, apiFetch, apiJson, isApiError } from "../lib/api";
import { withBasePath } from "../lib/basePath";
import { showToast } from "../lib/toast";
interface SubtitleStreamMetadata {
stream_index: number;
codec_name: string;
language?: string;
title?: string;
burnable: boolean;
}
interface AudioStreamMetadata {
stream_index: number;
codec_name: string;
language?: string;
title?: string;
channels?: number;
}
interface MediaAnalysis {
metadata: {
container: string;
codec_name: string;
width: number;
height: number;
dynamic_range: string;
audio_streams: AudioStreamMetadata[];
subtitle_streams: SubtitleStreamMetadata[];
};
}
interface ConversionSettings {
output_container: string;
remux_only: boolean;
video: {
codec: string;
mode: string;
value: number | null;
preset: string | null;
resolution: {
mode: string;
width: number | null;
height: number | null;
scale_factor: number | null;
};
hdr_mode: string;
};
audio: {
codec: string;
bitrate_kbps: number | null;
channels: string | null;
};
subtitles: {
mode: string;
};
}
interface UploadResponse {
conversion_job_id: number;
probe: MediaAnalysis;
normalized_settings: ConversionSettings;
}
interface PreviewResponse {
normalized_settings: ConversionSettings;
command_preview: string;
}
interface JobStatusResponse {
id: number;
status: string;
progress: number;
linked_job_id: number | null;
output_path: string | null;
download_ready: boolean;
probe: MediaAnalysis | null;
}
const DEFAULT_SETTINGS: ConversionSettings = {
output_container: "mkv",
remux_only: false,
video: {
codec: "hevc",
mode: "crf",
value: 24,
preset: "medium",
resolution: {
mode: "original",
width: null,
height: null,
scale_factor: null,
},
hdr_mode: "preserve",
},
audio: {
codec: "copy",
bitrate_kbps: 160,
channels: "auto",
},
subtitles: {
mode: "copy",
},
};
export default function ConversionTool() {
const [uploading, setUploading] = useState(false);
const [previewing, setPreviewing] = useState(false);
const [starting, setStarting] = useState(false);
const [status, setStatus] = useState<JobStatusResponse | null>(null);
const [conversionJobId, setConversionJobId] = useState<number | null>(null);
const [probe, setProbe] = useState<MediaAnalysis | null>(null);
const [settings, setSettings] = useState<ConversionSettings>(DEFAULT_SETTINGS);
const [commandPreview, setCommandPreview] = useState("");
const [error, setError] = useState<string | null>(null);
useEffect(() => {
if (!conversionJobId) return;
const id = window.setInterval(() => {
void apiJson<JobStatusResponse>(`/api/conversion/jobs/${conversionJobId}`)
.then(setStatus)
.catch(() => {});
}, 2000);
return () => window.clearInterval(id);
}, [conversionJobId]);
const updateSettings = (patch: Partial<ConversionSettings>) => {
setSettings((current) => ({ ...current, ...patch }));
};
const uploadFile = async (file: File) => {
setUploading(true);
setError(null);
try {
const formData = new FormData();
formData.append("file", file);
const response = await apiFetch("/api/conversion/uploads", {
method: "POST",
body: formData,
});
if (!response.ok) {
throw new Error(await response.text());
}
const payload = (await response.json()) as UploadResponse;
setConversionJobId(payload.conversion_job_id);
setProbe(payload.probe);
setSettings(payload.normalized_settings);
setStatus(null);
setCommandPreview("");
showToast({
kind: "success",
title: "Conversion",
message: "File uploaded and probed.",
});
} catch (err) {
const message = err instanceof Error ? err.message : "Upload failed";
setError(message);
showToast({ kind: "error", title: "Conversion", message });
} finally {
setUploading(false);
}
};
const preview = async () => {
if (!conversionJobId) return;
setPreviewing(true);
try {
const payload = await apiJson<PreviewResponse>("/api/conversion/preview", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
conversion_job_id: conversionJobId,
settings,
}),
});
setSettings(payload.normalized_settings);
setCommandPreview(payload.command_preview);
showToast({ kind: "success", title: "Conversion", message: "Preview updated." });
} catch (err) {
const message = isApiError(err) ? err.message : "Preview failed";
setError(message);
showToast({ kind: "error", title: "Conversion", message });
} finally {
setPreviewing(false);
}
};
const start = async () => {
if (!conversionJobId) return;
setStarting(true);
try {
await apiAction(`/api/conversion/jobs/${conversionJobId}/start`, { method: "POST" });
const payload = await apiJson<JobStatusResponse>(`/api/conversion/jobs/${conversionJobId}`);
setStatus(payload);
showToast({ kind: "success", title: "Conversion", message: "Conversion job queued." });
} catch (err) {
const message = isApiError(err) ? err.message : "Failed to start conversion";
setError(message);
showToast({ kind: "error", title: "Conversion", message });
} finally {
setStarting(false);
}
};
const remove = async () => {
if (!conversionJobId) return;
try {
await apiAction(`/api/conversion/jobs/${conversionJobId}`, { method: "DELETE" });
setConversionJobId(null);
setProbe(null);
setStatus(null);
setSettings(DEFAULT_SETTINGS);
setCommandPreview("");
showToast({ kind: "success", title: "Conversion", message: "Conversion job removed." });
} catch (err) {
const message = isApiError(err) ? err.message : "Failed to remove conversion job";
setError(message);
showToast({ kind: "error", title: "Conversion", message });
}
};
const download = async () => {
if (!conversionJobId) return;
window.location.href = withBasePath(`/api/conversion/jobs/${conversionJobId}/download`);
};
return (
<div className="space-y-6">
<div>
<h1 className="text-xl font-bold text-helios-ink">Conversion / Remux</h1>
<p className="mt-1 text-sm text-helios-slate">
Upload a single file, inspect the streams, preview the generated FFmpeg command, and run it through Alchemist.
</p>
</div>
{error && (
<div className="rounded-lg border border-status-error/20 bg-status-error/10 px-4 py-3 text-sm text-status-error">
{error}
</div>
)}
{!probe && (
<label className="flex flex-col items-center justify-center gap-3 rounded-xl border border-dashed border-helios-line/30 bg-helios-surface p-10 text-center cursor-pointer hover:bg-helios-surface-soft transition-colors">
<Upload size={28} className="text-helios-solar" />
<div>
<p className="text-sm font-semibold text-helios-ink">Upload a source file</p>
<p className="text-xs text-helios-slate mt-1">The uploaded file is stored temporarily under Alchemist-managed temp storage.</p>
</div>
<input
type="file"
className="hidden"
onChange={(event) => {
const file = event.target.files?.[0];
if (file) {
void uploadFile(file);
}
}}
disabled={uploading}
/>
<span className="rounded-lg bg-helios-solar px-4 py-2 text-sm font-bold text-helios-main">
{uploading ? "Uploading..." : "Choose File"}
</span>
</label>
)}
{probe && (
<>
<section className="rounded-xl border border-helios-line/20 bg-helios-surface p-5 space-y-4">
<h2 className="text-sm font-semibold text-helios-ink">Input</h2>
<div className="grid gap-3 md:grid-cols-4 text-sm">
<Stat label="Container" value={probe.metadata.container} />
<Stat label="Video" value={probe.metadata.codec_name} />
<Stat label="Resolution" value={`${probe.metadata.width}x${probe.metadata.height}`} />
<Stat label="Dynamic Range" value={probe.metadata.dynamic_range} />
</div>
</section>
<section className="rounded-xl border border-helios-line/20 bg-helios-surface p-5 space-y-4">
<h2 className="text-sm font-semibold text-helios-ink">Output Container</h2>
<select value={settings.output_container} onChange={(event) => updateSettings({ output_container: event.target.value })} className="w-full md:w-60 bg-helios-surface-soft border border-helios-line/20 rounded p-2 text-sm text-helios-ink">
{["mkv", "mp4", "webm", "mov"].map((option) => (
<option key={option} value={option}>{option.toUpperCase()}</option>
))}
</select>
</section>
<section className="rounded-xl border border-helios-line/20 bg-helios-surface p-5 space-y-4">
<div className="flex items-center justify-between gap-4">
<h2 className="text-sm font-semibold text-helios-ink">Remux Mode</h2>
<label className="flex items-center gap-2 text-sm text-helios-ink">
<input
type="checkbox"
checked={settings.remux_only}
onChange={(event) => updateSettings({ remux_only: event.target.checked })}
/>
Remux only
</label>
</div>
<p className="text-xs text-helios-slate">
Remux mode forces stream copy and disables re-encoding controls.
</p>
</section>
<section className="rounded-xl border border-helios-line/20 bg-helios-surface p-5 space-y-4">
<h2 className="text-sm font-semibold text-helios-ink">Video</h2>
<div className="grid gap-4 md:grid-cols-2">
<SelectField
label="Codec"
value={settings.video.codec}
disabled={settings.remux_only}
options={["copy", "h264", "hevc", "av1"]}
onChange={(value) => setSettings((current) => ({ ...current, video: { ...current.video, codec: value } }))}
/>
<SelectField
label="Mode"
value={settings.video.mode}
disabled={settings.remux_only || settings.video.codec === "copy"}
options={["crf", "bitrate"]}
onChange={(value) => setSettings((current) => ({ ...current, video: { ...current.video, mode: value } }))}
/>
<NumberField
label={settings.video.mode === "bitrate" ? "Bitrate (kbps)" : "Quality Value"}
value={settings.video.value ?? 0}
disabled={settings.remux_only || settings.video.codec === "copy"}
onChange={(value) => setSettings((current) => ({ ...current, video: { ...current.video, value } }))}
/>
<SelectField
label="Preset"
value={settings.video.preset ?? "medium"}
disabled={settings.remux_only || settings.video.codec === "copy"}
options={["ultrafast", "superfast", "veryfast", "faster", "fast", "medium", "slow", "slower", "veryslow"]}
onChange={(value) => setSettings((current) => ({ ...current, video: { ...current.video, preset: value } }))}
/>
<SelectField
label="Resolution Mode"
value={settings.video.resolution.mode}
disabled={settings.remux_only || settings.video.codec === "copy"}
options={["original", "custom", "scale_factor"]}
onChange={(value) => setSettings((current) => ({ ...current, video: { ...current.video, resolution: { ...current.video.resolution, mode: value } } }))}
/>
<SelectField
label="HDR"
value={settings.video.hdr_mode}
disabled={settings.remux_only || settings.video.codec === "copy"}
options={["preserve", "tonemap", "strip_metadata"]}
onChange={(value) => setSettings((current) => ({ ...current, video: { ...current.video, hdr_mode: value } }))}
/>
{settings.video.resolution.mode === "custom" && (
<>
<NumberField
label="Width"
value={settings.video.resolution.width ?? probe.metadata.width}
disabled={settings.remux_only || settings.video.codec === "copy"}
onChange={(value) => setSettings((current) => ({ ...current, video: { ...current.video, resolution: { ...current.video.resolution, width: value } } }))}
/>
<NumberField
label="Height"
value={settings.video.resolution.height ?? probe.metadata.height}
disabled={settings.remux_only || settings.video.codec === "copy"}
onChange={(value) => setSettings((current) => ({ ...current, video: { ...current.video, resolution: { ...current.video.resolution, height: value } } }))}
/>
</>
)}
{settings.video.resolution.mode === "scale_factor" && (
<NumberField
label="Scale Factor"
value={settings.video.resolution.scale_factor ?? 1}
disabled={settings.remux_only || settings.video.codec === "copy"}
step="0.1"
onChange={(value) => setSettings((current) => ({ ...current, video: { ...current.video, resolution: { ...current.video.resolution, scale_factor: value } } }))}
/>
)}
</div>
</section>
<section className="rounded-xl border border-helios-line/20 bg-helios-surface p-5 space-y-4">
<h2 className="text-sm font-semibold text-helios-ink">Audio</h2>
<div className="grid gap-4 md:grid-cols-3">
<SelectField
label="Codec"
value={settings.audio.codec}
disabled={settings.remux_only}
options={["copy", "aac", "opus", "mp3"]}
onChange={(value) => setSettings((current) => ({ ...current, audio: { ...current.audio, codec: value } }))}
/>
<NumberField
label="Bitrate (kbps)"
value={settings.audio.bitrate_kbps ?? 160}
disabled={settings.remux_only || settings.audio.codec === "copy"}
onChange={(value) => setSettings((current) => ({ ...current, audio: { ...current.audio, bitrate_kbps: value } }))}
/>
<SelectField
label="Channels"
value={settings.audio.channels ?? "auto"}
disabled={settings.remux_only || settings.audio.codec === "copy"}
options={["auto", "stereo", "5.1"]}
onChange={(value) => setSettings((current) => ({ ...current, audio: { ...current.audio, channels: value } }))}
/>
</div>
</section>
<section className="rounded-xl border border-helios-line/20 bg-helios-surface p-5 space-y-4">
<h2 className="text-sm font-semibold text-helios-ink">Subtitles</h2>
<SelectField
label="Mode"
value={settings.subtitles.mode}
disabled={settings.remux_only}
options={["copy", "burn", "remove"]}
onChange={(value) => setSettings((current) => ({ ...current, subtitles: { mode: value } }))}
/>
</section>
<section className="rounded-xl border border-helios-line/20 bg-helios-surface p-5 space-y-4">
<div className="flex flex-wrap gap-3">
<button onClick={() => void preview()} disabled={previewing} className="flex items-center gap-2 rounded-lg bg-helios-solar px-4 py-2 text-sm font-bold text-helios-main">
<Wand2 size={16} />
{previewing ? "Previewing..." : "Preview Command"}
</button>
<button onClick={() => void start()} disabled={starting || !commandPreview} className="flex items-center gap-2 rounded-lg border border-helios-line/20 px-4 py-2 text-sm font-semibold text-helios-ink">
<Play size={16} />
{starting ? "Starting..." : "Start Job"}
</button>
<button onClick={() => void download()} disabled={!status?.download_ready} className="flex items-center gap-2 rounded-lg border border-helios-line/20 px-4 py-2 text-sm font-semibold text-helios-ink disabled:opacity-50">
<Download size={16} />
Download Result
</button>
<button onClick={() => void remove()} className="flex items-center gap-2 rounded-lg border border-red-500/20 px-4 py-2 text-sm font-semibold text-red-500">
<Trash2 size={16} />
Remove
</button>
</div>
{commandPreview && (
<pre className="overflow-x-auto rounded-lg border border-helios-line/20 bg-helios-surface-soft p-4 text-xs text-helios-ink whitespace-pre-wrap">
{commandPreview}
</pre>
)}
</section>
{status && (
<section className="rounded-xl border border-helios-line/20 bg-helios-surface p-5 space-y-3">
<h2 className="text-sm font-semibold text-helios-ink">Status</h2>
<div className="grid gap-3 md:grid-cols-4 text-sm">
<Stat label="State" value={status.status} />
<Stat label="Progress" value={`${status.progress.toFixed(1)}%`} />
<Stat label="Linked Job" value={status.linked_job_id ? `#${status.linked_job_id}` : "None"} />
<Stat label="Download" value={status.download_ready ? "Ready" : "Pending"} />
</div>
</section>
)}
</>
)}
</div>
);
}
function Stat({ label, value }: { label: string; value: string }) {
return (
<div className="rounded-lg border border-helios-line/20 bg-helios-surface-soft/40 px-4 py-3">
<div className="text-xs text-helios-slate">{label}</div>
<div className="mt-1 font-mono text-sm font-semibold text-helios-ink">{value}</div>
</div>
);
}
function SelectField({
label,
value,
options,
onChange,
disabled,
}: {
label: string;
value: string;
options: string[];
onChange: (value: string) => void;
disabled?: boolean;
}) {
return (
<div>
<label className="block text-xs font-medium text-helios-slate mb-1">{label}</label>
<select
value={value}
onChange={(event) => onChange(event.target.value)}
disabled={disabled}
className="w-full bg-helios-surface-soft border border-helios-line/20 rounded p-2 text-sm text-helios-ink disabled:opacity-50"
>
{options.map((option) => (
<option key={option} value={option}>
{option}
</option>
))}
</select>
</div>
);
}
function NumberField({
label,
value,
onChange,
disabled,
step = "1",
}: {
label: string;
value: number;
onChange: (value: number) => void;
disabled?: boolean;
step?: string;
}) {
return (
<div>
<label className="block text-xs font-medium text-helios-slate mb-1">{label}</label>
<input
type="number"
value={value}
step={step}
disabled={disabled}
onChange={(event) => onChange(Number(event.target.value))}
className="w-full bg-helios-surface-soft border border-helios-line/20 rounded p-2 text-sm text-helios-ink disabled:opacity-50"
/>
</div>
);
}

View File

@@ -9,6 +9,7 @@ import {
type LucideIcon,
} from "lucide-react";
import { apiJson, isApiError } from "../lib/api";
import { withBasePath } from "../lib/basePath";
import { useSharedStats } from "../lib/statsStore";
import { showToast } from "../lib/toast";
import ResourceMonitor from "./ResourceMonitor";
@@ -144,7 +145,7 @@ function Dashboard() {
}
if (setupComplete !== "true") {
window.location.href = "/setup";
window.location.href = withBasePath("/setup");
}
}
} catch {
@@ -232,7 +233,7 @@ function Dashboard() {
<Activity size={16} className="text-helios-solar" />
Recent Activity
</h3>
<a href="/jobs" className="text-xs font-medium text-helios-solar hover:underline">
<a href={withBasePath("/jobs")} className="text-xs font-medium text-helios-solar hover:underline">
View all
</a>
</div>
@@ -248,7 +249,7 @@ function Dashboard() {
<span className="text-sm text-helios-slate/60">
No recent activity.
</span>
<a href="/settings" className="text-xs text-helios-solar hover:underline">
<a href={withBasePath("/settings")} className="text-xs text-helios-solar hover:underline">
Add a library folder
</a>
</div>

View File

@@ -3,6 +3,7 @@ import { Info, LogOut, Play, Square } from "lucide-react";
import { motion } from "framer-motion";
import AboutDialog from "./AboutDialog";
import { apiAction, apiJson } from "../lib/api";
import { withBasePath } from "../lib/basePath";
import { useSharedStats } from "../lib/statsStore";
import { showToast } from "../lib/toast";
@@ -146,7 +147,7 @@ export default function HeaderActions() {
message: "Logout request failed. Redirecting to login.",
});
} finally {
window.location.href = '/login';
window.location.href = withBasePath("/login");
}
};

View File

@@ -5,6 +5,7 @@ import {
Clock, X, Info, Activity, Database, Zap, Maximize2, MoreHorizontal, ArrowDown, ArrowUp, AlertCircle
} from "lucide-react";
import { apiAction, apiJson, isApiError } from "../lib/api";
import { withBasePath } from "../lib/basePath";
import { useDebouncedValue } from "../lib/useDebouncedValue";
import { showToast } from "../lib/toast";
import ConfirmDialog from "./ui/ConfirmDialog";
@@ -664,7 +665,7 @@ function JobManager() {
const connect = () => {
if (cancelled) return;
eventSource?.close();
eventSource = new EventSource("/api/events");
eventSource = new EventSource(withBasePath("/api/events"));
eventSource.onopen = () => {
// Reset reconnect attempts on successful connection

View File

@@ -1,5 +1,5 @@
import { useEffect, useState } from "react";
import { Copy, AlertTriangle } from "lucide-react";
import { AlertTriangle, Copy, Sparkles } from "lucide-react";
import { apiJson, isApiError } from "../lib/api";
import { showToast } from "../lib/toast";
@@ -15,9 +15,26 @@ interface DuplicateGroup {
paths: DuplicatePath[];
}
interface RecommendationCounts {
duplicates: number;
remux_only_candidate: number;
wasteful_audio_layout: number;
commentary_cleanup_candidate: number;
}
interface IntelligenceRecommendation {
type: string;
title: string;
summary: string;
path: string;
suggested_action: string;
}
interface IntelligenceResponse {
duplicate_groups: DuplicateGroup[];
total_duplicates: number;
recommendation_counts: RecommendationCounts;
recommendations: IntelligenceRecommendation[];
}
const STATUS_DOT: Record<string, string> = {
@@ -31,6 +48,12 @@ const STATUS_DOT: Record<string, string> = {
queued: "bg-helios-slate/30",
};
const TYPE_LABELS: Record<string, string> = {
remux_only_candidate: "Remux Opportunities",
wasteful_audio_layout: "Wasteful Audio Layouts",
commentary_cleanup_candidate: "Commentary Cleanup",
};
export default function LibraryIntelligence() {
const [data, setData] = useState<IntelligenceResponse | null>(null);
const [loading, setLoading] = useState(true);
@@ -57,12 +80,21 @@ export default function LibraryIntelligence() {
void fetch();
}, []);
const groupedRecommendations = data?.recommendations.reduce<Record<string, IntelligenceRecommendation[]>>(
(groups, recommendation) => {
groups[recommendation.type] ??= [];
groups[recommendation.type].push(recommendation);
return groups;
},
{},
) ?? {};
return (
<div className="flex flex-col gap-6">
<div>
<h1 className="text-xl font-bold text-helios-ink">Library Intelligence</h1>
<p className="mt-1 text-sm text-helios-slate">
Files that appear more than once across your library, grouped by filename.
Deterministic storage-focused recommendations based on duplicate detection, planner output, and stream metadata.
</p>
</div>
@@ -80,31 +112,52 @@ export default function LibraryIntelligence() {
{data && (
<>
<div className="grid grid-cols-2 gap-3">
<div className="rounded-lg border border-helios-line/30 bg-helios-surface px-5 py-4">
<p className="text-xs font-medium text-helios-slate">
Duplicate groups
</p>
<p className="mt-1 font-mono text-2xl font-bold text-helios-ink">
{data.duplicate_groups.length}
</p>
<div className="grid grid-cols-2 lg:grid-cols-4 gap-3">
<StatCard label="Duplicate groups" value={String(data.duplicate_groups.length)} accent="text-helios-ink" />
<StatCard label="Extra copies" value={String(data.total_duplicates)} accent="text-helios-solar" />
<StatCard label="Remux opportunities" value={String(data.recommendation_counts.remux_only_candidate)} accent="text-helios-cyan" />
<StatCard label="Audio / commentary" value={String(data.recommendation_counts.wasteful_audio_layout + data.recommendation_counts.commentary_cleanup_candidate)} accent="text-helios-ink" />
</div>
<div className="rounded-lg border border-helios-line/30 bg-helios-surface px-5 py-4">
<p className="text-xs font-medium text-helios-slate">Extra copies</p>
<p className="mt-1 font-mono text-2xl font-bold text-helios-solar">
{data.total_duplicates}
</p>
{Object.keys(groupedRecommendations).length > 0 && (
<div className="space-y-4">
{Object.entries(groupedRecommendations).map(([type, recommendations]) => (
<section key={type} className="rounded-lg border border-helios-line/30 bg-helios-surface overflow-hidden">
<div className="flex items-center gap-2 border-b border-helios-line/20 bg-helios-surface-soft/40 px-5 py-3">
<Sparkles size={14} className="text-helios-solar" />
<h2 className="text-sm font-semibold text-helios-ink">
{TYPE_LABELS[type] ?? type}
</h2>
</div>
<div className="divide-y divide-helios-line/10">
{recommendations.map((recommendation, index) => (
<div key={`${recommendation.path}-${index}`} className="px-5 py-4">
<div className="flex items-center justify-between gap-4">
<div>
<h3 className="text-sm font-semibold text-helios-ink">{recommendation.title}</h3>
<p className="mt-1 text-sm text-helios-slate">{recommendation.summary}</p>
</div>
</div>
<p className="mt-3 break-all font-mono text-xs text-helios-slate">{recommendation.path}</p>
<div className="mt-3 rounded-lg border border-helios-line/20 bg-helios-surface-soft/40 px-3 py-2 text-xs text-helios-ink">
<span className="font-semibold text-helios-solar">Suggested action:</span> {recommendation.suggested_action}
</div>
</div>
))}
</div>
</section>
))}
</div>
)}
{data.duplicate_groups.length === 0 ? (
<div className="flex flex-col items-center justify-center gap-3 rounded-lg border border-helios-line/30 bg-helios-surface p-10 text-center">
<AlertTriangle size={28} className="text-helios-slate/40" />
<p className="text-sm font-medium text-helios-ink">
No duplicates found
No duplicate groups found
</p>
<p className="max-w-xs text-xs text-helios-slate">
Every filename in your library appears to be unique.
Every tracked basename in your library appears to be unique.
</p>
</div>
) : (
@@ -159,3 +212,20 @@ export default function LibraryIntelligence() {
</div>
);
}
function StatCard({
label,
value,
accent,
}: {
label: string;
value: string;
accent: string;
}) {
return (
<div className="rounded-lg border border-helios-line/30 bg-helios-surface px-5 py-4">
<p className="text-xs font-medium text-helios-slate">{label}</p>
<p className={`mt-1 font-mono text-2xl font-bold ${accent}`}>{value}</p>
</div>
);
}

View File

@@ -3,6 +3,7 @@ import { Terminal, Pause, Play, Trash2, RefreshCw, Search } from "lucide-react";
import { clsx, type ClassValue } from "clsx";
import { twMerge } from "tailwind-merge";
import { apiAction, apiJson, isApiError } from "../lib/api";
import { withBasePath } from "../lib/basePath";
import { showToast } from "../lib/toast";
import ConfirmDialog from "./ui/ConfirmDialog";
@@ -72,7 +73,7 @@ export default function LogViewer() {
setStreamError(null);
eventSource?.close();
eventSource = new EventSource("/api/events");
eventSource = new EventSource(withBasePath("/api/events"));
const appendLog = (message: string, level: string, jobId?: number) => {
if (pausedRef.current) {

View File

@@ -1,33 +1,144 @@
import { useState, useEffect } from "react";
import { Plus, Trash2, Zap } from "lucide-react";
import { useEffect, useState } from "react";
import { Bell, Plus, Trash2, Zap } from "lucide-react";
import { apiAction, apiJson, isApiError } from "../lib/api";
import { showToast } from "../lib/toast";
import ConfirmDialog from "./ui/ConfirmDialog";
type NotificationTargetType =
| "discord_webhook"
| "discord_bot"
| "gotify"
| "webhook"
| "telegram"
| "email";
interface NotificationTarget {
id: number;
name: string;
target_type: "gotify" | "discord" | "webhook";
endpoint_url: string;
auth_token?: string;
events: string;
target_type: NotificationTargetType;
config_json: Record<string, unknown>;
events: string[];
enabled: boolean;
created_at: string;
}
const TARGET_TYPES: NotificationTarget["target_type"][] = ["discord", "gotify", "webhook"];
interface NotificationsSettingsResponse {
daily_summary_time_local: string;
targets: NotificationTarget[];
}
interface LegacyNotificationTarget {
id: number;
name: string;
target_type: "discord" | "gotify" | "webhook";
endpoint_url: string;
auth_token: string | null;
events: string;
enabled: boolean;
created_at?: string;
}
const TARGET_TYPES: Array<{ value: NotificationTargetType; label: string }> = [
{ value: "discord_webhook", label: "Discord Webhook" },
{ value: "discord_bot", label: "Discord Bot" },
{ value: "gotify", label: "Gotify" },
{ value: "webhook", label: "Generic Webhook" },
{ value: "telegram", label: "Telegram" },
{ value: "email", label: "Email" },
];
const EVENT_OPTIONS = [
"encode.queued",
"encode.started",
"encode.completed",
"encode.failed",
"scan.completed",
"engine.idle",
"daily.summary",
];
function targetSummary(target: NotificationTarget): string {
const config = target.config_json;
switch (target.target_type) {
case "discord_webhook":
return String(config.webhook_url ?? "");
case "discord_bot":
return `channel ${String(config.channel_id ?? "")}`;
case "gotify":
return String(config.server_url ?? "");
case "webhook":
return String(config.url ?? "");
case "telegram":
return `chat ${String(config.chat_id ?? "")}`;
case "email":
return String((config.to_addresses as string[] | undefined)?.join(", ") ?? "");
default:
return "";
}
}
function normalizeTarget(target: NotificationTarget | LegacyNotificationTarget): NotificationTarget {
if ("config_json" in target) {
return target;
}
const normalizedType: NotificationTargetType =
target.target_type === "discord" ? "discord_webhook" : target.target_type;
const config_json =
normalizedType === "discord_webhook"
? { webhook_url: target.endpoint_url }
: normalizedType === "gotify"
? { server_url: target.endpoint_url, app_token: target.auth_token ?? "" }
: { url: target.endpoint_url, auth_token: target.auth_token ?? "" };
return {
id: target.id,
name: target.name,
target_type: normalizedType,
config_json,
events: JSON.parse(target.events),
enabled: target.enabled,
created_at: target.created_at ?? new Date().toISOString(),
};
}
function defaultConfigForType(type: NotificationTargetType): Record<string, unknown> {
switch (type) {
case "discord_webhook":
return { webhook_url: "" };
case "discord_bot":
return { bot_token: "", channel_id: "" };
case "gotify":
return { server_url: "", app_token: "" };
case "webhook":
return { url: "", auth_token: "" };
case "telegram":
return { bot_token: "", chat_id: "" };
case "email":
return {
smtp_host: "",
smtp_port: 587,
username: "",
password: "",
from_address: "",
to_addresses: [""],
security: "starttls",
};
}
}
export default function NotificationSettings() {
const [targets, setTargets] = useState<NotificationTarget[]>([]);
const [dailySummaryTime, setDailySummaryTime] = useState("09:00");
const [loading, setLoading] = useState(true);
const [testingId, setTestingId] = useState<number | null>(null);
const [error, setError] = useState<string | null>(null);
const [showForm, setShowForm] = useState(false);
const [newName, setNewName] = useState("");
const [newType, setNewType] = useState<NotificationTarget["target_type"]>("discord");
const [newUrl, setNewUrl] = useState("");
const [newToken, setNewToken] = useState("");
const [newEvents, setNewEvents] = useState<string[]>(["completed", "failed"]);
const [draftName, setDraftName] = useState("");
const [draftType, setDraftType] = useState<NotificationTargetType>("discord_webhook");
const [draftConfig, setDraftConfig] = useState<Record<string, unknown>>(defaultConfigForType("discord_webhook"));
const [draftEvents, setDraftEvents] = useState<string[]>(["encode.completed", "encode.failed"]);
const [pendingDeleteId, setPendingDeleteId] = useState<number | null>(null);
useEffect(() => {
@@ -36,8 +147,16 @@ export default function NotificationSettings() {
const fetchTargets = async () => {
try {
const data = await apiJson<NotificationTarget[]>("/api/settings/notifications");
setTargets(data);
const data = await apiJson<NotificationsSettingsResponse | LegacyNotificationTarget[]>(
"/api/settings/notifications",
);
if (Array.isArray(data)) {
setTargets(data.map(normalizeTarget));
setDailySummaryTime("09:00");
} else {
setTargets(data.targets.map(normalizeTarget));
setDailySummaryTime(data.daily_summary_time_local);
}
setError(null);
} catch (e) {
const message = isApiError(e) ? e.message : "Failed to load notification targets";
@@ -47,6 +166,32 @@ export default function NotificationSettings() {
}
};
const saveDailySummaryTime = async () => {
try {
await apiAction("/api/settings/notifications", {
method: "PUT",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ daily_summary_time_local: dailySummaryTime }),
});
showToast({
kind: "success",
title: "Notifications",
message: "Daily summary time saved.",
});
} catch (e) {
const message = isApiError(e) ? e.message : "Failed to save daily summary time";
setError(message);
showToast({ kind: "error", title: "Notifications", message });
}
};
const resetDraft = (type: NotificationTargetType = "discord_webhook") => {
setDraftName("");
setDraftType(type);
setDraftConfig(defaultConfigForType(type));
setDraftEvents(["encode.completed", "encode.failed"]);
};
const handleAdd = async (e: React.FormEvent) => {
e.preventDefault();
try {
@@ -54,18 +199,15 @@ export default function NotificationSettings() {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
name: newName,
target_type: newType,
endpoint_url: newUrl,
auth_token: newToken || null,
events: newEvents,
name: draftName,
target_type: draftType,
config_json: draftConfig,
events: draftEvents,
enabled: true,
}),
});
setShowForm(false);
setNewName("");
setNewUrl("");
setNewToken("");
resetDraft();
setError(null);
await fetchTargets();
showToast({ kind: "success", title: "Notifications", message: "Target added." });
@@ -92,29 +234,17 @@ export default function NotificationSettings() {
const handleTest = async (target: NotificationTarget) => {
setTestingId(target.id);
try {
let events: string[] = [];
try {
const parsed = JSON.parse(target.events);
if (Array.isArray(parsed)) {
events = parsed;
}
} catch {
events = [];
}
await apiAction("/api/settings/notifications/test", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
name: target.name,
target_type: target.target_type,
endpoint_url: target.endpoint_url,
auth_token: target.auth_token,
events,
config_json: target.config_json,
events: target.events,
enabled: target.enabled,
}),
});
showToast({ kind: "success", title: "Notifications", message: "Test notification sent." });
} catch (e) {
const message = isApiError(e) ? e.message : "Test notification failed";
@@ -126,18 +256,46 @@ export default function NotificationSettings() {
};
const toggleEvent = (evt: string) => {
if (newEvents.includes(evt)) {
setNewEvents(newEvents.filter(e => e !== evt));
} else {
setNewEvents([...newEvents, evt]);
}
setDraftEvents((current) =>
current.includes(evt)
? current.filter((candidate) => candidate !== evt)
: [...current, evt],
);
};
const setConfigField = (key: string, value: unknown) => {
setDraftConfig((current) => ({ ...current, [key]: value }));
};
return (
<div className="space-y-6" aria-live="polite">
<div className="flex justify-end mb-6">
<div className="grid gap-4 md:grid-cols-[1fr_auto] items-end">
<div className="rounded-xl border border-helios-line/20 bg-helios-surface-soft p-4">
<div className="flex items-center gap-2 text-sm font-semibold text-helios-ink">
<Bell size={16} className="text-helios-solar" />
Daily Summary Time
</div>
<p className="mt-1 text-xs text-helios-slate">
Daily summaries are opt-in per target, but they all use one global local-time send window.
</p>
<input
type="time"
value={dailySummaryTime}
onChange={(event) => setDailySummaryTime(event.target.value)}
className="mt-3 w-full max-w-xs bg-helios-surface border border-helios-line/20 rounded p-2 text-sm text-helios-ink"
/>
</div>
<button
onClick={() => setShowForm(!showForm)}
onClick={() => void saveDailySummaryTime()}
className="rounded-lg border border-helios-line/20 px-4 py-2 text-sm font-semibold text-helios-ink hover:bg-helios-surface-soft transition-colors"
>
Save Summary Time
</button>
</div>
<div className="flex justify-end">
<button
onClick={() => setShowForm((current) => !current)}
className="flex items-center gap-2 px-3 py-1.5 bg-helios-surface border border-helios-line/30 hover:bg-helios-surface-soft text-helios-ink rounded-lg text-xs font-medium transition-colors"
>
<Plus size={14} />
@@ -157,8 +315,8 @@ export default function NotificationSettings() {
<div>
<label className="block text-xs font-medium text-helios-slate mb-1">Name</label>
<input
value={newName}
onChange={e => setNewName(e.target.value)}
value={draftName}
onChange={(event) => setDraftName(event.target.value)}
className="w-full bg-helios-surface border border-helios-line/20 rounded p-2 text-sm text-helios-ink"
placeholder="My Discord"
required
@@ -167,53 +325,177 @@ export default function NotificationSettings() {
<div>
<label className="block text-xs font-medium text-helios-slate mb-1">Type</label>
<select
value={newType}
onChange={e => setNewType(e.target.value as NotificationTarget["target_type"])}
value={draftType}
onChange={(event) => {
const nextType = event.target.value as NotificationTargetType;
setDraftType(nextType);
setDraftConfig(defaultConfigForType(nextType));
}}
className="w-full bg-helios-surface border border-helios-line/20 rounded p-2 text-sm text-helios-ink"
>
{TARGET_TYPES.map((type) => (
<option key={type} value={type}>
{type === "discord" ? "Discord Webhook" : type === "gotify" ? "Gotify" : "Generic Webhook"}
<option key={type.value} value={type.value}>
{type.label}
</option>
))}
</select>
</div>
</div>
<div>
<label className="block text-xs font-medium text-helios-slate mb-1">Endpoint URL</label>
<input
value={newUrl}
onChange={e => setNewUrl(e.target.value)}
className="w-full bg-helios-surface border border-helios-line/20 rounded p-2 text-sm text-helios-ink font-mono"
{draftType === "discord_webhook" && (
<TextField
label="Webhook URL"
value={String(draftConfig.webhook_url ?? "")}
onChange={(value) => setConfigField("webhook_url", value)}
placeholder="https://discord.com/api/webhooks/..."
required
/>
</div>
)}
<div>
<label className="block text-xs font-medium text-helios-slate mb-1">Auth Token (Optional)</label>
<input
value={newToken}
onChange={e => setNewToken(e.target.value)}
className="w-full bg-helios-surface border border-helios-line/20 rounded p-2 text-sm text-helios-ink font-mono"
placeholder="Bearer token or API Key"
{draftType === "discord_bot" && (
<div className="grid grid-cols-1 sm:grid-cols-2 gap-4">
<TextField
label="Bot Token"
value={String(draftConfig.bot_token ?? "")}
onChange={(value) => setConfigField("bot_token", value)}
placeholder="Discord bot token"
/>
<TextField
label="Channel ID"
value={String(draftConfig.channel_id ?? "")}
onChange={(value) => setConfigField("channel_id", value)}
placeholder="123456789012345678"
/>
</div>
)}
{draftType === "gotify" && (
<div className="grid grid-cols-1 sm:grid-cols-2 gap-4">
<TextField
label="Server URL"
value={String(draftConfig.server_url ?? "")}
onChange={(value) => setConfigField("server_url", value)}
placeholder="https://gotify.example.com/message"
/>
<TextField
label="App Token"
value={String(draftConfig.app_token ?? "")}
onChange={(value) => setConfigField("app_token", value)}
placeholder="Gotify app token"
/>
</div>
)}
{draftType === "webhook" && (
<div className="grid grid-cols-1 sm:grid-cols-2 gap-4">
<TextField
label="Endpoint URL"
value={String(draftConfig.url ?? "")}
onChange={(value) => setConfigField("url", value)}
placeholder="https://example.com/webhook"
/>
<TextField
label="Bearer Token (Optional)"
value={String(draftConfig.auth_token ?? "")}
onChange={(value) => setConfigField("auth_token", value)}
placeholder="Bearer token"
/>
</div>
)}
{draftType === "telegram" && (
<div className="grid grid-cols-1 sm:grid-cols-2 gap-4">
<TextField
label="Bot Token"
value={String(draftConfig.bot_token ?? "")}
onChange={(value) => setConfigField("bot_token", value)}
placeholder="Telegram bot token"
/>
<TextField
label="Chat ID"
value={String(draftConfig.chat_id ?? "")}
onChange={(value) => setConfigField("chat_id", value)}
placeholder="Telegram chat ID"
/>
</div>
)}
{draftType === "email" && (
<div className="grid grid-cols-1 sm:grid-cols-2 gap-4">
<TextField
label="SMTP Host"
value={String(draftConfig.smtp_host ?? "")}
onChange={(value) => setConfigField("smtp_host", value)}
placeholder="smtp.example.com"
/>
<TextField
label="SMTP Port"
value={String(draftConfig.smtp_port ?? 587)}
onChange={(value) => setConfigField("smtp_port", Number(value))}
placeholder="587"
/>
<TextField
label="Username"
value={String(draftConfig.username ?? "")}
onChange={(value) => setConfigField("username", value)}
placeholder="Optional"
/>
<TextField
label="Password"
value={String(draftConfig.password ?? "")}
onChange={(value) => setConfigField("password", value)}
placeholder="Optional"
/>
<TextField
label="From Address"
value={String(draftConfig.from_address ?? "")}
onChange={(value) => setConfigField("from_address", value)}
placeholder="alchemist@example.com"
/>
<TextField
label="To Addresses"
value={Array.isArray(draftConfig.to_addresses) ? String((draftConfig.to_addresses as string[]).join(", ")) : ""}
onChange={(value) =>
setConfigField(
"to_addresses",
value
.split(",")
.map((candidate) => candidate.trim())
.filter(Boolean),
)
}
placeholder="ops@example.com, alerts@example.com"
/>
<div>
<label className="block text-xs font-medium text-helios-slate mb-1">Security</label>
<select
value={String(draftConfig.security ?? "starttls")}
onChange={(event) => setConfigField("security", event.target.value)}
className="w-full bg-helios-surface border border-helios-line/20 rounded p-2 text-sm text-helios-ink"
>
<option value="starttls">STARTTLS</option>
<option value="tls">TLS / SMTPS</option>
<option value="none">None</option>
</select>
</div>
</div>
)}
<div>
<label className="block text-xs font-medium text-helios-slate mb-2">Events</label>
<div className="flex gap-4 flex-wrap">
{["completed", "failed", "queued"].map(evt => (
<label key={evt} className="flex items-center gap-2 text-sm text-helios-ink cursor-pointer">
<input
type="checkbox"
checked={newEvents.includes(evt)}
onChange={() => toggleEvent(evt)}
className="rounded border-helios-line/30 bg-helios-surface accent-helios-solar"
/>
<span className="capitalize">{evt}</span>
</label>
<div className="flex gap-2 flex-wrap">
{EVENT_OPTIONS.map((evt) => (
<button
key={evt}
type="button"
onClick={() => toggleEvent(evt)}
className={`rounded-full border px-3 py-2 text-xs font-semibold transition-all ${
draftEvents.includes(evt)
? "border-helios-solar bg-helios-solar/10 text-helios-ink"
: "border-helios-line/20 text-helios-slate"
}`}
>
{evt}
</button>
))}
</div>
</div>
@@ -228,19 +510,28 @@ export default function NotificationSettings() {
<div className="text-sm text-helios-slate animate-pulse">Loading targets</div>
) : (
<div className="space-y-3">
{targets.map(target => (
{targets.map((target) => (
<div key={target.id} className="flex items-center justify-between p-4 bg-helios-surface border border-helios-line/10 rounded-xl group/item">
<div className="flex items-center gap-4">
<div className="p-2 bg-helios-surface-soft rounded-lg text-helios-slate">
<Zap size={18} />
<Bell size={18} />
</div>
<div>
<div className="min-w-0">
<h3 className="font-bold text-sm text-helios-ink">{target.name}</h3>
<div className="flex items-center gap-2 mt-0.5">
<div className="flex items-center gap-2 mt-0.5 flex-wrap">
<span className="text-xs font-medium text-helios-slate bg-helios-surface-soft px-1.5 rounded">
{target.target_type}
</span>
<span className="text-xs text-helios-slate truncate max-w-[200px]">{target.endpoint_url}</span>
<span className="text-xs text-helios-slate break-all">
{targetSummary(target)}
</span>
</div>
<div className="mt-2 flex flex-wrap gap-2">
{target.events.map((eventName) => (
<span key={eventName} className="rounded-full border border-helios-line/20 px-2 py-0.5 text-[11px] text-helios-slate">
{eventName}
</span>
))}
</div>
</div>
</div>
@@ -281,3 +572,27 @@ export default function NotificationSettings() {
</div>
);
}
function TextField({
label,
value,
onChange,
placeholder,
}: {
label: string;
value: string;
onChange: (value: string) => void;
placeholder: string;
}) {
return (
<div>
<label className="block text-xs font-medium text-helios-slate mb-1">{label}</label>
<input
value={value}
onChange={(event) => onChange(event.target.value)}
className="w-full bg-helios-surface border border-helios-line/20 rounded p-2 text-sm text-helios-ink"
placeholder={placeholder}
/>
</div>
);
}

View File

@@ -1,5 +1,5 @@
import { useEffect, useRef, useState } from "react";
import { FolderOpen, Bell, Calendar, FileCog, Cog, Server, LayoutGrid, Palette, Activity, FileCode2 } from "lucide-react";
import { FolderOpen, Bell, Calendar, FileCog, Cog, Server, LayoutGrid, Palette, Activity, FileCode2, KeyRound } from "lucide-react";
import WatchFolders from "./WatchFolders";
import NotificationSettings from "./NotificationSettings";
import ScheduleSettings from "./ScheduleSettings";
@@ -10,6 +10,7 @@ import HardwareSettings from "./HardwareSettings";
import AppearanceSettings from "./AppearanceSettings";
import QualitySettings from "./QualitySettings";
import ConfigEditorSettings from "./ConfigEditorSettings";
import ApiTokenSettings from "./ApiTokenSettings";
const TABS = [
{ id: "appearance", label: "Appearance", icon: Palette, component: AppearanceSettings },
@@ -19,6 +20,7 @@ const TABS = [
{ id: "files", label: "Output & Files", icon: FileCog, component: FileSettings },
{ id: "schedule", label: "Automation", icon: Calendar, component: ScheduleSettings },
{ id: "notifications", label: "Notifications", icon: Bell, component: NotificationSettings },
{ id: "api-tokens", label: "API Tokens", icon: KeyRound, component: ApiTokenSettings },
{ id: "hardware", label: "Hardware", icon: LayoutGrid, component: HardwareSettings },
{ id: "system", label: "Runtime", icon: Server, component: SystemSettings },
{ id: "config", label: "Config", icon: FileCode2, component: ConfigEditorSettings },

View File

@@ -1,5 +1,6 @@
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
import { apiAction, apiJson, isApiError } from "../lib/api";
import { withBasePath } from "../lib/basePath";
import AdminAccountStep from "./setup/AdminAccountStep";
import LibraryStep from "./setup/LibraryStep";
import ProcessingStep from "./setup/ProcessingStep";
@@ -102,7 +103,7 @@ export default function SetupWizard() {
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ key: "setup_complete", value: "true" }),
}).catch(() => undefined);
window.location.href = "/";
window.location.href = withBasePath("/");
} catch (err) {
let message = "Failed to save setup configuration.";
if (isApiError(err)) {
@@ -112,7 +113,7 @@ export default function SetupWizard() {
: "Setup configuration was rejected. Check that your username is at least 3 characters and password is at least 8 characters.";
} else if (err.status === 403) {
message = "Setup has already been completed. Redirecting to dashboard...";
setTimeout(() => { window.location.href = "/"; }, 1500);
setTimeout(() => { window.location.href = withBasePath("/"); }, 1500);
} else if (err.status >= 500) {
message = `Server error during setup (${err.status}). Check the Alchemist logs for details.`;
} else {

View File

@@ -3,6 +3,7 @@ import {
Activity,
Sparkles,
Settings,
Wand2,
Video,
Terminal,
BarChart3,
@@ -12,6 +13,12 @@ import {
import SystemStatus from "./SystemStatus.tsx";
const currentPath = Astro.url.pathname;
const basePath = "__ALCHEMIST_BASE_URL__";
const withBase = (href: string) => `${basePath}${href === "/" ? "/" : href}`;
const strippedPath =
basePath && currentPath.startsWith(basePath)
? currentPath.slice(basePath.length) || "/"
: currentPath;
const navItems = [
{ href: "/", label: "Dashboard", Icon: Activity },
@@ -19,13 +26,14 @@ const navItems = [
{ href: "/logs", label: "Logs", Icon: Terminal },
{ href: "/stats", label: "Statistics", Icon: BarChart3 },
{ href: "/intelligence", label: "Intelligence", Icon: Sparkles },
{ href: "/convert", label: "Convert", Icon: Wand2 },
{ href: "/settings", label: "Settings", Icon: Settings },
];
---
{/* Mobile top bar */}
<div class="lg:hidden flex items-center justify-between px-4 py-3 bg-helios-surface border-b border-helios-line/60">
<a href="/" class="font-bold text-lg tracking-tight text-helios-ink">Alchemist</a>
<a href={withBase("/")} class="font-bold text-lg tracking-tight text-helios-ink">Alchemist</a>
<button
id="sidebar-hamburger"
aria-label="Open navigation"
@@ -50,7 +58,7 @@ const navItems = [
transition-transform duration-200 lg:transition-none"
>
<a
href="/"
href={withBase("/")}
class="flex items-center px-3 pb-4 border-b border-helios-line/40"
>
<span class="font-bold text-lg tracking-tight text-helios-ink">
@@ -69,11 +77,11 @@ const navItems = [
{
navItems.map(({ href, label, Icon }) => {
const isActive =
currentPath === href ||
(href !== "/" && currentPath.startsWith(href));
strippedPath === href ||
(href !== "/" && strippedPath.startsWith(href));
return (
<a
href={href}
href={withBase(href)}
class:list={[
"flex items-center gap-3 px-3 py-2 rounded-md border-l-2 border-transparent transition-colors whitespace-nowrap",
isActive

View File

@@ -9,6 +9,7 @@ interface Props {
}
const { title } = Astro.props;
const basePath = "__ALCHEMIST_BASE_URL__";
---
<!doctype html>
@@ -17,9 +18,12 @@ const { title } = Astro.props;
<meta charset="UTF-8" />
<meta name="description" content="Alchemist Media Transcoder" />
<meta name="viewport" content="width=device-width" />
<link rel="icon" type="image/svg+xml" href="/favicon.svg" />
<link rel="icon" type="image/svg+xml" href={`${basePath}/favicon.svg`} />
<meta name="generator" content={Astro.generator} />
<title>{title}</title>
<script is:inline define:vars={{ basePath }}>
window.__ALCHEMIST_BASE_URL__ = basePath;
</script>
<ClientRouter />
</head>
<body>

View File

@@ -1,3 +1,5 @@
import { stripBasePath, withBasePath } from "./basePath";
export interface ApiErrorShape {
status: number;
message: string;
@@ -83,6 +85,7 @@ export function isApiError(error: unknown): error is ApiError {
* Authenticated fetch utility using cookie auth.
*/
export async function apiFetch(url: string, options: RequestInit = {}): Promise<Response> {
const resolvedUrl = withBasePath(url);
const headers = new Headers(options.headers);
if (!headers.has("Content-Type") && typeof options.body === "string") {
@@ -105,7 +108,7 @@ export async function apiFetch(url: string, options: RequestInit = {}): Promise<
}
try {
const response = await fetch(url, {
const response = await fetch(resolvedUrl, {
...options,
headers,
credentials: options.credentials ?? "same-origin",
@@ -113,10 +116,10 @@ export async function apiFetch(url: string, options: RequestInit = {}): Promise<
});
if (response.status === 401 && typeof window !== "undefined") {
const path = window.location.pathname;
const path = stripBasePath(window.location.pathname);
const isAuthPage = path.startsWith("/login") || path.startsWith("/setup");
if (!isAuthPage) {
window.location.href = "/login";
window.location.href = withBasePath("/login");
return new Promise(() => {});
}
}
@@ -133,7 +136,7 @@ export async function apiFetch(url: string, options: RequestInit = {}): Promise<
export async function apiJson<T>(url: string, options: RequestInit = {}): Promise<T> {
const response = await apiFetch(url, options);
if (!response.ok) {
throw await toApiError(url, response);
throw await toApiError(withBasePath(url), response);
}
return (await parseResponseBody(response)) as T;
}
@@ -141,7 +144,7 @@ export async function apiJson<T>(url: string, options: RequestInit = {}): Promis
export async function apiAction(url: string, options: RequestInit = {}): Promise<void> {
const response = await apiFetch(url, options);
if (!response.ok) {
throw await toApiError(url, response);
throw await toApiError(withBasePath(url), response);
}
}

53
web/src/lib/basePath.ts Normal file
View File

@@ -0,0 +1,53 @@
declare global {
interface Window {
__ALCHEMIST_BASE_URL__?: string;
}
}
const PLACEHOLDER = "__ALCHEMIST_BASE_URL__";
function normalize(value: string | undefined): string {
const raw = (value ?? "").trim();
if (!raw || raw === "/" || raw === PLACEHOLDER) {
return "";
}
return raw.replace(/\/+$/, "");
}
export function getBasePath(): string {
if (typeof window !== "undefined") {
return normalize(window.__ALCHEMIST_BASE_URL__);
}
return "";
}
export function withBasePath(path: string): string {
if (/^[a-z]+:\/\//i.test(path)) {
return path;
}
const basePath = getBasePath();
if (!path) {
return basePath || "/";
}
if (path.startsWith("/")) {
return `${basePath}${path}`;
}
return `${basePath}/${path}`;
}
export function stripBasePath(pathname: string): string {
const basePath = getBasePath();
if (!basePath) {
return pathname || "/";
}
if (pathname === basePath) {
return "/";
}
if (pathname.startsWith(`${basePath}/`)) {
return pathname.slice(basePath.length) || "/";
}
return pathname || "/";
}

View File

@@ -3,6 +3,7 @@ import { Home } from "lucide-react";
import Layout from "../layouts/Layout.astro";
import Sidebar from "../components/Sidebar.astro";
import HeaderActions from "../components/HeaderActions.tsx";
const basePath = "__ALCHEMIST_BASE_URL__";
---
<Layout title="Alchemist | Page Not Found">
@@ -23,7 +24,7 @@ import HeaderActions from "../components/HeaderActions.tsx";
The page you're looking for couldn't be found. It may have moved or the URL might be wrong.
</p>
<a
href="/"
href={`${basePath}/`}
class="inline-flex items-center gap-2 mt-6 bg-helios-solar text-helios-main rounded-lg px-5 py-2.5 text-sm font-semibold hover:opacity-90 transition-opacity"
>
<Home size={16} />

View File

@@ -7,6 +7,7 @@ interface Props {
}
const { error } = Astro.props;
const basePath = "__ALCHEMIST_BASE_URL__";
---
<Layout title="Alchemist | Server Error">
@@ -28,7 +29,7 @@ const { error } = Astro.props;
) : null}
<a
href="/"
href={`${basePath}/`}
class="px-6 py-2.5 bg-helios-orange hover:bg-helios-orange/90 text-helios-main font-medium rounded-md transition-colors"
>
Return to Dashboard

View File

@@ -0,0 +1,20 @@
---
import Layout from "../layouts/Layout.astro";
import Sidebar from "../components/Sidebar.astro";
import HeaderActions from "../components/HeaderActions.tsx";
import ConversionTool from "../components/ConversionTool.tsx";
---
<Layout title="Alchemist | Convert">
<div class="app-shell">
<Sidebar />
<main class="app-main overflow-y-auto">
<div class="flex items-center justify-end px-6 py-3 border-b border-helios-line/20">
<HeaderActions client:load />
</div>
<div class="p-6">
<ConversionTool client:load />
</div>
</main>
</div>
</Layout>

View File

@@ -68,11 +68,12 @@ import { ArrowRight } from "lucide-react";
<script>
import { apiAction, apiJson, isApiError } from "../lib/api";
const basePath = "__ALCHEMIST_BASE_URL__";
void apiJson<{ setup_required: boolean }>("/api/setup/status")
.then((data) => {
if (data?.setup_required) {
window.location.href = "/setup";
window.location.href = `${basePath}/setup`;
}
})
.catch(() => undefined);
@@ -95,7 +96,7 @@ import { ArrowRight } from "lucide-react";
credentials: 'same-origin',
body: JSON.stringify(data)
});
window.location.href = '/';
window.location.href = `${basePath}/`;
} catch (err) {
console.error("Login failed", err);
errorMsg?.classList.remove('hidden');

View File

@@ -15,10 +15,11 @@ import SetupSidebar from "../components/SetupSidebar.astro";
<script>
import { apiJson } from "../lib/api";
const basePath = "__ALCHEMIST_BASE_URL__";
apiJson<{ setup_required: boolean }>("/api/setup/status")
.then((data) => {
if (!data.setup_required) {
window.location.href = "/";
window.location.href = `${basePath}/`;
}
})
.catch(() => undefined);