Release v0.2.6-beta

- Add setup auth fixes and scheduler time validation for reliable onboarding
- Harden file watcher behavior under bursty filesystem events
- Improve DB stability with WAL, timeouts, FK enforcement, runtime legacy watch_dir support, and new indexes
- Add session cleanup task and include config dirs in initial scans
- Switch reqwest to rustls for cross-compilation without OpenSSL
- Add cross-platform build script (bun + zig + cargo-xwin) and design philosophy doc
- Bump documentation changelog for v0.2.6-beta
This commit is contained in:
brooklyn
2026-01-12 14:37:53 -05:00
parent e273fe3198
commit 503ac84fe0
16 changed files with 884 additions and 281 deletions

View File

@@ -4,20 +4,20 @@ All notable changes to this project will be documented in this file.
## [v0.2.5] - 2026-01-11 ## [v0.2.5] - 2026-01-11
### 🛠 Fixes ### Fixes
- **Dashboard Crash**: Fixed a critical bug where the dashboard would render as a blank screen if GPU utilization was `null`. Added strict null checks before `toFixed()` calls in `ResourceMonitor.tsx`. - **Dashboard Crash**: Fixed a critical bug where the dashboard would render as a blank screen if GPU utilization was `null`. Added strict null checks before `toFixed()` calls in `ResourceMonitor.tsx`.
- **Animation Glitch**: Resolved an issue where the "Engine Status" button would fly in from the top-left corner on page navigation. Implemented unique `layoutId` generation using `useId()` to maintain the morph animation while preventing cross-page artifacts. - **Animation Glitch**: Resolved an issue where the "Engine Status" button would fly in from the top-left corner on page navigation. Implemented unique `layoutId` generation using `useId()` to maintain the morph animation while preventing cross-page artifacts.
- **Migration Checksum**: Fixed a startup error caused by a modified migration file. Reverted the original migration to restore checksum integrity and created a new migration for the version bump. - **Migration Checksum**: Fixed a startup error caused by a modified migration file. Reverted the original migration to restore checksum integrity and created a new migration for the version bump.
### ⚡️ Improvements ### Improvements
- **Resource Monitor Layout**: Repositioned the GPU Usage section to appear between "Active Jobs" and "Uptime" for better logical flow. - **Resource Monitor Layout**: Repositioned the GPU Usage section to appear between "Active Jobs" and "Uptime" for better logical flow.
- **Animation Timing**: Adjusted staggered animation delays in the Resource Monitor to match the new layout order. - **Animation Timing**: Adjusted staggered animation delays in the Resource Monitor to match the new layout order.
### 📚 Documentation ### Documentation
- **Codebase Overview**: Added `codebase_overview.md` explaining the monolith architecture (Rust + API + Frontend) and directory structure. - **Codebase Overview**: Added `codebase_overview.md` explaining the monolith architecture (Rust + API + Frontend) and directory structure.
- **Migration Policy**: Updated `MIGRATIONS.md` to explicitly forbid modifying existing migration files to prevent checksum errors. - **Migration Policy**: Updated `MIGRATIONS.md` to explicitly forbid modifying existing migration files to prevent checksum errors.
- **Walkthrough**: Updated `walkthrough.md` with detailed debugging logs and verification steps for all recent changes. - **Walkthrough**: Updated `walkthrough.md` with detailed debugging logs and verification steps for all recent changes.
### 🏗 Infrastructure ### Infrastructure
- **Version Bump**: Updated project version to `0.2.5` in `Cargo.toml`, `web/package.json`, and `VERSION`. - **Version Bump**: Updated project version to `0.2.5` in `Cargo.toml`, `web/package.json`, and `VERSION`.
- **Database**: Established `0.2.5` as the new minimum compatible version schema baseline. - **Database**: Established `0.2.5` as the new minimum compatible version schema baseline.

383
Cargo.lock generated
View File

@@ -40,7 +40,7 @@ dependencies = [
"mime_guess", "mime_guess",
"notify", "notify",
"num_cpus", "num_cpus",
"rand", "rand 0.8.5",
"rayon", "rayon",
"reqwest", "reqwest",
"rust-embed", "rust-embed",
@@ -58,6 +58,7 @@ dependencies = [
"tower-http 0.5.2", "tower-http 0.5.2",
"tracing", "tracing",
"tracing-subscriber", "tracing-subscriber",
"uuid",
"walkdir", "walkdir",
] ]
@@ -374,6 +375,12 @@ version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801"
[[package]]
name = "cfg_aliases"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
[[package]] [[package]]
name = "chrono" name = "chrono"
version = "0.4.42" version = "0.4.42"
@@ -440,16 +447,6 @@ version = "0.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8"
[[package]]
name = "core-foundation"
version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f"
dependencies = [
"core-foundation-sys",
"libc",
]
[[package]] [[package]]
name = "core-foundation-sys" name = "core-foundation-sys"
version = "0.8.7" version = "0.8.7"
@@ -613,15 +610,6 @@ dependencies = [
"serde", "serde",
] ]
[[package]]
name = "encoding_rs"
version = "0.8.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3"
dependencies = [
"cfg-if",
]
[[package]] [[package]]
name = "equivalent" name = "equivalent"
version = "1.0.2" version = "1.0.2"
@@ -690,27 +678,6 @@ dependencies = [
"spin", "spin",
] ]
[[package]]
name = "fnv"
version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
[[package]]
name = "foreign-types"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1"
dependencies = [
"foreign-types-shared",
]
[[package]]
name = "foreign-types-shared"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b"
[[package]] [[package]]
name = "form_urlencoded" name = "form_urlencoded"
version = "1.2.2" version = "1.2.2"
@@ -864,8 +831,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"js-sys",
"libc", "libc",
"wasi", "wasi",
"wasm-bindgen",
] ]
[[package]] [[package]]
@@ -875,28 +844,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"js-sys",
"libc", "libc",
"r-efi", "r-efi",
"wasip2", "wasip2",
] "wasm-bindgen",
[[package]]
name = "h2"
version = "0.4.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2f44da3a8150a6703ed5d34e164b875fd14c2cdab9af1252a9a1020bde2bdc54"
dependencies = [
"atomic-waker",
"bytes",
"fnv",
"futures-core",
"futures-sink",
"http",
"indexmap",
"slab",
"tokio",
"tokio-util",
"tracing",
] ]
[[package]] [[package]]
@@ -1039,7 +991,6 @@ dependencies = [
"bytes", "bytes",
"futures-channel", "futures-channel",
"futures-core", "futures-core",
"h2",
"http", "http",
"http-body", "http-body",
"httparse", "httparse",
@@ -1066,22 +1017,7 @@ dependencies = [
"tokio", "tokio",
"tokio-rustls", "tokio-rustls",
"tower-service", "tower-service",
] "webpki-roots",
[[package]]
name = "hyper-tls"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0"
dependencies = [
"bytes",
"http-body-util",
"hyper",
"hyper-util",
"native-tls",
"tokio",
"tokio-native-tls",
"tower-service",
] ]
[[package]] [[package]]
@@ -1103,11 +1039,9 @@ dependencies = [
"percent-encoding", "percent-encoding",
"pin-project-lite", "pin-project-lite",
"socket2", "socket2",
"system-configuration",
"tokio", "tokio",
"tower-service", "tower-service",
"tracing", "tracing",
"windows-registry",
] ]
[[package]] [[package]]
@@ -1411,6 +1345,12 @@ version = "0.4.29"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897"
[[package]]
name = "lru-slab"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154"
[[package]] [[package]]
name = "matchers" name = "matchers"
version = "0.2.0" version = "0.2.0"
@@ -1493,23 +1433,6 @@ dependencies = [
"windows-sys 0.61.2", "windows-sys 0.61.2",
] ]
[[package]]
name = "native-tls"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e"
dependencies = [
"libc",
"log",
"openssl",
"openssl-probe",
"openssl-sys",
"schannel",
"security-framework",
"security-framework-sys",
"tempfile",
]
[[package]] [[package]]
name = "newline-converter" name = "newline-converter"
version = "0.3.0" version = "0.3.0"
@@ -1577,7 +1500,7 @@ dependencies = [
"num-integer", "num-integer",
"num-iter", "num-iter",
"num-traits", "num-traits",
"rand", "rand 0.8.5",
"smallvec", "smallvec",
"zeroize", "zeroize",
] ]
@@ -1634,50 +1557,6 @@ version = "1.70.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe" checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe"
[[package]]
name = "openssl"
version = "0.10.75"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08838db121398ad17ab8531ce9de97b244589089e290a384c900cb9ff7434328"
dependencies = [
"bitflags 2.10.0",
"cfg-if",
"foreign-types",
"libc",
"once_cell",
"openssl-macros",
"openssl-sys",
]
[[package]]
name = "openssl-macros"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.114",
]
[[package]]
name = "openssl-probe"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e"
[[package]]
name = "openssl-sys"
version = "0.9.111"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "82cab2d520aa75e3c58898289429321eb788c3106963d0dc886ec7a5f4adc321"
dependencies = [
"cc",
"libc",
"pkg-config",
"vcpkg",
]
[[package]] [[package]]
name = "parking_lot" name = "parking_lot"
version = "0.12.5" version = "0.12.5"
@@ -1708,7 +1587,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "346f04948ba92c43e8469c1ee6736c7563d71012b17d40745260fe106aac2166" checksum = "346f04948ba92c43e8469c1ee6736c7563d71012b17d40745260fe106aac2166"
dependencies = [ dependencies = [
"base64ct", "base64ct",
"rand_core", "rand_core 0.6.4",
"subtle", "subtle",
] ]
@@ -1799,6 +1678,61 @@ dependencies = [
"unicode-ident", "unicode-ident",
] ]
[[package]]
name = "quinn"
version = "0.11.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9e20a958963c291dc322d98411f541009df2ced7b5a4f2bd52337638cfccf20"
dependencies = [
"bytes",
"cfg_aliases",
"pin-project-lite",
"quinn-proto",
"quinn-udp",
"rustc-hash",
"rustls",
"socket2",
"thiserror 2.0.17",
"tokio",
"tracing",
"web-time",
]
[[package]]
name = "quinn-proto"
version = "0.11.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f1906b49b0c3bc04b5fe5d86a77925ae6524a19b816ae38ce1e426255f1d8a31"
dependencies = [
"bytes",
"getrandom 0.3.4",
"lru-slab",
"rand 0.9.2",
"ring",
"rustc-hash",
"rustls",
"rustls-pki-types",
"slab",
"thiserror 2.0.17",
"tinyvec",
"tracing",
"web-time",
]
[[package]]
name = "quinn-udp"
version = "0.5.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "addec6a0dcad8a8d96a771f815f0eaf55f9d1805756410b39f5fa81332574cbd"
dependencies = [
"cfg_aliases",
"libc",
"once_cell",
"socket2",
"tracing",
"windows-sys 0.52.0",
]
[[package]] [[package]]
name = "quote" name = "quote"
version = "1.0.43" version = "1.0.43"
@@ -1821,8 +1755,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
dependencies = [ dependencies = [
"libc", "libc",
"rand_chacha", "rand_chacha 0.3.1",
"rand_core", "rand_core 0.6.4",
]
[[package]]
name = "rand"
version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1"
dependencies = [
"rand_chacha 0.9.0",
"rand_core 0.9.4",
] ]
[[package]] [[package]]
@@ -1832,7 +1776,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
dependencies = [ dependencies = [
"ppv-lite86", "ppv-lite86",
"rand_core", "rand_core 0.6.4",
]
[[package]]
name = "rand_chacha"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb"
dependencies = [
"ppv-lite86",
"rand_core 0.9.4",
] ]
[[package]] [[package]]
@@ -1844,6 +1798,15 @@ dependencies = [
"getrandom 0.2.16", "getrandom 0.2.16",
] ]
[[package]]
name = "rand_core"
version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4f1b3bc831f92381018fd9c6350b917c7b21f1eed35a65a51900e0e55a3d7afa"
dependencies = [
"getrandom 0.3.4",
]
[[package]] [[package]]
name = "rayon" name = "rayon"
version = "1.11.0" version = "1.11.0"
@@ -1907,29 +1870,26 @@ checksum = "eddd3ca559203180a307f12d114c268abf583f59b03cb906fd0b3ff8646c1147"
dependencies = [ dependencies = [
"base64 0.22.1", "base64 0.22.1",
"bytes", "bytes",
"encoding_rs",
"futures-core", "futures-core",
"h2",
"http", "http",
"http-body", "http-body",
"http-body-util", "http-body-util",
"hyper", "hyper",
"hyper-rustls", "hyper-rustls",
"hyper-tls",
"hyper-util", "hyper-util",
"js-sys", "js-sys",
"log", "log",
"mime",
"native-tls",
"percent-encoding", "percent-encoding",
"pin-project-lite", "pin-project-lite",
"quinn",
"rustls",
"rustls-pki-types", "rustls-pki-types",
"serde", "serde",
"serde_json", "serde_json",
"serde_urlencoded", "serde_urlencoded",
"sync_wrapper", "sync_wrapper",
"tokio", "tokio",
"tokio-native-tls", "tokio-rustls",
"tower 0.5.2", "tower 0.5.2",
"tower-http 0.6.8", "tower-http 0.6.8",
"tower-service", "tower-service",
@@ -1937,6 +1897,7 @@ dependencies = [
"wasm-bindgen", "wasm-bindgen",
"wasm-bindgen-futures", "wasm-bindgen-futures",
"web-sys", "web-sys",
"webpki-roots",
] ]
[[package]] [[package]]
@@ -1966,7 +1927,7 @@ dependencies = [
"num-traits", "num-traits",
"pkcs1", "pkcs1",
"pkcs8", "pkcs8",
"rand_core", "rand_core 0.6.4",
"signature", "signature",
"spki", "spki",
"subtle", "subtle",
@@ -2008,6 +1969,12 @@ dependencies = [
"walkdir", "walkdir",
] ]
[[package]]
name = "rustc-hash"
version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d"
[[package]] [[package]]
name = "rustix" name = "rustix"
version = "1.1.3" version = "1.1.3"
@@ -2028,6 +1995,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c665f33d38cea657d9614f766881e4d510e0eda4239891eea56b4cadcf01801b" checksum = "c665f33d38cea657d9614f766881e4d510e0eda4239891eea56b4cadcf01801b"
dependencies = [ dependencies = [
"once_cell", "once_cell",
"ring",
"rustls-pki-types", "rustls-pki-types",
"rustls-webpki", "rustls-webpki",
"subtle", "subtle",
@@ -2040,6 +2008,7 @@ version = "1.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "21e6f2ab2928ca4291b86736a8bd920a277a399bba1589409d72154ff87c1282" checksum = "21e6f2ab2928ca4291b86736a8bd920a277a399bba1589409d72154ff87c1282"
dependencies = [ dependencies = [
"web-time",
"zeroize", "zeroize",
] ]
@@ -2075,44 +2044,12 @@ dependencies = [
"winapi-util", "winapi-util",
] ]
[[package]]
name = "schannel"
version = "0.1.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1"
dependencies = [
"windows-sys 0.61.2",
]
[[package]] [[package]]
name = "scopeguard" name = "scopeguard"
version = "1.2.0" version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
[[package]]
name = "security-framework"
version = "2.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02"
dependencies = [
"bitflags 2.10.0",
"core-foundation",
"core-foundation-sys",
"libc",
"security-framework-sys",
]
[[package]]
name = "security-framework-sys"
version = "2.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0"
dependencies = [
"core-foundation-sys",
"libc",
]
[[package]] [[package]]
name = "serde" name = "serde"
version = "1.0.228" version = "1.0.228"
@@ -2263,7 +2200,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de"
dependencies = [ dependencies = [
"digest", "digest",
"rand_core", "rand_core 0.6.4",
] ]
[[package]] [[package]]
@@ -2439,7 +2376,7 @@ dependencies = [
"memchr", "memchr",
"once_cell", "once_cell",
"percent-encoding", "percent-encoding",
"rand", "rand 0.8.5",
"rsa", "rsa",
"serde", "serde",
"sha1", "sha1",
@@ -2479,7 +2416,7 @@ dependencies = [
"md-5", "md-5",
"memchr", "memchr",
"once_cell", "once_cell",
"rand", "rand 0.8.5",
"serde", "serde",
"serde_json", "serde_json",
"sha2", "sha2",
@@ -2610,27 +2547,6 @@ dependencies = [
"windows", "windows",
] ]
[[package]]
name = "system-configuration"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b"
dependencies = [
"bitflags 2.10.0",
"core-foundation",
"system-configuration-sys",
]
[[package]]
name = "system-configuration-sys"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4"
dependencies = [
"core-foundation-sys",
"libc",
]
[[package]] [[package]]
name = "tempfile" name = "tempfile"
version = "3.24.0" version = "3.24.0"
@@ -2746,16 +2662,6 @@ dependencies = [
"syn 2.0.114", "syn 2.0.114",
] ]
[[package]]
name = "tokio-native-tls"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2"
dependencies = [
"native-tls",
"tokio",
]
[[package]] [[package]]
name = "tokio-rustls" name = "tokio-rustls"
version = "0.26.4" version = "0.26.4"
@@ -3075,6 +2981,17 @@ version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
[[package]]
name = "uuid"
version = "1.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2e054861b4bd027cd373e18e8d8d8e6548085000e41290d95ce0c373a654b4a"
dependencies = [
"getrandom 0.3.4",
"js-sys",
"wasm-bindgen",
]
[[package]] [[package]]
name = "valuable" name = "valuable"
version = "0.1.1" version = "0.1.1"
@@ -3201,6 +3118,25 @@ dependencies = [
"wasm-bindgen", "wasm-bindgen",
] ]
[[package]]
name = "web-time"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb"
dependencies = [
"js-sys",
"wasm-bindgen",
]
[[package]]
name = "webpki-roots"
version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "12bed680863276c63889429bfd6cab3b99943659923822de1c8a39c49e4d722c"
dependencies = [
"rustls-pki-types",
]
[[package]] [[package]]
name = "whoami" name = "whoami"
version = "1.6.1" version = "1.6.1"
@@ -3327,17 +3263,6 @@ version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5"
[[package]]
name = "windows-registry"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "02752bf7fbdcce7f2a27a742f798510f3e5ad88dbe84871e5168e2120c3d5720"
dependencies = [
"windows-link",
"windows-result 0.4.1",
"windows-strings",
]
[[package]] [[package]]
name = "windows-result" name = "windows-result"
version = "0.1.2" version = "0.1.2"

View File

@@ -35,7 +35,7 @@ rayon = "1.10"
tokio-stream = { version = "0.1", features = ["sync"] } tokio-stream = { version = "0.1", features = ["sync"] }
thiserror = "2.0.17" thiserror = "2.0.17"
notify = { version = "6" } notify = { version = "6" }
reqwest = { version = "0.12", features = ["json"] } reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls"] }
rust-embed = { version = "8", features = ["axum"] } rust-embed = { version = "8", features = ["axum"] }
mime_guess = "2.0" mime_guess = "2.0"
async-trait = "0.1" async-trait = "0.1"
@@ -43,3 +43,4 @@ argon2 = "0.5.3"
rand = "0.8" rand = "0.8"
serde_urlencoded = "0.7.1" serde_urlencoded = "0.7.1"
sysinfo = "0.32" sysinfo = "0.32"
uuid = { version = "1", features = ["v4"] }

93
DESIGN_PHILOSOPHY.md Normal file
View File

@@ -0,0 +1,93 @@
# Design Philosophy
This document defines the principles that govern every major and minor part of the Alchemist project.
It is meant to keep the system stable, coherent, and forward compatible over time.
## 1) Product Intent
- Alchemist is a reliability-first media pipeline.
- The system favors predictability and correctness over novelty.
- Every feature should be operable by non-experts without removing power from experts.
## 2) Stability Over Novelty
- Do not introduce breaking changes unless there is no alternative.
- When in doubt, add new capabilities without removing old ones.
- Fail safe; avoid data loss as the default outcome.
## 3) Backwards and Forwards Compatibility
- Databases created on v0.2.5+ must remain usable for all future versions.
- New code must read old data without requiring manual migration steps.
- Schema changes should be additive only:
- Add columns with defaults or nullable values.
- Add new tables rather than mutate or drop old ones.
- Never rename or remove columns.
- Compatibility logic in code must tolerate missing fields and legacy table shapes.
## 4) Reliability and Observability
- Favor deterministic behavior over clever heuristics.
- Every long-running process should be monitorable and cancellable.
- Log critical transitions and errors with actionable context.
## 5) Safety and Data Integrity
- Never overwrite user media by default.
- Always prefer reversible actions.
- Validate inputs at boundaries (API, CLI, filesystem).
- Defensive programming: assume file states can change at any time.
## 6) Performance and Scale
- Optimize for large libraries and long runtimes.
- Prefer bounded memory usage over raw speed.
- Use indexes and incremental scans for large datasets.
- Avoid unnecessary reprocessing or re-probing of files.
## 7) Security and Privacy
- Authentication and authorization are mandatory for protected APIs.
- Use secure defaults for tokens and cryptography.
- Telemetry must be opt-in, minimal, and anonymized.
## 8) Configuration Is a Contract
- Config changes must be validated and safe to apply live.
- Defaults should be safe and conservative.
- Every config option must have a clear and visible purpose.
## 9) UI and UX Consistency
- UI must reflect backend truth; avoid optimistic UI unless reconciled.
- Never hide errors; show the user what failed and why.
- UI should be fast, responsive, and readable on small screens.
## 10) Cross-Platform Discipline
- All core features must work on macOS, Linux, and Windows unless explicitly documented.
- Build pipelines must be deterministic and repeatable on CI and developer machines.
## 11) Incremental Architecture
- Prefer small, composable modules.
- Avoid tight coupling between UI and core pipeline logic.
- Stable APIs and event streams are more important than rapid refactors.
## 12) Testing and Verification
- Test the critical paths: scan, enqueue, analyze, encode, finalize.
- Every migration should be tested against a v0.2.5+ baseline DB.
- Tests must be deterministic and reproducible.
## 13) Documentation and Traceability
- Document behavior changes alongside code changes.
- Keep release notes aligned with schema evolution.
- Every new feature must include an explanation of its operational impact.
## 14) Maintenance and Lifecycle
- Add cleanup tasks for long-lived data (logs, sessions, temp files).
- Make maintenance tasks visible and safe to run.
- Avoid silent failures; surface and recover wherever possible.
## 15) Decision-Making Rules
- If a change risks data loss, do not merge it.
- If a change risks breaking older data, redesign it.
- If a change simplifies code but harms clarity or reliability, reject it.
## 16) Style and Engineering Practices
- Keep code explicit and readable; avoid cleverness.
- Keep functions small and well-named.
- Prefer explicit error handling over implicit fallbacks.
---
This philosophy is binding unless explicitly overridden in a documented exception.

View File

@@ -1182,6 +1182,18 @@ A:
## Changelog ## Changelog
### v0.2.6-beta
- Setup wizard now authenticates scan and hardware calls to prevent endless loading
- Scheduler window validation and normalized time handling
- File watcher no longer blocks on bursty filesystem events
- DB stability pass: WAL + busy timeout + foreign keys enabled
- Legacy watch directory schemas now supported at runtime
- Session cleanup task to prevent DB growth
- New DB indexes for faster jobs/logs/schedule/notifications queries
- Reqwest switched to rustls for cross-compiles without OpenSSL
- Cross-platform build script (bun + zig + cargo-xwin)
- Design philosophy added for consistent development standards
### v0.2.5 (Current) ### v0.2.5 (Current)
- Async runtime reliability improvements (spawn_blocking for ffprobe/VMAF/hardware detection) - Async runtime reliability improvements (spawn_blocking for ffprobe/VMAF/hardware detection)
- Accurate encode_speed and avg_bitrate_kbps metrics computed from actual media duration - Accurate encode_speed and avg_bitrate_kbps metrics computed from actual media duration
@@ -1239,4 +1251,4 @@ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
--- ---
*Documentation for Alchemist v0.2.5+ • Last updated: January 2026* *Documentation for Alchemist v0.2.6-beta • Last updated: January 2026*

View File

@@ -0,0 +1,30 @@
-- Stability and performance indexes (v0.2.5+ compatible)
CREATE INDEX IF NOT EXISTS idx_jobs_status_priority_created_at
ON jobs(status, priority DESC, created_at);
CREATE INDEX IF NOT EXISTS idx_jobs_status_updated_at
ON jobs(status, updated_at);
CREATE INDEX IF NOT EXISTS idx_jobs_updated_at
ON jobs(updated_at);
CREATE INDEX IF NOT EXISTS idx_logs_created_at
ON logs(created_at);
CREATE INDEX IF NOT EXISTS idx_decisions_job_id_created_at
ON decisions(job_id, created_at);
CREATE INDEX IF NOT EXISTS idx_encode_stats_created_at
ON encode_stats(created_at);
CREATE INDEX IF NOT EXISTS idx_schedule_windows_enabled
ON schedule_windows(enabled);
CREATE INDEX IF NOT EXISTS idx_notification_targets_enabled
ON notification_targets(enabled);
INSERT OR REPLACE INTO schema_info (key, value) VALUES
('schema_version', '2'),
('min_compatible_version', '0.2.5'),
('last_updated', datetime('now'));

65
scripts/build_release.sh Executable file
View File

@@ -0,0 +1,65 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
cd "$ROOT_DIR"
if ! command -v cargo >/dev/null 2>&1; then
echo "cargo not found; install Rust first." >&2
exit 1
fi
if ! command -v bun >/dev/null 2>&1; then
echo "bun not found; install Bun first." >&2
exit 1
fi
if ! command -v zig >/dev/null 2>&1; then
echo "zig not found; install Zig to cross-compile from macOS." >&2
exit 1
fi
if ! command -v cargo-zigbuild >/dev/null 2>&1; then
echo "cargo-zigbuild not found; install with 'cargo install cargo-zigbuild'." >&2
exit 1
fi
if ! command -v cargo-xwin >/dev/null 2>&1; then
echo "cargo-xwin not found; install with 'cargo install cargo-xwin' for Windows MSVC builds." >&2
exit 1
fi
echo "Building web frontend..."
if [ ! -d "$ROOT_DIR/web/node_modules" ]; then
(cd "$ROOT_DIR/web" && bun install)
fi
(cd "$ROOT_DIR/web" && bun run build)
TARGETS=(
"aarch64-apple-darwin"
"x86_64-unknown-linux-gnu"
"x86_64-pc-windows-msvc"
)
HOST_OS="$(uname -s)"
HOST_ARCH="$(uname -m)"
build_target() {
local target="$1"
if [ "$HOST_OS" = "Darwin" ] && [ "$target" = "aarch64-apple-darwin" ] && [ "$HOST_ARCH" = "arm64" ]; then
cargo build --release --target "$target"
elif [[ "$target" == *"-pc-windows-msvc" ]]; then
cargo xwin build --release --target "$target"
else
cargo zigbuild --release --target "$target"
fi
}
echo "Building release binaries..."
for target in "${TARGETS[@]}"; do
echo "- $target"
rustup target add "$target" >/dev/null 2>&1 || true
build_target "$target"
done
echo "Done. Artifacts are in target/<triple>/release/"

View File

@@ -1,8 +1,12 @@
use crate::error::Result; use crate::error::Result;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sqlx::{sqlite::SqliteConnectOptions, Row, SqlitePool}; use sqlx::{
sqlite::{SqliteConnectOptions, SqliteJournalMode},
Row, SqlitePool,
};
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::time::Duration;
#[derive(Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq, sqlx::Type)] #[derive(Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq, sqlx::Type)]
#[sqlx(rename_all = "lowercase")] #[sqlx(rename_all = "lowercase")]
@@ -304,7 +308,10 @@ impl Db {
pub async fn new(db_path: &str) -> Result<Self> { pub async fn new(db_path: &str) -> Result<Self> {
let options = SqliteConnectOptions::new() let options = SqliteConnectOptions::new()
.filename(db_path) .filename(db_path)
.create_if_missing(true); .create_if_missing(true)
.foreign_keys(true)
.journal_mode(SqliteJournalMode::Wal)
.busy_timeout(Duration::from_secs(5));
let pool = SqlitePool::connect_with(options).await?; let pool = SqlitePool::connect_with(options).await?;
@@ -378,12 +385,13 @@ impl Db {
pub async fn add_job(&self, job: Job) -> Result<()> { pub async fn add_job(&self, job: Job) -> Result<()> {
sqlx::query( sqlx::query(
"INSERT INTO jobs (input_path, output_path, status, priority, progress, attempt_count, created_at, updated_at) "INSERT INTO jobs (input_path, output_path, status, mtime_hash, priority, progress, attempt_count, created_at, updated_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)", VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)",
) )
.bind(job.input_path) .bind(job.input_path)
.bind(job.output_path) .bind(job.output_path)
.bind(job.status) .bind(job.status)
.bind("0.0")
.bind(job.priority) .bind(job.priority)
.bind(job.progress) .bind(job.progress)
.bind(job.attempt_count) .bind(job.attempt_count)
@@ -743,11 +751,27 @@ impl Db {
} }
pub async fn get_watch_dirs(&self) -> Result<Vec<WatchDir>> { pub async fn get_watch_dirs(&self) -> Result<Vec<WatchDir>> {
let dirs = sqlx::query_as::<_, WatchDir>( let has_is_recursive = self.has_column("watch_dirs", "is_recursive").await?;
"SELECT id, path, is_recursive, created_at FROM watch_dirs ORDER BY path ASC", let has_recursive = self.has_column("watch_dirs", "recursive").await?;
) let has_enabled = self.has_column("watch_dirs", "enabled").await?;
.fetch_all(&self.pool)
.await?; let recursive_expr = if has_is_recursive {
"is_recursive"
} else if has_recursive {
"recursive"
} else {
"1"
};
let enabled_filter = if has_enabled { "WHERE enabled = 1 " } else { "" };
let query = format!(
"SELECT id, path, {} as is_recursive, created_at FROM watch_dirs {}ORDER BY path ASC",
recursive_expr, enabled_filter
);
let dirs = sqlx::query_as::<_, WatchDir>(&query)
.fetch_all(&self.pool)
.await?;
Ok(dirs) Ok(dirs)
} }
@@ -771,13 +795,33 @@ impl Db {
} }
pub async fn add_watch_dir(&self, path: &str, is_recursive: bool) -> Result<WatchDir> { pub async fn add_watch_dir(&self, path: &str, is_recursive: bool) -> Result<WatchDir> {
let row = sqlx::query_as::<_, WatchDir>( let has_is_recursive = self.has_column("watch_dirs", "is_recursive").await?;
"INSERT INTO watch_dirs (path, is_recursive) VALUES (?, ?) RETURNING id, path, is_recursive, created_at", let has_recursive = self.has_column("watch_dirs", "recursive").await?;
)
.bind(path) let row = if has_is_recursive {
.bind(is_recursive) sqlx::query_as::<_, WatchDir>(
.fetch_one(&self.pool) "INSERT INTO watch_dirs (path, is_recursive) VALUES (?, ?) RETURNING id, path, is_recursive, created_at",
.await?; )
.bind(path)
.bind(is_recursive)
.fetch_one(&self.pool)
.await?
} else if has_recursive {
sqlx::query_as::<_, WatchDir>(
"INSERT INTO watch_dirs (path, recursive) VALUES (?, ?) RETURNING id, path, recursive as is_recursive, created_at",
)
.bind(path)
.bind(is_recursive)
.fetch_one(&self.pool)
.await?
} else {
sqlx::query_as::<_, WatchDir>(
"INSERT INTO watch_dirs (path) VALUES (?) RETURNING id, path, 1 as is_recursive, created_at",
)
.bind(path)
.fetch_one(&self.pool)
.await?
};
Ok(row) Ok(row)
} }
@@ -1174,6 +1218,19 @@ impl Db {
.await?; .await?;
Ok(()) Ok(())
} }
async fn has_column(&self, table: &str, column: &str) -> Result<bool> {
let table = table.replace('\'', "''");
let sql = format!("PRAGMA table_info('{}')", table);
let rows = sqlx::query(&sql).fetch_all(&self.pool).await?;
for row in rows {
let name: String = row.get("name");
if name == column {
return Ok(true);
}
}
Ok(false)
}
} }
// Auth related structs // Auth related structs

View File

@@ -7,6 +7,7 @@ pub mod orchestrator;
pub mod scheduler; pub mod scheduler;
pub mod server; pub mod server;
pub mod system; pub mod system;
pub mod telemetry;
pub mod wizard; pub mod wizard;
pub use config::QualityProfile; pub use config::QualityProfile;

View File

@@ -1,4 +1,4 @@
use crate::config::Config; use crate::config::{Config, OutputCodec};
use crate::db::{AlchemistEvent, Db, Job, JobState}; use crate::db::{AlchemistEvent, Db, Job, JobState};
use crate::error::Result; use crate::error::Result;
use crate::media::analyzer::FfmpegAnalyzer; use crate::media::analyzer::FfmpegAnalyzer;
@@ -9,6 +9,7 @@ use crate::media::pipeline::{
use crate::media::planner::BasicPlanner; use crate::media::planner::BasicPlanner;
use crate::media::scanner::Scanner; use crate::media::scanner::Scanner;
use crate::system::hardware::HardwareInfo; use crate::system::hardware::HardwareInfo;
use crate::telemetry::{encoder_label, hardware_label, resolution_bucket, TelemetryEvent};
use crate::Transcoder; use crate::Transcoder;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering}; use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
@@ -106,6 +107,44 @@ impl Agent {
Ok(()) Ok(())
} }
async fn emit_telemetry_event(
&self,
telemetry_enabled: bool,
output_codec: OutputCodec,
metadata: &crate::media::pipeline::MediaMetadata,
event_type: &'static str,
status: Option<&'static str>,
failure_reason: Option<&'static str>,
input_size_bytes: Option<u64>,
output_size_bytes: Option<u64>,
duration_ms: Option<u64>,
speed_factor: Option<f64>,
) {
if !telemetry_enabled {
return;
}
let hw = self.hw_info.as_ref().as_ref();
let event = TelemetryEvent {
app_version: env!("CARGO_PKG_VERSION").to_string(),
event_type: event_type.to_string(),
status: status.map(str::to_string),
failure_reason: failure_reason.map(str::to_string),
hardware_model: hardware_label(hw),
encoder: Some(encoder_label(hw, output_codec)),
video_codec: Some(output_codec.as_str().to_string()),
resolution: resolution_bucket(metadata.width, metadata.height),
duration_ms,
input_size_bytes,
output_size_bytes,
speed_factor,
};
tokio::spawn(async move {
crate::telemetry::send_event(event).await;
});
}
pub fn is_paused(&self) -> bool { pub fn is_paused(&self) -> bool {
self.paused.load(Ordering::SeqCst) || self.scheduler_paused.load(Ordering::SeqCst) self.paused.load(Ordering::SeqCst) || self.scheduler_paused.load(Ordering::SeqCst)
} }
@@ -323,6 +362,20 @@ impl Agent {
self.update_job_state(job.id, JobState::Encoding).await?; self.update_job_state(job.id, JobState::Encoding).await?;
self.emit_telemetry_event(
config_snapshot.system.enable_telemetry,
config_snapshot.transcode.output_codec,
&metadata,
"job_started",
None,
None,
Some(metadata.size_bytes),
None,
None,
None,
)
.await;
let executor = FfmpegExecutor::new( let executor = FfmpegExecutor::new(
self.orchestrator.clone(), self.orchestrator.clone(),
Arc::new(config_snapshot.clone()), // Use snapshot Arc::new(config_snapshot.clone()), // Use snapshot
@@ -346,7 +399,7 @@ impl Agent {
.await .await
{ {
Ok(_) => { Ok(_) => {
self.finalize_job(job, &file_path, &output_path, start_time) self.finalize_job(job, &file_path, &output_path, start_time, &metadata)
.await .await
} }
Err(e) => { Err(e) => {
@@ -360,6 +413,25 @@ impl Agent {
info!("Job {}: Removed partial output {:?}", job.id, output_path); info!("Job {}: Removed partial output {:?}", job.id, output_path);
} }
} }
let failure_reason = if let crate::error::AlchemistError::Cancelled = e {
"cancelled"
} else {
"transcode_failed"
};
self.emit_telemetry_event(
config_snapshot.system.enable_telemetry,
config_snapshot.transcode.output_codec,
&metadata,
"job_finished",
Some("failure"),
Some(failure_reason),
Some(metadata.size_bytes),
None,
Some(start_time.elapsed().as_millis() as u64),
None,
)
.await;
if let crate::error::AlchemistError::Cancelled = e { if let crate::error::AlchemistError::Cancelled = e {
self.update_job_state(job.id, JobState::Cancelled).await self.update_job_state(job.id, JobState::Cancelled).await
} else { } else {
@@ -372,10 +444,11 @@ impl Agent {
} }
async fn update_job_state(&self, job_id: i64, status: JobState) -> Result<()> { async fn update_job_state(&self, job_id: i64, status: JobState) -> Result<()> {
let _ = self.db.update_job_status(job_id, status).await; if let Err(e) = self.db.update_job_status(job_id, status).await {
let _ = self error!("Failed to update job {} status {:?}: {}", job_id, status, e);
.tx return Err(e);
.send(AlchemistEvent::JobStateChanged { job_id, status }); }
let _ = self.tx.send(AlchemistEvent::JobStateChanged { job_id, status });
Ok(()) Ok(())
} }
@@ -385,6 +458,7 @@ impl Agent {
input_path: &std::path::Path, input_path: &std::path::Path,
output_path: &std::path::Path, output_path: &std::path::Path,
start_time: std::time::Instant, start_time: std::time::Instant,
metadata: &crate::media::pipeline::MediaMetadata,
) -> Result<()> { ) -> Result<()> {
let job_id = job.id; let job_id = job.id;
// Integrity & Size Reduction check // Integrity & Size Reduction check
@@ -418,6 +492,8 @@ impl Agent {
let encode_duration = start_time.elapsed().as_secs_f64(); let encode_duration = start_time.elapsed().as_secs_f64();
let config = self.config.read().await; let config = self.config.read().await;
let telemetry_enabled = config.system.enable_telemetry;
let output_codec = config.transcode.output_codec;
// Check reduction threshold // Check reduction threshold
if output_size == 0 || reduction < config.transcode.size_reduction_threshold { if output_size == 0 || reduction < config.transcode.size_reduction_threshold {
@@ -533,6 +609,20 @@ impl Agent {
self.update_job_state(job_id, JobState::Completed).await?; self.update_job_state(job_id, JobState::Completed).await?;
self.emit_telemetry_event(
telemetry_enabled,
output_codec,
metadata,
"job_finished",
Some("success"),
None,
Some(input_size),
Some(output_size),
Some((encode_duration * 1000.0) as u64),
Some(encode_speed),
)
.await;
// Handle File Deletion Policy // Handle File Deletion Policy
if let Ok(settings) = self.db.get_file_settings().await { if let Ok(settings) = self.db.get_file_settings().await {
if settings.delete_source { if settings.delete_source {

View File

@@ -3,7 +3,7 @@ use crate::Agent;
use chrono::{Datelike, Local, Timelike}; use chrono::{Datelike, Local, Timelike};
use std::sync::Arc; use std::sync::Arc;
use tokio::time::Duration; use tokio::time::Duration;
use tracing::{error, info}; use tracing::{error, info, warn};
pub struct Scheduler { pub struct Scheduler {
db: Arc<Db>, db: Arc<Db>,
@@ -42,7 +42,7 @@ impl Scheduler {
} }
let now = Local::now(); let now = Local::now();
let current_time_str = format!("{:02}:{:02}", now.hour(), now.minute()); let current_minutes = (now.hour() * 60 + now.minute()) as u32;
let current_day = now.weekday().num_days_from_sunday() as i32; // 0=Sun, 6=Sat let current_day = now.weekday().num_days_from_sunday() as i32; // 0=Sun, 6=Sat
let mut in_window = false; let mut in_window = false;
@@ -54,17 +54,32 @@ impl Scheduler {
continue; continue;
} }
let start_minutes = match parse_schedule_minutes(&window.start_time) {
Some(value) => value,
None => {
warn!("Invalid schedule start_time '{}'", window.start_time);
continue;
}
};
let end_minutes = match parse_schedule_minutes(&window.end_time) {
Some(value) => value,
None => {
warn!("Invalid schedule end_time '{}'", window.end_time);
continue;
}
};
// Check time // Check time
// Handle cross-day windows (e.g. 23:00 to 02:00) // Handle cross-day windows (e.g. 23:00 to 02:00)
if window.start_time <= window.end_time { if start_minutes <= end_minutes {
// Normal window // Normal window
if current_time_str >= window.start_time && current_time_str < window.end_time { if current_minutes >= start_minutes && current_minutes < end_minutes {
in_window = true; in_window = true;
break; break;
} }
} else { } else {
// Split window // Split window
if current_time_str >= window.start_time || current_time_str < window.end_time { if current_minutes >= start_minutes || current_minutes < end_minutes {
in_window = true; in_window = true;
break; break;
} }
@@ -86,3 +101,17 @@ impl Scheduler {
Ok(()) Ok(())
} }
} }
fn parse_schedule_minutes(value: &str) -> Option<u32> {
let trimmed = value.trim();
let parts: Vec<&str> = trimmed.split(':').collect();
if parts.len() != 2 {
return None;
}
let hour: u32 = parts[0].parse().ok()?;
let minute: u32 = parts[1].parse().ok()?;
if hour > 23 || minute > 59 {
return None;
}
Some(hour * 60 + minute)
}

View File

@@ -31,9 +31,11 @@ use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc; use std::sync::Arc;
use std::time::Instant; use std::time::Instant;
use tokio::sync::{broadcast, RwLock}; use tokio::sync::{broadcast, RwLock};
use tokio::time::Duration;
use tokio_stream::wrappers::BroadcastStream; use tokio_stream::wrappers::BroadcastStream;
use tokio_stream::StreamExt; use tokio_stream::StreamExt;
use tracing::{error, info}; use tracing::{error, info};
use uuid::Uuid;
#[derive(RustEmbed)] #[derive(RustEmbed)]
#[folder = "web/dist/"] #[folder = "web/dist/"]
@@ -47,6 +49,7 @@ pub struct AppState {
pub tx: broadcast::Sender<AlchemistEvent>, pub tx: broadcast::Sender<AlchemistEvent>,
pub setup_required: Arc<AtomicBool>, pub setup_required: Arc<AtomicBool>,
pub start_time: Instant, pub start_time: Instant,
pub telemetry_runtime_id: String,
pub notification_manager: Arc<crate::notifications::NotificationManager>, pub notification_manager: Arc<crate::notifications::NotificationManager>,
pub sys: std::sync::Mutex<sysinfo::System>, pub sys: std::sync::Mutex<sysinfo::System>,
pub file_watcher: Arc<crate::system::watcher::FileWatcher>, pub file_watcher: Arc<crate::system::watcher::FileWatcher>,
@@ -68,7 +71,10 @@ pub async fn run_server(
sys.refresh_cpu_usage(); sys.refresh_cpu_usage();
sys.refresh_memory(); sys.refresh_memory();
let library_scanner = Arc::new(crate::system::scanner::LibraryScanner::new(db.clone())); let library_scanner = Arc::new(crate::system::scanner::LibraryScanner::new(
db.clone(),
config.clone(),
));
let state = Arc::new(AppState { let state = Arc::new(AppState {
db, db,
@@ -78,12 +84,23 @@ pub async fn run_server(
tx, tx,
setup_required: Arc::new(AtomicBool::new(setup_required)), setup_required: Arc::new(AtomicBool::new(setup_required)),
start_time: std::time::Instant::now(), start_time: std::time::Instant::now(),
telemetry_runtime_id: Uuid::new_v4().to_string(),
notification_manager, notification_manager,
sys: std::sync::Mutex::new(sys), sys: std::sync::Mutex::new(sys),
file_watcher, file_watcher,
library_scanner, library_scanner,
}); });
let cleanup_db = state.db.clone();
tokio::spawn(async move {
loop {
if let Err(e) = cleanup_db.cleanup_sessions().await {
error!("Failed to cleanup sessions: {}", e);
}
tokio::time::sleep(Duration::from_secs(60 * 60)).await;
}
});
let app = Router::new() let app = Router::new()
// API Routes // API Routes
.route("/api/scan/start", post(start_scan_handler)) .route("/api/scan/start", post(start_scan_handler))
@@ -158,6 +175,7 @@ pub async fn run_server(
.route("/api/system/resources", get(system_resources_handler)) .route("/api/system/resources", get(system_resources_handler))
.route("/api/system/info", get(get_system_info_handler)) .route("/api/system/info", get(get_system_info_handler))
.route("/api/system/hardware", get(get_hardware_info_handler)) .route("/api/system/hardware", get(get_hardware_info_handler))
.route("/api/telemetry/payload", get(telemetry_payload_handler))
// Setup Routes // Setup Routes
.route("/api/setup/status", get(setup_status_handler)) .route("/api/setup/status", get(setup_status_handler))
.route("/api/setup/complete", post(setup_complete_handler)) .route("/api/setup/complete", post(setup_complete_handler))
@@ -233,8 +251,10 @@ async fn refresh_file_watcher(state: &AppState) {
} }
async fn setup_status_handler(State(state): State<Arc<AppState>>) -> impl IntoResponse { async fn setup_status_handler(State(state): State<Arc<AppState>>) -> impl IntoResponse {
let config = state.config.read().await;
axum::Json(serde_json::json!({ axum::Json(serde_json::json!({
"setup_required": state.setup_required.load(Ordering::Relaxed) "setup_required": state.setup_required.load(Ordering::Relaxed),
"enable_telemetry": config.system.enable_telemetry
})) }))
} }
@@ -443,9 +463,9 @@ async fn setup_complete_handler(
}; };
// Create Initial Session // Create Initial Session
let token: String = rand::thread_rng() let token: String = OsRng
.sample_iter(&rand::distributions::Alphanumeric) .sample_iter(&rand::distributions::Alphanumeric)
.take(32) .take(64)
.map(char::from) .map(char::from)
.collect(); .collect();
let expires_at = Utc::now() + chrono::Duration::days(30); let expires_at = Utc::now() + chrono::Duration::days(30);
@@ -780,9 +800,9 @@ async fn login_handler(
} }
// Create session // Create session
let token: String = rand::thread_rng() let token: String = OsRng
.sample_iter(&rand::distributions::Alphanumeric) .sample_iter(&rand::distributions::Alphanumeric)
.take(32) .take(64)
.map(char::from) .map(char::from)
.collect(); .collect();
@@ -813,6 +833,10 @@ async fn auth_middleware(State(state): State<Arc<AppState>>, req: Request, next:
return next.run(req).await; return next.run(req).await;
} }
if state.setup_required.load(Ordering::Relaxed) && path == "/api/system/hardware" {
return next.run(req).await;
}
// Protected API endpoints -> Require Token // Protected API endpoints -> Require Token
let mut token = req let mut token = req
.headers() .headers()
@@ -1215,16 +1239,54 @@ struct AddSchedulePayload {
enabled: bool, enabled: bool,
} }
fn normalize_schedule_time(value: &str) -> Option<String> {
let trimmed = value.trim();
let parts: Vec<&str> = trimmed.split(':').collect();
if parts.len() != 2 {
return None;
}
let hour: u32 = parts[0].parse().ok()?;
let minute: u32 = parts[1].parse().ok()?;
if hour > 23 || minute > 59 {
return None;
}
Some(format!("{:02}:{:02}", hour, minute))
}
async fn add_schedule_handler( async fn add_schedule_handler(
State(state): State<Arc<AppState>>, State(state): State<Arc<AppState>>,
axum::Json(payload): axum::Json<AddSchedulePayload>, axum::Json(payload): axum::Json<AddSchedulePayload>,
) -> impl IntoResponse { ) -> impl IntoResponse {
if payload.days_of_week.is_empty()
|| payload
.days_of_week
.iter()
.any(|day| *day < 0 || *day > 6)
{
return (
StatusCode::BAD_REQUEST,
"days_of_week must include values 0-6",
)
.into_response();
}
let start_time = match normalize_schedule_time(&payload.start_time) {
Some(value) => value,
None => {
return (StatusCode::BAD_REQUEST, "start_time must be HH:MM").into_response();
}
};
let end_time = match normalize_schedule_time(&payload.end_time) {
Some(value) => value,
None => return (StatusCode::BAD_REQUEST, "end_time must be HH:MM").into_response(),
};
let days_json = serde_json::to_string(&payload.days_of_week).unwrap_or_default(); let days_json = serde_json::to_string(&payload.days_of_week).unwrap_or_default();
match state match state
.db .db
.add_schedule_window( .add_schedule_window(
&payload.start_time, &start_time,
&payload.end_time, &end_time,
&days_json, &days_json,
payload.enabled, payload.enabled,
) )
@@ -1422,6 +1484,53 @@ async fn get_system_info_handler(State(state): State<Arc<AppState>>) -> impl Int
.into_response() .into_response()
} }
#[derive(Serialize)]
struct TelemetryPayload {
runtime_id: String,
timestamp: String,
version: String,
os_version: String,
is_docker: bool,
uptime_seconds: u64,
cpu_count: usize,
memory_total_mb: u64,
active_jobs: i64,
concurrent_limit: usize,
}
async fn telemetry_payload_handler(State(state): State<Arc<AppState>>) -> impl IntoResponse {
let config = state.config.read().await;
if !config.system.enable_telemetry {
return (StatusCode::FORBIDDEN, "Telemetry disabled").into_response();
}
let (cpu_count, memory_total_mb) = {
let mut sys = state.sys.lock().unwrap();
sys.refresh_memory();
(sys.cpus().len(), (sys.total_memory() / 1024 / 1024) as u64)
};
let version = env!("CARGO_PKG_VERSION").to_string();
let os_version = format!("{} {}", std::env::consts::OS, std::env::consts::ARCH);
let is_docker = std::path::Path::new("/.dockerenv").exists();
let uptime_seconds = state.start_time.elapsed().as_secs();
let stats = state.db.get_job_stats().await.unwrap_or_default();
axum::Json(TelemetryPayload {
runtime_id: state.telemetry_runtime_id.clone(),
timestamp: Utc::now().to_rfc3339(),
version,
os_version,
is_docker,
uptime_seconds,
cpu_count,
memory_total_mb,
active_jobs: stats.active,
concurrent_limit: config.transcode.concurrent_jobs,
})
.into_response()
}
async fn get_hardware_info_handler(State(state): State<Arc<AppState>>) -> impl IntoResponse { async fn get_hardware_info_handler(State(state): State<Arc<AppState>>) -> impl IntoResponse {
let config = state.config.read().await; let config = state.config.read().await;
match crate::system::hardware::detect_hardware_async(config.hardware.allow_cpu_fallback).await { match crate::system::hardware::detect_hardware_async(config.hardware.allow_cpu_fallback).await {

View File

@@ -1,9 +1,11 @@
use crate::config::Config;
use crate::db::Db; use crate::db::Db;
use crate::error::Result; use crate::error::Result;
use crate::media::scanner::Scanner; use crate::media::scanner::Scanner;
use std::collections::HashMap;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use tokio::sync::Mutex; use tokio::sync::{Mutex, RwLock};
use tracing::{error, info, warn}; use tracing::{error, info, warn};
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
@@ -16,13 +18,15 @@ pub struct ScanStatus {
pub struct LibraryScanner { pub struct LibraryScanner {
db: Arc<Db>, db: Arc<Db>,
config: Arc<RwLock<Config>>,
status: Arc<Mutex<ScanStatus>>, status: Arc<Mutex<ScanStatus>>,
} }
impl LibraryScanner { impl LibraryScanner {
pub fn new(db: Arc<Db>) -> Self { pub fn new(db: Arc<Db>, config: Arc<RwLock<Config>>) -> Self {
Self { Self {
db, db,
config,
status: Arc::new(Mutex::new(ScanStatus { status: Arc::new(Mutex::new(ScanStatus {
is_running: false, is_running: false,
files_found: 0, files_found: 0,
@@ -48,6 +52,7 @@ impl LibraryScanner {
let scanner_self = self.status.clone(); let scanner_self = self.status.clone();
let db = self.db.clone(); let db = self.db.clone();
let config = self.config.clone();
tokio::spawn(async move { tokio::spawn(async move {
info!("🚀 Starting full library scan..."); info!("🚀 Starting full library scan...");
@@ -76,11 +81,26 @@ impl LibraryScanner {
} }
}; };
let config_dirs = {
let cfg = config.read().await;
cfg.scanner.directories.clone()
};
let mut scan_targets: HashMap<PathBuf, bool> = HashMap::new();
for dir in config_dirs {
scan_targets.insert(PathBuf::from(dir), true);
}
for watch_dir in watch_dirs {
scan_targets
.entry(PathBuf::from(&watch_dir.path))
.and_modify(|recursive| *recursive |= watch_dir.is_recursive)
.or_insert(watch_dir.is_recursive);
}
let scanner = Scanner::new(); let scanner = Scanner::new();
let mut all_scanned = Vec::new(); let mut all_scanned = Vec::new();
for watch_dir in watch_dirs { for (path, recursive) in scan_targets {
let path = PathBuf::from(&watch_dir.path);
if !path.exists() { if !path.exists() {
warn!("Watch directory does not exist: {:?}", path); warn!("Watch directory does not exist: {:?}", path);
continue; continue;
@@ -88,10 +108,10 @@ impl LibraryScanner {
{ {
let mut s = scanner_self.lock().await; let mut s = scanner_self.lock().await;
s.current_folder = Some(watch_dir.path.clone()); s.current_folder = Some(path.to_string_lossy().to_string());
} }
let files = scanner.scan_with_recursion(vec![(path, watch_dir.is_recursive)]); let files = scanner.scan_with_recursion(vec![(path, recursive)]);
all_scanned.extend(files); all_scanned.extend(files);
} }

View File

@@ -118,7 +118,9 @@ impl FileWatcher {
// Check if it's a media file // Check if it's a media file
if let Some(ext) = path.extension() { if let Some(ext) = path.extension() {
if extensions.contains(&ext.to_string_lossy().to_lowercase()) { if extensions.contains(&ext.to_string_lossy().to_lowercase()) {
let _ = tx_clone.blocking_send(path); if let Err(err) = tx_clone.try_send(path) {
debug!("Watcher queue full or closed: {}", err);
}
} }
} }
} }

128
src/telemetry.rs Normal file
View File

@@ -0,0 +1,128 @@
use crate::config::OutputCodec;
use crate::system::hardware::{HardwareInfo, Vendor};
use serde::Serialize;
use tracing::warn;
const DEFAULT_ALEMBIC_INGEST_URL: &str = "http://localhost:3000/v1/event";
#[derive(Debug, Serialize)]
pub struct TelemetryEvent {
pub app_version: String,
pub event_type: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub status: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub failure_reason: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub hardware_model: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub encoder: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub video_codec: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub resolution: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub duration_ms: Option<u64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub input_size_bytes: Option<u64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub output_size_bytes: Option<u64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub speed_factor: Option<f64>,
}
pub fn hardware_label(hw: Option<&HardwareInfo>) -> Option<String> {
let hw = hw?;
let label = match hw.vendor {
Vendor::Nvidia => "Nvidia",
Vendor::Amd => "AMD",
Vendor::Intel => "Intel",
Vendor::Apple => "Apple",
Vendor::Cpu => "CPU",
};
Some(label.to_string())
}
pub fn encoder_label(hw: Option<&HardwareInfo>, codec: OutputCodec) -> String {
let cpu_encoder = match codec {
OutputCodec::Av1 => "libsvtav1",
OutputCodec::Hevc => "libx265",
};
let Some(hw) = hw else {
return cpu_encoder.to_string();
};
let codec_str = codec.as_str();
let supports_codec = hw.supported_codecs.iter().any(|c| c == codec_str);
if !supports_codec {
return cpu_encoder.to_string();
}
match (hw.vendor, codec) {
(Vendor::Intel, OutputCodec::Av1) => "av1_qsv".to_string(),
(Vendor::Intel, OutputCodec::Hevc) => "hevc_qsv".to_string(),
(Vendor::Nvidia, OutputCodec::Av1) => "av1_nvenc".to_string(),
(Vendor::Nvidia, OutputCodec::Hevc) => "hevc_nvenc".to_string(),
(Vendor::Apple, OutputCodec::Av1) => "av1_videotoolbox".to_string(),
(Vendor::Apple, OutputCodec::Hevc) => "hevc_videotoolbox".to_string(),
(Vendor::Amd, OutputCodec::Av1) => {
if cfg!(target_os = "windows") {
"av1_amf".to_string()
} else {
"av1_vaapi".to_string()
}
}
(Vendor::Amd, OutputCodec::Hevc) => {
if cfg!(target_os = "windows") {
"hevc_amf".to_string()
} else {
"hevc_vaapi".to_string()
}
}
(Vendor::Cpu, _) => cpu_encoder.to_string(),
}
}
pub fn resolution_bucket(width: u32, height: u32) -> Option<String> {
let pixel_height = if height > 0 { height } else { width };
if pixel_height == 0 {
return None;
}
let bucket = if pixel_height >= 2160 {
"2160p"
} else if pixel_height >= 1440 {
"1440p"
} else if pixel_height >= 1080 {
"1080p"
} else if pixel_height >= 720 {
"720p"
} else if pixel_height >= 480 {
"480p"
} else {
return Some(format!("{}p", pixel_height));
};
Some(bucket.to_string())
}
pub async fn send_event(event: TelemetryEvent) {
let endpoint =
std::env::var("ALEMBIC_INGEST_URL").unwrap_or_else(|_| DEFAULT_ALEMBIC_INGEST_URL.into());
let client = reqwest::Client::new();
match client.post(&endpoint).json(&event).send().await {
Ok(resp) => {
if !resp.status().is_success() {
warn!(
"Telemetry ingest failed with status {} from {}",
resp.status(),
endpoint
);
}
}
Err(e) => {
warn!("Telemetry ingest error to {}: {}", endpoint, e);
}
}
}

View File

@@ -1,4 +1,4 @@
import React, { useState } from 'react'; import React, { useEffect, useState } from 'react';
import { motion, AnimatePresence } from 'framer-motion'; import { motion, AnimatePresence } from 'framer-motion';
import { import {
ArrowRight, ArrowRight,
@@ -73,6 +73,28 @@ export default function SetupWizard() {
const [dirInput, setDirInput] = useState(''); const [dirInput, setDirInput] = useState('');
const getAuthHeaders = () => {
const token = localStorage.getItem('alchemist_token');
return token ? { Authorization: `Bearer ${token}` } : {};
};
useEffect(() => {
const loadSetupDefaults = async () => {
try {
const res = await fetch('/api/setup/status');
if (!res.ok) return;
const data = await res.json();
if (typeof data.enable_telemetry === 'boolean') {
setConfig(prev => ({ ...prev, enable_telemetry: data.enable_telemetry }));
}
} catch (e) {
console.error("Failed to load setup defaults", e);
}
};
loadSetupDefaults();
}, []);
const handleNext = async () => { const handleNext = async () => {
if (step === 1 && (!config.username || !config.password)) { if (step === 1 && (!config.username || !config.password)) {
setError("Please fill in both username and password."); setError("Please fill in both username and password.");
@@ -84,7 +106,12 @@ export default function SetupWizard() {
if (!hardware) { if (!hardware) {
setLoading(true); setLoading(true);
try { try {
const res = await fetch('/api/system/hardware'); const res = await fetch('/api/system/hardware', {
headers: getAuthHeaders()
});
if (!res.ok) {
throw new Error(`Hardware detection failed (${res.status})`);
}
const data = await res.json(); const data = await res.json();
setHardware(data); setHardware(data);
} catch (e) { } catch (e) {
@@ -109,17 +136,29 @@ export default function SetupWizard() {
const startScan = async () => { const startScan = async () => {
try { try {
await fetch('/api/scan/start', { method: 'POST' }); const res = await fetch('/api/scan/start', {
method: 'POST',
headers: getAuthHeaders()
});
if (!res.ok) {
throw new Error(await res.text());
}
pollScanStatus(); pollScanStatus();
} catch (e) { } catch (e) {
console.error("Failed to start scan", e); console.error("Failed to start scan", e);
setError("Failed to start scan. Please check authentication.");
} }
}; };
const pollScanStatus = async () => { const pollScanStatus = async () => {
const interval = setInterval(async () => { const interval = setInterval(async () => {
try { try {
const res = await fetch('/api/scan/status'); const res = await fetch('/api/scan/status', {
headers: getAuthHeaders()
});
if (!res.ok) {
throw new Error(await res.text());
}
const data = await res.json(); const data = await res.json();
setScanStatus(data); setScanStatus(data);
if (!data.is_running) { if (!data.is_running) {
@@ -128,7 +167,9 @@ export default function SetupWizard() {
} }
} catch (e) { } catch (e) {
console.error("Polling failed", e); console.error("Polling failed", e);
setError("Scan status unavailable. Please refresh and try again.");
clearInterval(interval); clearInterval(interval);
setLoading(false);
} }
}, 1000); }, 1000);
}; };