mirror of
https://github.com/bybrooklyn/alchemist.git
synced 2026-04-18 01:43:34 -04:00
117 lines
4.0 KiB
Rust
117 lines
4.0 KiB
Rust
//! Statistics and savings dashboard handlers.
|
|
|
|
use super::{AppState, config_read_error_response};
|
|
use crate::db::Db;
|
|
use crate::error::Result;
|
|
use axum::{extract::State, response::IntoResponse};
|
|
use std::sync::Arc;
|
|
|
|
pub(crate) struct StatsData {
|
|
pub(crate) total: i64,
|
|
pub(crate) completed: i64,
|
|
pub(crate) active: i64,
|
|
pub(crate) failed: i64,
|
|
pub(crate) concurrent_limit: usize,
|
|
}
|
|
|
|
pub(crate) async fn get_stats_data(db: &Db, concurrent_limit: usize) -> Result<StatsData> {
|
|
let s = db.get_stats().await?;
|
|
let total = s
|
|
.as_object()
|
|
.map(|m| m.values().filter_map(|v| v.as_i64()).sum::<i64>())
|
|
.unwrap_or(0);
|
|
let completed = s.get("completed").and_then(|v| v.as_i64()).unwrap_or(0);
|
|
let active = s
|
|
.as_object()
|
|
.map(|m| {
|
|
m.iter()
|
|
.filter(|(k, _)| {
|
|
["encoding", "analyzing", "remuxing", "resuming"].contains(&k.as_str())
|
|
})
|
|
.map(|(_, v)| v.as_i64().unwrap_or(0))
|
|
.sum::<i64>()
|
|
})
|
|
.unwrap_or(0);
|
|
let failed = s.get("failed").and_then(|v| v.as_i64()).unwrap_or(0);
|
|
|
|
Ok(StatsData {
|
|
total,
|
|
completed,
|
|
active,
|
|
failed,
|
|
concurrent_limit,
|
|
})
|
|
}
|
|
|
|
pub(crate) async fn stats_handler(State(state): State<Arc<AppState>>) -> impl IntoResponse {
|
|
match get_stats_data(&state.db, state.agent.concurrent_jobs_limit()).await {
|
|
Ok(stats) => axum::Json(serde_json::json!({
|
|
"total": stats.total,
|
|
"completed": stats.completed,
|
|
"active": stats.active,
|
|
"failed": stats.failed,
|
|
"concurrent_limit": stats.concurrent_limit
|
|
}))
|
|
.into_response(),
|
|
Err(err) => config_read_error_response("load job stats", &err),
|
|
}
|
|
}
|
|
|
|
pub(crate) async fn aggregated_stats_handler(
|
|
State(state): State<Arc<AppState>>,
|
|
) -> impl IntoResponse {
|
|
match state.db.get_aggregated_stats().await {
|
|
Ok(stats) => {
|
|
let savings = stats.total_input_size - stats.total_output_size;
|
|
axum::Json(serde_json::json!({
|
|
"total_input_bytes": stats.total_input_size,
|
|
"total_output_bytes": stats.total_output_size,
|
|
"total_savings_bytes": savings,
|
|
"total_time_seconds": stats.total_encode_time_seconds,
|
|
"total_jobs": stats.completed_jobs,
|
|
"avg_vmaf": stats.avg_vmaf.unwrap_or(0.0)
|
|
}))
|
|
.into_response()
|
|
}
|
|
Err(err) => config_read_error_response("load aggregated stats", &err),
|
|
}
|
|
}
|
|
|
|
pub(crate) async fn daily_stats_handler(State(state): State<Arc<AppState>>) -> impl IntoResponse {
|
|
match state.db.get_daily_stats(30).await {
|
|
Ok(stats) => axum::Json(serde_json::json!(stats)).into_response(),
|
|
Err(err) => config_read_error_response("load daily stats", &err),
|
|
}
|
|
}
|
|
|
|
pub(crate) async fn detailed_stats_handler(
|
|
State(state): State<Arc<AppState>>,
|
|
) -> impl IntoResponse {
|
|
match state.db.get_detailed_encode_stats(50).await {
|
|
Ok(stats) => axum::Json(serde_json::json!(stats)).into_response(),
|
|
Err(err) => config_read_error_response("load detailed stats", &err),
|
|
}
|
|
}
|
|
|
|
pub(crate) async fn savings_summary_handler(
|
|
State(state): State<Arc<AppState>>,
|
|
) -> impl IntoResponse {
|
|
match state.db.get_savings_summary().await {
|
|
Ok(summary) => axum::Json(summary).into_response(),
|
|
Err(err) => config_read_error_response("load storage savings summary", &err),
|
|
}
|
|
}
|
|
|
|
pub(crate) async fn skip_reasons_handler(State(state): State<Arc<AppState>>) -> impl IntoResponse {
|
|
match state.db.get_skip_reason_counts().await {
|
|
Ok(counts) => {
|
|
let items: Vec<serde_json::Value> = counts
|
|
.into_iter()
|
|
.map(|(code, count)| serde_json::json!({ "code": code, "count": count }))
|
|
.collect();
|
|
axum::Json(serde_json::json!({ "today": items })).into_response()
|
|
}
|
|
Err(err) => config_read_error_response("load skip reason counts", &err),
|
|
}
|
|
}
|