feat: make batchsize customizable too

This commit is contained in:
əlemi 2024-12-03 20:13:05 +01:00
parent 2594fc255c
commit 371e5ef7cd
Signed by: alemi
GPG key ID: A4895B84D311642C
3 changed files with 6 additions and 2 deletions

View file

@ -8,7 +8,8 @@ pub async fn serve(config: Config, db: Database, addr: &str) -> std::io::Result<
// whats a jinja
let index = include_str!("../web/index.html")
.replacen("%%DESCRIPTION%%", config.description.as_deref().unwrap_or("keeping track of your infra's up status"), 1)
.replacen("%%THRESHOLD%%", &config.threshold.unwrap_or(1000).to_string(), 1);
.replacen("%%THRESHOLD%%", &config.threshold.unwrap_or(1000).to_string(), 1)
.replacen("%%BATCHSIZE%%", &config.batchsize.unwrap_or(120).to_string(), 1);
let app = axum::Router::new()
.route("/", axum::routing::get(|| async { Html(index) }))

View file

@ -10,6 +10,9 @@ pub struct Config {
/// requests taking longer than this limit (in ms) will be marked as "slow" in FE
pub threshold: Option<u64>,
/// how many samples to provide to web frontend
pub batchsize: Option<u64>,
// TODO reintroduce this! should allow to optionally trim db periodically
/// how many samples of history to keep
//history: usize,

View file

@ -176,7 +176,7 @@ async function updateStatus() {
let out = "";
for (let key of keys) {
let res = await fetch(`/api/status/${key}?limit=120`);
let res = await fetch(`/api/status/${key}?limit=%%BATCHSIZE%%`);
let history = await res.json();
out += card(key, history, status[key]);
out += "\n";