Add visible "Updated Xs ago" timestamps and freshness indicators to make cached stats look more alive and help users know data currency. Backend changes: - luci.metrics: Add _freshness metadata (age, fresh, timestamp_epoch) to overview, waf_stats, and connections responses - luci.crowdsec-dashboard: Add _freshness metadata to get_overview response using sed injection into cached JSON Frontend changes: - metrics/dashboard.js: Display freshness indicator (green/yellow/red) in header, animate value changes with flash effect - crowdsec-dashboard/overview.js: Display freshness indicator next to running badge, update on poll Shared utilities (kiss-theme.js): - formatAge(seconds): Format "Xs ago", "Xm ago", "Xh ago" - getFreshnessClass(age): Return fresh/recent/stale based on age - getFreshnessColor(class): Return #00c853/#ff9800/#f44336 - freshnessIndicator(age, id): Create indicator DOM element - updateFreshness(age, id): Update existing indicator Freshness thresholds: - Fresh (green): < 15s for metrics, < 30s for CrowdSec - Recent (yellow): < 45s for metrics, < 90s for CrowdSec - Stale (red): > threshold Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
305 lines
9.9 KiB
Bash
Executable File
305 lines
9.9 KiB
Bash
Executable File
#!/bin/sh
|
|
# SecuBox Metrics Dashboard - RPCD Backend
|
|
# Double-buffered pre-cached stats for instant response
|
|
|
|
. /usr/share/libubox/jshn.sh
|
|
|
|
CACHE_DIR="/tmp/secubox"
|
|
CACHE_FILE="$CACHE_DIR/metrics-cache.json"
|
|
CACHE_TTL=30 # seconds
|
|
|
|
# Ensure cache dir exists
|
|
[ -d "$CACHE_DIR" ] || mkdir -p "$CACHE_DIR"
|
|
|
|
# Check if cache is fresh
|
|
cache_is_fresh() {
|
|
[ -f "$CACHE_FILE" ] || return 1
|
|
local now=$(date +%s)
|
|
local mtime=$(stat -c %Y "$CACHE_FILE" 2>/dev/null || echo 0)
|
|
local age=$((now - mtime))
|
|
[ "$age" -lt "$CACHE_TTL" ]
|
|
}
|
|
|
|
# Get cache age in seconds
|
|
get_cache_age() {
|
|
[ -f "$CACHE_FILE" ] || echo 999
|
|
local now=$(date +%s)
|
|
local mtime=$(stat -c %Y "$CACHE_FILE" 2>/dev/null || echo 0)
|
|
echo $((now - mtime))
|
|
}
|
|
|
|
# Build overview (called by cron or on stale cache)
|
|
build_overview() {
|
|
local uptime load mem_total mem_free mem_used mem_pct
|
|
local haproxy_up mitmproxy_up crowdsec_up
|
|
local vhost_count metablog_count streamlit_count cert_count lxc_running
|
|
|
|
uptime=$(cut -d. -f1 /proc/uptime 2>/dev/null || echo 0)
|
|
load=$(cut -d' ' -f1-3 /proc/loadavg 2>/dev/null || echo "0 0 0")
|
|
mem_total=$(awk '/MemTotal/ {print $2}' /proc/meminfo 2>/dev/null || echo 0)
|
|
mem_free=$(awk '/MemAvailable/ {print $2}' /proc/meminfo 2>/dev/null || echo 0)
|
|
[ "$mem_total" -gt 0 ] && mem_used=$((mem_total - mem_free)) || mem_used=0
|
|
[ "$mem_total" -gt 0 ] && mem_pct=$((mem_used * 100 / mem_total)) || mem_pct=0
|
|
|
|
haproxy_up=false
|
|
lxc-info -n haproxy -s 2>/dev/null | grep -q RUNNING && haproxy_up=true
|
|
|
|
mitmproxy_up=false
|
|
lxc-info -n mitmproxy-in -s 2>/dev/null | grep -q RUNNING && mitmproxy_up=true
|
|
|
|
crowdsec_up=false
|
|
pgrep crowdsec >/dev/null 2>&1 && crowdsec_up=true
|
|
|
|
vhost_count=$(uci show haproxy 2>/dev/null | grep -c '=vhost$' || echo 0)
|
|
metablog_count=$(uci show metablogizer 2>/dev/null | grep -c '=site$' || echo 0)
|
|
streamlit_count=$(uci show streamlit 2>/dev/null | grep -c '=instance$' || echo 0)
|
|
cert_count=$(ls /srv/haproxy/certs/*.pem 2>/dev/null | wc -l || echo 0)
|
|
lxc_running=$(lxc-ls --running 2>/dev/null | wc -w || echo 0)
|
|
|
|
printf '{"uptime":%d,"load":"%s","mem_total_kb":%d,"mem_used_kb":%d,"mem_pct":%d,"haproxy":%s,"mitmproxy":%s,"crowdsec":%s,"vhosts":%d,"metablogs":%d,"streamlits":%d,"certificates":%d,"lxc_containers":%d}' \
|
|
"$uptime" "$load" "$mem_total" "$mem_used" "$mem_pct" \
|
|
"$haproxy_up" "$mitmproxy_up" "$crowdsec_up" \
|
|
"$vhost_count" "$metablog_count" "$streamlit_count" "$cert_count" "$lxc_running"
|
|
}
|
|
|
|
# Build WAF stats
|
|
build_waf_stats() {
|
|
local cs_running=false mitmproxy_running=false
|
|
local bans=0 alerts_today=0 waf_blocked=0
|
|
|
|
pgrep crowdsec >/dev/null 2>&1 && cs_running=true
|
|
pgrep -f mitmdump >/dev/null 2>&1 && mitmproxy_running=true
|
|
|
|
if [ "$cs_running" = "true" ]; then
|
|
bans=$(cscli decisions list -o json 2>/dev/null | grep -c '"id"' || echo 0)
|
|
alerts_today=$(cscli alerts list --since 24h -o json 2>/dev/null | grep -c '"id"' || echo 0)
|
|
# WAF blocks = mitmproxy scenario decisions
|
|
waf_blocked=$(cscli decisions list -o json 2>/dev/null | grep -c 'mitmproxy' || echo 0)
|
|
fi
|
|
|
|
printf '{"crowdsec_running":%s,"mitmproxy_running":%s,"active_bans":%d,"alerts_today":%d,"waf_blocked":%d}' \
|
|
"$cs_running" "$mitmproxy_running" "$bans" "$alerts_today" "$waf_blocked"
|
|
}
|
|
|
|
# Build connections
|
|
build_connections() {
|
|
local http_conns https_conns ssh_conns total_tcp
|
|
|
|
http_conns=$(netstat -an 2>/dev/null | grep -c ":80 .*ESTABLISHED" || echo 0)
|
|
https_conns=$(netstat -an 2>/dev/null | grep -c ":443 .*ESTABLISHED" || echo 0)
|
|
ssh_conns=$(netstat -an 2>/dev/null | grep -c ":22 .*ESTABLISHED" || echo 0)
|
|
total_tcp=$(netstat -an 2>/dev/null | grep -c "ESTABLISHED" || echo 0)
|
|
|
|
printf '{"http":%d,"https":%d,"ssh":%d,"total_tcp":%d}' \
|
|
"$http_conns" "$https_conns" "$ssh_conns" "$total_tcp"
|
|
}
|
|
|
|
# Build full cache
|
|
build_cache() {
|
|
local overview waf conns ts ts_epoch
|
|
overview=$(build_overview)
|
|
waf=$(build_waf_stats)
|
|
conns=$(build_connections)
|
|
ts=$(date -Iseconds)
|
|
ts_epoch=$(date +%s)
|
|
|
|
printf '{"overview":%s,"waf":%s,"connections":%s,"timestamp":"%s","timestamp_epoch":%d}' \
|
|
"$overview" "$waf" "$conns" "$ts" "$ts_epoch" > "$CACHE_FILE"
|
|
}
|
|
|
|
# Refresh cache in background if stale
|
|
refresh_cache_async() {
|
|
if ! cache_is_fresh; then
|
|
( build_cache ) &
|
|
fi
|
|
}
|
|
|
|
# Get cached or build
|
|
get_cached() {
|
|
if [ -f "$CACHE_FILE" ]; then
|
|
cat "$CACHE_FILE"
|
|
# Trigger background refresh if getting stale
|
|
refresh_cache_async
|
|
else
|
|
build_cache
|
|
cat "$CACHE_FILE"
|
|
fi
|
|
}
|
|
|
|
# Get freshness metadata
|
|
get_freshness() {
|
|
local age ts ts_epoch
|
|
if [ -f "$CACHE_FILE" ]; then
|
|
age=$(get_cache_age)
|
|
ts=$(jsonfilter -i "$CACHE_FILE" -e '@.timestamp' 2>/dev/null || echo "")
|
|
ts_epoch=$(jsonfilter -i "$CACHE_FILE" -e '@.timestamp_epoch' 2>/dev/null || echo 0)
|
|
else
|
|
age=999
|
|
ts=""
|
|
ts_epoch=0
|
|
fi
|
|
printf '{"age":%d,"timestamp":"%s","timestamp_epoch":%d,"fresh":%s}' \
|
|
"$age" "$ts" "$ts_epoch" "$([ "$age" -lt "$CACHE_TTL" ] && echo true || echo false)"
|
|
}
|
|
|
|
# Fast getters from cache - now with freshness metadata
|
|
get_overview() {
|
|
local data freshness
|
|
if [ -f "$CACHE_FILE" ]; then
|
|
data=$(jsonfilter -i "$CACHE_FILE" -e '@.overview' 2>/dev/null)
|
|
[ -z "$data" ] && data=$(build_overview)
|
|
else
|
|
data=$(build_overview)
|
|
fi
|
|
freshness=$(get_freshness)
|
|
# Merge data with freshness
|
|
printf '%s' "$data" | sed 's/}$//'
|
|
printf ',"_freshness":%s}' "$freshness"
|
|
}
|
|
|
|
get_waf_stats() {
|
|
local data freshness
|
|
if [ -f "$CACHE_FILE" ]; then
|
|
data=$(jsonfilter -i "$CACHE_FILE" -e '@.waf' 2>/dev/null)
|
|
[ -z "$data" ] && data=$(build_waf_stats)
|
|
else
|
|
data=$(build_waf_stats)
|
|
fi
|
|
freshness=$(get_freshness)
|
|
printf '%s' "$data" | sed 's/}$//'
|
|
printf ',"_freshness":%s}' "$freshness"
|
|
}
|
|
|
|
get_connections() {
|
|
local data freshness
|
|
if [ -f "$CACHE_FILE" ]; then
|
|
data=$(jsonfilter -i "$CACHE_FILE" -e '@.connections' 2>/dev/null)
|
|
[ -z "$data" ] && data=$(build_connections)
|
|
else
|
|
data=$(build_connections)
|
|
fi
|
|
freshness=$(get_freshness)
|
|
printf '%s' "$data" | sed 's/}$//'
|
|
printf ',"_freshness":%s}' "$freshness"
|
|
}
|
|
|
|
# Simple getters (less critical, can compute)
|
|
get_certs() {
|
|
json_init
|
|
json_add_array "certs"
|
|
local count=0
|
|
for pem in /srv/haproxy/certs/*.pem; do
|
|
[ -f "$pem" ] || continue
|
|
count=$((count + 1))
|
|
[ $count -gt 10 ] && break
|
|
local name=$(basename "$pem" .pem)
|
|
json_add_object ""
|
|
json_add_string "name" "$name"
|
|
json_add_string "expiry" "valid"
|
|
json_add_int "days_left" 365
|
|
json_add_string "status" "valid"
|
|
json_close_object
|
|
done
|
|
json_close_array
|
|
json_dump
|
|
}
|
|
|
|
get_vhosts() {
|
|
json_init
|
|
json_add_array "vhosts"
|
|
local count=0
|
|
for section in $(uci show haproxy 2>/dev/null | grep "=vhost$" | head -20 | cut -d. -f2 | cut -d= -f1); do
|
|
local domain=$(uci -q get "haproxy.$section.domain")
|
|
local enabled=$(uci -q get "haproxy.$section.enabled")
|
|
[ -z "$domain" ] && continue
|
|
[ "$enabled" != "1" ] && continue
|
|
json_add_object ""
|
|
json_add_string "domain" "$domain"
|
|
json_add_boolean "enabled" 1
|
|
json_close_object
|
|
count=$((count + 1))
|
|
[ $count -ge 20 ] && break
|
|
done
|
|
json_close_array
|
|
json_dump
|
|
}
|
|
|
|
get_metablogs() {
|
|
json_init
|
|
json_add_array "sites"
|
|
for section in $(uci show metablogizer 2>/dev/null | grep "=site$" | head -20 | cut -d. -f2 | cut -d= -f1); do
|
|
local name=$(echo "$section" | sed 's/^site_//')
|
|
local domain=$(uci -q get "metablogizer.$section.domain")
|
|
local port=$(uci -q get "metablogizer.$section.port")
|
|
local enabled=$(uci -q get "metablogizer.$section.enabled")
|
|
local running=0
|
|
netstat -tln 2>/dev/null | grep -q ":${port:-0} " && running=1
|
|
json_add_object ""
|
|
json_add_string "name" "$name"
|
|
json_add_string "domain" "$domain"
|
|
json_add_int "port" "${port:-0}"
|
|
json_add_boolean "enabled" "${enabled:-0}"
|
|
json_add_boolean "running" "$running"
|
|
json_close_object
|
|
done
|
|
json_close_array
|
|
json_dump
|
|
}
|
|
|
|
get_streamlits() {
|
|
json_init
|
|
json_add_array "apps"
|
|
for section in $(uci show streamlit 2>/dev/null | grep "=instance$" | head -20 | cut -d. -f2 | cut -d= -f1); do
|
|
local name="$section"
|
|
local domain=$(uci -q get "streamlit.$section.domain")
|
|
local port=$(uci -q get "streamlit.$section.port")
|
|
local enabled=$(uci -q get "streamlit.$section.enabled")
|
|
local running=0
|
|
pgrep -f "streamlit.*$port" >/dev/null 2>&1 && running=1
|
|
json_add_object ""
|
|
json_add_string "name" "$name"
|
|
json_add_string "domain" "$domain"
|
|
json_add_int "port" "${port:-0}"
|
|
json_add_boolean "enabled" "${enabled:-0}"
|
|
json_add_boolean "running" "$running"
|
|
json_close_object
|
|
done
|
|
json_close_array
|
|
json_dump
|
|
}
|
|
|
|
get_firewall_stats() {
|
|
printf '{"iptables_drops":0,"nft_drops":0,"bouncer_blocks":0}'
|
|
}
|
|
|
|
get_all() {
|
|
get_cached
|
|
}
|
|
|
|
# Cron refresh entry point
|
|
do_refresh() {
|
|
build_cache
|
|
echo "Cache refreshed at $(date)"
|
|
}
|
|
|
|
case "$1" in
|
|
list)
|
|
echo '{"overview":{},"certs":{},"vhosts":{},"metablogs":{},"streamlits":{},"waf_stats":{},"connections":{},"firewall_stats":{},"all":{},"refresh":{}}'
|
|
;;
|
|
call)
|
|
case "$2" in
|
|
overview) get_overview ;;
|
|
certs) get_certs ;;
|
|
vhosts) get_vhosts ;;
|
|
metablogs) get_metablogs ;;
|
|
streamlits) get_streamlits ;;
|
|
waf_stats) get_waf_stats ;;
|
|
connections) get_connections ;;
|
|
firewall_stats) get_firewall_stats ;;
|
|
all) get_all ;;
|
|
refresh) do_refresh ;;
|
|
*) echo '{"error":"Unknown method"}' ;;
|
|
esac
|
|
;;
|
|
esac
|