secubox-openwrt/package/secubox/luci-app-metrics-dashboard/root/usr/libexec/rpcd/luci.metrics
CyberMind-FR 434e501dae fix(metrics): Use date -r for OpenWrt file mtime and fix grep -c double output
- Replace stat -c %Y with date -r for BusyBox compatibility (stat not available)
- Fix get_cache_age() to properly return early when cache file missing
- Fix grep -c || echo 0 pattern that caused "invalid number '0\n0'" errors
- Add proper numeric defaults using : "${var:=0}" pattern
- Add freshness metadata (_freshness) with age, timestamp, and fresh boolean

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-03-20 11:40:04 +01:00

324 lines
10 KiB
Bash
Executable File

#!/bin/sh
# SecuBox Metrics Dashboard - RPCD Backend
# Double-buffered pre-cached stats for instant response
. /usr/share/libubox/jshn.sh
CACHE_DIR="/tmp/secubox"
CACHE_FILE="$CACHE_DIR/metrics-cache.json"
CACHE_TTL=30 # seconds
# Ensure cache dir exists
[ -d "$CACHE_DIR" ] || mkdir -p "$CACHE_DIR"
# Check if cache is fresh
cache_is_fresh() {
[ -f "$CACHE_FILE" ] || return 1
local now=$(date +%s)
local mtime=$(date -r "$CACHE_FILE" +%s 2>/dev/null || echo 0)
local age=$((now - mtime))
[ "$age" -lt "$CACHE_TTL" ]
}
# Get cache age in seconds
get_cache_age() {
if [ ! -f "$CACHE_FILE" ]; then
echo 999
return
fi
local now=$(date +%s)
local mtime=$(date -r "$CACHE_FILE" +%s 2>/dev/null || echo 0)
echo $((now - mtime))
}
# Build overview (called by cron or on stale cache)
build_overview() {
local uptime load mem_total mem_free mem_used mem_pct
local haproxy_up mitmproxy_up crowdsec_up
local vhost_count metablog_count streamlit_count cert_count lxc_running
uptime=$(cut -d. -f1 /proc/uptime 2>/dev/null || echo 0)
load=$(cut -d' ' -f1-3 /proc/loadavg 2>/dev/null || echo "0 0 0")
mem_total=$(awk '/MemTotal/ {print $2}' /proc/meminfo 2>/dev/null || echo 0)
mem_free=$(awk '/MemAvailable/ {print $2}' /proc/meminfo 2>/dev/null || echo 0)
[ "$mem_total" -gt 0 ] && mem_used=$((mem_total - mem_free)) || mem_used=0
[ "$mem_total" -gt 0 ] && mem_pct=$((mem_used * 100 / mem_total)) || mem_pct=0
haproxy_up=false
lxc-info -n haproxy -s 2>/dev/null | grep -q RUNNING && haproxy_up=true
mitmproxy_up=false
lxc-info -n mitmproxy-in -s 2>/dev/null | grep -q RUNNING && mitmproxy_up=true
crowdsec_up=false
pgrep crowdsec >/dev/null 2>&1 && crowdsec_up=true
vhost_count=$(uci show haproxy 2>/dev/null | grep -c '=vhost$' || :)
metablog_count=$(uci show metablogizer 2>/dev/null | grep -c '=site$' || :)
streamlit_count=$(uci show streamlit 2>/dev/null | grep -c '=instance$' || :)
cert_count=$(ls /srv/haproxy/certs/*.pem 2>/dev/null | wc -l || :)
lxc_running=$(lxc-ls --running 2>/dev/null | wc -w || :)
# Ensure numeric defaults
: "${vhost_count:=0}"; : "${metablog_count:=0}"; : "${streamlit_count:=0}"
: "${cert_count:=0}"; : "${lxc_running:=0}"
printf '{"uptime":%d,"load":"%s","mem_total_kb":%d,"mem_used_kb":%d,"mem_pct":%d,"haproxy":%s,"mitmproxy":%s,"crowdsec":%s,"vhosts":%d,"metablogs":%d,"streamlits":%d,"certificates":%d,"lxc_containers":%d}' \
"$uptime" "$load" "$mem_total" "$mem_used" "$mem_pct" \
"$haproxy_up" "$mitmproxy_up" "$crowdsec_up" \
"$vhost_count" "$metablog_count" "$streamlit_count" "$cert_count" "$lxc_running"
}
# Build WAF stats
build_waf_stats() {
local cs_running=false mitmproxy_running=false
local bans=0 alerts_today=0 waf_blocked=0
pgrep crowdsec >/dev/null 2>&1 && cs_running=true
pgrep -f mitmdump >/dev/null 2>&1 && mitmproxy_running=true
if [ "$cs_running" = "true" ]; then
bans=$(cscli decisions list -o json 2>/dev/null | grep -c '"id"' || :)
alerts_today=$(cscli alerts list --since 24h -o json 2>/dev/null | grep -c '"id"' || :)
# WAF blocks = mitmproxy scenario decisions
waf_blocked=$(cscli decisions list -o json 2>/dev/null | grep -c 'mitmproxy' || :)
# Ensure numeric defaults
: "${bans:=0}"; : "${alerts_today:=0}"; : "${waf_blocked:=0}"
fi
printf '{"crowdsec_running":%s,"mitmproxy_running":%s,"active_bans":%d,"alerts_today":%d,"waf_blocked":%d}' \
"$cs_running" "$mitmproxy_running" "$bans" "$alerts_today" "$waf_blocked"
}
# Build connections
build_connections() {
local http_conns https_conns ssh_conns total_tcp
http_conns=$(netstat -an 2>/dev/null | grep -c ":80 .*ESTABLISHED" || :)
https_conns=$(netstat -an 2>/dev/null | grep -c ":443 .*ESTABLISHED" || :)
ssh_conns=$(netstat -an 2>/dev/null | grep -c ":22 .*ESTABLISHED" || :)
total_tcp=$(netstat -an 2>/dev/null | grep -c "ESTABLISHED" || :)
# Ensure numeric defaults
: "${http_conns:=0}"; : "${https_conns:=0}"; : "${ssh_conns:=0}"; : "${total_tcp:=0}"
printf '{"http":%d,"https":%d,"ssh":%d,"total_tcp":%d}' \
"$http_conns" "$https_conns" "$ssh_conns" "$total_tcp"
}
# Build full cache
build_cache() {
local overview waf conns ts ts_epoch
overview=$(build_overview)
waf=$(build_waf_stats)
conns=$(build_connections)
ts=$(date -Iseconds)
ts_epoch=$(date +%s)
printf '{"overview":%s,"waf":%s,"connections":%s,"timestamp":"%s","timestamp_epoch":%d}' \
"$overview" "$waf" "$conns" "$ts" "$ts_epoch" > "$CACHE_FILE"
}
# Refresh cache in background if stale
refresh_cache_async() {
if ! cache_is_fresh; then
( build_cache ) &
fi
}
# Get cached or build
get_cached() {
if [ -f "$CACHE_FILE" ]; then
cat "$CACHE_FILE"
# Trigger background refresh if getting stale
refresh_cache_async
else
build_cache
cat "$CACHE_FILE"
fi
}
# Get freshness metadata
get_freshness() {
local age ts ts_epoch fresh
if [ -f "$CACHE_FILE" ]; then
age=$(get_cache_age)
ts=$(jsonfilter -i "$CACHE_FILE" -e '@.timestamp' 2>/dev/null | tr -d '\n')
ts_epoch=$(jsonfilter -i "$CACHE_FILE" -e '@.timestamp_epoch' 2>/dev/null | tr -d '\n')
else
age=999
ts=""
ts_epoch=0
fi
# Ensure numeric values are valid
[ -z "$age" ] && age=999
[ -z "$ts_epoch" ] || [ "$ts_epoch" = "null" ] && ts_epoch=0
# Determine freshness
if [ "$age" -lt "$CACHE_TTL" ] 2>/dev/null; then
fresh=true
else
fresh=false
fi
printf '{"age":%d,"timestamp":"%s","timestamp_epoch":%d,"fresh":%s}' \
"$age" "$ts" "$ts_epoch" "$fresh"
}
# Fast getters from cache - now with freshness metadata
get_overview() {
local data freshness
if [ -f "$CACHE_FILE" ]; then
data=$(jsonfilter -i "$CACHE_FILE" -e '@.overview' 2>/dev/null)
[ -z "$data" ] && data=$(build_overview)
else
data=$(build_overview)
fi
freshness=$(get_freshness)
# Merge data with freshness
printf '%s' "$data" | sed 's/}$//'
printf ',"_freshness":%s}' "$freshness"
}
get_waf_stats() {
local data freshness
if [ -f "$CACHE_FILE" ]; then
data=$(jsonfilter -i "$CACHE_FILE" -e '@.waf' 2>/dev/null)
[ -z "$data" ] && data=$(build_waf_stats)
else
data=$(build_waf_stats)
fi
freshness=$(get_freshness)
printf '%s' "$data" | sed 's/}$//'
printf ',"_freshness":%s}' "$freshness"
}
get_connections() {
local data freshness
if [ -f "$CACHE_FILE" ]; then
data=$(jsonfilter -i "$CACHE_FILE" -e '@.connections' 2>/dev/null)
[ -z "$data" ] && data=$(build_connections)
else
data=$(build_connections)
fi
freshness=$(get_freshness)
printf '%s' "$data" | sed 's/}$//'
printf ',"_freshness":%s}' "$freshness"
}
# Simple getters (less critical, can compute)
get_certs() {
json_init
json_add_array "certs"
local count=0
for pem in /srv/haproxy/certs/*.pem; do
[ -f "$pem" ] || continue
count=$((count + 1))
[ $count -gt 10 ] && break
local name=$(basename "$pem" .pem)
json_add_object ""
json_add_string "name" "$name"
json_add_string "expiry" "valid"
json_add_int "days_left" 365
json_add_string "status" "valid"
json_close_object
done
json_close_array
json_dump
}
get_vhosts() {
json_init
json_add_array "vhosts"
local count=0
for section in $(uci show haproxy 2>/dev/null | grep "=vhost$" | head -20 | cut -d. -f2 | cut -d= -f1); do
local domain=$(uci -q get "haproxy.$section.domain")
local enabled=$(uci -q get "haproxy.$section.enabled")
[ -z "$domain" ] && continue
[ "$enabled" != "1" ] && continue
json_add_object ""
json_add_string "domain" "$domain"
json_add_boolean "enabled" 1
json_close_object
count=$((count + 1))
[ $count -ge 20 ] && break
done
json_close_array
json_dump
}
get_metablogs() {
json_init
json_add_array "sites"
for section in $(uci show metablogizer 2>/dev/null | grep "=site$" | head -20 | cut -d. -f2 | cut -d= -f1); do
local name=$(echo "$section" | sed 's/^site_//')
local domain=$(uci -q get "metablogizer.$section.domain")
local port=$(uci -q get "metablogizer.$section.port")
local enabled=$(uci -q get "metablogizer.$section.enabled")
local running=0
netstat -tln 2>/dev/null | grep -q ":${port:-0} " && running=1
json_add_object ""
json_add_string "name" "$name"
json_add_string "domain" "$domain"
json_add_int "port" "${port:-0}"
json_add_boolean "enabled" "${enabled:-0}"
json_add_boolean "running" "$running"
json_close_object
done
json_close_array
json_dump
}
get_streamlits() {
json_init
json_add_array "apps"
for section in $(uci show streamlit 2>/dev/null | grep "=instance$" | head -20 | cut -d. -f2 | cut -d= -f1); do
local name="$section"
local domain=$(uci -q get "streamlit.$section.domain")
local port=$(uci -q get "streamlit.$section.port")
local enabled=$(uci -q get "streamlit.$section.enabled")
local running=0
pgrep -f "streamlit.*$port" >/dev/null 2>&1 && running=1
json_add_object ""
json_add_string "name" "$name"
json_add_string "domain" "$domain"
json_add_int "port" "${port:-0}"
json_add_boolean "enabled" "${enabled:-0}"
json_add_boolean "running" "$running"
json_close_object
done
json_close_array
json_dump
}
get_firewall_stats() {
printf '{"iptables_drops":0,"nft_drops":0,"bouncer_blocks":0}'
}
get_all() {
get_cached
}
# Cron refresh entry point
do_refresh() {
build_cache
echo "Cache refreshed at $(date)"
}
case "$1" in
list)
echo '{"overview":{},"certs":{},"vhosts":{},"metablogs":{},"streamlits":{},"waf_stats":{},"connections":{},"firewall_stats":{},"all":{},"refresh":{}}'
;;
call)
case "$2" in
overview) get_overview ;;
certs) get_certs ;;
vhosts) get_vhosts ;;
metablogs) get_metablogs ;;
streamlits) get_streamlits ;;
waf_stats) get_waf_stats ;;
connections) get_connections ;;
firewall_stats) get_firewall_stats ;;
all) get_all ;;
refresh) do_refresh ;;
*) echo '{"error":"Unknown method"}' ;;
esac
;;
esac