secubox-openwrt/package/secubox/luci-app-cdn-cache/root/usr/libexec/rpcd/luci.cdn-cache
CyberMind-FR 189715e2ee feat(cdn-cache): Add MITM SSL bump support for HTTPS caching
- Generate CA certificate for SSL bumping
- Initialize SSL certificate database with security_file_certgen
- Selective SSL bump: only cache-worthy domains (Windows Update, Steam, etc.)
- Exclude security-sensitive sites (banking, Google accounts, etc.)
- Proper firewall integration for both HTTP and HTTPS redirect
- RPCD methods for CA cert download and SSL bump control

Ports:
- 3128: Forward proxy
- 3129: HTTP transparent intercept
- 3130: HTTPS SSL bump intercept

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-30 15:11:14 +01:00

944 lines
22 KiB
Bash
Executable File

#!/bin/sh
# SPDX-License-Identifier: Apache-2.0
# CDN Cache RPCD Backend for SecuBox
# Copyright (C) 2025 CyberMind.fr
. /lib/functions.sh
. /usr/share/libubox/jshn.sh
get_pkg_version() {
local ctrl="/usr/lib/opkg/info/luci-app-cdn-cache.control"
if [ -f "$ctrl" ]; then
awk -F': ' '/^Version/ { print $2; exit }' "$ctrl"
else
echo "unknown"
fi
}
PKG_VERSION="$(get_pkg_version)"
CACHE_DIR=$(uci -q get cdn-cache.main.cache_dir || echo "/var/cache/cdn-squid")
STATS_FILE="/var/run/cdn-cache-stats.json"
LOG_DIR="/var/log/cdn-cache"
LOG_FILE="$LOG_DIR/cache.log"
SQUID_CONF="/var/etc/cdn-cache-squid.conf"
CA_CERT="/etc/squid/ssl/ca.pem"
# Initialize stats file if not exists
init_stats() {
if [ ! -f "$STATS_FILE" ]; then
cat > "$STATS_FILE" << 'EOF'
{"hits":0,"misses":0,"bytes_saved":0,"bytes_served":0,"requests":0,"start_time":0}
EOF
fi
}
# Get Squid cache manager info
get_squid_info() {
local mgr_url="http://localhost:$(uci -q get cdn-cache.main.listen_port || echo 3128)/squid-internal-mgr"
# Try to get info via squidclient if available
if command -v squidclient >/dev/null 2>&1; then
squidclient -h localhost mgr:info 2>/dev/null
fi
}
# Get service status
get_status() {
local enabled=$(uci -q get cdn-cache.main.enabled || echo "0")
local running=0
local pid=""
local uptime=0
local cache_size=0
local cache_files=0
local squid_installed=0
local backend="squid"
# Check if Squid is installed
if command -v squid >/dev/null 2>&1; then
squid_installed=1
fi
# Check if Squid is running (cdn-cache instance)
if [ -f "/var/run/cdn-cache.pid" ]; then
pid=$(cat /var/run/cdn-cache.pid 2>/dev/null)
if [ -n "$pid" ] && kill -0 "$pid" 2>/dev/null; then
running=1
local start_time=$(stat -c %Y "/var/run/cdn-cache.pid" 2>/dev/null || echo "0")
local now=$(date +%s)
uptime=$((now - start_time))
fi
fi
# Fallback: check by process name
if [ "$running" = "0" ]; then
pid=$(pgrep -f "squid.*cdn-cache" | head -1)
if [ -n "$pid" ]; then
running=1
fi
fi
# Get cache directory stats
if [ -d "$CACHE_DIR" ]; then
cache_size=$(du -sk "$CACHE_DIR" 2>/dev/null | cut -f1 || echo "0")
cache_files=$(find "$CACHE_DIR" -type f 2>/dev/null | wc -l || echo "0")
fi
local listen_port=$(uci -q get cdn-cache.main.listen_port || echo "3128")
local transparent=$(uci -q get cdn-cache.main.transparent || echo "1")
local ssl_bump=$(uci -q get cdn-cache.main.ssl_bump || echo "0")
local max_size=$(uci -q get cdn-cache.main.cache_size || echo "2048")
# Get Squid version if installed
local squid_version=""
if [ "$squid_installed" = "1" ]; then
squid_version=$(squid -v 2>/dev/null | head -1 | sed 's/.*Version //' | cut -d' ' -f1)
fi
# Check if CA cert exists
local ca_cert_exists=0
[ -f "$CA_CERT" ] && ca_cert_exists=1
json_init
json_add_string "version" "$PKG_VERSION"
json_add_string "backend" "$backend"
json_add_string "squid_version" "$squid_version"
json_add_boolean "squid_installed" "$squid_installed"
json_add_boolean "enabled" "$enabled"
json_add_boolean "running" "$running"
json_add_string "pid" "$pid"
json_add_int "uptime" "$uptime"
json_add_int "cache_size_kb" "$cache_size"
json_add_int "cache_files" "$cache_files"
json_add_int "max_size_mb" "$max_size"
json_add_int "listen_port" "$listen_port"
json_add_boolean "transparent" "$transparent"
json_add_boolean "ssl_bump" "$ssl_bump"
json_add_boolean "ca_cert_exists" "$ca_cert_exists"
json_add_string "cache_dir" "$CACHE_DIR"
json_dump
}
# Get cache statistics from Squid access log
get_stats() {
init_stats
local hits=0
local misses=0
local requests=0
local bytes_served=0
local bytes_saved=0
# Parse Squid access log for stats
local access_log="$LOG_DIR/access.log"
if [ -f "$access_log" ]; then
# Count cache HITs and MISSes from Squid log format
# Format: timestamp elapsed client result/code size method url ...
hits=$(grep -c "TCP_HIT\|TCP_MEM_HIT\|TCP_REFRESH_HIT\|TCP_IMS_HIT" "$access_log" 2>/dev/null || echo "0")
misses=$(grep -c "TCP_MISS\|TCP_REFRESH_MISS\|TCP_CLIENT_REFRESH_MISS" "$access_log" 2>/dev/null || echo "0")
requests=$(wc -l < "$access_log" 2>/dev/null || echo "0")
# Sum bytes served (field 5 in squid log)
bytes_served=$(awk '{sum += $5} END {print sum}' "$access_log" 2>/dev/null || echo "0")
[ -z "$bytes_served" ] && bytes_served=0
# Estimate bytes saved (cached hits * average object size)
# This is an approximation - cached bytes don't get re-downloaded
local avg_size=0
if [ "$requests" -gt 0 ]; then
avg_size=$((bytes_served / requests))
fi
bytes_saved=$((hits * avg_size))
fi
local total=$((hits + misses))
local hit_ratio=0
if [ "$total" -gt 0 ]; then
hit_ratio=$((hits * 100 / total))
fi
# Convert to human readable
local saved_mb=$((bytes_saved / 1048576))
local served_mb=$((bytes_served / 1048576))
json_init
json_add_int "hits" "$hits"
json_add_int "misses" "$misses"
json_add_int "hit_ratio" "$hit_ratio"
json_add_int "requests" "$requests"
json_add_int "bytes_saved" "$bytes_saved"
json_add_int "bytes_served" "$bytes_served"
json_add_int "saved_mb" "$saved_mb"
json_add_int "served_mb" "$served_mb"
json_dump
}
# Get cache content list
get_cache_list() {
json_init
json_add_array "items"
if [ -d "$CACHE_DIR" ]; then
find "$CACHE_DIR" -type f -printf '%s %T@ %p\n' 2>/dev/null | \
sort -k2 -rn | head -100 | while read size mtime path; do
local filename=$(basename "$path")
local domain=$(echo "$path" | sed -n 's|.*/\([^/]*\)/[^/]*$|\1|p')
local age=$(( $(date +%s) - ${mtime%.*} ))
json_add_object ""
json_add_string "filename" "$filename"
json_add_string "domain" "$domain"
json_add_int "size" "$size"
json_add_int "age" "$age"
json_add_string "path" "$path"
json_close_object
done
fi
json_close_array
json_dump
}
# Get top domains by cache usage (parsed from Squid access log)
get_top_domains() {
json_init
json_add_array "domains"
local access_log="$LOG_DIR/access.log"
if [ -f "$access_log" ]; then
# Parse domains from URLs in access log
# Extract domain from URL field (field 7) and count hits + bytes
awk '
{
url = $7
bytes = $5
status = $4
# Extract domain from URL
gsub(/^https?:\/\//, "", url)
gsub(/\/.*$/, "", url)
gsub(/:[0-9]+$/, "", url)
if (url != "" && url != "-") {
domains[url]++
domain_bytes[url] += bytes
if (status ~ /HIT/) {
domain_hits[url]++
}
}
}
END {
for (d in domains) {
print domains[d], domain_bytes[d], domain_hits[d], d
}
}
' "$access_log" 2>/dev/null | sort -rn | head -20 | while read count bytes hits domain; do
local size_kb=$((bytes / 1024))
json_add_object ""
json_add_string "domain" "$domain"
json_add_int "requests" "$count"
json_add_int "size_kb" "$size_kb"
json_add_int "hits" "${hits:-0}"
json_close_object
done
fi
json_close_array
json_dump
}
# Get bandwidth savings over time
get_bandwidth_savings() {
local period="${1:-24h}"
json_init
json_add_string "period" "$period"
json_add_array "data"
# Generate sample data points (would be from real logs)
local now=$(date +%s)
local points=24
local interval=3600
case "$period" in
"7d") points=168; interval=3600 ;;
"30d") points=30; interval=86400 ;;
*) points=24; interval=3600 ;;
esac
local i=0
while [ "$i" -lt "$points" ]; do
local ts=$((now - (points - i) * interval))
# Simulated data - in production would read from logs
local saved=$((RANDOM % 100 + 10))
local total=$((saved + RANDOM % 50 + 20))
json_add_object ""
json_add_int "timestamp" "$ts"
json_add_int "saved_mb" "$saved"
json_add_int "total_mb" "$total"
json_close_object
i=$((i + 1))
done
json_close_array
json_dump
}
# Get hit ratio over time
get_hit_ratio() {
local period="${1:-24h}"
json_init
json_add_string "period" "$period"
json_add_array "data"
local now=$(date +%s)
local points=24
local interval=3600
case "$period" in
"7d") points=168; interval=3600 ;;
"30d") points=30; interval=86400 ;;
*) points=24; interval=3600 ;;
esac
local i=0
while [ "$i" -lt "$points" ]; do
local ts=$((now - (points - i) * interval))
# Simulated data
local ratio=$((RANDOM % 40 + 50))
json_add_object ""
json_add_int "timestamp" "$ts"
json_add_int "ratio" "$ratio"
json_close_object
i=$((i + 1))
done
json_close_array
json_dump
}
# Get cache size info
get_cache_size() {
local total_kb=0
local max_mb=$(uci -q get cdn-cache.main.cache_size || echo "1024")
local max_kb=$((max_mb * 1024))
if [ -d "$CACHE_DIR" ]; then
total_kb=$(du -sk "$CACHE_DIR" 2>/dev/null | cut -f1 || echo "0")
fi
local usage_pct=0
if [ "$max_kb" -gt 0 ]; then
usage_pct=$((total_kb * 100 / max_kb))
fi
json_init
json_add_int "used_kb" "$total_kb"
json_add_int "max_kb" "$max_kb"
json_add_int "usage_percent" "$usage_pct"
json_add_int "free_kb" "$((max_kb - total_kb))"
json_dump
}
# Get configured policies
get_policies() {
json_init
json_add_array "policies"
config_load cdn-cache
config_foreach _add_policy cache_policy
json_close_array
json_dump
}
_add_policy() {
local section="$1"
local enabled name domains extensions cache_time max_size priority
config_get_bool enabled "$section" enabled 0
config_get name "$section" name ""
config_get domains "$section" domains ""
config_get extensions "$section" extensions ""
config_get cache_time "$section" cache_time 1440
config_get max_size "$section" max_size 512
config_get priority "$section" priority 1
json_add_object ""
json_add_string "id" "$section"
json_add_boolean "enabled" "$enabled"
json_add_string "name" "$name"
json_add_string "domains" "$domains"
json_add_string "extensions" "$extensions"
json_add_int "cache_time" "$cache_time"
json_add_int "max_size" "$max_size"
json_add_int "priority" "$priority"
json_close_object
}
# Get exclusions
get_exclusions() {
json_init
json_add_array "exclusions"
config_load cdn-cache
config_foreach _add_exclusion exclusion
json_close_array
json_dump
}
_add_exclusion() {
local section="$1"
local enabled name domains reason
config_get_bool enabled "$section" enabled 0
config_get name "$section" name ""
config_get domains "$section" domains ""
config_get reason "$section" reason ""
json_add_object ""
json_add_string "id" "$section"
json_add_boolean "enabled" "$enabled"
json_add_string "name" "$name"
json_add_string "domains" "$domains"
json_add_string "reason" "$reason"
json_close_object
}
# Get recent logs
get_logs() {
local count="${1:-50}"
json_init
json_add_array "logs"
# Try access log first (more useful for CDN cache)
local access_log="$LOG_DIR/access.log"
local cache_log="$LOG_DIR/cache.log"
if [ -f "$access_log" ]; then
tail -n "$count" "$access_log" 2>/dev/null | while read line; do
json_add_string "" "$line"
done
elif [ -f "$cache_log" ]; then
tail -n "$count" "$cache_log" 2>/dev/null | while read line; do
json_add_string "" "$line"
done
fi
json_close_array
json_dump
}
# Set enabled state
set_enabled() {
local enabled="${1:-0}"
uci set cdn-cache.main.enabled="$enabled"
uci commit cdn-cache
if [ "$enabled" = "1" ]; then
/etc/init.d/cdn-cache start
else
/etc/init.d/cdn-cache stop
fi
json_init
json_add_boolean "success" 1
json_dump
}
# Purge entire cache
purge_cache() {
# Stop Squid, clear cache, reinitialize
/etc/init.d/cdn-cache stop 2>/dev/null
if [ -d "$CACHE_DIR" ]; then
rm -rf "$CACHE_DIR"/*
mkdir -p "$CACHE_DIR"
chown squid:squid "$CACHE_DIR" 2>/dev/null
fi
# Clear access log
[ -f "$LOG_DIR/access.log" ] && : > "$LOG_DIR/access.log"
# Reset stats
cat > "$STATS_FILE" << 'EOF'
{"hits":0,"misses":0,"bytes_saved":0,"bytes_served":0,"requests":0,"start_time":0}
EOF
# Reinitialize Squid cache and restart
if [ -f "$SQUID_CONF" ]; then
squid -f "$SQUID_CONF" -z 2>/dev/null
sleep 2
fi
/etc/init.d/cdn-cache start 2>/dev/null
logger -t cdn-cache "Cache purged by user"
json_init
json_add_boolean "success" 1
json_add_string "message" "Cache purged successfully"
json_dump
}
# Purge cache for specific domain
purge_domain() {
local domain="$1"
if [ -n "$domain" ] && [ -d "$CACHE_DIR/$domain" ]; then
rm -rf "$CACHE_DIR/$domain"
logger -t cdn-cache "Cache purged for domain: $domain"
json_init
json_add_boolean "success" 1
json_add_string "message" "Cache purged for $domain"
json_dump
else
json_init
json_add_boolean "success" 0
json_add_string "message" "Domain not found in cache"
json_dump
fi
}
# Purge expired entries
purge_expired() {
local deleted=0
local cache_valid=$(uci -q get cdn-cache.main.cache_valid || echo "1440")
local max_age=$((cache_valid * 60))
if [ -d "$CACHE_DIR" ]; then
deleted=$(find "$CACHE_DIR" -type f -mmin +"$cache_valid" -delete -print 2>/dev/null | wc -l)
fi
logger -t cdn-cache "Purged $deleted expired entries"
json_init
json_add_boolean "success" 1
json_add_int "deleted" "$deleted"
json_dump
}
# Preload URL into cache
preload_url() {
local url="$1"
if [ -n "$url" ]; then
local output=$(wget -q --spider "$url" 2>&1)
local result=$?
if [ "$result" -eq 0 ]; then
logger -t cdn-cache "Preloaded: $url"
json_init
json_add_boolean "success" 1
json_add_string "message" "URL preloaded"
json_dump
else
json_init
json_add_boolean "success" 0
json_add_string "message" "Failed to preload URL"
json_dump
fi
else
json_init
json_add_boolean "success" 0
json_add_string "message" "No URL provided"
json_dump
fi
}
# Add new policy
add_policy() {
local name="$1"
local domains="$2"
local extensions="$3"
local cache_time="${4:-1440}"
local max_size="${5:-512}"
local section="policy_$(date +%s)"
uci set cdn-cache.$section=cache_policy
uci set cdn-cache.$section.enabled=1
uci set cdn-cache.$section.name="$name"
uci set cdn-cache.$section.domains="$domains"
uci set cdn-cache.$section.extensions="$extensions"
uci set cdn-cache.$section.cache_time="$cache_time"
uci set cdn-cache.$section.max_size="$max_size"
uci set cdn-cache.$section.priority=5
uci commit cdn-cache
json_init
json_add_boolean "success" 1
json_add_string "id" "$section"
json_dump
}
# Remove policy
remove_policy() {
local id="$1"
if [ -n "$id" ]; then
uci delete cdn-cache.$id
uci commit cdn-cache
json_init
json_add_boolean "success" 1
json_dump
else
json_init
json_add_boolean "success" 0
json_dump
fi
}
# Add exclusion
add_exclusion() {
local name="$1"
local domains="$2"
local reason="$3"
local section="exclusion_$(date +%s)"
uci set cdn-cache.$section=exclusion
uci set cdn-cache.$section.enabled=1
uci set cdn-cache.$section.name="$name"
uci set cdn-cache.$section.domains="$domains"
uci set cdn-cache.$section.reason="$reason"
uci commit cdn-cache
json_init
json_add_boolean "success" 1
json_add_string "id" "$section"
json_dump
}
# Remove exclusion
remove_exclusion() {
local id="$1"
if [ -n "$id" ]; then
uci delete cdn-cache.$id
uci commit cdn-cache
json_init
json_add_boolean "success" 1
json_dump
else
json_init
json_add_boolean "success" 0
json_dump
fi
}
# Wrapper methods for specification compliance (rules = policies)
list_rules() {
get_policies
}
add_rule() {
add_policy "$@"
}
delete_rule() {
remove_policy "$@"
}
# Set cache size limits
set_limits() {
local max_size_mb="$1"
local cache_valid="${2:-1440}"
if [ -z "$max_size_mb" ]; then
json_init
json_add_boolean "success" 0
json_add_string "error" "max_size_mb required"
json_dump
return
fi
uci set cdn-cache.main.cache_size="$max_size_mb"
uci set cdn-cache.main.cache_valid="$cache_valid"
uci commit cdn-cache
logger -t cdn-cache "Cache limits updated: ${max_size_mb}MB, ${cache_valid}min validity"
json_init
json_add_boolean "success" 1
json_add_string "message" "Cache limits updated"
json_add_int "max_size_mb" "$max_size_mb"
json_add_int "cache_valid_minutes" "$cache_valid"
json_dump
}
# Clear statistics
clear_stats() {
cat > "$STATS_FILE" << 'EOF'
{"hits":0,"misses":0,"bytes_saved":0,"bytes_served":0,"requests":0,"start_time":0}
EOF
json_init
json_add_boolean "success" 1
json_dump
}
# Restart service
do_restart() {
/etc/init.d/cdn-cache restart
json_init
json_add_boolean "success" 1
json_dump
}
# Get CA certificate for download
get_ca_cert() {
json_init
if [ -f "$CA_CERT" ]; then
local cert_content=$(cat "$CA_CERT" | base64 -w0)
local cert_fingerprint=$(openssl x509 -in "$CA_CERT" -noout -fingerprint -sha256 2>/dev/null | sed 's/.*=//')
local cert_subject=$(openssl x509 -in "$CA_CERT" -noout -subject 2>/dev/null | sed 's/subject=//')
local cert_expires=$(openssl x509 -in "$CA_CERT" -noout -enddate 2>/dev/null | sed 's/notAfter=//')
json_add_boolean "success" 1
json_add_string "certificate" "$cert_content"
json_add_string "fingerprint" "$cert_fingerprint"
json_add_string "subject" "$cert_subject"
json_add_string "expires" "$cert_expires"
else
json_add_boolean "success" 0
json_add_string "error" "CA certificate not found"
fi
json_dump
}
# Enable/disable SSL bump
set_ssl_bump() {
local enabled="${1:-0}"
uci set cdn-cache.main.ssl_bump="$enabled"
uci commit cdn-cache
if [ "$enabled" = "1" ]; then
logger -t cdn-cache "SSL bump enabled - restart required"
else
logger -t cdn-cache "SSL bump disabled - restart required"
fi
json_init
json_add_boolean "success" 1
json_add_boolean "restart_required" 1
json_dump
}
# Main dispatcher
case "$1" in
list)
json_init
json_add_object "status"
json_close_object
json_add_object "stats"
json_close_object
json_add_object "cache_list"
json_close_object
json_add_object "top_domains"
json_close_object
json_add_object "bandwidth_savings"
json_add_string "period" "string"
json_close_object
json_add_object "hit_ratio"
json_add_string "period" "string"
json_close_object
json_add_object "cache_size"
json_close_object
json_add_object "policies"
json_close_object
json_add_object "exclusions"
json_close_object
json_add_object "logs"
json_add_int "count" 0
json_close_object
json_add_object "set_enabled"
json_add_boolean "enabled" false
json_close_object
json_add_object "purge_cache"
json_close_object
json_add_object "purge_domain"
json_add_string "domain" "string"
json_close_object
json_add_object "purge_expired"
json_close_object
json_add_object "preload_url"
json_add_string "url" "string"
json_close_object
json_add_object "add_policy"
json_add_string "name" "string"
json_add_string "domains" "string"
json_add_string "extensions" "string"
json_add_int "cache_time" 0
json_add_int "max_size" 0
json_close_object
json_add_object "remove_policy"
json_add_string "id" "string"
json_close_object
json_add_object "add_exclusion"
json_add_string "name" "string"
json_add_string "domains" "string"
json_add_string "reason" "string"
json_close_object
json_add_object "remove_exclusion"
json_add_string "id" "string"
json_close_object
json_add_object "list_rules"
json_close_object
json_add_object "add_rule"
json_add_string "name" "string"
json_add_string "domains" "string"
json_add_string "extensions" "string"
json_add_int "cache_time" 0
json_add_int "max_size" 0
json_close_object
json_add_object "delete_rule"
json_add_string "id" "string"
json_close_object
json_add_object "set_limits"
json_add_int "max_size_mb" 0
json_add_int "cache_valid" 0
json_close_object
json_add_object "clear_stats"
json_close_object
json_add_object "restart"
json_close_object
json_add_object "get_ca_cert"
json_close_object
json_add_object "set_ssl_bump"
json_add_boolean "enabled" false
json_close_object
json_dump
;;
call)
case "$2" in
status) get_status ;;
stats) get_stats ;;
cache_list) get_cache_list ;;
top_domains) get_top_domains ;;
bandwidth_savings)
read -r input
json_load "$input"
json_get_var period period "24h"
get_bandwidth_savings "$period"
;;
hit_ratio)
read -r input
json_load "$input"
json_get_var period period "24h"
get_hit_ratio "$period"
;;
cache_size) get_cache_size ;;
policies) get_policies ;;
exclusions) get_exclusions ;;
logs)
read -r input
json_load "$input"
json_get_var count count 50
get_logs "$count"
;;
set_enabled)
read -r input
json_load "$input"
json_get_var enabled enabled 0
set_enabled "$enabled"
;;
purge_cache) purge_cache ;;
purge_domain)
read -r input
json_load "$input"
json_get_var domain domain ""
purge_domain "$domain"
;;
purge_expired) purge_expired ;;
preload_url)
read -r input
json_load "$input"
json_get_var url url ""
preload_url "$url"
;;
add_policy)
read -r input
json_load "$input"
json_get_var name name ""
json_get_var domains domains ""
json_get_var extensions extensions ""
json_get_var cache_time cache_time 1440
json_get_var max_size max_size 512
add_policy "$name" "$domains" "$extensions" "$cache_time" "$max_size"
;;
remove_policy)
read -r input
json_load "$input"
json_get_var id id ""
remove_policy "$id"
;;
add_exclusion)
read -r input
json_load "$input"
json_get_var name name ""
json_get_var domains domains ""
json_get_var reason reason ""
add_exclusion "$name" "$domains" "$reason"
;;
remove_exclusion)
read -r input
json_load "$input"
json_get_var id id ""
remove_exclusion "$id"
;;
list_rules) list_rules ;;
add_rule)
read -r input
json_load "$input"
json_get_var name name ""
json_get_var domains domains ""
json_get_var extensions extensions ""
json_get_var cache_time cache_time 1440
json_get_var max_size max_size 512
add_rule "$name" "$domains" "$extensions" "$cache_time" "$max_size"
;;
delete_rule)
read -r input
json_load "$input"
json_get_var id id ""
delete_rule "$id"
;;
set_limits)
read -r input
json_load "$input"
json_get_var max_size_mb max_size_mb 0
json_get_var cache_valid cache_valid 1440
set_limits "$max_size_mb" "$cache_valid"
;;
clear_stats) clear_stats ;;
restart) do_restart ;;
get_ca_cert) get_ca_cert ;;
set_ssl_bump)
read -r input
json_load "$input"
json_get_var enabled enabled 0
set_ssl_bump "$enabled"
;;
*) echo '{"error":"Unknown method"}' ;;
esac
;;
*)
echo '{"error":"Unknown command"}'
;;
esac