feat(mitmproxy): Add HAProxy backend inspection and token auth

- Add HAProxy → mitmproxy → Backend inspection chain for filtering
  all vhost traffic through mitmproxy with threat detection
- Add haproxy_router.py addon for Host-based request routing
- Add mitmproxyctl commands: sync-routes, haproxy-enable, haproxy-disable
- Add auth token to status response for Web UI auto-authentication
- Add HAProxy Backend Inspection section to LuCI status page with
  enable/disable/sync controls
- Add HAProxy Router settings section to LuCI settings page
- LXC container now supports dual-port mode (8888 + 8889 for HAProxy)
- Token displayed with copy button in dashboard

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
CyberMind-FR 2026-01-31 15:49:05 +01:00
parent a6d2b75db8
commit 92f73fc3d2
90 changed files with 1560 additions and 155 deletions

View File

@ -209,7 +209,8 @@
"Bash(git -C /home/reepost/CyberMindStudio/secubox-openwrt status -sb)",
"Bash(pip install:*)",
"WebFetch(domain:secubox.maegia.tv)",
"Bash(# Find the built packages echo \"\"=== Built packages ===\"\" ls -la secubox-tools/build/aarch64_cortex-a72/*.ipk)"
"Bash(# Find the built packages echo \"\"=== Built packages ===\"\" ls -la secubox-tools/build/aarch64_cortex-a72/*.ipk)",
"Bash(__NEW_LINE_2faeb2c9d4f26aa1__ git diff --cached --stat)"
]
}
}

View File

@ -75,6 +75,25 @@ return view.extend({
o = s.option(form.Flag, 'log_requests', _('Log Requests'));
o.depends('enabled', '1');
// HAProxy Router
s = m.section(form.TypedSection, 'haproxy_router', _('HAProxy Backend Inspection'));
s.anonymous = true;
s.description = _('Route HAProxy vhost traffic through mitmproxy for threat detection. All backends will be inspected before reaching their destination.');
o = s.option(form.Flag, 'enabled', _('Enable HAProxy Inspection'));
o.description = _('When enabled, all HAProxy backends will route through mitmproxy');
o = s.option(form.Value, 'listen_port', _('Listen Port'));
o.datatype = 'port';
o.default = '8889';
o.description = _('Port for HAProxy to send traffic to mitmproxy');
o.depends('enabled', '1');
o = s.option(form.Flag, 'threat_detection', _('Threat Detection'));
o.default = '1';
o.description = _('Enable threat detection on HAProxy traffic');
o.depends('enabled', '1');
return m.render();
}
});

View File

@ -34,6 +34,21 @@ var callClearAlerts = rpc.declare({
method: 'clear_alerts'
});
var callHaproxyEnable = rpc.declare({
object: 'luci.mitmproxy',
method: 'haproxy_enable'
});
var callHaproxyDisable = rpc.declare({
object: 'luci.mitmproxy',
method: 'haproxy_disable'
});
var callSyncRoutes = rpc.declare({
object: 'luci.mitmproxy',
method: 'sync_routes'
});
function severityColor(sev) {
return { critical: '#e74c3c', high: '#e67e22', medium: '#f39c12', low: '#3498db' }[sev] || '#666';
}
@ -84,10 +99,38 @@ return view.extend({
E('tr', { 'class': 'tr' }, [
E('td', { 'class': 'td' }, E('strong', {}, _('Web UI'))),
E('td', { 'class': 'td' }, status.running ?
E('a', { 'href': 'http://' + window.location.hostname + ':' + (status.web_port || 8082), 'target': '_blank' },
'http://' + window.location.hostname + ':' + (status.web_port || 8082)) :
E('a', {
'href': 'http://' + window.location.hostname + ':' + (status.web_port || 8081) + (status.token ? '/?token=' + status.token : ''),
'target': '_blank'
}, 'http://' + window.location.hostname + ':' + (status.web_port || 8081) + (status.token ? '/?token=***' : '')) :
_('Not available'))
]),
status.token ? E('tr', { 'class': 'tr' }, [
E('td', { 'class': 'td' }, E('strong', {}, _('Auth Token'))),
E('td', { 'class': 'td' }, [
E('code', { 'style': 'font-size: 11px; background: #f0f0f0; padding: 2px 6px; border-radius: 3px;' },
status.token.substring(0, 12) + '...'),
' ',
E('button', {
'class': 'btn cbi-button cbi-button-action',
'style': 'font-size: 11px; padding: 2px 8px;',
'click': function() {
navigator.clipboard.writeText(status.token);
this.textContent = _('Copied!');
setTimeout(function() { this.textContent = _('Copy'); }.bind(this), 1500);
}
}, _('Copy'))
])
]) : null,
status.haproxy_router_enabled ? E('tr', { 'class': 'tr' }, [
E('td', { 'class': 'td' }, E('strong', {}, _('HAProxy Router'))),
E('td', { 'class': 'td' }, [
E('span', {
'style': 'display: inline-block; width: 10px; height: 10px; border-radius: 50%; margin-right: 6px; background: #27ae60;'
}),
_('Enabled (port ') + (status.haproxy_listen_port || 8889) + ')'
])
]) : null
])
]),
E('div', { 'style': 'margin-top: 16px;' }, [
@ -119,6 +162,78 @@ return view.extend({
])
]),
// HAProxy Backend Inspection Card
E('div', { 'class': 'cbi-section' }, [
E('h3', {}, _('HAProxy Backend Inspection')),
E('div', { 'class': 'cbi-section-node' }, [
E('p', {}, _('Route all HAProxy vhost traffic through mitmproxy for threat detection. When enabled, backends are inspected before reaching their destination.')),
E('table', { 'class': 'table', 'style': 'margin: 16px 0;' }, [
E('tr', { 'class': 'tr' }, [
E('td', { 'class': 'td', 'width': '33%' }, E('strong', {}, _('Status'))),
E('td', { 'class': 'td' }, [
E('span', {
'style': 'display: inline-block; width: 12px; height: 12px; border-radius: 50%; margin-right: 8px; background: ' + (status.haproxy_router_enabled ? '#27ae60' : '#95a5a6')
}),
status.haproxy_router_enabled ? _('Enabled') : _('Disabled')
])
]),
E('tr', { 'class': 'tr' }, [
E('td', { 'class': 'td' }, E('strong', {}, _('Inspection Port'))),
E('td', { 'class': 'td' }, status.haproxy_listen_port || 8889)
])
]),
E('div', {}, [
!status.haproxy_router_enabled ?
E('button', {
'class': 'btn cbi-button cbi-button-apply',
'click': function() {
ui.showModal(_('Enabling HAProxy Inspection...'), [
E('p', { 'class': 'spinning' }, _('Updating HAProxy backends and restarting services...'))
]);
callHaproxyEnable().then(function(res) {
ui.hideModal();
if (res && res.success) {
ui.addNotification(null, E('p', {}, _('HAProxy backend inspection enabled')), 'success');
} else {
ui.addNotification(null, E('p', {}, _('Failed: ') + (res.error || 'Unknown error')), 'error');
}
location.reload();
});
},
'disabled': !status.running
}, _('Enable HAProxy Inspection')) :
E('button', {
'class': 'btn cbi-button cbi-button-reset',
'click': function() {
ui.showModal(_('Disabling HAProxy Inspection...'), [
E('p', { 'class': 'spinning' }, _('Restoring original HAProxy backends...'))
]);
callHaproxyDisable().then(function(res) {
ui.hideModal();
if (res && res.success) {
ui.addNotification(null, E('p', {}, _('HAProxy backend inspection disabled')), 'success');
} else {
ui.addNotification(null, E('p', {}, _('Failed: ') + (res.error || 'Unknown error')), 'error');
}
location.reload();
});
}
}, _('Disable HAProxy Inspection')),
' ',
E('button', {
'class': 'btn cbi-button',
'click': function() {
callSyncRoutes().then(function(res) {
if (res && res.success) {
ui.addNotification(null, E('p', {}, _('Routes synced from HAProxy')), 'success');
}
});
}
}, _('Sync Routes'))
])
])
]),
// Security Threats Card
E('div', { 'class': 'cbi-section' }, [
E('h3', {}, [

View File

@ -17,6 +17,8 @@ get_status() {
local proxy_port=$(uci_get main.proxy_port)
local data_path=$(uci_get main.data_path)
local mode=$(uci_get main.mode)
local haproxy_router_enabled=$(uci_get haproxy_router.enabled)
local haproxy_listen_port=$(uci_get haproxy_router.listen_port)
# Check for LXC availability
local lxc_available=0
@ -38,6 +40,11 @@ get_status() {
nft list table inet mitmproxy >/dev/null 2>&1 && nft_active=1
fi
# Get authentication token from container data path
local token=""
local token_file="${data_path:-/srv/mitmproxy}/.mitmproxy_token"
[ -f "$token_file" ] && token=$(cat "$token_file" 2>/dev/null | tr -d '\n')
cat <<EOFJ
{
"enabled": $([ "$enabled" = "1" ] && echo "true" || echo "false"),
@ -49,7 +56,10 @@ get_status() {
"proxy_port": ${proxy_port:-8888},
"data_path": "${data_path:-/srv/mitmproxy}",
"mode": "${mode:-regular}",
"nft_active": $([ "$nft_active" = "1" ] && echo "true" || echo "false")
"nft_active": $([ "$nft_active" = "1" ] && echo "true" || echo "false"),
"token": "${token:-}",
"haproxy_router_enabled": $([ "$haproxy_router_enabled" = "1" ] && echo "true" || echo "false"),
"haproxy_listen_port": ${haproxy_listen_port:-8889}
}
EOFJ
}
@ -397,8 +407,82 @@ clear_alerts() {
echo '{"success":true,"message":"Alerts cleared"}'
}
haproxy_enable() {
json_init
if ! command -v mitmproxyctl >/dev/null 2>&1; then
json_add_boolean "success" 0
json_add_string "error" "mitmproxyctl not found"
json_dump
return
fi
mitmproxyctl haproxy-enable >/tmp/haproxy-enable.log 2>&1
local result=$?
if [ $result -eq 0 ]; then
json_add_boolean "success" 1
json_add_string "message" "HAProxy backend inspection enabled"
else
json_add_boolean "success" 0
json_add_string "error" "Failed to enable HAProxy inspection"
local log=$(cat /tmp/haproxy-enable.log 2>/dev/null)
[ -n "$log" ] && json_add_string "details" "$log"
fi
json_dump
}
haproxy_disable() {
json_init
if ! command -v mitmproxyctl >/dev/null 2>&1; then
json_add_boolean "success" 0
json_add_string "error" "mitmproxyctl not found"
json_dump
return
fi
mitmproxyctl haproxy-disable >/tmp/haproxy-disable.log 2>&1
local result=$?
if [ $result -eq 0 ]; then
json_add_boolean "success" 1
json_add_string "message" "HAProxy backend inspection disabled"
else
json_add_boolean "success" 0
json_add_string "error" "Failed to disable HAProxy inspection"
fi
json_dump
}
sync_routes() {
json_init
if ! command -v mitmproxyctl >/dev/null 2>&1; then
json_add_boolean "success" 0
json_add_string "error" "mitmproxyctl not found"
json_dump
return
fi
mitmproxyctl sync-routes >/tmp/sync-routes.log 2>&1
local result=$?
if [ $result -eq 0 ]; then
json_add_boolean "success" 1
json_add_string "message" "Routes synced from HAProxy"
else
json_add_boolean "success" 0
json_add_string "error" "Failed to sync routes"
fi
json_dump
}
list_methods() { cat <<'EOFM'
{"status":{},"settings":{},"save_settings":{"mode":"str","enabled":"bool","proxy_port":"int","web_port":"int","apply_now":"bool"},"set_mode":{"mode":"str","apply_now":"bool"},"setup_firewall":{},"clear_firewall":{},"install":{},"start":{},"stop":{},"restart":{},"alerts":{},"threat_stats":{},"clear_alerts":{}}
{"status":{},"settings":{},"save_settings":{"mode":"str","enabled":"bool","proxy_port":"int","web_port":"int","apply_now":"bool"},"set_mode":{"mode":"str","apply_now":"bool"},"setup_firewall":{},"clear_firewall":{},"install":{},"start":{},"stop":{},"restart":{},"alerts":{},"threat_stats":{},"clear_alerts":{},"haproxy_enable":{},"haproxy_disable":{},"sync_routes":{}}
EOFM
}
@ -419,6 +503,9 @@ case "$1" in
alerts) get_alerts ;;
threat_stats) get_threat_stats ;;
clear_alerts) clear_alerts ;;
haproxy_enable) haproxy_enable ;;
haproxy_disable) haproxy_disable ;;
sync_routes) sync_routes ;;
*) echo '{"error":"Unknown method"}' ;;
esac
;;

View File

@ -8,7 +8,7 @@ Architecture: all
Installed-Size: 71680
Description: Comprehensive authentication and session management with captive portal, OAuth2/OIDC integration, voucher system, and time-based access control
Filename: luci-app-auth-guardian_0.4.0-r3_all.ipk
Size: 12079
Size: 12078
Package: luci-app-bandwidth-manager
Version: 0.5.0-r2
@ -20,7 +20,7 @@ Architecture: all
Installed-Size: 378880
Description: Advanced bandwidth management with QoS rules, client quotas, and SQM integration
Filename: luci-app-bandwidth-manager_0.5.0-r2_all.ipk
Size: 66965
Size: 66972
Package: luci-app-cdn-cache
Version: 0.5.0-r3
@ -32,7 +32,7 @@ Architecture: all
Installed-Size: 122880
Description: Dashboard for managing local CDN caching proxy on OpenWrt
Filename: luci-app-cdn-cache_0.5.0-r3_all.ipk
Size: 23187
Size: 23190
Package: luci-app-client-guardian
Version: 0.4.0-r7
@ -44,7 +44,7 @@ Architecture: all
Installed-Size: 307200
Description: Network Access Control with client monitoring, zone management, captive portal, parental controls, and SMS/email alerts
Filename: luci-app-client-guardian_0.4.0-r7_all.ipk
Size: 57042
Size: 57044
Package: luci-app-crowdsec-dashboard
Version: 0.7.0-r29
@ -56,7 +56,7 @@ Architecture: all
Installed-Size: 296960
Description: Real-time security monitoring dashboard for CrowdSec on OpenWrt
Filename: luci-app-crowdsec-dashboard_0.7.0-r29_all.ipk
Size: 55580
Size: 55583
Package: luci-app-cyberfeed
Version: 0.1.1-r1
@ -68,7 +68,7 @@ Architecture: all
Installed-Size: 71680
Description: Cyberpunk-themed RSS feed aggregator dashboard with social media support
Filename: luci-app-cyberfeed_0.1.1-r1_all.ipk
Size: 12835
Size: 12838
Package: luci-app-exposure
Version: 1.0.0-r3
@ -80,7 +80,7 @@ Architecture: all
Installed-Size: 153600
Description: LuCI SecuBox Service Exposure Manager
Filename: luci-app-exposure_1.0.0-r3_all.ipk
Size: 20532
Size: 20535
Package: luci-app-gitea
Version: 1.0.0-r2
@ -92,7 +92,7 @@ Architecture: all
Installed-Size: 92160
Description: Modern dashboard for Gitea Platform management on OpenWrt
Filename: luci-app-gitea_1.0.0-r2_all.ipk
Size: 15586
Size: 15585
Package: luci-app-glances
Version: 1.0.0-r2
@ -104,7 +104,7 @@ Architecture: all
Installed-Size: 40960
Description: Modern dashboard for Glances system monitoring with SecuBox theme
Filename: luci-app-glances_1.0.0-r2_all.ipk
Size: 6965
Size: 6969
Package: luci-app-haproxy
Version: 1.0.0-r8
@ -116,7 +116,7 @@ Architecture: all
Installed-Size: 204800
Description: Web interface for managing HAProxy load balancer with vhosts, SSL certificates, and backend routing
Filename: luci-app-haproxy_1.0.0-r8_all.ipk
Size: 34165
Size: 34169
Package: luci-app-hexojs
Version: 1.0.0-r3
@ -128,7 +128,7 @@ Architecture: all
Installed-Size: 215040
Description: Modern dashboard for Hexo static site generator on OpenWrt
Filename: luci-app-hexojs_1.0.0-r3_all.ipk
Size: 32974
Size: 32976
Package: luci-app-jitsi
Version: 1.0.0-r1
@ -140,7 +140,7 @@ Architecture: all
Installed-Size: 30720
Description: LuCI Jitsi Meet Configuration
Filename: luci-app-jitsi_1.0.0-r1_all.ipk
Size: 5140
Size: 5135
Package: luci-app-ksm-manager
Version: 0.4.0-r2
@ -152,7 +152,7 @@ Architecture: all
Installed-Size: 112640
Description: Centralized cryptographic key management with hardware security module (HSM) support for Nitrokey and YubiKey devices. Provides secure key storage, certificate management, SSH key handling, and secret storage with audit logging.
Filename: luci-app-ksm-manager_0.4.0-r2_all.ipk
Size: 18719
Size: 18721
Package: luci-app-localai
Version: 0.1.0-r15
@ -164,7 +164,7 @@ Architecture: all
Installed-Size: 81920
Description: Modern dashboard for LocalAI LLM management on OpenWrt
Filename: luci-app-localai_0.1.0-r15_all.ipk
Size: 14358
Size: 14361
Package: luci-app-lyrion
Version: 1.0.0-r1
@ -176,7 +176,7 @@ Architecture: all
Installed-Size: 40960
Description: LuCI support for Lyrion Music Server
Filename: luci-app-lyrion_1.0.0-r1_all.ipk
Size: 6724
Size: 6726
Package: luci-app-magicmirror2
Version: 0.4.0-r6
@ -212,7 +212,7 @@ Architecture: all
Installed-Size: 102400
Description: Real-time detection and monitoring of streaming services (Netflix, YouTube, Spotify, etc.) with quality estimation, history tracking, and alerts. Supports nDPId local DPI and netifyd.
Filename: luci-app-media-flow_0.6.4-r1_all.ipk
Size: 19111
Size: 19126
Package: luci-app-metablogizer
Version: 1.0.0-r3
@ -224,7 +224,7 @@ Architecture: all
Installed-Size: 112640
Description: LuCI support for MetaBlogizer Static Site Publisher
Filename: luci-app-metablogizer_1.0.0-r3_all.ipk
Size: 23505
Size: 23506
Package: luci-app-metabolizer
Version: 1.0.0-r2
@ -236,19 +236,19 @@ Architecture: all
Installed-Size: 30720
Description: LuCI support for Metabolizer CMS
Filename: luci-app-metabolizer_1.0.0-r2_all.ipk
Size: 4754
Size: 4760
Package: luci-app-mitmproxy
Version: 0.4.0-r6
Version: 0.5.0-r1
Depends: luci-base, luci-app-secubox, secubox-app-mitmproxy, jq
License: Apache-2.0
Section: luci
Maintainer: OpenWrt LuCI community
Architecture: all
Installed-Size: 102400
Installed-Size: 112640
Description: Modern dashboard for mitmproxy HTTPS traffic inspection with SecuBox theme
Filename: luci-app-mitmproxy_0.4.0-r6_all.ipk
Size: 18932
Filename: luci-app-mitmproxy_0.5.0-r1_all.ipk
Size: 20408
Package: luci-app-mmpm
Version: 0.2.0-r3
@ -272,7 +272,7 @@ Architecture: all
Installed-Size: 122880
Description: USB-to-MQTT IoT hub with SecuBox theme
Filename: luci-app-mqtt-bridge_0.4.0-r4_all.ipk
Size: 22775
Size: 22780
Package: luci-app-ndpid
Version: 1.1.2-r2
@ -284,7 +284,7 @@ Architecture: all
Installed-Size: 122880
Description: Modern dashboard for nDPId deep packet inspection on OpenWrt
Filename: luci-app-ndpid_1.1.2-r2_all.ipk
Size: 22453
Size: 22455
Package: luci-app-netdata-dashboard
Version: 0.5.0-r2
@ -296,7 +296,7 @@ Architecture: all
Installed-Size: 133120
Description: Real-time system monitoring dashboard with Netdata integration for OpenWrt
Filename: luci-app-netdata-dashboard_0.5.0-r2_all.ipk
Size: 22400
Size: 22398
Package: luci-app-network-modes
Version: 0.5.0-r3
@ -320,7 +320,7 @@ Architecture: all
Installed-Size: 81920
Description: Unified network services dashboard with DNS/hosts sync, CDN cache control, and WPAD auto-proxy configuration
Filename: luci-app-network-tweaks_1.0.0-r7_all.ipk
Size: 15459
Size: 15461
Package: luci-app-nextcloud
Version: 1.0.0-r1
@ -332,7 +332,7 @@ Architecture: all
Installed-Size: 30720
Description: LuCI support for Nextcloud
Filename: luci-app-nextcloud_1.0.0-r1_all.ipk
Size: 6481
Size: 6484
Package: luci-app-ollama
Version: 0.1.0-r1
@ -344,7 +344,7 @@ Architecture: all
Installed-Size: 71680
Description: Modern dashboard for Ollama LLM management on OpenWrt
Filename: luci-app-ollama_0.1.0-r1_all.ipk
Size: 11997
Size: 11994
Package: luci-app-picobrew
Version: 1.0.0-r1
@ -356,7 +356,7 @@ Architecture: all
Installed-Size: 51200
Description: Modern dashboard for PicoBrew Server management on OpenWrt
Filename: luci-app-picobrew_1.0.0-r1_all.ipk
Size: 9979
Size: 9975
Package: luci-app-secubox
Version: 0.7.1-r4
@ -368,7 +368,7 @@ Architecture: all
Installed-Size: 266240
Description: Central control hub for all SecuBox modules. Provides unified dashboard, module status, system health monitoring, and quick actions.
Filename: luci-app-secubox_0.7.1-r4_all.ipk
Size: 49897
Size: 49902
Package: luci-app-secubox-admin
Version: 1.0.0-r19
@ -379,7 +379,7 @@ Architecture: all
Installed-Size: 337920
Description: Unified admin control center for SecuBox appstore plugins with system monitoring
Filename: luci-app-secubox-admin_1.0.0-r19_all.ipk
Size: 57097
Size: 57096
Package: luci-app-secubox-crowdsec
Version: 1.0.0-r3
@ -391,7 +391,7 @@ Architecture: all
Installed-Size: 81920
Description: LuCI SecuBox CrowdSec Dashboard
Filename: luci-app-secubox-crowdsec_1.0.0-r3_all.ipk
Size: 13919
Size: 13920
Package: luci-app-secubox-netdiag
Version: 1.0.0-r1
@ -403,7 +403,7 @@ Architecture: all
Installed-Size: 61440
Description: Real-time DSA switch port statistics, error monitoring, and network health diagnostics
Filename: luci-app-secubox-netdiag_1.0.0-r1_all.ipk
Size: 11999
Size: 11997
Package: luci-app-secubox-netifyd
Version: 1.2.1-r1
@ -415,7 +415,7 @@ Architecture: all
Installed-Size: 215040
Description: Complete LuCI interface for netifyd DPI engine with real-time flow monitoring, application detection, network analytics, and flow action plugins
Filename: luci-app-secubox-netifyd_1.2.1-r1_all.ipk
Size: 39497
Size: 39499
Package: luci-app-secubox-p2p
Version: 0.1.0-r1
@ -451,7 +451,7 @@ Architecture: all
Installed-Size: 71680
Description: Unified dashboard integrating netifyd DPI threats with CrowdSec intelligence for real-time threat monitoring and automated blocking
Filename: luci-app-secubox-security-threats_1.0.0-r4_all.ipk
Size: 13904
Size: 13908
Package: luci-app-service-registry
Version: 1.0.0-r1
@ -463,7 +463,7 @@ Architecture: all
Installed-Size: 194560
Description: Unified service aggregation with HAProxy vhosts, Tor hidden services, and QR-coded landing page
Filename: luci-app-service-registry_1.0.0-r1_all.ipk
Size: 39821
Size: 39825
Package: luci-app-streamlit
Version: 1.0.0-r9
@ -475,7 +475,7 @@ Architecture: all
Installed-Size: 122880
Description: Modern dashboard for Streamlit Platform management on OpenWrt
Filename: luci-app-streamlit_1.0.0-r9_all.ipk
Size: 20469
Size: 20470
Package: luci-app-system-hub
Version: 0.5.1-r4
@ -487,7 +487,7 @@ Architecture: all
Installed-Size: 358400
Description: Central system control with monitoring, services, logs, and backup
Filename: luci-app-system-hub_0.5.1-r4_all.ipk
Size: 66343
Size: 66348
Package: luci-app-tor-shield
Version: 1.0.0-r10
@ -499,7 +499,7 @@ Architecture: all
Installed-Size: 133120
Description: Modern dashboard for Tor anonymization on OpenWrt
Filename: luci-app-tor-shield_1.0.0-r10_all.ipk
Size: 24532
Size: 24537
Package: luci-app-traffic-shaper
Version: 0.4.0-r2
@ -511,7 +511,7 @@ Architecture: all
Installed-Size: 92160
Description: Advanced traffic shaping with TC/CAKE for precise bandwidth control
Filename: luci-app-traffic-shaper_0.4.0-r2_all.ipk
Size: 15631
Size: 15637
Package: luci-app-vhost-manager
Version: 0.5.0-r5
@ -523,7 +523,7 @@ Architecture: all
Installed-Size: 153600
Description: Nginx reverse proxy manager with Let's Encrypt SSL certificates, authentication, and WebSocket support
Filename: luci-app-vhost-manager_0.5.0-r5_all.ipk
Size: 26197
Size: 26201
Package: luci-app-wireguard-dashboard
Version: 0.7.0-r5
@ -535,7 +535,7 @@ Architecture: all
Installed-Size: 235520
Description: Modern dashboard for WireGuard VPN monitoring on OpenWrt
Filename: luci-app-wireguard-dashboard_0.7.0-r5_all.ipk
Size: 45363
Size: 45371
Package: luci-app-zigbee2mqtt
Version: 1.0.0-r2
@ -547,7 +547,7 @@ Architecture: all
Installed-Size: 40960
Description: Graphical interface for managing the Zigbee2MQTT docker application.
Filename: luci-app-zigbee2mqtt_1.0.0-r2_all.ipk
Size: 7084
Size: 7091
Package: luci-theme-secubox
Version: 0.4.7-r1
@ -559,7 +559,7 @@ Architecture: all
Installed-Size: 460800
Description: Global CyberMood design system (CSS/JS/i18n) shared by all SecuBox dashboards.
Filename: luci-theme-secubox_0.4.7-r1_all.ipk
Size: 111791
Size: 111796
Package: secubox-app
Version: 1.0.0-r2
@ -570,7 +570,7 @@ Installed-Size: 92160
Description: Command line helper for SecuBox App Store manifests. Installs /usr/sbin/secubox-app
and ships the default manifests under /usr/share/secubox/plugins/.
Filename: secubox-app_1.0.0-r2_all.ipk
Size: 11180
Size: 11183
Package: secubox-app-adguardhome
Version: 1.0.0-r2
@ -584,7 +584,7 @@ Description: Installer, configuration, and service manager for running AdGuard
inside Docker on SecuBox-powered OpenWrt systems. Network-wide ad blocker
with DNS-over-HTTPS/TLS support and detailed analytics.
Filename: secubox-app-adguardhome_1.0.0-r2_all.ipk
Size: 2878
Size: 2880
Package: secubox-app-auth-logger
Version: 1.2.2-r1
@ -602,7 +602,7 @@ Description: Logs authentication failures from LuCI/rpcd and Dropbear SSH
- JavaScript hook to intercept login failures
- CrowdSec parser and bruteforce scenario
Filename: secubox-app-auth-logger_1.2.2-r1_all.ipk
Size: 9374
Size: 9379
Package: secubox-app-crowdsec-custom
Version: 1.1.0-r1
@ -625,7 +625,7 @@ Description: Custom CrowdSec configurations for SecuBox web interface protectio
- Webapp generic auth bruteforce protection
- Whitelist for trusted networks
Filename: secubox-app-crowdsec-custom_1.1.0-r1_all.ipk
Size: 5759
Size: 5762
Package: secubox-app-cs-firewall-bouncer
Version: 0.0.31-r4
@ -652,7 +652,7 @@ Description: SecuBox CrowdSec Firewall Bouncer for OpenWrt.
- Automatic restart on firewall reload
- procd service management
Filename: secubox-app-cs-firewall-bouncer_0.0.31-r4_aarch64_cortex-a72.ipk
Size: 5049323
Size: 5049320
Package: secubox-app-cyberfeed
Version: 0.2.1-r1
@ -666,7 +666,7 @@ Description: Cyberpunk-themed RSS feed aggregator for OpenWrt/SecuBox.
Features emoji injection, neon styling, and RSS-Bridge support
for social media feeds (Facebook, Twitter, Mastodon).
Filename: secubox-app-cyberfeed_0.2.1-r1_all.ipk
Size: 12449
Size: 12452
Package: secubox-app-domoticz
Version: 1.0.0-r2
@ -694,7 +694,7 @@ Description: Unified service exposure manager for SecuBox.
- Dynamic Tor hidden service management
- HAProxy SSL reverse proxy configuration
Filename: secubox-app-exposure_1.0.0-r1_all.ipk
Size: 6832
Size: 6834
Package: secubox-app-gitea
Version: 1.0.0-r5
@ -717,7 +717,7 @@ Description: Gitea Git Platform - Self-hosted lightweight Git service
Runs in LXC container with Alpine Linux.
Configure in /etc/config/gitea.
Filename: secubox-app-gitea_1.0.0-r5_all.ipk
Size: 9405
Size: 9406
Package: secubox-app-glances
Version: 1.0.0-r1
@ -740,7 +740,7 @@ Description: Glances - Cross-platform system monitoring tool for SecuBox.
Runs in LXC container for isolation and security.
Configure in /etc/config/glances.
Filename: secubox-app-glances_1.0.0-r1_all.ipk
Size: 5531
Size: 5537
Package: secubox-app-haproxy
Version: 1.0.0-r23
@ -760,7 +760,7 @@ Description: HAProxy load balancer and reverse proxy running in an LXC containe
- Stats dashboard
- Rate limiting and ACLs
Filename: secubox-app-haproxy_1.0.0-r23_all.ipk
Size: 15674
Size: 15681
Package: secubox-app-hexojs
Version: 1.0.0-r8
@ -784,7 +784,7 @@ Description: Hexo CMS - Self-hosted static blog generator for OpenWrt
Runs in LXC container with Alpine Linux.
Configure in /etc/config/hexojs.
Filename: secubox-app-hexojs_1.0.0-r8_all.ipk
Size: 94931
Size: 94937
Package: secubox-app-jitsi
Version: 1.0.0-r1
@ -809,7 +809,7 @@ Description: Jitsi Meet - Secure, fully featured video conferencing for SecuBox
Integrates with HAProxy for SSL termination.
Configure in /etc/config/jitsi.
Filename: secubox-app-jitsi_1.0.0-r1_all.ipk
Size: 8908
Size: 8916
Package: secubox-app-localai
Version: 2.25.0-r1
@ -831,7 +831,7 @@ Description: LocalAI native binary package for OpenWrt.
API: http://<router-ip>:8081/v1
Filename: secubox-app-localai_2.25.0-r1_all.ipk
Size: 5714
Size: 5726
Package: secubox-app-localai-wb
Version: 2.25.0-r1
@ -855,7 +855,7 @@ Description: LocalAI native binary package for OpenWrt.
API: http://<router-ip>:8080/v1
Filename: secubox-app-localai-wb_2.25.0-r1_all.ipk
Size: 7942
Size: 7953
Package: secubox-app-lyrion
Version: 2.0.2-r1
@ -875,7 +875,7 @@ Description: Lyrion Media Server (formerly Logitech Media Server / Squeezebox S
Auto-detects available runtime, preferring LXC for lower resource usage.
Configure runtime in /etc/config/lyrion.
Filename: secubox-app-lyrion_2.0.2-r1_all.ipk
Size: 7275
Size: 7285
Package: secubox-app-magicmirror2
Version: 0.4.0-r8
@ -897,7 +897,7 @@ Description: MagicMirror² - Open source modular smart mirror platform for Secu
Runs in LXC container for isolation and security.
Configure in /etc/config/magicmirror2.
Filename: secubox-app-magicmirror2_0.4.0-r8_all.ipk
Size: 9247
Size: 9254
Package: secubox-app-mailinabox
Version: 2.0.0-r1
@ -943,16 +943,16 @@ Description: Metabolizer Blog Pipeline - Integrated CMS with Git-based workflow
Pipeline: Edit in Streamlit -> Push to Gitea -> Build with Hexo -> Publish
Filename: secubox-app-metabolizer_1.0.0-r3_all.ipk
Size: 13980
Size: 13979
Package: secubox-app-mitmproxy
Version: 0.4.0-r16
Version: 0.5.0-r17
Depends: wget, tar
License: Apache-2.0
Section: utils
Maintainer: CyberMind Studio <contact@cybermind.fr>
Architecture: all
Installed-Size: 40960
Installed-Size: 71680
Description: mitmproxy - Interactive HTTPS proxy for SecuBox-powered OpenWrt systems.
Features:
@ -960,11 +960,17 @@ Description: mitmproxy - Interactive HTTPS proxy for SecuBox-powered OpenWrt sy
- Modify requests and responses on the fly
- Web interface (mitmweb) for easy analysis
- Export traffic for offline analysis
- Enhanced threat detection addon (v2.0):
* SQL injection, XSS, command injection
* Path traversal, SSRF, XXE, LDAP injection
* Log4Shell and known CVE detection
* Rate limiting and suspicious header detection
* CrowdSec integration for blocking
Runs in LXC container for isolation and security.
Configure in /etc/config/mitmproxy.
Filename: secubox-app-mitmproxy_0.4.0-r16_all.ipk
Size: 10208
Filename: secubox-app-mitmproxy_0.5.0-r17_all.ipk
Size: 18508
Package: secubox-app-mmpm
Version: 0.2.0-r5
@ -1021,7 +1027,7 @@ Description: Ollama - Simple local LLM runtime for SecuBox-powered OpenWrt syst
Runs in Docker/Podman container.
Configure in /etc/config/ollama.
Filename: secubox-app-ollama_0.1.0-r1_all.ipk
Size: 5734
Size: 5735
Package: secubox-app-picobrew
Version: 1.0.0-r7
@ -1043,7 +1049,7 @@ Description: PicoBrew Server - Self-hosted brewing controller for PicoBrew devi
Runs in LXC container with Python/Flask backend.
Configure in /etc/config/picobrew.
Filename: secubox-app-picobrew_1.0.0-r7_all.ipk
Size: 5541
Size: 5543
Package: secubox-app-streamlit
Version: 1.0.0-r5
@ -1070,7 +1076,7 @@ Description: Streamlit App Platform - Self-hosted Python data app platform
Configure in /etc/config/streamlit.
Filename: secubox-app-streamlit_1.0.0-r5_all.ipk
Size: 11718
Size: 11720
Package: secubox-app-tor
Version: 1.0.0-r1
@ -1093,7 +1099,7 @@ Description: SecuBox Tor Shield - One-click Tor anonymization for OpenWrt
Configure in /etc/config/tor-shield.
Filename: secubox-app-tor_1.0.0-r1_all.ipk
Size: 7382
Size: 7377
Package: secubox-app-webapp
Version: 1.5.0-r7
@ -1111,7 +1117,7 @@ Description: SecuBox Control Center Dashboard - A web-based dashboard for monit
- Service management
- Network interface control
Filename: secubox-app-webapp_1.5.0-r7_all.ipk
Size: 39169
Size: 39170
Package: secubox-app-zigbee2mqtt
Version: 1.0.0-r3
@ -1124,7 +1130,7 @@ Installed-Size: 20480
Description: Installer, configuration, and service manager for running Zigbee2MQTT
inside Docker on SecuBox-powered OpenWrt systems.
Filename: secubox-app-zigbee2mqtt_1.0.0-r3_all.ipk
Size: 3544
Size: 3542
Package: secubox-core
Version: 0.10.0-r11
@ -1144,7 +1150,7 @@ Description: SecuBox Core Framework provides the foundational infrastructure fo
- Unified CLI interface
- ubus RPC backend
Filename: secubox-core_0.10.0-r11_all.ipk
Size: 87807
Size: 87809
Package: secubox-p2p
Version: 0.6.0-r1
@ -1163,5 +1169,5 @@ Description: SecuBox P2P Hub backend providing peer discovery, mesh networking
and MirrorBox NetMesh Catalog for cross-chain distributed service
registry with HAProxy vhost discovery and multi-endpoint access URLs.
Filename: secubox-p2p_0.6.0-r1_all.ipk
Size: 40189
Size: 40190

View File

@ -1,12 +1,12 @@
{
"feed_url": "/secubox-feed",
"generated": "2026-01-31T13:38:48+01:00",
"generated": "2026-01-31T15:33:22+01:00",
"packages": [
{
"name": "luci-app-auth-guardian",
"version": "0.4.0-r3",
"filename": "luci-app-auth-guardian_0.4.0-r3_all.ipk",
"size": 12079,
"size": 12078,
"category": "security",
"icon": "key",
"description": "Authentication management",
@ -18,7 +18,7 @@
"name": "luci-app-bandwidth-manager",
"version": "0.5.0-r2",
"filename": "luci-app-bandwidth-manager_0.5.0-r2_all.ipk",
"size": 66965,
"size": 66972,
"category": "network",
"icon": "activity",
"description": "Bandwidth monitoring and control",
@ -30,7 +30,7 @@
"name": "luci-app-cdn-cache",
"version": "0.5.0-r3",
"filename": "luci-app-cdn-cache_0.5.0-r3_all.ipk",
"size": 23187,
"size": 23190,
"category": "network",
"icon": "globe",
"description": "CDN caching",
@ -42,7 +42,7 @@
"name": "luci-app-client-guardian",
"version": "0.4.0-r7",
"filename": "luci-app-client-guardian_0.4.0-r7_all.ipk",
"size": 57042,
"size": 57044,
"category": "network",
"icon": "users",
"description": "Client management and monitoring",
@ -54,7 +54,7 @@
"name": "luci-app-crowdsec-dashboard",
"version": "0.7.0-r29",
"filename": "luci-app-crowdsec-dashboard_0.7.0-r29_all.ipk",
"size": 55580,
"size": 55583,
"category": "security",
"icon": "shield",
"description": "CrowdSec security monitoring",
@ -66,7 +66,7 @@
"name": "luci-app-cyberfeed",
"version": "0.1.1-r1",
"filename": "luci-app-cyberfeed_0.1.1-r1_all.ipk",
"size": 12835,
"size": 12838,
"category": "utility",
"icon": "package",
"description": "SecuBox package",
@ -78,7 +78,7 @@
"name": "luci-app-exposure",
"version": "1.0.0-r3",
"filename": "luci-app-exposure_1.0.0-r3_all.ipk",
"size": 20532,
"size": 20535,
"category": "utility",
"icon": "package",
"description": "SecuBox package",
@ -90,7 +90,7 @@
"name": "luci-app-gitea",
"version": "1.0.0-r2",
"filename": "luci-app-gitea_1.0.0-r2_all.ipk",
"size": 15586,
"size": 15585,
"category": "utility",
"icon": "package",
"description": "SecuBox package",
@ -102,7 +102,7 @@
"name": "luci-app-glances",
"version": "1.0.0-r2",
"filename": "luci-app-glances_1.0.0-r2_all.ipk",
"size": 6965,
"size": 6969,
"category": "utility",
"icon": "package",
"description": "SecuBox package",
@ -114,7 +114,7 @@
"name": "luci-app-haproxy",
"version": "1.0.0-r8",
"filename": "luci-app-haproxy_1.0.0-r8_all.ipk",
"size": 34165,
"size": 34169,
"category": "utility",
"icon": "package",
"description": "SecuBox package",
@ -126,7 +126,7 @@
"name": "luci-app-hexojs",
"version": "1.0.0-r3",
"filename": "luci-app-hexojs_1.0.0-r3_all.ipk",
"size": 32974,
"size": 32976,
"category": "utility",
"icon": "package",
"description": "SecuBox package",
@ -138,7 +138,7 @@
"name": "luci-app-jitsi",
"version": "1.0.0-r1",
"filename": "luci-app-jitsi_1.0.0-r1_all.ipk",
"size": 5140,
"size": 5135,
"category": "utility",
"icon": "package",
"description": "SecuBox package",
@ -150,7 +150,7 @@
"name": "luci-app-ksm-manager",
"version": "0.4.0-r2",
"filename": "luci-app-ksm-manager_0.4.0-r2_all.ipk",
"size": 18719,
"size": 18721,
"category": "system",
"icon": "cpu",
"description": "Kernel memory management",
@ -162,7 +162,7 @@
"name": "luci-app-localai",
"version": "0.1.0-r15",
"filename": "luci-app-localai_0.1.0-r15_all.ipk",
"size": 14358,
"size": 14361,
"category": "utility",
"icon": "package",
"description": "SecuBox package",
@ -174,7 +174,7 @@
"name": "luci-app-lyrion",
"version": "1.0.0-r1",
"filename": "luci-app-lyrion_1.0.0-r1_all.ipk",
"size": 6724,
"size": 6726,
"category": "utility",
"icon": "package",
"description": "SecuBox package",
@ -210,7 +210,7 @@
"name": "luci-app-media-flow",
"version": "0.6.4-r1",
"filename": "luci-app-media-flow_0.6.4-r1_all.ipk",
"size": 19111,
"size": 19126,
"category": "media",
"icon": "film",
"description": "Media streaming",
@ -222,7 +222,7 @@
"name": "luci-app-metablogizer",
"version": "1.0.0-r3",
"filename": "luci-app-metablogizer_1.0.0-r3_all.ipk",
"size": 23505,
"size": 23506,
"category": "utility",
"icon": "package",
"description": "SecuBox package",
@ -234,7 +234,7 @@
"name": "luci-app-metabolizer",
"version": "1.0.0-r2",
"filename": "luci-app-metabolizer_1.0.0-r2_all.ipk",
"size": 4754,
"size": 4760,
"category": "utility",
"icon": "package",
"description": "SecuBox package",
@ -244,9 +244,9 @@
,
{
"name": "luci-app-mitmproxy",
"version": "0.4.0-r6",
"filename": "luci-app-mitmproxy_0.4.0-r6_all.ipk",
"size": 18932,
"version": "0.5.0-r1",
"filename": "luci-app-mitmproxy_0.5.0-r1_all.ipk",
"size": 20408,
"category": "security",
"icon": "lock",
"description": "HTTPS proxy and traffic inspection",
@ -270,7 +270,7 @@
"name": "luci-app-mqtt-bridge",
"version": "0.4.0-r4",
"filename": "luci-app-mqtt-bridge_0.4.0-r4_all.ipk",
"size": 22775,
"size": 22780,
"category": "iot",
"icon": "message-square",
"description": "MQTT bridge",
@ -282,7 +282,7 @@
"name": "luci-app-ndpid",
"version": "1.1.2-r2",
"filename": "luci-app-ndpid_1.1.2-r2_all.ipk",
"size": 22453,
"size": 22455,
"category": "security",
"icon": "eye",
"description": "Deep packet inspection",
@ -294,7 +294,7 @@
"name": "luci-app-netdata-dashboard",
"version": "0.5.0-r2",
"filename": "luci-app-netdata-dashboard_0.5.0-r2_all.ipk",
"size": 22400,
"size": 22398,
"category": "monitoring",
"icon": "bar-chart-2",
"description": "System monitoring dashboard",
@ -318,7 +318,7 @@
"name": "luci-app-network-tweaks",
"version": "1.0.0-r7",
"filename": "luci-app-network-tweaks_1.0.0-r7_all.ipk",
"size": 15459,
"size": 15461,
"category": "network",
"icon": "wifi",
"description": "Network configuration",
@ -330,7 +330,7 @@
"name": "luci-app-nextcloud",
"version": "1.0.0-r1",
"filename": "luci-app-nextcloud_1.0.0-r1_all.ipk",
"size": 6481,
"size": 6484,
"category": "utility",
"icon": "package",
"description": "SecuBox package",
@ -342,7 +342,7 @@
"name": "luci-app-ollama",
"version": "0.1.0-r1",
"filename": "luci-app-ollama_0.1.0-r1_all.ipk",
"size": 11997,
"size": 11994,
"category": "utility",
"icon": "package",
"description": "SecuBox package",
@ -354,7 +354,7 @@
"name": "luci-app-picobrew",
"version": "1.0.0-r1",
"filename": "luci-app-picobrew_1.0.0-r1_all.ipk",
"size": 9979,
"size": 9975,
"category": "utility",
"icon": "package",
"description": "SecuBox package",
@ -366,7 +366,7 @@
"name": "luci-app-secubox",
"version": "0.7.1-r4",
"filename": "luci-app-secubox_0.7.1-r4_all.ipk",
"size": 49897,
"size": 49902,
"category": "system",
"icon": "box",
"description": "SecuBox system component",
@ -378,7 +378,7 @@
"name": "luci-app-secubox-admin",
"version": "1.0.0-r19",
"filename": "luci-app-secubox-admin_1.0.0-r19_all.ipk",
"size": 57097,
"size": 57096,
"category": "system",
"icon": "box",
"description": "SecuBox system component",
@ -390,7 +390,7 @@
"name": "luci-app-secubox-crowdsec",
"version": "1.0.0-r3",
"filename": "luci-app-secubox-crowdsec_1.0.0-r3_all.ipk",
"size": 13919,
"size": 13920,
"category": "system",
"icon": "box",
"description": "SecuBox system component",
@ -402,7 +402,7 @@
"name": "luci-app-secubox-netdiag",
"version": "1.0.0-r1",
"filename": "luci-app-secubox-netdiag_1.0.0-r1_all.ipk",
"size": 11999,
"size": 11997,
"category": "system",
"icon": "box",
"description": "SecuBox system component",
@ -414,7 +414,7 @@
"name": "luci-app-secubox-netifyd",
"version": "1.2.1-r1",
"filename": "luci-app-secubox-netifyd_1.2.1-r1_all.ipk",
"size": 39497,
"size": 39499,
"category": "system",
"icon": "box",
"description": "SecuBox system component",
@ -450,7 +450,7 @@
"name": "luci-app-secubox-security-threats",
"version": "1.0.0-r4",
"filename": "luci-app-secubox-security-threats_1.0.0-r4_all.ipk",
"size": 13904,
"size": 13908,
"category": "system",
"icon": "box",
"description": "SecuBox system component",
@ -462,7 +462,7 @@
"name": "luci-app-service-registry",
"version": "1.0.0-r1",
"filename": "luci-app-service-registry_1.0.0-r1_all.ipk",
"size": 39821,
"size": 39825,
"category": "utility",
"icon": "package",
"description": "SecuBox package",
@ -474,7 +474,7 @@
"name": "luci-app-streamlit",
"version": "1.0.0-r9",
"filename": "luci-app-streamlit_1.0.0-r9_all.ipk",
"size": 20469,
"size": 20470,
"category": "utility",
"icon": "package",
"description": "SecuBox package",
@ -486,7 +486,7 @@
"name": "luci-app-system-hub",
"version": "0.5.1-r4",
"filename": "luci-app-system-hub_0.5.1-r4_all.ipk",
"size": 66343,
"size": 66348,
"category": "system",
"icon": "settings",
"description": "System management",
@ -498,7 +498,7 @@
"name": "luci-app-tor-shield",
"version": "1.0.0-r10",
"filename": "luci-app-tor-shield_1.0.0-r10_all.ipk",
"size": 24532,
"size": 24537,
"category": "utility",
"icon": "package",
"description": "SecuBox package",
@ -510,7 +510,7 @@
"name": "luci-app-traffic-shaper",
"version": "0.4.0-r2",
"filename": "luci-app-traffic-shaper_0.4.0-r2_all.ipk",
"size": 15631,
"size": 15637,
"category": "network",
"icon": "filter",
"description": "Traffic shaping and QoS",
@ -522,7 +522,7 @@
"name": "luci-app-vhost-manager",
"version": "0.5.0-r5",
"filename": "luci-app-vhost-manager_0.5.0-r5_all.ipk",
"size": 26197,
"size": 26201,
"category": "network",
"icon": "server",
"description": "Virtual host management",
@ -534,7 +534,7 @@
"name": "luci-app-wireguard-dashboard",
"version": "0.7.0-r5",
"filename": "luci-app-wireguard-dashboard_0.7.0-r5_all.ipk",
"size": 45363,
"size": 45371,
"category": "vpn",
"icon": "shield",
"description": "WireGuard VPN dashboard",
@ -546,7 +546,7 @@
"name": "luci-app-zigbee2mqtt",
"version": "1.0.0-r2",
"filename": "luci-app-zigbee2mqtt_1.0.0-r2_all.ipk",
"size": 7084,
"size": 7091,
"category": "iot",
"icon": "radio",
"description": "Zigbee device management",
@ -558,7 +558,7 @@
"name": "luci-theme-secubox",
"version": "0.4.7-r1",
"filename": "luci-theme-secubox_0.4.7-r1_all.ipk",
"size": 111791,
"size": 111796,
"category": "theme",
"icon": "palette",
"description": "LuCI theme",
@ -570,7 +570,7 @@
"name": "secubox-app",
"version": "1.0.0-r2",
"filename": "secubox-app_1.0.0-r2_all.ipk",
"size": 11180,
"size": 11183,
"category": "utility",
"icon": "package",
"description": "SecuBox package",
@ -582,7 +582,7 @@
"name": "secubox-app-adguardhome",
"version": "1.0.0-r2",
"filename": "secubox-app-adguardhome_1.0.0-r2_all.ipk",
"size": 2878,
"size": 2880,
"category": "secubox",
"icon": "package",
"description": "SecuBox backend service",
@ -594,7 +594,7 @@
"name": "secubox-app-auth-logger",
"version": "1.2.2-r1",
"filename": "secubox-app-auth-logger_1.2.2-r1_all.ipk",
"size": 9374,
"size": 9379,
"category": "secubox",
"icon": "package",
"description": "SecuBox backend service",
@ -606,7 +606,7 @@
"name": "secubox-app-crowdsec-custom",
"version": "1.1.0-r1",
"filename": "secubox-app-crowdsec-custom_1.1.0-r1_all.ipk",
"size": 5759,
"size": 5762,
"category": "secubox",
"icon": "package",
"description": "SecuBox backend service",
@ -618,7 +618,7 @@
"name": "secubox-app-cs-firewall-bouncer",
"version": "0.0.31-r4_aarch64",
"filename": "secubox-app-cs-firewall-bouncer_0.0.31-r4_aarch64_cortex-a72.ipk",
"size": 5049323,
"size": 5049320,
"category": "secubox",
"icon": "package",
"description": "SecuBox backend service",
@ -630,7 +630,7 @@
"name": "secubox-app-cyberfeed",
"version": "0.2.1-r1",
"filename": "secubox-app-cyberfeed_0.2.1-r1_all.ipk",
"size": 12449,
"size": 12452,
"category": "secubox",
"icon": "package",
"description": "SecuBox backend service",
@ -654,7 +654,7 @@
"name": "secubox-app-exposure",
"version": "1.0.0-r1",
"filename": "secubox-app-exposure_1.0.0-r1_all.ipk",
"size": 6832,
"size": 6834,
"category": "secubox",
"icon": "package",
"description": "SecuBox backend service",
@ -666,7 +666,7 @@
"name": "secubox-app-gitea",
"version": "1.0.0-r5",
"filename": "secubox-app-gitea_1.0.0-r5_all.ipk",
"size": 9405,
"size": 9406,
"category": "secubox",
"icon": "package",
"description": "SecuBox backend service",
@ -678,7 +678,7 @@
"name": "secubox-app-glances",
"version": "1.0.0-r1",
"filename": "secubox-app-glances_1.0.0-r1_all.ipk",
"size": 5531,
"size": 5537,
"category": "secubox",
"icon": "package",
"description": "SecuBox backend service",
@ -690,7 +690,7 @@
"name": "secubox-app-haproxy",
"version": "1.0.0-r23",
"filename": "secubox-app-haproxy_1.0.0-r23_all.ipk",
"size": 15674,
"size": 15681,
"category": "secubox",
"icon": "package",
"description": "SecuBox backend service",
@ -702,7 +702,7 @@
"name": "secubox-app-hexojs",
"version": "1.0.0-r8",
"filename": "secubox-app-hexojs_1.0.0-r8_all.ipk",
"size": 94931,
"size": 94937,
"category": "secubox",
"icon": "package",
"description": "SecuBox backend service",
@ -714,7 +714,7 @@
"name": "secubox-app-jitsi",
"version": "1.0.0-r1",
"filename": "secubox-app-jitsi_1.0.0-r1_all.ipk",
"size": 8908,
"size": 8916,
"category": "secubox",
"icon": "package",
"description": "SecuBox backend service",
@ -726,7 +726,7 @@
"name": "secubox-app-localai",
"version": "2.25.0-r1",
"filename": "secubox-app-localai_2.25.0-r1_all.ipk",
"size": 5714,
"size": 5726,
"category": "secubox",
"icon": "package",
"description": "SecuBox backend service",
@ -738,7 +738,7 @@
"name": "secubox-app-localai-wb",
"version": "2.25.0-r1",
"filename": "secubox-app-localai-wb_2.25.0-r1_all.ipk",
"size": 7942,
"size": 7953,
"category": "secubox",
"icon": "package",
"description": "SecuBox backend service",
@ -750,7 +750,7 @@
"name": "secubox-app-lyrion",
"version": "2.0.2-r1",
"filename": "secubox-app-lyrion_2.0.2-r1_all.ipk",
"size": 7275,
"size": 7285,
"category": "secubox",
"icon": "package",
"description": "SecuBox backend service",
@ -762,7 +762,7 @@
"name": "secubox-app-magicmirror2",
"version": "0.4.0-r8",
"filename": "secubox-app-magicmirror2_0.4.0-r8_all.ipk",
"size": 9247,
"size": 9254,
"category": "secubox",
"icon": "package",
"description": "SecuBox backend service",
@ -786,7 +786,7 @@
"name": "secubox-app-metabolizer",
"version": "1.0.0-r3",
"filename": "secubox-app-metabolizer_1.0.0-r3_all.ipk",
"size": 13980,
"size": 13979,
"category": "secubox",
"icon": "package",
"description": "SecuBox backend service",
@ -796,9 +796,9 @@
,
{
"name": "secubox-app-mitmproxy",
"version": "0.4.0-r16",
"filename": "secubox-app-mitmproxy_0.4.0-r16_all.ipk",
"size": 10208,
"version": "0.5.0-r17",
"filename": "secubox-app-mitmproxy_0.5.0-r17_all.ipk",
"size": 18508,
"category": "secubox",
"icon": "package",
"description": "SecuBox backend service",
@ -834,7 +834,7 @@
"name": "secubox-app-ollama",
"version": "0.1.0-r1",
"filename": "secubox-app-ollama_0.1.0-r1_all.ipk",
"size": 5734,
"size": 5735,
"category": "secubox",
"icon": "package",
"description": "SecuBox backend service",
@ -846,7 +846,7 @@
"name": "secubox-app-picobrew",
"version": "1.0.0-r7",
"filename": "secubox-app-picobrew_1.0.0-r7_all.ipk",
"size": 5541,
"size": 5543,
"category": "secubox",
"icon": "package",
"description": "SecuBox backend service",
@ -858,7 +858,7 @@
"name": "secubox-app-streamlit",
"version": "1.0.0-r5",
"filename": "secubox-app-streamlit_1.0.0-r5_all.ipk",
"size": 11718,
"size": 11720,
"category": "secubox",
"icon": "package",
"description": "SecuBox backend service",
@ -870,7 +870,7 @@
"name": "secubox-app-tor",
"version": "1.0.0-r1",
"filename": "secubox-app-tor_1.0.0-r1_all.ipk",
"size": 7382,
"size": 7377,
"category": "secubox",
"icon": "package",
"description": "SecuBox backend service",
@ -882,7 +882,7 @@
"name": "secubox-app-webapp",
"version": "1.5.0-r7",
"filename": "secubox-app-webapp_1.5.0-r7_all.ipk",
"size": 39169,
"size": 39170,
"category": "secubox",
"icon": "package",
"description": "SecuBox backend service",
@ -894,7 +894,7 @@
"name": "secubox-app-zigbee2mqtt",
"version": "1.0.0-r3",
"filename": "secubox-app-zigbee2mqtt_1.0.0-r3_all.ipk",
"size": 3544,
"size": 3542,
"category": "secubox",
"icon": "package",
"description": "SecuBox backend service",
@ -906,7 +906,7 @@
"name": "secubox-core",
"version": "0.10.0-r11",
"filename": "secubox-core_0.10.0-r11_all.ipk",
"size": 87807,
"size": 87809,
"category": "system",
"icon": "box",
"description": "SecuBox core components",
@ -918,7 +918,7 @@
"name": "secubox-p2p",
"version": "0.6.0-r1",
"filename": "secubox-p2p_0.6.0-r1_all.ipk",
"size": 40189,
"size": 40190,
"category": "utility",
"icon": "package",
"description": "SecuBox package",

View File

@ -45,6 +45,16 @@ config whitelist 'whitelist'
# list bypass_ip 'x.x.x.x'
# list bypass_domain 'example.com'
# HAProxy backend inspection mode
config haproxy_router 'haproxy_router'
option enabled '0'
# Port HAProxy sends traffic to
option listen_port '8889'
# Enable threat detection on HAProxy traffic
option threat_detection '1'
# Routes file (auto-generated from HAProxy UCI)
option routes_file '/srv/mitmproxy/haproxy-routes.json'
# CDN/MediaFlow filtering addon
config filtering 'filtering'
option enabled '0'
@ -57,7 +67,7 @@ config filtering 'filtering'
# Block ads and trackers
option block_ads '0'
# Custom filter script path
option addon_script '/etc/mitmproxy/addons/secubox_filter.py'
option addon_script '/data/addons/secubox_analytics.py'
# Capture settings
config capture 'capture'

View File

@ -27,6 +27,9 @@ Commands:
cert Show CA certificate info / export path
firewall-setup Setup nftables rules for transparent mode
firewall-clear Remove nftables transparent mode rules
sync-routes Sync HAProxy backends to mitmproxy routes
haproxy-enable Enable HAProxy backend inspection mode
haproxy-disable Disable HAProxy backend inspection mode
service-run Internal: run container under procd
service-stop Stop container
@ -36,8 +39,13 @@ Modes (configure in /etc/config/mitmproxy):
upstream - Forward to upstream proxy
reverse - Reverse proxy mode
Web Interface: http://<router-ip>:8081
HAProxy Integration:
When enabled, HAProxy backends route through mitmproxy for
threat detection. Use 'mitmproxyctl haproxy-enable' to setup.
Web Interface: http://<router-ip>:8082
Proxy Port: 8888
HAProxy Port: 8889
EOF
}
@ -84,7 +92,13 @@ load_config() {
filter_cdn="$(uci_get filtering.filter_cdn || echo 0)"
filter_media="$(uci_get filtering.filter_media || echo 0)"
block_ads="$(uci_get filtering.block_ads || echo 0)"
addon_script="$(uci_get filtering.addon_script || echo /etc/mitmproxy/addons/secubox_filter.py)"
addon_script="$(uci_get filtering.addon_script || echo /data/addons/secubox_analytics.py)"
# HAProxy router settings
haproxy_router_enabled="$(uci_get haproxy_router.enabled || echo 0)"
haproxy_listen_port="$(uci_get haproxy_router.listen_port || echo 8889)"
haproxy_threat_detection="$(uci_get haproxy_router.threat_detection || echo 1)"
haproxy_routes_file="$(uci_get haproxy_router.routes_file || echo /srv/mitmproxy/haproxy-routes.json)"
}
ensure_dir() { [ -d "$1" ] || mkdir -p "$1"; }
@ -367,21 +381,39 @@ WEB_HOST="${MITMPROXY_WEB_HOST:-0.0.0.0}"
ADDON_SCRIPT="${MITMPROXY_ADDON_SCRIPT:-}"
FILTERING_ENABLED="${MITMPROXY_FILTERING_ENABLED:-0}"
# HAProxy router mode
HAPROXY_ROUTER_ENABLED="${MITMPROXY_HAPROXY_ROUTER_ENABLED:-0}"
HAPROXY_LISTEN_PORT="${MITMPROXY_HAPROXY_LISTEN_PORT:-8889}"
HAPROXY_ROUTES_FILE="${MITMPROXY_HAPROXY_ROUTES_FILE:-/data/haproxy-routes.json}"
# Build args
ARGS="--listen-host 0.0.0.0 --listen-port $PROXY_PORT --set confdir=/data"
ARGS="$ARGS --web-host $WEB_HOST --web-port $WEB_PORT --no-web-open-browser"
# Configure mode
case "$MODE" in
transparent) ARGS="$ARGS --mode transparent" ;;
upstream) [ -n "$UPSTREAM_PROXY" ] && ARGS="$ARGS --mode upstream:$UPSTREAM_PROXY" ;;
reverse) [ -n "$REVERSE_TARGET" ] && ARGS="$ARGS --mode reverse:$REVERSE_TARGET" ;;
esac
# HAProxy router mode: add additional listening port for HAProxy traffic
if [ "$HAPROXY_ROUTER_ENABLED" = "1" ]; then
ARGS="$ARGS --mode regular@$HAPROXY_LISTEN_PORT"
echo "HAProxy router mode: listening on port $HAPROXY_LISTEN_PORT"
# Load HAProxy router addon
if [ -f "/data/addons/haproxy_router.py" ]; then
ARGS="$ARGS -s /data/addons/haproxy_router.py"
echo "Loading HAProxy router addon"
fi
fi
[ "$SSL_INSECURE" = "1" ] && ARGS="$ARGS --ssl-insecure"
[ "$ANTICACHE" = "1" ] && ARGS="$ARGS --anticache"
[ "$ANTICOMP" = "1" ] && ARGS="$ARGS --anticomp"
# Load addon script if filtering is enabled
# Load analytics addon if filtering is enabled
if [ "$FILTERING_ENABLED" = "1" ] && [ -n "$ADDON_SCRIPT" ] && [ -f "$ADDON_SCRIPT" ]; then
ARGS="$ARGS -s $ADDON_SCRIPT"
echo "Loading addon: $ADDON_SCRIPT"
@ -390,6 +422,7 @@ fi
rm -f /data/.mitmproxy_token /tmp/mitmweb.log
echo "Starting mitmweb..."
echo "Command: mitmweb $ARGS"
# Start mitmweb in background, output to log file
/usr/local/bin/mitmweb $ARGS 2>&1 | tee /tmp/mitmweb.log &
@ -409,7 +442,9 @@ while [ $ATTEMPTS -lt $MAX_ATTEMPTS ]; do
TOKEN=$(grep -o 'token=[a-zA-Z0-9_-]*' /tmp/mitmweb.log 2>/dev/null | head -1 | cut -d= -f2)
if [ -n "$TOKEN" ]; then
echo "$TOKEN" > /data/.mitmproxy_token
chmod 644 /data/.mitmproxy_token
echo "Token captured: $(echo "$TOKEN" | cut -c1-8)..."
echo "Web UI: http://$WEB_HOST:$WEB_PORT/?token=$TOKEN"
break
fi
fi
@ -628,6 +663,11 @@ lxc.environment = MITMPROXY_FILTER_MEDIA=$filter_media
lxc.environment = MITMPROXY_BLOCK_ADS=$block_ads
lxc.environment = MITMPROXY_LOG_FILE=/data/requests.log
# HAProxy router mode
lxc.environment = MITMPROXY_HAPROXY_ROUTER_ENABLED=$haproxy_router_enabled
lxc.environment = MITMPROXY_HAPROXY_LISTEN_PORT=$haproxy_listen_port
lxc.environment = MITMPROXY_HAPROXY_ROUTES_FILE=/data/haproxy-routes.json
# Capabilities
lxc.cap.drop = sys_admin sys_module mac_admin mac_override
@ -870,6 +910,168 @@ cmd_service_stop() {
lxc_stop
}
# =============================================================================
# HAPROXY BACKEND INSPECTION
# =============================================================================
cmd_sync_routes() {
load_config
log_info "Syncing HAProxy backends to mitmproxy routes..."
local routes_file="$data_path/haproxy-routes.json"
local tmp_file="/tmp/haproxy-routes.tmp"
# Start JSON
echo "{" > "$tmp_file"
local first=1
local count=0
# Get all vhosts and their backends
uci show haproxy 2>/dev/null | grep "=vhost" | while read line; do
local vhost=$(echo "$line" | cut -d'=' -f1 | cut -d'.' -f2)
local domain=$(uci -q get haproxy.$vhost.domain)
local backend=$(uci -q get haproxy.$vhost.backend)
if [ -n "$domain" ] && [ -n "$backend" ]; then
# Get backend server details
local server=$(uci -q get haproxy.$backend.server)
if [ -n "$server" ]; then
# Parse server spec: "name ip:port check [options]"
local addr=$(echo "$server" | awk '{print $2}')
local ip=$(echo "$addr" | cut -d':' -f1)
local port=$(echo "$addr" | cut -d':' -f2)
# Handle backends without explicit port
[ "$ip" = "$port" ] && port="80"
if [ $first -eq 0 ]; then
echo "," >> "$tmp_file"
fi
first=0
count=$((count + 1))
echo " \"$domain\": [\"$ip\", $port]" >> "$tmp_file"
log_info " $domain -> $ip:$port"
fi
fi
done
echo "}" >> "$tmp_file"
# Move to final location
mv "$tmp_file" "$routes_file"
chmod 644 "$routes_file"
log_info "Generated $routes_file with $count routes"
# Copy to container data path if container is running
if lxc-info -n "$LXC_NAME" -s 2>/dev/null | grep -q "RUNNING"; then
cp "$routes_file" "$data_path/haproxy-routes.json"
log_info "Routes synced to container"
fi
}
cmd_haproxy_enable() {
require_root
load_config
log_info "Enabling HAProxy backend inspection..."
# 1. Enable HAProxy router in config
uci set mitmproxy.haproxy_router.enabled='1'
uci commit mitmproxy
# 2. Sync routes from HAProxy
cmd_sync_routes
# 3. Create HAProxy backend for mitmproxy
log_info "Configuring HAProxy backend 'mitmproxy_inspector'..."
# Check if backend already exists
if ! uci -q get haproxy.mitmproxy_inspector >/dev/null 2>&1; then
uci set haproxy.mitmproxy_inspector=backend
uci set haproxy.mitmproxy_inspector.server="mitmproxy 127.0.0.1:$haproxy_listen_port check"
fi
# 4. Store original backends and update vhosts to use mitmproxy
log_info "Updating HAProxy vhosts to route through mitmproxy..."
local updated=0
for vhost in $(uci show haproxy 2>/dev/null | grep "=vhost" | cut -d'=' -f1 | cut -d'.' -f2); do
local current_backend=$(uci -q get haproxy.$vhost.backend)
# Skip if already using mitmproxy or if it's the fallback
if [ "$current_backend" = "mitmproxy_inspector" ] || [ "$current_backend" = "fallback" ]; then
continue
fi
# Store original backend
uci set haproxy.$vhost.original_backend="$current_backend"
# Set to mitmproxy
uci set haproxy.$vhost.backend="mitmproxy_inspector"
updated=$((updated + 1))
local domain=$(uci -q get haproxy.$vhost.domain)
log_info " $domain: $current_backend -> mitmproxy_inspector"
done
uci commit haproxy
log_info "Updated $updated vhosts"
# 5. Restart services
log_info "Restarting services..."
/etc/init.d/mitmproxy restart
/etc/init.d/haproxy reload 2>/dev/null || /etc/init.d/haproxy restart 2>/dev/null
log_info ""
log_info "HAProxy backend inspection ENABLED"
log_info "All vhost traffic now flows through mitmproxy for threat detection"
log_info "View threats at: http://<router-ip>/cgi-bin/luci/admin/services/mitmproxy"
}
cmd_haproxy_disable() {
require_root
load_config
log_info "Disabling HAProxy backend inspection..."
# 1. Disable HAProxy router in config
uci set mitmproxy.haproxy_router.enabled='0'
uci commit mitmproxy
# 2. Restore original backends
log_info "Restoring original HAProxy backends..."
local restored=0
for vhost in $(uci show haproxy 2>/dev/null | grep "=vhost" | cut -d'=' -f1 | cut -d'.' -f2); do
local original_backend=$(uci -q get haproxy.$vhost.original_backend)
if [ -n "$original_backend" ]; then
uci set haproxy.$vhost.backend="$original_backend"
uci delete haproxy.$vhost.original_backend 2>/dev/null
restored=$((restored + 1))
local domain=$(uci -q get haproxy.$vhost.domain)
log_info " $domain: mitmproxy_inspector -> $original_backend"
fi
done
uci commit haproxy
log_info "Restored $restored vhosts"
# 3. Restart services
log_info "Restarting services..."
/etc/init.d/haproxy reload 2>/dev/null || /etc/init.d/haproxy restart 2>/dev/null
log_info ""
log_info "HAProxy backend inspection DISABLED"
log_info "Vhosts now route directly to their original backends"
}
# Main Entry Point
case "${1:-}" in
install) shift; cmd_install "$@" ;;
@ -881,6 +1083,9 @@ case "${1:-}" in
cert) shift; cmd_cert "$@" ;;
firewall-setup) shift; cmd_firewall_setup "$@" ;;
firewall-clear) shift; cmd_firewall_clear "$@" ;;
sync-routes) shift; cmd_sync_routes "$@" ;;
haproxy-enable) shift; cmd_haproxy_enable "$@" ;;
haproxy-disable) shift; cmd_haproxy_disable "$@" ;;
service-run) shift; cmd_service_run "$@" ;;
service-stop) shift; cmd_service_stop "$@" ;;
help|--help|-h|'') usage ;;

View File

@ -0,0 +1,164 @@
#!/usr/bin/env python3
"""
HAProxy Backend Router for mitmproxy
Routes incoming requests from HAProxy to actual backends based on Host header
Works with secubox_analytics.py for threat detection
"""
import json
import os
import subprocess
from mitmproxy import http, ctx
from mitmproxy.net.server_spec import ServerSpec
from mitmproxy.connection import Address
# Backend routing configuration file
ROUTES_FILE = "/data/haproxy-routes.json"
DEFAULT_BACKEND = ("127.0.0.1", 8081) # LuCI fallback
class HaproxyRouter:
def __init__(self):
self.routes = {}
self._load_routes()
ctx.log.info(f"HAProxy Router loaded with {len(self.routes)} routes")
def _load_routes(self):
"""Load routing table from JSON file"""
if os.path.exists(ROUTES_FILE):
try:
with open(ROUTES_FILE, 'r') as f:
self.routes = json.load(f)
ctx.log.info(f"Loaded routes: {list(self.routes.keys())}")
except Exception as e:
ctx.log.error(f"Failed to load routes: {e}")
self.routes = {}
else:
ctx.log.warn(f"Routes file not found: {ROUTES_FILE}")
self._generate_default_routes()
def _generate_default_routes(self):
"""Generate default routes from UCI if available"""
self.routes = {
# Format: "hostname": ["ip", port]
"blog.cybermind.fr": ["192.168.255.1", 4000],
"devel.cybermind.fr": ["192.168.255.1", 3000],
"devel.maegia.tv": ["192.168.255.1", 3000],
"play.cybermind.fr": ["192.168.255.1", 8501],
"crt.cybermind.fr": ["192.168.255.1", 8502],
"secubox.maegia.tv": ["127.0.0.1", 8081],
"glances.maegia.tv": ["192.168.255.1", 61208],
"factory.maegia.tv": ["192.168.255.1", 7331],
"meet.maegia.tv": ["127.0.0.1", 8443],
}
# Save for next time
try:
with open(ROUTES_FILE, 'w') as f:
json.dump(self.routes, f, indent=2)
except:
pass
def _get_backend(self, host: str) -> tuple:
"""Get backend address for hostname"""
# Remove port from host if present
hostname = host.split(':')[0].lower()
if hostname in self.routes:
backend = self.routes[hostname]
return (backend[0], backend[1])
# Try wildcard matching
for pattern, backend in self.routes.items():
if pattern.startswith('*.'):
suffix = pattern[2:]
if hostname.endswith(suffix):
return (backend[0], backend[1])
ctx.log.warn(f"No route for {hostname}, using default")
return DEFAULT_BACKEND
def request(self, flow: http.HTTPFlow):
"""Route request to appropriate backend"""
host = flow.request.host_header or flow.request.host
backend = self._get_backend(host)
# Set the upstream server
flow.request.host = backend[0]
flow.request.port = backend[1]
# Log routing decision
ctx.log.debug(f"ROUTE: {host} -> {backend[0]}:{backend[1]}")
# Store original host for analytics
flow.metadata['original_host'] = host
flow.metadata['backend'] = f"{backend[0]}:{backend[1]}"
def generate_routes_from_uci():
"""Generate routes JSON from HAProxy UCI config"""
routes = {}
try:
# Get vhosts
result = subprocess.run(
['uci', 'show', 'haproxy'],
capture_output=True, text=True
)
vhosts = {}
backends = {}
for line in result.stdout.split('\n'):
# Parse vhost domains
if '=vhost' in line:
vhost_name = line.split('=')[0].split('.')[1]
vhosts[vhost_name] = {}
elif '.domain=' in line:
parts = line.split('=')
vhost_name = parts[0].split('.')[1]
domain = parts[1].strip("'")
if vhost_name in vhosts:
vhosts[vhost_name]['domain'] = domain
elif '.backend=' in line and '=vhost' not in line:
parts = line.split('=')
vhost_name = parts[0].split('.')[1]
backend_name = parts[1].strip("'")
if vhost_name in vhosts:
vhosts[vhost_name]['backend'] = backend_name
# Parse backend servers
if '=backend' in line:
backend_name = line.split('=')[0].split('.')[1]
backends[backend_name] = {}
elif '.server=' in line:
parts = line.split('=')
backend_name = parts[0].split('.')[1]
server_spec = parts[1].strip("'")
# Parse "name ip:port check"
server_parts = server_spec.split()
if len(server_parts) >= 2:
addr = server_parts[1]
if ':' in addr:
ip, port = addr.rsplit(':', 1)
backends[backend_name] = {'ip': ip, 'port': int(port)}
# Build routes
for vhost_name, vhost in vhosts.items():
if 'domain' in vhost and 'backend' in vhost:
backend_name = vhost['backend']
if backend_name in backends:
backend = backends[backend_name]
routes[vhost['domain']] = [backend.get('ip', '127.0.0.1'), backend.get('port', 80)]
return routes
except Exception as e:
print(f"Error generating routes: {e}")
return {}
if __name__ == "__main__":
# CLI tool to generate routes
routes = generate_routes_from_uci()
print(json.dumps(routes, indent=2))
addons = [HaproxyRouter()]

View File

@ -0,0 +1,35 @@
#!/bin/sh /etc/rc.common
# SecuBox TFTP Recovery Server
# Provides network-based bare-metal recovery
START=96
STOP=10
USE_PROCD=1
PROG=/usr/sbin/secubox-tftp-recovery
start_service() {
# Check if enabled in config
local enabled=$(uci -q get secubox.tftp_recovery.enabled)
[ "$enabled" != "1" ] && return 0
procd_open_instance
procd_set_param command $PROG start
procd_set_param respawn
procd_set_param stdout 1
procd_set_param stderr 1
procd_close_instance
}
stop_service() {
$PROG stop
}
status() {
$PROG status
}
reload_service() {
stop
start
}

View File

@ -0,0 +1,115 @@
#!/bin/sh
# SecuBox TFTP Mesh Library
# Shared functions for mesh-based TFTP recovery
TFTP_ROOT="/srv/tftp"
RECOVERY_DIR="/srv/secubox/recovery"
MESH_RECOVERY_PORT=7331
# Get best recovery server from mesh
get_best_recovery_server() {
local board="$1"
local peers_file="/tmp/secubox-p2p-peers.json"
local best_server=""
local best_latency=9999
[ -f "$peers_file" ] || return 1
local peer_count=$(jsonfilter -i "$peers_file" -e '@.peers[*]' 2>/dev/null | wc -l)
local i=0
while [ $i -lt $peer_count ]; do
local peer_addr=$(jsonfilter -i "$peers_file" -e "@.peers[$i].address" 2>/dev/null)
local is_local=$(jsonfilter -i "$peers_file" -e "@.peers[$i].is_local" 2>/dev/null)
[ "$is_local" = "true" ] && { i=$((i + 1)); continue; }
[ -z "$peer_addr" ] && { i=$((i + 1)); continue; }
# Check if peer has recovery service
local start_time=$(date +%s%N 2>/dev/null || date +%s)
local manifest=$(wget -q -T 2 -O - "http://$peer_addr:$MESH_RECOVERY_PORT/tftp/recovery-manifest.json" 2>/dev/null)
local end_time=$(date +%s%N 2>/dev/null || date +%s)
if [ -n "$manifest" ]; then
# Check board compatibility if specified
if [ -n "$board" ]; then
local peer_board=$(echo "$manifest" | jsonfilter -e '@.board' 2>/dev/null)
[ "$peer_board" != "$board" ] && { i=$((i + 1)); continue; }
fi
# Calculate latency
local latency=$((end_time - start_time))
if [ $latency -lt $best_latency ]; then
best_latency=$latency
best_server="$peer_addr"
fi
fi
i=$((i + 1))
done
echo "$best_server"
}
# Sync recovery images from mesh
sync_recovery_from_mesh() {
local peer_addr=$(get_best_recovery_server)
if [ -z "$peer_addr" ]; then
echo "No recovery servers available on mesh"
return 1
fi
echo "Syncing recovery images from $peer_addr..."
mkdir -p "$TFTP_ROOT"
# Get manifest
local manifest=$(wget -q -O - "http://$peer_addr:$MESH_RECOVERY_PORT/tftp/recovery-manifest.json")
if [ -z "$manifest" ]; then
echo "Failed to fetch manifest"
return 1
fi
# Download each image
echo "$manifest" | jsonfilter -e '@.images[*].name' 2>/dev/null | while read img; do
if [ ! -f "$TFTP_ROOT/$img" ]; then
echo " Downloading: $img"
wget -q -O "$TFTP_ROOT/$img" "http://$peer_addr:$MESH_RECOVERY_PORT/tftp/$img"
fi
done
# Download U-Boot scripts
echo "$manifest" | jsonfilter -e '@.uboot_scripts[*]' 2>/dev/null | while read scr; do
if [ ! -f "$TFTP_ROOT/$scr" ]; then
echo " Downloading: $scr"
wget -q -O "$TFTP_ROOT/$scr" "http://$peer_addr:$MESH_RECOVERY_PORT/tftp/$scr"
fi
done
echo "Recovery sync complete"
}
# Broadcast recovery availability to mesh
broadcast_recovery_service() {
local node_id=$(cat /var/run/secubox-p2p/node.id 2>/dev/null || hostname)
local lan_ip=$(uci -q get network.lan.ipaddr || echo "192.168.1.1")
# Create service announcement for P2P
cat > /tmp/secubox-recovery-service.json << EOF
{
"name": "tftp-recovery",
"type": "recovery",
"port": 69,
"address": "$lan_ip",
"node_id": "$node_id",
"protocol": "tftp",
"http_port": $MESH_RECOVERY_PORT
}
EOF
# Register with local P2P daemon
if [ -x /usr/sbin/secubox-p2p ]; then
/usr/sbin/secubox-p2p register-service tftp-recovery 69 2>/dev/null
fi
}

View File

@ -0,0 +1,648 @@
#!/bin/sh
# SecuBox TFTP Recovery Server
# Provides network-based bare-metal recovery for SecuBox devices
#
# Usage:
# secubox-tftp-recovery start - Start TFTP recovery server
# secubox-tftp-recovery stop - Stop TFTP recovery server
# secubox-tftp-recovery status - Show server status
# secubox-tftp-recovery prepare - Prepare recovery images
# secubox-tftp-recovery mesh - Discover mesh recovery servers
# secubox-tftp-recovery uboot - Generate U-Boot recovery scripts
VERSION="1.0.0"
TFTP_ROOT="/srv/tftp"
RECOVERY_DIR="/srv/secubox/recovery"
UBOOT_SCRIPTS_DIR="$RECOVERY_DIR/uboot"
IMAGES_DIR="$RECOVERY_DIR/images"
STATE_FILE="/var/run/secubox-tftp-recovery.state"
PID_FILE="/var/run/secubox-tftp-recovery.pid"
# Mesh discovery
P2P_STATE_DIR="/var/run/secubox-p2p"
MESH_RECOVERY_FILE="/tmp/secubox-mesh-recovery.json"
# Colors
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
CYAN='\033[0;36m'
NC='\033[0m'
log() {
echo -e "${GREEN}[TFTP]${NC} $1"
}
warn() {
echo -e "${YELLOW}[WARN]${NC} $1"
}
error() {
echo -e "${RED}[ERROR]${NC} $1"
}
# ============================================================================
# TFTP Server Management
# ============================================================================
init_dirs() {
mkdir -p "$TFTP_ROOT"
mkdir -p "$RECOVERY_DIR"
mkdir -p "$UBOOT_SCRIPTS_DIR"
mkdir -p "$IMAGES_DIR"
chmod 755 "$TFTP_ROOT"
}
# Check if dnsmasq TFTP is available
check_dnsmasq_tftp() {
if grep -q "enable-tftp" /etc/dnsmasq.conf 2>/dev/null; then
return 0
fi
if uci -q get dhcp.@dnsmasq[0].enable_tftp 2>/dev/null | grep -q "1"; then
return 0
fi
return 1
}
# Configure dnsmasq for TFTP
configure_dnsmasq_tftp() {
log "Configuring dnsmasq TFTP..."
# Enable TFTP in dnsmasq via UCI
uci -q set dhcp.@dnsmasq[0].enable_tftp='1'
uci -q set dhcp.@dnsmasq[0].tftp_root="$TFTP_ROOT"
uci -q set dhcp.@dnsmasq[0].dhcp_boot='pxelinux.0'
# Add SecuBox recovery boot option
uci -q set dhcp.@dnsmasq[0].tftp_secure='1'
uci commit dhcp
log "TFTP configured at $TFTP_ROOT"
}
# Start TFTP recovery server
start_server() {
log "Starting TFTP recovery server..."
init_dirs
# Check if already running
if [ -f "$PID_FILE" ] && kill -0 "$(cat $PID_FILE)" 2>/dev/null; then
warn "TFTP recovery server already running"
return 0
fi
# Configure dnsmasq TFTP if not already done
if ! check_dnsmasq_tftp; then
configure_dnsmasq_tftp
fi
# Prepare recovery images if not exists
if [ ! -f "$TFTP_ROOT/secubox-recovery.bin" ]; then
prepare_images
fi
# Generate U-Boot scripts
generate_uboot_scripts
# Restart dnsmasq to apply TFTP config
/etc/init.d/dnsmasq restart
# Announce on mesh
announce_mesh_recovery
# Save state
echo "running" > "$STATE_FILE"
echo "$$" > "$PID_FILE"
log "TFTP recovery server started"
log "Recovery root: $TFTP_ROOT"
log "Listen on: $(uci -q get network.lan.ipaddr):69"
}
# Stop TFTP recovery server
stop_server() {
log "Stopping TFTP recovery server..."
# Disable TFTP in dnsmasq
uci -q set dhcp.@dnsmasq[0].enable_tftp='0'
uci commit dhcp
/etc/init.d/dnsmasq restart
# Remove mesh announcement
rm -f "$MESH_RECOVERY_FILE"
# Clean state
rm -f "$STATE_FILE" "$PID_FILE"
log "TFTP recovery server stopped"
}
# Show server status
show_status() {
echo ""
echo "========================================"
echo " SecuBox TFTP Recovery Server v$VERSION"
echo "========================================"
echo ""
# Server status
if [ -f "$STATE_FILE" ] && [ "$(cat $STATE_FILE)" = "running" ]; then
echo -e "Status: ${GREEN}Running${NC}"
else
echo -e "Status: ${RED}Stopped${NC}"
fi
# TFTP config
local tftp_enabled=$(uci -q get dhcp.@dnsmasq[0].enable_tftp)
if [ "$tftp_enabled" = "1" ]; then
echo -e "TFTP: ${GREEN}Enabled${NC}"
else
echo -e "TFTP: ${RED}Disabled${NC}"
fi
echo "TFTP Root: $TFTP_ROOT"
echo "Listen IP: $(uci -q get network.lan.ipaddr || echo 'unknown'):69"
echo ""
# Available images
echo "Available Recovery Images:"
if [ -d "$TFTP_ROOT" ]; then
for img in "$TFTP_ROOT"/*.bin "$TFTP_ROOT"/*.itb "$TFTP_ROOT"/*.img 2>/dev/null; do
[ -f "$img" ] || continue
local size=$(ls -lh "$img" 2>/dev/null | awk '{print $5}')
echo " - $(basename "$img") ($size)"
done
else
echo " (none)"
fi
echo ""
# U-Boot scripts
echo "U-Boot Scripts:"
if [ -d "$UBOOT_SCRIPTS_DIR" ]; then
for script in "$UBOOT_SCRIPTS_DIR"/*.scr "$UBOOT_SCRIPTS_DIR"/*.txt 2>/dev/null; do
[ -f "$script" ] || continue
echo " - $(basename "$script")"
done
else
echo " (none)"
fi
echo ""
# Mesh recovery servers
echo "Mesh Recovery Servers:"
discover_mesh_servers 2>/dev/null
echo ""
}
# ============================================================================
# Recovery Image Preparation
# ============================================================================
prepare_images() {
log "Preparing recovery images..."
init_dirs
# Get current firmware info
local board=$(cat /tmp/sysinfo/board_name 2>/dev/null || echo "unknown")
local model=$(cat /tmp/sysinfo/model 2>/dev/null || echo "Unknown")
log "Board: $board"
log "Model: $model"
# Create recovery sysupgrade image
local backup_file="$IMAGES_DIR/secubox-backup-$(date +%Y%m%d).tar.gz"
log "Creating configuration backup..."
if command -v sysupgrade >/dev/null 2>&1; then
sysupgrade -b "$backup_file" 2>/dev/null
if [ -f "$backup_file" ]; then
cp "$backup_file" "$TFTP_ROOT/"
log "Backup created: $(basename $backup_file)"
fi
fi
# Look for existing firmware images
local fw_paths="/tmp/firmware /overlay/firmware /www/firmware"
for fwdir in $fw_paths; do
if [ -d "$fwdir" ]; then
for fw in "$fwdir"/*.bin "$fwdir"/*.itb 2>/dev/null; do
[ -f "$fw" ] || continue
local fname=$(basename "$fw")
if [ ! -f "$TFTP_ROOT/$fname" ]; then
cp "$fw" "$TFTP_ROOT/"
log "Added firmware: $fname"
fi
done
fi
done
# Create recovery manifest
cat > "$TFTP_ROOT/recovery-manifest.json" << EOF
{
"version": "$VERSION",
"board": "$board",
"model": "$model",
"node_id": "$(cat $P2P_STATE_DIR/node.id 2>/dev/null || hostname)",
"created": "$(date -Iseconds 2>/dev/null || date)",
"images": [
$(ls -1 "$TFTP_ROOT"/*.bin "$TFTP_ROOT"/*.itb "$TFTP_ROOT"/*.tar.gz 2>/dev/null | while read f; do
local fname=$(basename "$f")
local size=$(stat -c%s "$f" 2>/dev/null || ls -l "$f" | awk '{print $5}')
local md5=$(md5sum "$f" 2>/dev/null | cut -d' ' -f1)
echo " {\"name\": \"$fname\", \"size\": $size, \"md5\": \"$md5\"}"
done | paste -sd ',' -)
],
"uboot_scripts": [
$(ls -1 "$TFTP_ROOT"/*.scr 2>/dev/null | while read f; do
echo " \"$(basename "$f")\""
done | paste -sd ',' -)
]
}
EOF
log "Recovery manifest created"
log "Images available in: $TFTP_ROOT"
}
# ============================================================================
# U-Boot Script Generation
# ============================================================================
generate_uboot_scripts() {
log "Generating U-Boot recovery scripts..."
init_dirs
local server_ip=$(uci -q get network.lan.ipaddr || echo "192.168.1.1")
local load_addr="0x48000000"
# Generic ARM64 recovery script
cat > "$UBOOT_SCRIPTS_DIR/recovery-arm64.txt" << EOF
# SecuBox TFTP Recovery Script - ARM64
# Load this via: source \$serverip:recovery-arm64.txt
#
# Setup:
# setenv serverip $server_ip
# setenv ipaddr 192.168.1.100
# run secubox_recovery
setenv serverip $server_ip
setenv secubox_recovery 'tftp $load_addr secubox-recovery.bin; source $load_addr'
setenv secubox_backup 'tftp $load_addr secubox-backup.tar.gz'
setenv secubox_flash 'tftp $load_addr openwrt-sysupgrade.bin; nand erase.chip; nand write $load_addr 0 \$filesize'
echo "SecuBox TFTP Recovery loaded"
echo "Commands: run secubox_recovery | run secubox_backup | run secubox_flash"
EOF
# MochaBin/Armada specific
cat > "$UBOOT_SCRIPTS_DIR/recovery-mochabin.txt" << EOF
# SecuBox TFTP Recovery Script - MochaBin (Armada 8040)
#
# Emergency Recovery Steps:
# 1. Connect serial console (115200 8N1)
# 2. Power on, press any key to stop autoboot
# 3. Run these commands:
#
# setenv serverip $server_ip
# setenv ipaddr 192.168.1.100
# dhcp
# tftpboot 0x20000000 openwrt-mvebu-cortexa72-globalscale_mochabin-initramfs-fit.itb
# bootm 0x20000000
setenv serverip $server_ip
setenv loadaddr 0x20000000
# TFTP boot initramfs for recovery
setenv tftpboot_recovery 'dhcp; tftpboot \$loadaddr openwrt-mochabin-initramfs.itb; bootm \$loadaddr'
# Flash sysupgrade via TFTP
setenv tftpflash 'dhcp; tftpboot \$loadaddr openwrt-mochabin-sysupgrade.bin; sf probe; sf erase 0 +\$filesize; sf write \$loadaddr 0 \$filesize'
# MMC boot recovery
setenv mmcboot_recovery 'mmc dev 1; ext4load mmc 1:1 \$loadaddr /boot/Image; ext4load mmc 1:1 0x21000000 /boot/armada-8040-mcbin.dtb; booti \$loadaddr - 0x21000000'
echo "MochaBin Recovery Commands:"
echo " run tftpboot_recovery - Boot initramfs via TFTP"
echo " run tftpflash - Flash sysupgrade via TFTP"
echo " run mmcboot_recovery - Boot from MMC"
EOF
# Raspberry Pi specific
cat > "$UBOOT_SCRIPTS_DIR/recovery-rpi.txt" << EOF
# SecuBox TFTP Recovery Script - Raspberry Pi 4
#
# Recovery Steps:
# 1. Remove SD card, boot from USB/network
# 2. Or modify config.txt on SD:
# kernel=u-boot.bin
# arm_64bit=1
setenv serverip $server_ip
setenv loadaddr 0x01000000
setenv fdt_addr 0x02600000
# TFTP boot
setenv tftpboot 'dhcp; tftpboot \$loadaddr openwrt-rpi4-ext4.img.gz; source \$loadaddr'
# Recovery from USB
setenv usbboot 'usb start; ext4load usb 0:1 \$loadaddr /boot/Image; booti \$loadaddr - \$fdt_addr'
echo "Raspberry Pi Recovery loaded"
EOF
# x86/EFI recovery
cat > "$UBOOT_SCRIPTS_DIR/recovery-x86.txt" << EOF
# SecuBox Network Recovery - x86/EFI
#
# For x86 devices, use PXE boot:
# 1. Enable network boot in BIOS/UEFI
# 2. Configure DHCP to point to TFTP server
# 3. Boot from network
# DHCP config for dnsmasq (add to /etc/dnsmasq.conf):
# dhcp-boot=pxelinux.0,$server_ip
# dhcp-match=set:efi-x86_64,option:client-arch,7
# dhcp-boot=tag:efi-x86_64,grubx64.efi,$server_ip
# PXE menu file: pxelinux.cfg/default
# DEFAULT secubox
# LABEL secubox
# KERNEL openwrt-x86-64-generic-kernel.bin
# APPEND initrd=openwrt-x86-64-generic-rootfs.img root=/dev/ram0
EOF
# Compile scripts to U-Boot format if mkimage available
if command -v mkimage >/dev/null 2>&1; then
for txt in "$UBOOT_SCRIPTS_DIR"/*.txt; do
[ -f "$txt" ] || continue
local scr="${txt%.txt}.scr"
mkimage -T script -C none -n "SecuBox Recovery" -d "$txt" "$scr" 2>/dev/null
if [ -f "$scr" ]; then
cp "$scr" "$TFTP_ROOT/"
log "Compiled: $(basename $scr)"
fi
done
else
# Just copy txt files
cp "$UBOOT_SCRIPTS_DIR"/*.txt "$TFTP_ROOT/" 2>/dev/null
warn "mkimage not available - scripts not compiled"
fi
log "U-Boot scripts generated in $UBOOT_SCRIPTS_DIR"
}
# ============================================================================
# Mesh Integration
# ============================================================================
announce_mesh_recovery() {
local node_id=$(cat "$P2P_STATE_DIR/node.id" 2>/dev/null || hostname)
local node_name=$(uci -q get system.@system[0].hostname || hostname)
local lan_ip=$(uci -q get network.lan.ipaddr || echo "192.168.1.1")
# Create mesh announcement
cat > "$MESH_RECOVERY_FILE" << EOF
{
"type": "tftp-recovery",
"node_id": "$node_id",
"node_name": "$node_name",
"address": "$lan_ip",
"port": 69,
"status": "active",
"version": "$VERSION",
"announced": "$(date -Iseconds 2>/dev/null || date)",
"images": $(cat "$TFTP_ROOT/recovery-manifest.json" 2>/dev/null | jsonfilter -e '@.images' || echo '[]')
}
EOF
log "Mesh recovery service announced"
}
discover_mesh_servers() {
local found=0
# Check local peers file
local peers_file="/tmp/secubox-p2p-peers.json"
if [ -f "$peers_file" ]; then
local peer_count=$(jsonfilter -i "$peers_file" -e '@.peers[*]' 2>/dev/null | wc -l)
local i=0
while [ $i -lt $peer_count ]; do
local peer_addr=$(jsonfilter -i "$peers_file" -e "@.peers[$i].address" 2>/dev/null)
local peer_name=$(jsonfilter -i "$peers_file" -e "@.peers[$i].name" 2>/dev/null)
local is_local=$(jsonfilter -i "$peers_file" -e "@.peers[$i].is_local" 2>/dev/null)
[ "$is_local" = "true" ] && { i=$((i + 1)); continue; }
[ -z "$peer_addr" ] && { i=$((i + 1)); continue; }
# Try to fetch recovery manifest from peer
local manifest=$(wget -q -O - "http://$peer_addr:7331/tftp/recovery-manifest.json" 2>/dev/null)
if [ -n "$manifest" ]; then
local peer_board=$(echo "$manifest" | jsonfilter -e '@.board' 2>/dev/null)
echo -e " ${GREEN}*${NC} $peer_name ($peer_addr) - $peer_board"
found=$((found + 1))
fi
i=$((i + 1))
done
fi
if [ $found -eq 0 ]; then
echo " (no mesh recovery servers found)"
echo " Run 'secubox-p2p discover' to find peers"
fi
}
# Fetch recovery image from mesh peer
fetch_from_mesh() {
local peer_addr="$1"
local image_name="$2"
if [ -z "$peer_addr" ] || [ -z "$image_name" ]; then
echo "Usage: secubox-tftp-recovery fetch <peer_ip> <image_name>"
return 1
fi
log "Fetching $image_name from $peer_addr..."
init_dirs
# Try TFTP first (faster for large files)
if command -v tftp >/dev/null 2>&1; then
tftp -g -r "$image_name" -l "$IMAGES_DIR/$image_name" "$peer_addr" 2>/dev/null
if [ -f "$IMAGES_DIR/$image_name" ]; then
log "Downloaded via TFTP: $image_name"
cp "$IMAGES_DIR/$image_name" "$TFTP_ROOT/"
return 0
fi
fi
# Fallback to HTTP
wget -q -O "$IMAGES_DIR/$image_name" "http://$peer_addr:7331/tftp/$image_name" 2>/dev/null
if [ -f "$IMAGES_DIR/$image_name" ]; then
log "Downloaded via HTTP: $image_name"
cp "$IMAGES_DIR/$image_name" "$TFTP_ROOT/"
return 0
fi
error "Failed to fetch $image_name from $peer_addr"
return 1
}
# ============================================================================
# Recovery Operations
# ============================================================================
# Emergency recovery via TFTP
emergency_recovery() {
local server_ip="$1"
local image="$2"
if [ -z "$server_ip" ]; then
# Auto-discover from mesh
log "Auto-discovering recovery server..."
server_ip=$(discover_mesh_servers 2>/dev/null | grep -oP '\d+\.\d+\.\d+\.\d+' | head -1)
fi
if [ -z "$server_ip" ]; then
error "No recovery server found. Specify IP: secubox-tftp-recovery emergency <server_ip>"
return 1
fi
log "Emergency recovery from $server_ip"
# Fetch recovery manifest
local manifest=$(wget -q -O - "http://$server_ip:7331/tftp/recovery-manifest.json" 2>/dev/null)
if [ -z "$manifest" ]; then
# Try TFTP directly
tftp -g -r recovery-manifest.json -l /tmp/recovery-manifest.json "$server_ip" 2>/dev/null
manifest=$(cat /tmp/recovery-manifest.json 2>/dev/null)
fi
if [ -z "$manifest" ]; then
error "Cannot reach recovery server at $server_ip"
return 1
fi
log "Recovery server found"
echo "$manifest" | jsonfilter -e '@.images[*].name' 2>/dev/null | while read img; do
echo " - $img"
done
# Select image
if [ -z "$image" ]; then
image=$(echo "$manifest" | jsonfilter -e '@.images[0].name' 2>/dev/null)
fi
if [ -z "$image" ]; then
error "No recovery image available"
return 1
fi
log "Fetching recovery image: $image"
# Download and apply
mkdir -p /tmp/recovery
if echo "$image" | grep -q "\.tar\.gz$"; then
# Config backup - restore it
tftp -g -r "$image" -l "/tmp/recovery/$image" "$server_ip" 2>/dev/null || \
wget -q -O "/tmp/recovery/$image" "http://$server_ip:7331/tftp/$image"
if [ -f "/tmp/recovery/$image" ]; then
log "Restoring configuration backup..."
sysupgrade -r "/tmp/recovery/$image"
log "Configuration restored. Rebooting..."
reboot
fi
elif echo "$image" | grep -qE "\.(bin|itb)$"; then
# Firmware image - flash it
tftp -g -r "$image" -l "/tmp/recovery/$image" "$server_ip" 2>/dev/null || \
wget -q -O "/tmp/recovery/$image" "http://$server_ip:7331/tftp/$image"
if [ -f "/tmp/recovery/$image" ]; then
log "Flashing firmware..."
sysupgrade -n "/tmp/recovery/$image"
fi
fi
}
# ============================================================================
# Main
# ============================================================================
show_help() {
cat << EOF
SecuBox TFTP Recovery Server v$VERSION
Usage: secubox-tftp-recovery <command> [options]
Commands:
start Start TFTP recovery server
stop Stop TFTP recovery server
status Show server status and available images
prepare Prepare recovery images (backup, firmware)
uboot Generate U-Boot recovery scripts
mesh Discover mesh recovery servers
fetch <ip> <img> Fetch recovery image from mesh peer
emergency [ip] Emergency recovery from mesh (auto-discover)
Examples:
secubox-tftp-recovery start
secubox-tftp-recovery status
secubox-tftp-recovery mesh
secubox-tftp-recovery fetch 192.168.1.2 secubox-backup.tar.gz
secubox-tftp-recovery emergency
U-Boot Recovery (at device console):
setenv serverip <this-device-ip>
setenv ipaddr 192.168.1.100
tftpboot 0x48000000 secubox-recovery.bin
source 0x48000000
EOF
}
case "$1" in
start)
start_server
;;
stop)
stop_server
;;
status)
show_status
;;
prepare)
prepare_images
;;
uboot)
generate_uboot_scripts
;;
mesh)
echo ""
echo "Mesh Recovery Servers:"
discover_mesh_servers
echo ""
;;
fetch)
fetch_from_mesh "$2" "$3"
;;
emergency)
emergency_recovery "$2" "$3"
;;
-h|--help|help)
show_help
;;
*)
show_help
exit 1
;;
esac
exit 0