feat(v0.17): P2P Mesh Recovery, MITM Analytics, Swiss Army Knife
Major features: - P2P Mesh distributed recovery infrastructure with blockchain catalog - MITM analytics proxy for external access monitoring (IP, country, scans) - SecuBox Swiss unified CLI tool for management & recovery - Python remote management console (secubox-console) - Multi-theme landing page generator (mirrorbox, cyberpunk, minimal, terminal, light) - Service Registry enhancements with health check and network diagnostics - Services page modernization with Service Registry API integration New components: - secubox-swiss: Swiss Army Knife unified management tool - secubox-mesh: P2P mesh networking and sync - secubox-recover: Snapshot, profiles, rollback, reborn scripts - secubox-console: Python remote management app - secubox_analytics.py: MITM traffic analysis addon Fixes: - Service Registry ACL permissions for secubox services page - Port status display (firewall_open detection) - RPC response handling for list_services Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
parent
38fbddd2a9
commit
e2428d807f
@ -187,7 +187,11 @@
|
||||
"Bash(gzip:*)",
|
||||
"Bash(python3:*)",
|
||||
"WebFetch(domain:192.168.255.1)",
|
||||
"Bash(ssh-add:*)"
|
||||
"Bash(ssh-add:*)",
|
||||
"Bash(SSH_AUTH_SOCK=\"\" ssh -i ~/.ssh/id_rsa root@192.168.255.1 'grep \"\"s.anonymous\"\" /www/luci-static/resources/view/cdn-cache/settings.js')",
|
||||
"Bash(SSH_AUTH_SOCK=\"\" ssh -i ~/.ssh/id_rsa root@192.168.255.1 'grep \"\"s.anonymous\"\" /www/luci-static/resources/view/cdn-cache/settings.js; rm -f /tmp/luci-indexcache*')",
|
||||
"Bash(SSH_AUTH_SOCK=\"\" ssh -i ~/.ssh/id_ed25519 root@192.168.255.1 'grep \"\"s.anonymous\"\" /www/luci-static/resources/view/cdn-cache/settings.js; rm -f /tmp/luci-indexcache*')",
|
||||
"Bash(SSH_AUTH_SOCK=\"\" ssh -i ~/.ssh/id_ed25519 root@192.168.255.1 'echo \"\"=== CrowdSec Decisions ===\"\"; cscli decisions list 2>/dev/null | head -10; echo \"\"\"\"; echo \"\"=== Auth Guardian Status ===\"\"; ubus call luci.auth-guardian status 2>/dev/null | head -20')"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@ -71,7 +71,7 @@ return view.extend({
|
||||
s = m.section(form.TableSection, 'cache_policy', 'Cache Policies',
|
||||
'Define caching rules for specific domains and file types');
|
||||
s.addremove = true;
|
||||
s.anonymous = true;
|
||||
s.anonymous = false;
|
||||
s.sortable = true;
|
||||
|
||||
o = s.option(form.Flag, 'enabled', 'Enabled');
|
||||
@ -108,7 +108,7 @@ return view.extend({
|
||||
s = m.section(form.TableSection, 'exclusion', 'Exclusions',
|
||||
'Domains that should never be cached');
|
||||
s.addremove = true;
|
||||
s.anonymous = true;
|
||||
s.anonymous = false;
|
||||
|
||||
o = s.option(form.Flag, 'enabled', 'Enabled');
|
||||
o.default = '1';
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,116 +1,139 @@
|
||||
{
|
||||
"luci-app-secubox": {
|
||||
"description": "SecuBox Dashboard",
|
||||
"read": {
|
||||
"ubus": {
|
||||
"luci.secubox": [
|
||||
"status",
|
||||
"getStatus",
|
||||
"getVersion",
|
||||
"modules",
|
||||
"getModules",
|
||||
"modules_by_category",
|
||||
"module_info",
|
||||
"getModuleInfo",
|
||||
"check_module_enabled",
|
||||
"health",
|
||||
"getHealth",
|
||||
"diagnostics",
|
||||
"runDiagnostics",
|
||||
"getLogs",
|
||||
"get_system_health",
|
||||
"get_alerts",
|
||||
"get_dashboard_data",
|
||||
"get_theme",
|
||||
"first_run_status",
|
||||
"list_apps",
|
||||
"get_app_manifest",
|
||||
"list_profiles",
|
||||
"listProfiles",
|
||||
"getProfile",
|
||||
"validateProfile",
|
||||
"listSnapshots",
|
||||
"get_appstore_apps",
|
||||
"get_appstore_app",
|
||||
"get_public_ips",
|
||||
"get_network_health",
|
||||
"get_vital_services",
|
||||
"get_full_health_report",
|
||||
"get_services",
|
||||
"get_proxy_mode",
|
||||
"p2p_get_peers",
|
||||
"p2p_get_settings",
|
||||
"p2p_discover",
|
||||
"p2p_get_catalog",
|
||||
"p2p_get_peer_catalog",
|
||||
"p2p_get_shared_services"
|
||||
],
|
||||
"luci.gitea": [
|
||||
"get_status"
|
||||
],
|
||||
"luci.secubox-p2p": [
|
||||
"get_gitea_config",
|
||||
"list_gitea_repos",
|
||||
"get_gitea_commits",
|
||||
"list_local_backups"
|
||||
],
|
||||
"read": {
|
||||
"ubus": {
|
||||
"luci.secubox": [
|
||||
"status",
|
||||
"getStatus",
|
||||
"getVersion",
|
||||
"modules",
|
||||
"getModules",
|
||||
"modules_by_category",
|
||||
"module_info",
|
||||
"getModuleInfo",
|
||||
"check_module_enabled",
|
||||
"health",
|
||||
"getHealth",
|
||||
"diagnostics",
|
||||
"runDiagnostics",
|
||||
"getLogs",
|
||||
"get_system_health",
|
||||
"get_alerts",
|
||||
"get_dashboard_data",
|
||||
"get_theme",
|
||||
"first_run_status",
|
||||
"list_apps",
|
||||
"get_app_manifest",
|
||||
"list_profiles",
|
||||
"listProfiles",
|
||||
"getProfile",
|
||||
"validateProfile",
|
||||
"listSnapshots",
|
||||
"get_appstore_apps",
|
||||
"get_appstore_app",
|
||||
"get_public_ips",
|
||||
"get_network_health",
|
||||
"get_vital_services",
|
||||
"get_full_health_report",
|
||||
"get_services",
|
||||
"get_proxy_mode",
|
||||
"p2p_get_peers",
|
||||
"p2p_get_settings",
|
||||
"p2p_discover",
|
||||
"p2p_get_catalog",
|
||||
"p2p_get_peer_catalog",
|
||||
"p2p_get_shared_services"
|
||||
],
|
||||
"luci.service-registry": [
|
||||
"list_services",
|
||||
"get_service",
|
||||
"list_categories",
|
||||
"get_qr_data",
|
||||
"get_certificate_status",
|
||||
"check_service_health",
|
||||
"check_all_health",
|
||||
"get_network_info",
|
||||
"get_landing_config"
|
||||
],
|
||||
"luci.haproxy": [
|
||||
"status",
|
||||
"list_vhosts",
|
||||
"list_backends",
|
||||
"list_certificates"
|
||||
],
|
||||
"luci.gitea": [
|
||||
"get_status"
|
||||
],
|
||||
"luci.secubox-p2p": [
|
||||
"get_gitea_config",
|
||||
"list_gitea_repos",
|
||||
"get_gitea_commits",
|
||||
"list_local_backups"
|
||||
],
|
||||
"uci": [
|
||||
"get",
|
||||
"state"
|
||||
]
|
||||
},
|
||||
"uci": [
|
||||
"secubox"
|
||||
"secubox",
|
||||
"service-registry",
|
||||
"haproxy"
|
||||
]
|
||||
},
|
||||
"write": {
|
||||
"ubus": {
|
||||
"luci.secubox": [
|
||||
"reload",
|
||||
"start_module",
|
||||
"stop_module",
|
||||
"restart_module",
|
||||
"enable_module",
|
||||
"disable_module",
|
||||
"installModule",
|
||||
"removeModule",
|
||||
"updateModule",
|
||||
"applyProfile",
|
||||
"createSnapshot",
|
||||
"restoreSnapshot",
|
||||
"quick_action",
|
||||
"set_theme",
|
||||
"dismiss_alert",
|
||||
"clear_alerts",
|
||||
"fix_permissions",
|
||||
"apply_first_run",
|
||||
"apply_app_wizard",
|
||||
"apply_profile",
|
||||
"rollback_profile",
|
||||
"install_appstore_app",
|
||||
"remove_appstore_app",
|
||||
"set_proxy_mode",
|
||||
"p2p_add_peer",
|
||||
"p2p_remove_peer",
|
||||
"p2p_set_settings",
|
||||
"p2p_share_app",
|
||||
"p2p_unshare_app",
|
||||
"p2p_install_from_peer",
|
||||
"p2p_sync_catalog",
|
||||
"p2p_broadcast_command"
|
||||
],
|
||||
"luci.gitea": [
|
||||
"generate_token",
|
||||
"create_repo"
|
||||
],
|
||||
"luci.secubox-p2p": [
|
||||
"set_gitea_config",
|
||||
"create_gitea_repo",
|
||||
"push_gitea_backup",
|
||||
"pull_gitea_backup",
|
||||
"create_local_backup",
|
||||
"restore_local_backup"
|
||||
],
|
||||
"write": {
|
||||
"ubus": {
|
||||
"luci.secubox": [
|
||||
"reload",
|
||||
"start_module",
|
||||
"stop_module",
|
||||
"restart_module",
|
||||
"enable_module",
|
||||
"disable_module",
|
||||
"installModule",
|
||||
"removeModule",
|
||||
"updateModule",
|
||||
"applyProfile",
|
||||
"createSnapshot",
|
||||
"restoreSnapshot",
|
||||
"quick_action",
|
||||
"set_theme",
|
||||
"dismiss_alert",
|
||||
"clear_alerts",
|
||||
"fix_permissions",
|
||||
"apply_first_run",
|
||||
"apply_app_wizard",
|
||||
"apply_profile",
|
||||
"rollback_profile",
|
||||
"install_appstore_app",
|
||||
"remove_appstore_app",
|
||||
"set_proxy_mode",
|
||||
"p2p_add_peer",
|
||||
"p2p_remove_peer",
|
||||
"p2p_set_settings",
|
||||
"p2p_share_app",
|
||||
"p2p_unshare_app",
|
||||
"p2p_install_from_peer",
|
||||
"p2p_sync_catalog",
|
||||
"p2p_broadcast_command"
|
||||
],
|
||||
"luci.service-registry": [
|
||||
"generate_landing_page",
|
||||
"sync_providers"
|
||||
],
|
||||
"luci.gitea": [
|
||||
"generate_token",
|
||||
"create_repo"
|
||||
],
|
||||
"luci.secubox-p2p": [
|
||||
"set_gitea_config",
|
||||
"create_gitea_repo",
|
||||
"push_gitea_backup",
|
||||
"pull_gitea_backup",
|
||||
"create_local_backup",
|
||||
"restore_local_backup"
|
||||
],
|
||||
"uci": [
|
||||
"set",
|
||||
"delete",
|
||||
|
||||
@ -87,6 +87,13 @@ var callSaveLandingConfig = rpc.declare({
|
||||
expect: {}
|
||||
});
|
||||
|
||||
var callSetLandingTheme = rpc.declare({
|
||||
object: 'luci.service-registry',
|
||||
method: 'set_landing_theme',
|
||||
params: ['theme'],
|
||||
expect: {}
|
||||
});
|
||||
|
||||
var callCheckServiceHealth = rpc.declare({
|
||||
object: 'luci.service-registry',
|
||||
method: 'check_service_health',
|
||||
@ -193,6 +200,11 @@ return baseclass.extend({
|
||||
return callSaveLandingConfig(autoRegen ? true : false);
|
||||
},
|
||||
|
||||
// Set landing page theme
|
||||
setLandingTheme: function(theme) {
|
||||
return callSetLandingTheme(theme || 'mirrorbox');
|
||||
},
|
||||
|
||||
// Get dashboard data (services + provider status)
|
||||
getDashboardData: function() {
|
||||
return Promise.all([
|
||||
|
||||
@ -197,6 +197,24 @@ return view.extend({
|
||||
E('div', { 'class': 'settings-section' }, [
|
||||
E('h2', {}, ['⚙️', ' Settings']),
|
||||
E('div', { 'class': 'settings-grid' }, [
|
||||
E('div', { 'class': 'setting-item' }, [
|
||||
E('div', { 'class': 'setting-label' }, [
|
||||
E('span', {}, '🎨'),
|
||||
E('span', {}, 'Theme')
|
||||
]),
|
||||
E('div', { 'class': 'theme-selector' }, [
|
||||
E('select', {
|
||||
'class': 'theme-select',
|
||||
'change': function(e) { self.handleThemeChange(e.target.value); }
|
||||
}, [
|
||||
E('option', { 'value': 'mirrorbox', 'selected': (config.theme || 'mirrorbox') === 'mirrorbox' }, 'MirrorBox (Glassmorphism)'),
|
||||
E('option', { 'value': 'cyberpunk', 'selected': config.theme === 'cyberpunk' }, 'Cyberpunk (Neon)'),
|
||||
E('option', { 'value': 'minimal', 'selected': config.theme === 'minimal' }, 'Minimal Dark'),
|
||||
E('option', { 'value': 'terminal', 'selected': config.theme === 'terminal' }, 'Terminal (Matrix)'),
|
||||
E('option', { 'value': 'light', 'selected': config.theme === 'light' }, 'Clean Light')
|
||||
])
|
||||
])
|
||||
]),
|
||||
E('div', { 'class': 'setting-item' }, [
|
||||
E('div', { 'class': 'setting-label' }, [
|
||||
E('span', {}, '🔄'),
|
||||
@ -269,6 +287,31 @@ return view.extend({
|
||||
ui.addNotification(null, E('p', (enabled ? '✅' : '❌') + ' Auto-regenerate ' + (enabled ? 'enabled' : 'disabled')), 'info');
|
||||
},
|
||||
|
||||
handleThemeChange: function(theme) {
|
||||
var self = this;
|
||||
ui.showModal(_('Applying Theme'), [
|
||||
E('p', { 'class': 'spinning' }, _('🎨 Applying theme: ' + theme + '...'))
|
||||
]);
|
||||
|
||||
return api.setLandingTheme(theme).then(function(result) {
|
||||
if (result.success) {
|
||||
return api.generateLandingPage();
|
||||
}
|
||||
throw new Error(result.error || 'Failed to set theme');
|
||||
}).then(function(result) {
|
||||
ui.hideModal();
|
||||
if (result.success) {
|
||||
ui.addNotification(null, E('p', '✅ ' + _('Theme applied: ') + theme), 'info');
|
||||
window.location.reload();
|
||||
} else {
|
||||
ui.addNotification(null, E('p', '❌ ' + _('Failed to regenerate page')), 'error');
|
||||
}
|
||||
}).catch(function(err) {
|
||||
ui.hideModal();
|
||||
ui.addNotification(null, E('p', '❌ ' + _('Error: ') + err.message), 'error');
|
||||
});
|
||||
},
|
||||
|
||||
getStyles: function() {
|
||||
return [
|
||||
'.service-landing-page { font-family: system-ui, -apple-system, sans-serif; color: #e0e0e0; background: linear-gradient(135deg, #0a0a1a 0%, #1a1a2e 100%); min-height: 100vh; padding: 20px; margin: -20px; }',
|
||||
@ -344,7 +387,14 @@ return view.extend({
|
||||
'.toggle-slider { position: absolute; inset: 0; background: rgba(255,255,255,0.1); border-radius: 13px; cursor: pointer; transition: 0.3s; }',
|
||||
'.toggle-slider::before { content: ""; position: absolute; width: 20px; height: 20px; left: 3px; bottom: 3px; background: #fff; border-radius: 50%; transition: 0.3s; }',
|
||||
'.toggle-switch input:checked + .toggle-slider { background: #2ecc71; }',
|
||||
'.toggle-switch input:checked + .toggle-slider::before { transform: translateX(24px); }'
|
||||
'.toggle-switch input:checked + .toggle-slider::before { transform: translateX(24px); }',
|
||||
|
||||
// Theme Selector
|
||||
'.theme-selector { display: flex; align-items: center; gap: 10px; }',
|
||||
'.theme-select { padding: 10px 15px; background: rgba(0,0,0,0.4); border: 1px solid rgba(255,255,255,0.2); border-radius: 8px; color: #fff; font-size: 14px; cursor: pointer; min-width: 200px; }',
|
||||
'.theme-select:hover { border-color: rgba(52,152,219,0.5); }',
|
||||
'.theme-select:focus { outline: none; border-color: #3498db; }',
|
||||
'.theme-select option { background: #1a1a2e; color: #fff; padding: 10px; }'
|
||||
].join('\n');
|
||||
}
|
||||
});
|
||||
|
||||
@ -26,7 +26,7 @@ function generateQRCodeImg(data, size) {
|
||||
}
|
||||
|
||||
return view.extend({
|
||||
title: _('Service Registry'),
|
||||
title: _('Web Services Registry'),
|
||||
pollInterval: 30,
|
||||
healthData: null,
|
||||
|
||||
@ -112,8 +112,9 @@ return view.extend({
|
||||
html += '<div class="sr-network-item">';
|
||||
html += '<span class="sr-network-label">Port 80 (HTTP)</span>';
|
||||
var http = extPorts.http || {};
|
||||
if (http.status === 'open') {
|
||||
html += '<span class="sr-network-value sr-network-ok">✅ Open from Internet</span>';
|
||||
if (http.status === 'open' || http.status === 'firewall_open') {
|
||||
html += '<span class="sr-network-value sr-network-ok">✅ Open</span>';
|
||||
if (http.hint) html += '<span class="sr-network-sub">' + http.hint + '</span>';
|
||||
} else if (http.status === 'blocked') {
|
||||
html += '<span class="sr-network-value sr-network-fail">🚫 Blocked</span>';
|
||||
html += '<span class="sr-network-sub">' + (http.hint || 'Check router') + '</span>';
|
||||
@ -126,8 +127,9 @@ return view.extend({
|
||||
html += '<div class="sr-network-item">';
|
||||
html += '<span class="sr-network-label">Port 443 (HTTPS)</span>';
|
||||
var https = extPorts.https || {};
|
||||
if (https.status === 'open') {
|
||||
html += '<span class="sr-network-value sr-network-ok">✅ Open from Internet</span>';
|
||||
if (https.status === 'open' || https.status === 'firewall_open') {
|
||||
html += '<span class="sr-network-value sr-network-ok">✅ Open</span>';
|
||||
if (https.hint) html += '<span class="sr-network-sub">' + https.hint + '</span>';
|
||||
} else if (https.status === 'blocked') {
|
||||
html += '<span class="sr-network-value sr-network-fail">🚫 Blocked</span>';
|
||||
html += '<span class="sr-network-sub">' + (https.hint || 'Check router') + '</span>';
|
||||
@ -397,7 +399,7 @@ return view.extend({
|
||||
|
||||
return E('div', { 'class': 'sr-header' }, [
|
||||
E('div', { 'class': 'sr-title' }, [
|
||||
E('h2', {}, '🗂️ Service Registry'),
|
||||
E('h2', {}, '🗂️ Web Services Registry'),
|
||||
E('span', { 'class': 'sr-subtitle' },
|
||||
published + ' published · ' + running + ' running · ' +
|
||||
haproxyCount + ' domains · ' + torCount + ' onion')
|
||||
|
||||
@ -1484,12 +1484,14 @@ method_get_landing_config() {
|
||||
json_init
|
||||
|
||||
config_load "$UCI_CONFIG"
|
||||
local landing_path auto_regen
|
||||
local landing_path auto_regen theme
|
||||
config_get landing_path main landing_path "/www/secubox-services.html"
|
||||
config_get auto_regen main landing_auto_regen "1"
|
||||
config_get theme main landing_theme "mirrorbox"
|
||||
|
||||
json_add_string "path" "$landing_path"
|
||||
json_add_boolean "auto_regen" "$auto_regen"
|
||||
json_add_string "theme" "$theme"
|
||||
|
||||
# Check if file exists
|
||||
if [ -f "$landing_path" ]; then
|
||||
@ -1522,6 +1524,33 @@ method_save_landing_config() {
|
||||
json_dump
|
||||
}
|
||||
|
||||
# Set landing page theme
|
||||
method_set_landing_theme() {
|
||||
local theme
|
||||
|
||||
read -r input
|
||||
json_load "$input"
|
||||
json_get_var theme theme "mirrorbox"
|
||||
|
||||
json_init
|
||||
|
||||
# Validate theme
|
||||
case "$theme" in
|
||||
mirrorbox|cyberpunk|minimal|terminal|light)
|
||||
uci set "$UCI_CONFIG.main.landing_theme=$theme"
|
||||
uci commit "$UCI_CONFIG"
|
||||
json_add_boolean "success" 1
|
||||
json_add_string "theme" "$theme"
|
||||
;;
|
||||
*)
|
||||
json_add_boolean "success" 0
|
||||
json_add_string "error" "Invalid theme: $theme"
|
||||
;;
|
||||
esac
|
||||
|
||||
json_dump
|
||||
}
|
||||
|
||||
# Main RPC interface
|
||||
case "$1" in
|
||||
list)
|
||||
@ -1542,7 +1571,8 @@ case "$1" in
|
||||
"check_all_health": {},
|
||||
"get_network_info": {},
|
||||
"get_landing_config": {},
|
||||
"save_landing_config": { "auto_regen": "boolean" }
|
||||
"save_landing_config": { "auto_regen": "boolean" },
|
||||
"set_landing_theme": { "theme": "string" }
|
||||
}
|
||||
EOF
|
||||
;;
|
||||
@ -1564,6 +1594,7 @@ EOF
|
||||
get_network_info) method_get_network_info ;;
|
||||
get_landing_config) method_get_landing_config ;;
|
||||
save_landing_config) method_save_landing_config ;;
|
||||
set_landing_theme) method_set_landing_theme ;;
|
||||
*)
|
||||
json_init
|
||||
json_add_boolean "error" 1
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -39,7 +39,8 @@
|
||||
"generate_landing_page",
|
||||
"update_service",
|
||||
"delete_service",
|
||||
"save_landing_config"
|
||||
"save_landing_config",
|
||||
"set_landing_theme"
|
||||
],
|
||||
"luci.haproxy": [
|
||||
"create_vhost",
|
||||
|
||||
@ -8,7 +8,7 @@ Architecture: all
|
||||
Installed-Size: 71680
|
||||
Description: Comprehensive authentication and session management with captive portal, OAuth2/OIDC integration, voucher system, and time-based access control
|
||||
Filename: luci-app-auth-guardian_0.4.0-r3_all.ipk
|
||||
Size: 12084
|
||||
Size: 12080
|
||||
|
||||
Package: luci-app-bandwidth-manager
|
||||
Version: 0.5.0-r2
|
||||
@ -20,7 +20,7 @@ Architecture: all
|
||||
Installed-Size: 378880
|
||||
Description: Advanced bandwidth management with QoS rules, client quotas, and SQM integration
|
||||
Filename: luci-app-bandwidth-manager_0.5.0-r2_all.ipk
|
||||
Size: 66963
|
||||
Size: 66966
|
||||
|
||||
Package: luci-app-cdn-cache
|
||||
Version: 0.5.0-r3
|
||||
@ -29,10 +29,10 @@ License: Apache-2.0
|
||||
Section: luci
|
||||
Maintainer: OpenWrt LuCI community
|
||||
Architecture: all
|
||||
Installed-Size: 112640
|
||||
Installed-Size: 122880
|
||||
Description: Dashboard for managing local CDN caching proxy on OpenWrt
|
||||
Filename: luci-app-cdn-cache_0.5.0-r3_all.ipk
|
||||
Size: 20434
|
||||
Size: 23189
|
||||
|
||||
Package: luci-app-client-guardian
|
||||
Version: 0.4.0-r7
|
||||
@ -44,7 +44,7 @@ Architecture: all
|
||||
Installed-Size: 307200
|
||||
Description: Network Access Control with client monitoring, zone management, captive portal, parental controls, and SMS/email alerts
|
||||
Filename: luci-app-client-guardian_0.4.0-r7_all.ipk
|
||||
Size: 57045
|
||||
Size: 57047
|
||||
|
||||
Package: luci-app-crowdsec-dashboard
|
||||
Version: 0.7.0-r29
|
||||
@ -56,7 +56,7 @@ Architecture: all
|
||||
Installed-Size: 296960
|
||||
Description: Real-time security monitoring dashboard for CrowdSec on OpenWrt
|
||||
Filename: luci-app-crowdsec-dashboard_0.7.0-r29_all.ipk
|
||||
Size: 55583
|
||||
Size: 55585
|
||||
|
||||
Package: luci-app-cyberfeed
|
||||
Version: 0.1.1-r1
|
||||
@ -68,7 +68,7 @@ Architecture: all
|
||||
Installed-Size: 71680
|
||||
Description: Cyberpunk-themed RSS feed aggregator dashboard with social media support
|
||||
Filename: luci-app-cyberfeed_0.1.1-r1_all.ipk
|
||||
Size: 12836
|
||||
Size: 12840
|
||||
|
||||
Package: luci-app-exposure
|
||||
Version: 1.0.0-r3
|
||||
@ -80,7 +80,7 @@ Architecture: all
|
||||
Installed-Size: 153600
|
||||
Description: LuCI SecuBox Service Exposure Manager
|
||||
Filename: luci-app-exposure_1.0.0-r3_all.ipk
|
||||
Size: 20535
|
||||
Size: 20536
|
||||
|
||||
Package: luci-app-gitea
|
||||
Version: 1.0.0-r2
|
||||
@ -92,7 +92,7 @@ Architecture: all
|
||||
Installed-Size: 92160
|
||||
Description: Modern dashboard for Gitea Platform management on OpenWrt
|
||||
Filename: luci-app-gitea_1.0.0-r2_all.ipk
|
||||
Size: 15583
|
||||
Size: 15588
|
||||
|
||||
Package: luci-app-glances
|
||||
Version: 1.0.0-r2
|
||||
@ -104,7 +104,7 @@ Architecture: all
|
||||
Installed-Size: 40960
|
||||
Description: Modern dashboard for Glances system monitoring with SecuBox theme
|
||||
Filename: luci-app-glances_1.0.0-r2_all.ipk
|
||||
Size: 6965
|
||||
Size: 6967
|
||||
|
||||
Package: luci-app-haproxy
|
||||
Version: 1.0.0-r8
|
||||
@ -116,7 +116,7 @@ Architecture: all
|
||||
Installed-Size: 204800
|
||||
Description: Web interface for managing HAProxy load balancer with vhosts, SSL certificates, and backend routing
|
||||
Filename: luci-app-haproxy_1.0.0-r8_all.ipk
|
||||
Size: 34162
|
||||
Size: 34168
|
||||
|
||||
Package: luci-app-hexojs
|
||||
Version: 1.0.0-r3
|
||||
@ -128,7 +128,7 @@ Architecture: all
|
||||
Installed-Size: 215040
|
||||
Description: Modern dashboard for Hexo static site generator on OpenWrt
|
||||
Filename: luci-app-hexojs_1.0.0-r3_all.ipk
|
||||
Size: 32980
|
||||
Size: 32984
|
||||
|
||||
Package: luci-app-ksm-manager
|
||||
Version: 0.4.0-r2
|
||||
@ -140,7 +140,7 @@ Architecture: all
|
||||
Installed-Size: 112640
|
||||
Description: Centralized cryptographic key management with hardware security module (HSM) support for Nitrokey and YubiKey devices. Provides secure key storage, certificate management, SSH key handling, and secret storage with audit logging.
|
||||
Filename: luci-app-ksm-manager_0.4.0-r2_all.ipk
|
||||
Size: 18724
|
||||
Size: 18722
|
||||
|
||||
Package: luci-app-localai
|
||||
Version: 0.1.0-r15
|
||||
@ -164,7 +164,7 @@ Architecture: all
|
||||
Installed-Size: 40960
|
||||
Description: LuCI support for Lyrion Music Server
|
||||
Filename: luci-app-lyrion_1.0.0-r1_all.ipk
|
||||
Size: 6725
|
||||
Size: 6726
|
||||
|
||||
Package: luci-app-magicmirror2
|
||||
Version: 0.4.0-r6
|
||||
@ -176,7 +176,7 @@ Architecture: all
|
||||
Installed-Size: 71680
|
||||
Description: Modern dashboard for MagicMirror2 smart display platform with module manager and SecuBox theme
|
||||
Filename: luci-app-magicmirror2_0.4.0-r6_all.ipk
|
||||
Size: 12274
|
||||
Size: 12277
|
||||
|
||||
Package: luci-app-mailinabox
|
||||
Version: 1.0.0-r1
|
||||
@ -188,7 +188,7 @@ Architecture: all
|
||||
Installed-Size: 30720
|
||||
Description: LuCI support for Mail-in-a-Box
|
||||
Filename: luci-app-mailinabox_1.0.0-r1_all.ipk
|
||||
Size: 5483
|
||||
Size: 5481
|
||||
|
||||
Package: luci-app-media-flow
|
||||
Version: 0.6.4-r1
|
||||
@ -200,7 +200,7 @@ Architecture: all
|
||||
Installed-Size: 102400
|
||||
Description: Real-time detection and monitoring of streaming services (Netflix, YouTube, Spotify, etc.) with quality estimation, history tracking, and alerts. Supports nDPId local DPI and netifyd.
|
||||
Filename: luci-app-media-flow_0.6.4-r1_all.ipk
|
||||
Size: 19116
|
||||
Size: 19124
|
||||
|
||||
Package: luci-app-metablogizer
|
||||
Version: 1.0.0-r3
|
||||
@ -212,7 +212,7 @@ Architecture: all
|
||||
Installed-Size: 102400
|
||||
Description: LuCI support for MetaBlogizer Static Site Publisher
|
||||
Filename: luci-app-metablogizer_1.0.0-r3_all.ipk
|
||||
Size: 21649
|
||||
Size: 21652
|
||||
|
||||
Package: luci-app-metabolizer
|
||||
Version: 1.0.0-r2
|
||||
@ -224,7 +224,7 @@ Architecture: all
|
||||
Installed-Size: 30720
|
||||
Description: LuCI support for Metabolizer CMS
|
||||
Filename: luci-app-metabolizer_1.0.0-r2_all.ipk
|
||||
Size: 4757
|
||||
Size: 4758
|
||||
|
||||
Package: luci-app-mitmproxy
|
||||
Version: 0.4.0-r6
|
||||
@ -248,7 +248,7 @@ Architecture: all
|
||||
Installed-Size: 51200
|
||||
Description: Web interface for MMPM - MagicMirror Package Manager
|
||||
Filename: luci-app-mmpm_0.2.0-r3_all.ipk
|
||||
Size: 7902
|
||||
Size: 7905
|
||||
|
||||
Package: luci-app-mqtt-bridge
|
||||
Version: 0.4.0-r4
|
||||
@ -260,7 +260,7 @@ Architecture: all
|
||||
Installed-Size: 122880
|
||||
Description: USB-to-MQTT IoT hub with SecuBox theme
|
||||
Filename: luci-app-mqtt-bridge_0.4.0-r4_all.ipk
|
||||
Size: 22780
|
||||
Size: 22777
|
||||
|
||||
Package: luci-app-ndpid
|
||||
Version: 1.1.2-r2
|
||||
@ -272,7 +272,7 @@ Architecture: all
|
||||
Installed-Size: 122880
|
||||
Description: Modern dashboard for nDPId deep packet inspection on OpenWrt
|
||||
Filename: luci-app-ndpid_1.1.2-r2_all.ipk
|
||||
Size: 22454
|
||||
Size: 22458
|
||||
|
||||
Package: luci-app-netdata-dashboard
|
||||
Version: 0.5.0-r2
|
||||
@ -284,7 +284,7 @@ Architecture: all
|
||||
Installed-Size: 133120
|
||||
Description: Real-time system monitoring dashboard with Netdata integration for OpenWrt
|
||||
Filename: luci-app-netdata-dashboard_0.5.0-r2_all.ipk
|
||||
Size: 22398
|
||||
Size: 22402
|
||||
|
||||
Package: luci-app-network-modes
|
||||
Version: 0.5.0-r3
|
||||
@ -296,7 +296,7 @@ Architecture: all
|
||||
Installed-Size: 307200
|
||||
Description: Configure OpenWrt for different network modes: Sniffer, Access Point, Relay, Router
|
||||
Filename: luci-app-network-modes_0.5.0-r3_all.ipk
|
||||
Size: 55612
|
||||
Size: 55608
|
||||
|
||||
Package: luci-app-network-tweaks
|
||||
Version: 1.0.0-r7
|
||||
@ -308,7 +308,7 @@ Architecture: all
|
||||
Installed-Size: 81920
|
||||
Description: Unified network services dashboard with DNS/hosts sync, CDN cache control, and WPAD auto-proxy configuration
|
||||
Filename: luci-app-network-tweaks_1.0.0-r7_all.ipk
|
||||
Size: 15465
|
||||
Size: 15458
|
||||
|
||||
Package: luci-app-nextcloud
|
||||
Version: 1.0.0-r1
|
||||
@ -320,7 +320,7 @@ Architecture: all
|
||||
Installed-Size: 30720
|
||||
Description: LuCI support for Nextcloud
|
||||
Filename: luci-app-nextcloud_1.0.0-r1_all.ipk
|
||||
Size: 6486
|
||||
Size: 6488
|
||||
|
||||
Package: luci-app-ollama
|
||||
Version: 0.1.0-r1
|
||||
@ -332,7 +332,7 @@ Architecture: all
|
||||
Installed-Size: 71680
|
||||
Description: Modern dashboard for Ollama LLM management on OpenWrt
|
||||
Filename: luci-app-ollama_0.1.0-r1_all.ipk
|
||||
Size: 11995
|
||||
Size: 11997
|
||||
|
||||
Package: luci-app-picobrew
|
||||
Version: 1.0.0-r1
|
||||
@ -344,7 +344,7 @@ Architecture: all
|
||||
Installed-Size: 51200
|
||||
Description: Modern dashboard for PicoBrew Server management on OpenWrt
|
||||
Filename: luci-app-picobrew_1.0.0-r1_all.ipk
|
||||
Size: 9976
|
||||
Size: 9978
|
||||
|
||||
Package: luci-app-secubox
|
||||
Version: 0.7.1-r4
|
||||
@ -356,7 +356,7 @@ Architecture: all
|
||||
Installed-Size: 266240
|
||||
Description: Central control hub for all SecuBox modules. Provides unified dashboard, module status, system health monitoring, and quick actions.
|
||||
Filename: luci-app-secubox_0.7.1-r4_all.ipk
|
||||
Size: 49905
|
||||
Size: 49906
|
||||
|
||||
Package: luci-app-secubox-admin
|
||||
Version: 1.0.0-r19
|
||||
@ -379,7 +379,7 @@ Architecture: all
|
||||
Installed-Size: 81920
|
||||
Description: LuCI SecuBox CrowdSec Dashboard
|
||||
Filename: luci-app-secubox-crowdsec_1.0.0-r3_all.ipk
|
||||
Size: 13914
|
||||
Size: 13915
|
||||
|
||||
Package: luci-app-secubox-netdiag
|
||||
Version: 1.0.0-r1
|
||||
@ -403,7 +403,7 @@ Architecture: all
|
||||
Installed-Size: 215040
|
||||
Description: Complete LuCI interface for netifyd DPI engine with real-time flow monitoring, application detection, network analytics, and flow action plugins
|
||||
Filename: luci-app-secubox-netifyd_1.2.1-r1_all.ipk
|
||||
Size: 39499
|
||||
Size: 39500
|
||||
|
||||
Package: luci-app-secubox-portal
|
||||
Version: 0.7.0-r2
|
||||
@ -415,7 +415,7 @@ Architecture: all
|
||||
Installed-Size: 163840
|
||||
Description: Unified entry point for all SecuBox applications with tabbed navigation
|
||||
Filename: luci-app-secubox-portal_0.7.0-r2_all.ipk
|
||||
Size: 32227
|
||||
Size: 32228
|
||||
|
||||
Package: luci-app-secubox-security-threats
|
||||
Version: 1.0.0-r4
|
||||
@ -427,7 +427,7 @@ Architecture: all
|
||||
Installed-Size: 71680
|
||||
Description: Unified dashboard integrating netifyd DPI threats with CrowdSec intelligence for real-time threat monitoring and automated blocking
|
||||
Filename: luci-app-secubox-security-threats_1.0.0-r4_all.ipk
|
||||
Size: 13906
|
||||
Size: 13907
|
||||
|
||||
Package: luci-app-service-registry
|
||||
Version: 1.0.0-r1
|
||||
@ -436,10 +436,10 @@ License: MIT
|
||||
Section: luci
|
||||
Maintainer: OpenWrt LuCI community
|
||||
Architecture: all
|
||||
Installed-Size: 163840
|
||||
Installed-Size: 194560
|
||||
Description: Unified service aggregation with HAProxy vhosts, Tor hidden services, and QR-coded landing page
|
||||
Filename: luci-app-service-registry_1.0.0-r1_all.ipk
|
||||
Size: 33349
|
||||
Size: 39612
|
||||
|
||||
Package: luci-app-streamlit
|
||||
Version: 1.0.0-r9
|
||||
@ -451,7 +451,7 @@ Architecture: all
|
||||
Installed-Size: 122880
|
||||
Description: Modern dashboard for Streamlit Platform management on OpenWrt
|
||||
Filename: luci-app-streamlit_1.0.0-r9_all.ipk
|
||||
Size: 20473
|
||||
Size: 20470
|
||||
|
||||
Package: luci-app-system-hub
|
||||
Version: 0.5.1-r4
|
||||
@ -463,7 +463,7 @@ Architecture: all
|
||||
Installed-Size: 358400
|
||||
Description: Central system control with monitoring, services, logs, and backup
|
||||
Filename: luci-app-system-hub_0.5.1-r4_all.ipk
|
||||
Size: 66345
|
||||
Size: 66348
|
||||
|
||||
Package: luci-app-tor-shield
|
||||
Version: 1.0.0-r10
|
||||
@ -475,7 +475,7 @@ Architecture: all
|
||||
Installed-Size: 133120
|
||||
Description: Modern dashboard for Tor anonymization on OpenWrt
|
||||
Filename: luci-app-tor-shield_1.0.0-r10_all.ipk
|
||||
Size: 24535
|
||||
Size: 24534
|
||||
|
||||
Package: luci-app-traffic-shaper
|
||||
Version: 0.4.0-r2
|
||||
@ -487,7 +487,7 @@ Architecture: all
|
||||
Installed-Size: 92160
|
||||
Description: Advanced traffic shaping with TC/CAKE for precise bandwidth control
|
||||
Filename: luci-app-traffic-shaper_0.4.0-r2_all.ipk
|
||||
Size: 15635
|
||||
Size: 15638
|
||||
|
||||
Package: luci-app-vhost-manager
|
||||
Version: 0.5.0-r5
|
||||
@ -499,7 +499,7 @@ Architecture: all
|
||||
Installed-Size: 174080
|
||||
Description: Nginx reverse proxy manager with Let's Encrypt SSL certificates, authentication, and WebSocket support
|
||||
Filename: luci-app-vhost-manager_0.5.0-r5_all.ipk
|
||||
Size: 29226
|
||||
Size: 29227
|
||||
|
||||
Package: luci-app-wireguard-dashboard
|
||||
Version: 0.7.0-r5
|
||||
@ -511,7 +511,7 @@ Architecture: all
|
||||
Installed-Size: 235520
|
||||
Description: Modern dashboard for WireGuard VPN monitoring on OpenWrt
|
||||
Filename: luci-app-wireguard-dashboard_0.7.0-r5_all.ipk
|
||||
Size: 45370
|
||||
Size: 45369
|
||||
|
||||
Package: luci-app-zigbee2mqtt
|
||||
Version: 1.0.0-r2
|
||||
@ -523,7 +523,7 @@ Architecture: all
|
||||
Installed-Size: 40960
|
||||
Description: Graphical interface for managing the Zigbee2MQTT docker application.
|
||||
Filename: luci-app-zigbee2mqtt_1.0.0-r2_all.ipk
|
||||
Size: 7083
|
||||
Size: 7094
|
||||
|
||||
Package: luci-theme-secubox
|
||||
Version: 0.4.7-r1
|
||||
@ -535,7 +535,7 @@ Architecture: all
|
||||
Installed-Size: 460800
|
||||
Description: Global CyberMood design system (CSS/JS/i18n) shared by all SecuBox dashboards.
|
||||
Filename: luci-theme-secubox_0.4.7-r1_all.ipk
|
||||
Size: 111796
|
||||
Size: 111797
|
||||
|
||||
Package: secubox-app
|
||||
Version: 1.0.0-r2
|
||||
@ -546,7 +546,7 @@ Installed-Size: 92160
|
||||
Description: Command line helper for SecuBox App Store manifests. Installs /usr/sbin/secubox-app
|
||||
and ships the default manifests under /usr/share/secubox/plugins/.
|
||||
Filename: secubox-app_1.0.0-r2_all.ipk
|
||||
Size: 11182
|
||||
Size: 11183
|
||||
|
||||
Package: secubox-app-adguardhome
|
||||
Version: 1.0.0-r2
|
||||
@ -560,7 +560,7 @@ Description: Installer, configuration, and service manager for running AdGuard
|
||||
inside Docker on SecuBox-powered OpenWrt systems. Network-wide ad blocker
|
||||
with DNS-over-HTTPS/TLS support and detailed analytics.
|
||||
Filename: secubox-app-adguardhome_1.0.0-r2_all.ipk
|
||||
Size: 2880
|
||||
Size: 2881
|
||||
|
||||
Package: secubox-app-auth-logger
|
||||
Version: 1.2.2-r1
|
||||
@ -578,7 +578,7 @@ Description: Logs authentication failures from LuCI/rpcd and Dropbear SSH
|
||||
- JavaScript hook to intercept login failures
|
||||
- CrowdSec parser and bruteforce scenario
|
||||
Filename: secubox-app-auth-logger_1.2.2-r1_all.ipk
|
||||
Size: 9377
|
||||
Size: 9375
|
||||
|
||||
Package: secubox-app-crowdsec-custom
|
||||
Version: 1.1.0-r1
|
||||
@ -601,7 +601,7 @@ Description: Custom CrowdSec configurations for SecuBox web interface protectio
|
||||
- Webapp generic auth bruteforce protection
|
||||
- Whitelist for trusted networks
|
||||
Filename: secubox-app-crowdsec-custom_1.1.0-r1_all.ipk
|
||||
Size: 5758
|
||||
Size: 5756
|
||||
|
||||
Package: secubox-app-cs-firewall-bouncer
|
||||
Version: 0.0.31-r4
|
||||
@ -642,7 +642,7 @@ Description: Cyberpunk-themed RSS feed aggregator for OpenWrt/SecuBox.
|
||||
Features emoji injection, neon styling, and RSS-Bridge support
|
||||
for social media feeds (Facebook, Twitter, Mastodon).
|
||||
Filename: secubox-app-cyberfeed_0.2.1-r1_all.ipk
|
||||
Size: 12450
|
||||
Size: 12455
|
||||
|
||||
Package: secubox-app-domoticz
|
||||
Version: 1.0.0-r2
|
||||
@ -655,7 +655,7 @@ Installed-Size: 10240
|
||||
Description: Installer, configuration, and service manager for running Domoticz
|
||||
inside Docker on SecuBox-powered OpenWrt systems.
|
||||
Filename: secubox-app-domoticz_1.0.0-r2_all.ipk
|
||||
Size: 2548
|
||||
Size: 2547
|
||||
|
||||
Package: secubox-app-exposure
|
||||
Version: 1.0.0-r1
|
||||
@ -670,7 +670,7 @@ Description: Unified service exposure manager for SecuBox.
|
||||
- Dynamic Tor hidden service management
|
||||
- HAProxy SSL reverse proxy configuration
|
||||
Filename: secubox-app-exposure_1.0.0-r1_all.ipk
|
||||
Size: 6827
|
||||
Size: 6830
|
||||
|
||||
Package: secubox-app-gitea
|
||||
Version: 1.0.0-r5
|
||||
@ -693,7 +693,7 @@ Description: Gitea Git Platform - Self-hosted lightweight Git service
|
||||
Runs in LXC container with Alpine Linux.
|
||||
Configure in /etc/config/gitea.
|
||||
Filename: secubox-app-gitea_1.0.0-r5_all.ipk
|
||||
Size: 9406
|
||||
Size: 9409
|
||||
|
||||
Package: secubox-app-glances
|
||||
Version: 1.0.0-r1
|
||||
@ -716,7 +716,7 @@ Description: Glances - Cross-platform system monitoring tool for SecuBox.
|
||||
Runs in LXC container for isolation and security.
|
||||
Configure in /etc/config/glances.
|
||||
Filename: secubox-app-glances_1.0.0-r1_all.ipk
|
||||
Size: 5534
|
||||
Size: 5536
|
||||
|
||||
Package: secubox-app-haproxy
|
||||
Version: 1.0.0-r23
|
||||
@ -760,7 +760,7 @@ Description: Hexo CMS - Self-hosted static blog generator for OpenWrt
|
||||
Runs in LXC container with Alpine Linux.
|
||||
Configure in /etc/config/hexojs.
|
||||
Filename: secubox-app-hexojs_1.0.0-r8_all.ipk
|
||||
Size: 94935
|
||||
Size: 94934
|
||||
|
||||
Package: secubox-app-localai
|
||||
Version: 2.25.0-r1
|
||||
@ -782,7 +782,7 @@ Description: LocalAI native binary package for OpenWrt.
|
||||
|
||||
API: http://<router-ip>:8081/v1
|
||||
Filename: secubox-app-localai_2.25.0-r1_all.ipk
|
||||
Size: 5725
|
||||
Size: 5724
|
||||
|
||||
Package: secubox-app-localai-wb
|
||||
Version: 2.25.0-r1
|
||||
@ -806,7 +806,7 @@ Description: LocalAI native binary package for OpenWrt.
|
||||
|
||||
API: http://<router-ip>:8080/v1
|
||||
Filename: secubox-app-localai-wb_2.25.0-r1_all.ipk
|
||||
Size: 7951
|
||||
Size: 7953
|
||||
|
||||
Package: secubox-app-lyrion
|
||||
Version: 2.0.2-r1
|
||||
@ -848,7 +848,7 @@ Description: MagicMirror² - Open source modular smart mirror platform for Secu
|
||||
Runs in LXC container for isolation and security.
|
||||
Configure in /etc/config/magicmirror2.
|
||||
Filename: secubox-app-magicmirror2_0.4.0-r8_all.ipk
|
||||
Size: 9250
|
||||
Size: 9253
|
||||
|
||||
Package: secubox-app-mailinabox
|
||||
Version: 2.0.0-r1
|
||||
@ -873,7 +873,7 @@ Description: Complete email server solution using docker-mailserver for SecuBox
|
||||
|
||||
Commands: mailinaboxctl --help
|
||||
Filename: secubox-app-mailinabox_2.0.0-r1_all.ipk
|
||||
Size: 7573
|
||||
Size: 7574
|
||||
|
||||
Package: secubox-app-metabolizer
|
||||
Version: 1.0.0-r3
|
||||
@ -894,7 +894,7 @@ Description: Metabolizer Blog Pipeline - Integrated CMS with Git-based workflow
|
||||
|
||||
Pipeline: Edit in Streamlit -> Push to Gitea -> Build with Hexo -> Publish
|
||||
Filename: secubox-app-metabolizer_1.0.0-r3_all.ipk
|
||||
Size: 13979
|
||||
Size: 13975
|
||||
|
||||
Package: secubox-app-mitmproxy
|
||||
Version: 0.4.0-r16
|
||||
@ -915,7 +915,7 @@ Description: mitmproxy - Interactive HTTPS proxy for SecuBox-powered OpenWrt sy
|
||||
Runs in LXC container for isolation and security.
|
||||
Configure in /etc/config/mitmproxy.
|
||||
Filename: secubox-app-mitmproxy_0.4.0-r16_all.ipk
|
||||
Size: 10215
|
||||
Size: 10214
|
||||
|
||||
Package: secubox-app-mmpm
|
||||
Version: 0.2.0-r5
|
||||
@ -936,7 +936,7 @@ Description: MMPM (MagicMirror Package Manager) for SecuBox.
|
||||
|
||||
Runs inside the MagicMirror2 LXC container.
|
||||
Filename: secubox-app-mmpm_0.2.0-r5_all.ipk
|
||||
Size: 3978
|
||||
Size: 3980
|
||||
|
||||
Package: secubox-app-nextcloud
|
||||
Version: 1.0.0-r2
|
||||
@ -972,7 +972,7 @@ Description: Ollama - Simple local LLM runtime for SecuBox-powered OpenWrt syst
|
||||
Runs in Docker/Podman container.
|
||||
Configure in /etc/config/ollama.
|
||||
Filename: secubox-app-ollama_0.1.0-r1_all.ipk
|
||||
Size: 5735
|
||||
Size: 5741
|
||||
|
||||
Package: secubox-app-picobrew
|
||||
Version: 1.0.0-r7
|
||||
@ -994,7 +994,7 @@ Description: PicoBrew Server - Self-hosted brewing controller for PicoBrew devi
|
||||
Runs in LXC container with Python/Flask backend.
|
||||
Configure in /etc/config/picobrew.
|
||||
Filename: secubox-app-picobrew_1.0.0-r7_all.ipk
|
||||
Size: 5536
|
||||
Size: 5538
|
||||
|
||||
Package: secubox-app-streamlit
|
||||
Version: 1.0.0-r5
|
||||
@ -1021,7 +1021,7 @@ Description: Streamlit App Platform - Self-hosted Python data app platform
|
||||
|
||||
Configure in /etc/config/streamlit.
|
||||
Filename: secubox-app-streamlit_1.0.0-r5_all.ipk
|
||||
Size: 11722
|
||||
Size: 11720
|
||||
|
||||
Package: secubox-app-tor
|
||||
Version: 1.0.0-r1
|
||||
@ -1062,7 +1062,7 @@ Description: SecuBox Control Center Dashboard - A web-based dashboard for monit
|
||||
- Service management
|
||||
- Network interface control
|
||||
Filename: secubox-app-webapp_1.5.0-r7_all.ipk
|
||||
Size: 39167
|
||||
Size: 39168
|
||||
|
||||
Package: secubox-app-zigbee2mqtt
|
||||
Version: 1.0.0-r3
|
||||
@ -1075,7 +1075,7 @@ Installed-Size: 20480
|
||||
Description: Installer, configuration, and service manager for running Zigbee2MQTT
|
||||
inside Docker on SecuBox-powered OpenWrt systems.
|
||||
Filename: secubox-app-zigbee2mqtt_1.0.0-r3_all.ipk
|
||||
Size: 3539
|
||||
Size: 3545
|
||||
|
||||
Package: secubox-core
|
||||
Version: 0.10.0-r9
|
||||
@ -1095,5 +1095,5 @@ Description: SecuBox Core Framework provides the foundational infrastructure fo
|
||||
- Unified CLI interface
|
||||
- ubus RPC backend
|
||||
Filename: secubox-core_0.10.0-r9_all.ipk
|
||||
Size: 80068
|
||||
Size: 80069
|
||||
|
||||
|
||||
Binary file not shown.
@ -1,12 +1,12 @@
|
||||
{
|
||||
"feed_url": "/secubox-feed",
|
||||
"generated": "2026-01-30T09:25:31+01:00",
|
||||
"generated": "2026-01-30T17:21:08+01:00",
|
||||
"packages": [
|
||||
{
|
||||
"name": "luci-app-auth-guardian",
|
||||
"version": "0.4.0-r3",
|
||||
"filename": "luci-app-auth-guardian_0.4.0-r3_all.ipk",
|
||||
"size": 12084,
|
||||
"size": 12080,
|
||||
"category": "security",
|
||||
"icon": "key",
|
||||
"description": "Authentication management",
|
||||
@ -18,7 +18,7 @@
|
||||
"name": "luci-app-bandwidth-manager",
|
||||
"version": "0.5.0-r2",
|
||||
"filename": "luci-app-bandwidth-manager_0.5.0-r2_all.ipk",
|
||||
"size": 66963,
|
||||
"size": 66966,
|
||||
"category": "network",
|
||||
"icon": "activity",
|
||||
"description": "Bandwidth monitoring and control",
|
||||
@ -30,7 +30,7 @@
|
||||
"name": "luci-app-cdn-cache",
|
||||
"version": "0.5.0-r3",
|
||||
"filename": "luci-app-cdn-cache_0.5.0-r3_all.ipk",
|
||||
"size": 20434,
|
||||
"size": 23189,
|
||||
"category": "network",
|
||||
"icon": "globe",
|
||||
"description": "CDN caching",
|
||||
@ -42,7 +42,7 @@
|
||||
"name": "luci-app-client-guardian",
|
||||
"version": "0.4.0-r7",
|
||||
"filename": "luci-app-client-guardian_0.4.0-r7_all.ipk",
|
||||
"size": 57045,
|
||||
"size": 57047,
|
||||
"category": "network",
|
||||
"icon": "users",
|
||||
"description": "Client management and monitoring",
|
||||
@ -54,7 +54,7 @@
|
||||
"name": "luci-app-crowdsec-dashboard",
|
||||
"version": "0.7.0-r29",
|
||||
"filename": "luci-app-crowdsec-dashboard_0.7.0-r29_all.ipk",
|
||||
"size": 55583,
|
||||
"size": 55585,
|
||||
"category": "security",
|
||||
"icon": "shield",
|
||||
"description": "CrowdSec security monitoring",
|
||||
@ -66,7 +66,7 @@
|
||||
"name": "luci-app-cyberfeed",
|
||||
"version": "0.1.1-r1",
|
||||
"filename": "luci-app-cyberfeed_0.1.1-r1_all.ipk",
|
||||
"size": 12836,
|
||||
"size": 12840,
|
||||
"category": "utility",
|
||||
"icon": "package",
|
||||
"description": "SecuBox package",
|
||||
@ -78,7 +78,7 @@
|
||||
"name": "luci-app-exposure",
|
||||
"version": "1.0.0-r3",
|
||||
"filename": "luci-app-exposure_1.0.0-r3_all.ipk",
|
||||
"size": 20535,
|
||||
"size": 20536,
|
||||
"category": "utility",
|
||||
"icon": "package",
|
||||
"description": "SecuBox package",
|
||||
@ -90,7 +90,7 @@
|
||||
"name": "luci-app-gitea",
|
||||
"version": "1.0.0-r2",
|
||||
"filename": "luci-app-gitea_1.0.0-r2_all.ipk",
|
||||
"size": 15583,
|
||||
"size": 15588,
|
||||
"category": "utility",
|
||||
"icon": "package",
|
||||
"description": "SecuBox package",
|
||||
@ -102,7 +102,7 @@
|
||||
"name": "luci-app-glances",
|
||||
"version": "1.0.0-r2",
|
||||
"filename": "luci-app-glances_1.0.0-r2_all.ipk",
|
||||
"size": 6965,
|
||||
"size": 6967,
|
||||
"category": "utility",
|
||||
"icon": "package",
|
||||
"description": "SecuBox package",
|
||||
@ -114,7 +114,7 @@
|
||||
"name": "luci-app-haproxy",
|
||||
"version": "1.0.0-r8",
|
||||
"filename": "luci-app-haproxy_1.0.0-r8_all.ipk",
|
||||
"size": 34162,
|
||||
"size": 34168,
|
||||
"category": "utility",
|
||||
"icon": "package",
|
||||
"description": "SecuBox package",
|
||||
@ -126,7 +126,7 @@
|
||||
"name": "luci-app-hexojs",
|
||||
"version": "1.0.0-r3",
|
||||
"filename": "luci-app-hexojs_1.0.0-r3_all.ipk",
|
||||
"size": 32980,
|
||||
"size": 32984,
|
||||
"category": "utility",
|
||||
"icon": "package",
|
||||
"description": "SecuBox package",
|
||||
@ -138,7 +138,7 @@
|
||||
"name": "luci-app-ksm-manager",
|
||||
"version": "0.4.0-r2",
|
||||
"filename": "luci-app-ksm-manager_0.4.0-r2_all.ipk",
|
||||
"size": 18724,
|
||||
"size": 18722,
|
||||
"category": "system",
|
||||
"icon": "cpu",
|
||||
"description": "Kernel memory management",
|
||||
@ -162,7 +162,7 @@
|
||||
"name": "luci-app-lyrion",
|
||||
"version": "1.0.0-r1",
|
||||
"filename": "luci-app-lyrion_1.0.0-r1_all.ipk",
|
||||
"size": 6725,
|
||||
"size": 6726,
|
||||
"category": "utility",
|
||||
"icon": "package",
|
||||
"description": "SecuBox package",
|
||||
@ -174,7 +174,7 @@
|
||||
"name": "luci-app-magicmirror2",
|
||||
"version": "0.4.0-r6",
|
||||
"filename": "luci-app-magicmirror2_0.4.0-r6_all.ipk",
|
||||
"size": 12274,
|
||||
"size": 12277,
|
||||
"category": "iot",
|
||||
"icon": "monitor",
|
||||
"description": "Smart mirror display",
|
||||
@ -186,7 +186,7 @@
|
||||
"name": "luci-app-mailinabox",
|
||||
"version": "1.0.0-r1",
|
||||
"filename": "luci-app-mailinabox_1.0.0-r1_all.ipk",
|
||||
"size": 5483,
|
||||
"size": 5481,
|
||||
"category": "utility",
|
||||
"icon": "package",
|
||||
"description": "SecuBox package",
|
||||
@ -198,7 +198,7 @@
|
||||
"name": "luci-app-media-flow",
|
||||
"version": "0.6.4-r1",
|
||||
"filename": "luci-app-media-flow_0.6.4-r1_all.ipk",
|
||||
"size": 19116,
|
||||
"size": 19124,
|
||||
"category": "media",
|
||||
"icon": "film",
|
||||
"description": "Media streaming",
|
||||
@ -210,7 +210,7 @@
|
||||
"name": "luci-app-metablogizer",
|
||||
"version": "1.0.0-r3",
|
||||
"filename": "luci-app-metablogizer_1.0.0-r3_all.ipk",
|
||||
"size": 21649,
|
||||
"size": 21652,
|
||||
"category": "utility",
|
||||
"icon": "package",
|
||||
"description": "SecuBox package",
|
||||
@ -222,7 +222,7 @@
|
||||
"name": "luci-app-metabolizer",
|
||||
"version": "1.0.0-r2",
|
||||
"filename": "luci-app-metabolizer_1.0.0-r2_all.ipk",
|
||||
"size": 4757,
|
||||
"size": 4758,
|
||||
"category": "utility",
|
||||
"icon": "package",
|
||||
"description": "SecuBox package",
|
||||
@ -246,7 +246,7 @@
|
||||
"name": "luci-app-mmpm",
|
||||
"version": "0.2.0-r3",
|
||||
"filename": "luci-app-mmpm_0.2.0-r3_all.ipk",
|
||||
"size": 7902,
|
||||
"size": 7905,
|
||||
"category": "utility",
|
||||
"icon": "package",
|
||||
"description": "SecuBox package",
|
||||
@ -258,7 +258,7 @@
|
||||
"name": "luci-app-mqtt-bridge",
|
||||
"version": "0.4.0-r4",
|
||||
"filename": "luci-app-mqtt-bridge_0.4.0-r4_all.ipk",
|
||||
"size": 22780,
|
||||
"size": 22777,
|
||||
"category": "iot",
|
||||
"icon": "message-square",
|
||||
"description": "MQTT bridge",
|
||||
@ -270,7 +270,7 @@
|
||||
"name": "luci-app-ndpid",
|
||||
"version": "1.1.2-r2",
|
||||
"filename": "luci-app-ndpid_1.1.2-r2_all.ipk",
|
||||
"size": 22454,
|
||||
"size": 22458,
|
||||
"category": "security",
|
||||
"icon": "eye",
|
||||
"description": "Deep packet inspection",
|
||||
@ -282,7 +282,7 @@
|
||||
"name": "luci-app-netdata-dashboard",
|
||||
"version": "0.5.0-r2",
|
||||
"filename": "luci-app-netdata-dashboard_0.5.0-r2_all.ipk",
|
||||
"size": 22398,
|
||||
"size": 22402,
|
||||
"category": "monitoring",
|
||||
"icon": "bar-chart-2",
|
||||
"description": "System monitoring dashboard",
|
||||
@ -294,7 +294,7 @@
|
||||
"name": "luci-app-network-modes",
|
||||
"version": "0.5.0-r3",
|
||||
"filename": "luci-app-network-modes_0.5.0-r3_all.ipk",
|
||||
"size": 55612,
|
||||
"size": 55608,
|
||||
"category": "network",
|
||||
"icon": "wifi",
|
||||
"description": "Network configuration",
|
||||
@ -306,7 +306,7 @@
|
||||
"name": "luci-app-network-tweaks",
|
||||
"version": "1.0.0-r7",
|
||||
"filename": "luci-app-network-tweaks_1.0.0-r7_all.ipk",
|
||||
"size": 15465,
|
||||
"size": 15458,
|
||||
"category": "network",
|
||||
"icon": "wifi",
|
||||
"description": "Network configuration",
|
||||
@ -318,7 +318,7 @@
|
||||
"name": "luci-app-nextcloud",
|
||||
"version": "1.0.0-r1",
|
||||
"filename": "luci-app-nextcloud_1.0.0-r1_all.ipk",
|
||||
"size": 6486,
|
||||
"size": 6488,
|
||||
"category": "utility",
|
||||
"icon": "package",
|
||||
"description": "SecuBox package",
|
||||
@ -330,7 +330,7 @@
|
||||
"name": "luci-app-ollama",
|
||||
"version": "0.1.0-r1",
|
||||
"filename": "luci-app-ollama_0.1.0-r1_all.ipk",
|
||||
"size": 11995,
|
||||
"size": 11997,
|
||||
"category": "utility",
|
||||
"icon": "package",
|
||||
"description": "SecuBox package",
|
||||
@ -342,7 +342,7 @@
|
||||
"name": "luci-app-picobrew",
|
||||
"version": "1.0.0-r1",
|
||||
"filename": "luci-app-picobrew_1.0.0-r1_all.ipk",
|
||||
"size": 9976,
|
||||
"size": 9978,
|
||||
"category": "utility",
|
||||
"icon": "package",
|
||||
"description": "SecuBox package",
|
||||
@ -354,7 +354,7 @@
|
||||
"name": "luci-app-secubox",
|
||||
"version": "0.7.1-r4",
|
||||
"filename": "luci-app-secubox_0.7.1-r4_all.ipk",
|
||||
"size": 49905,
|
||||
"size": 49906,
|
||||
"category": "system",
|
||||
"icon": "box",
|
||||
"description": "SecuBox system component",
|
||||
@ -378,7 +378,7 @@
|
||||
"name": "luci-app-secubox-crowdsec",
|
||||
"version": "1.0.0-r3",
|
||||
"filename": "luci-app-secubox-crowdsec_1.0.0-r3_all.ipk",
|
||||
"size": 13914,
|
||||
"size": 13915,
|
||||
"category": "system",
|
||||
"icon": "box",
|
||||
"description": "SecuBox system component",
|
||||
@ -402,7 +402,7 @@
|
||||
"name": "luci-app-secubox-netifyd",
|
||||
"version": "1.2.1-r1",
|
||||
"filename": "luci-app-secubox-netifyd_1.2.1-r1_all.ipk",
|
||||
"size": 39499,
|
||||
"size": 39500,
|
||||
"category": "system",
|
||||
"icon": "box",
|
||||
"description": "SecuBox system component",
|
||||
@ -414,7 +414,7 @@
|
||||
"name": "luci-app-secubox-portal",
|
||||
"version": "0.7.0-r2",
|
||||
"filename": "luci-app-secubox-portal_0.7.0-r2_all.ipk",
|
||||
"size": 32227,
|
||||
"size": 32228,
|
||||
"category": "system",
|
||||
"icon": "box",
|
||||
"description": "SecuBox system component",
|
||||
@ -426,7 +426,7 @@
|
||||
"name": "luci-app-secubox-security-threats",
|
||||
"version": "1.0.0-r4",
|
||||
"filename": "luci-app-secubox-security-threats_1.0.0-r4_all.ipk",
|
||||
"size": 13906,
|
||||
"size": 13907,
|
||||
"category": "system",
|
||||
"icon": "box",
|
||||
"description": "SecuBox system component",
|
||||
@ -438,7 +438,7 @@
|
||||
"name": "luci-app-service-registry",
|
||||
"version": "1.0.0-r1",
|
||||
"filename": "luci-app-service-registry_1.0.0-r1_all.ipk",
|
||||
"size": 33349,
|
||||
"size": 39612,
|
||||
"category": "utility",
|
||||
"icon": "package",
|
||||
"description": "SecuBox package",
|
||||
@ -450,7 +450,7 @@
|
||||
"name": "luci-app-streamlit",
|
||||
"version": "1.0.0-r9",
|
||||
"filename": "luci-app-streamlit_1.0.0-r9_all.ipk",
|
||||
"size": 20473,
|
||||
"size": 20470,
|
||||
"category": "utility",
|
||||
"icon": "package",
|
||||
"description": "SecuBox package",
|
||||
@ -462,7 +462,7 @@
|
||||
"name": "luci-app-system-hub",
|
||||
"version": "0.5.1-r4",
|
||||
"filename": "luci-app-system-hub_0.5.1-r4_all.ipk",
|
||||
"size": 66345,
|
||||
"size": 66348,
|
||||
"category": "system",
|
||||
"icon": "settings",
|
||||
"description": "System management",
|
||||
@ -474,7 +474,7 @@
|
||||
"name": "luci-app-tor-shield",
|
||||
"version": "1.0.0-r10",
|
||||
"filename": "luci-app-tor-shield_1.0.0-r10_all.ipk",
|
||||
"size": 24535,
|
||||
"size": 24534,
|
||||
"category": "utility",
|
||||
"icon": "package",
|
||||
"description": "SecuBox package",
|
||||
@ -486,7 +486,7 @@
|
||||
"name": "luci-app-traffic-shaper",
|
||||
"version": "0.4.0-r2",
|
||||
"filename": "luci-app-traffic-shaper_0.4.0-r2_all.ipk",
|
||||
"size": 15635,
|
||||
"size": 15638,
|
||||
"category": "network",
|
||||
"icon": "filter",
|
||||
"description": "Traffic shaping and QoS",
|
||||
@ -498,7 +498,7 @@
|
||||
"name": "luci-app-vhost-manager",
|
||||
"version": "0.5.0-r5",
|
||||
"filename": "luci-app-vhost-manager_0.5.0-r5_all.ipk",
|
||||
"size": 29226,
|
||||
"size": 29227,
|
||||
"category": "network",
|
||||
"icon": "server",
|
||||
"description": "Virtual host management",
|
||||
@ -510,7 +510,7 @@
|
||||
"name": "luci-app-wireguard-dashboard",
|
||||
"version": "0.7.0-r5",
|
||||
"filename": "luci-app-wireguard-dashboard_0.7.0-r5_all.ipk",
|
||||
"size": 45370,
|
||||
"size": 45369,
|
||||
"category": "vpn",
|
||||
"icon": "shield",
|
||||
"description": "WireGuard VPN dashboard",
|
||||
@ -522,7 +522,7 @@
|
||||
"name": "luci-app-zigbee2mqtt",
|
||||
"version": "1.0.0-r2",
|
||||
"filename": "luci-app-zigbee2mqtt_1.0.0-r2_all.ipk",
|
||||
"size": 7083,
|
||||
"size": 7094,
|
||||
"category": "iot",
|
||||
"icon": "radio",
|
||||
"description": "Zigbee device management",
|
||||
@ -534,7 +534,7 @@
|
||||
"name": "luci-theme-secubox",
|
||||
"version": "0.4.7-r1",
|
||||
"filename": "luci-theme-secubox_0.4.7-r1_all.ipk",
|
||||
"size": 111796,
|
||||
"size": 111797,
|
||||
"category": "theme",
|
||||
"icon": "palette",
|
||||
"description": "LuCI theme",
|
||||
@ -546,7 +546,7 @@
|
||||
"name": "secubox-app",
|
||||
"version": "1.0.0-r2",
|
||||
"filename": "secubox-app_1.0.0-r2_all.ipk",
|
||||
"size": 11182,
|
||||
"size": 11183,
|
||||
"category": "utility",
|
||||
"icon": "package",
|
||||
"description": "SecuBox package",
|
||||
@ -558,7 +558,7 @@
|
||||
"name": "secubox-app-adguardhome",
|
||||
"version": "1.0.0-r2",
|
||||
"filename": "secubox-app-adguardhome_1.0.0-r2_all.ipk",
|
||||
"size": 2880,
|
||||
"size": 2881,
|
||||
"category": "secubox",
|
||||
"icon": "package",
|
||||
"description": "SecuBox backend service",
|
||||
@ -570,7 +570,7 @@
|
||||
"name": "secubox-app-auth-logger",
|
||||
"version": "1.2.2-r1",
|
||||
"filename": "secubox-app-auth-logger_1.2.2-r1_all.ipk",
|
||||
"size": 9377,
|
||||
"size": 9375,
|
||||
"category": "secubox",
|
||||
"icon": "package",
|
||||
"description": "SecuBox backend service",
|
||||
@ -582,7 +582,7 @@
|
||||
"name": "secubox-app-crowdsec-custom",
|
||||
"version": "1.1.0-r1",
|
||||
"filename": "secubox-app-crowdsec-custom_1.1.0-r1_all.ipk",
|
||||
"size": 5758,
|
||||
"size": 5756,
|
||||
"category": "secubox",
|
||||
"icon": "package",
|
||||
"description": "SecuBox backend service",
|
||||
@ -606,7 +606,7 @@
|
||||
"name": "secubox-app-cyberfeed",
|
||||
"version": "0.2.1-r1",
|
||||
"filename": "secubox-app-cyberfeed_0.2.1-r1_all.ipk",
|
||||
"size": 12450,
|
||||
"size": 12455,
|
||||
"category": "secubox",
|
||||
"icon": "package",
|
||||
"description": "SecuBox backend service",
|
||||
@ -618,7 +618,7 @@
|
||||
"name": "secubox-app-domoticz",
|
||||
"version": "1.0.0-r2",
|
||||
"filename": "secubox-app-domoticz_1.0.0-r2_all.ipk",
|
||||
"size": 2548,
|
||||
"size": 2547,
|
||||
"category": "secubox",
|
||||
"icon": "package",
|
||||
"description": "SecuBox backend service",
|
||||
@ -630,7 +630,7 @@
|
||||
"name": "secubox-app-exposure",
|
||||
"version": "1.0.0-r1",
|
||||
"filename": "secubox-app-exposure_1.0.0-r1_all.ipk",
|
||||
"size": 6827,
|
||||
"size": 6830,
|
||||
"category": "secubox",
|
||||
"icon": "package",
|
||||
"description": "SecuBox backend service",
|
||||
@ -642,7 +642,7 @@
|
||||
"name": "secubox-app-gitea",
|
||||
"version": "1.0.0-r5",
|
||||
"filename": "secubox-app-gitea_1.0.0-r5_all.ipk",
|
||||
"size": 9406,
|
||||
"size": 9409,
|
||||
"category": "secubox",
|
||||
"icon": "package",
|
||||
"description": "SecuBox backend service",
|
||||
@ -654,7 +654,7 @@
|
||||
"name": "secubox-app-glances",
|
||||
"version": "1.0.0-r1",
|
||||
"filename": "secubox-app-glances_1.0.0-r1_all.ipk",
|
||||
"size": 5534,
|
||||
"size": 5536,
|
||||
"category": "secubox",
|
||||
"icon": "package",
|
||||
"description": "SecuBox backend service",
|
||||
@ -678,7 +678,7 @@
|
||||
"name": "secubox-app-hexojs",
|
||||
"version": "1.0.0-r8",
|
||||
"filename": "secubox-app-hexojs_1.0.0-r8_all.ipk",
|
||||
"size": 94935,
|
||||
"size": 94934,
|
||||
"category": "secubox",
|
||||
"icon": "package",
|
||||
"description": "SecuBox backend service",
|
||||
@ -690,7 +690,7 @@
|
||||
"name": "secubox-app-localai",
|
||||
"version": "2.25.0-r1",
|
||||
"filename": "secubox-app-localai_2.25.0-r1_all.ipk",
|
||||
"size": 5725,
|
||||
"size": 5724,
|
||||
"category": "secubox",
|
||||
"icon": "package",
|
||||
"description": "SecuBox backend service",
|
||||
@ -702,7 +702,7 @@
|
||||
"name": "secubox-app-localai-wb",
|
||||
"version": "2.25.0-r1",
|
||||
"filename": "secubox-app-localai-wb_2.25.0-r1_all.ipk",
|
||||
"size": 7951,
|
||||
"size": 7953,
|
||||
"category": "secubox",
|
||||
"icon": "package",
|
||||
"description": "SecuBox backend service",
|
||||
@ -726,7 +726,7 @@
|
||||
"name": "secubox-app-magicmirror2",
|
||||
"version": "0.4.0-r8",
|
||||
"filename": "secubox-app-magicmirror2_0.4.0-r8_all.ipk",
|
||||
"size": 9250,
|
||||
"size": 9253,
|
||||
"category": "secubox",
|
||||
"icon": "package",
|
||||
"description": "SecuBox backend service",
|
||||
@ -738,7 +738,7 @@
|
||||
"name": "secubox-app-mailinabox",
|
||||
"version": "2.0.0-r1",
|
||||
"filename": "secubox-app-mailinabox_2.0.0-r1_all.ipk",
|
||||
"size": 7573,
|
||||
"size": 7574,
|
||||
"category": "secubox",
|
||||
"icon": "package",
|
||||
"description": "SecuBox backend service",
|
||||
@ -750,7 +750,7 @@
|
||||
"name": "secubox-app-metabolizer",
|
||||
"version": "1.0.0-r3",
|
||||
"filename": "secubox-app-metabolizer_1.0.0-r3_all.ipk",
|
||||
"size": 13979,
|
||||
"size": 13975,
|
||||
"category": "secubox",
|
||||
"icon": "package",
|
||||
"description": "SecuBox backend service",
|
||||
@ -762,7 +762,7 @@
|
||||
"name": "secubox-app-mitmproxy",
|
||||
"version": "0.4.0-r16",
|
||||
"filename": "secubox-app-mitmproxy_0.4.0-r16_all.ipk",
|
||||
"size": 10215,
|
||||
"size": 10214,
|
||||
"category": "secubox",
|
||||
"icon": "package",
|
||||
"description": "SecuBox backend service",
|
||||
@ -774,7 +774,7 @@
|
||||
"name": "secubox-app-mmpm",
|
||||
"version": "0.2.0-r5",
|
||||
"filename": "secubox-app-mmpm_0.2.0-r5_all.ipk",
|
||||
"size": 3978,
|
||||
"size": 3980,
|
||||
"category": "secubox",
|
||||
"icon": "package",
|
||||
"description": "SecuBox backend service",
|
||||
@ -798,7 +798,7 @@
|
||||
"name": "secubox-app-ollama",
|
||||
"version": "0.1.0-r1",
|
||||
"filename": "secubox-app-ollama_0.1.0-r1_all.ipk",
|
||||
"size": 5735,
|
||||
"size": 5741,
|
||||
"category": "secubox",
|
||||
"icon": "package",
|
||||
"description": "SecuBox backend service",
|
||||
@ -810,7 +810,7 @@
|
||||
"name": "secubox-app-picobrew",
|
||||
"version": "1.0.0-r7",
|
||||
"filename": "secubox-app-picobrew_1.0.0-r7_all.ipk",
|
||||
"size": 5536,
|
||||
"size": 5538,
|
||||
"category": "secubox",
|
||||
"icon": "package",
|
||||
"description": "SecuBox backend service",
|
||||
@ -822,7 +822,7 @@
|
||||
"name": "secubox-app-streamlit",
|
||||
"version": "1.0.0-r5",
|
||||
"filename": "secubox-app-streamlit_1.0.0-r5_all.ipk",
|
||||
"size": 11722,
|
||||
"size": 11720,
|
||||
"category": "secubox",
|
||||
"icon": "package",
|
||||
"description": "SecuBox backend service",
|
||||
@ -846,7 +846,7 @@
|
||||
"name": "secubox-app-webapp",
|
||||
"version": "1.5.0-r7",
|
||||
"filename": "secubox-app-webapp_1.5.0-r7_all.ipk",
|
||||
"size": 39167,
|
||||
"size": 39168,
|
||||
"category": "secubox",
|
||||
"icon": "package",
|
||||
"description": "SecuBox backend service",
|
||||
@ -858,7 +858,7 @@
|
||||
"name": "secubox-app-zigbee2mqtt",
|
||||
"version": "1.0.0-r3",
|
||||
"filename": "secubox-app-zigbee2mqtt_1.0.0-r3_all.ipk",
|
||||
"size": 3539,
|
||||
"size": 3545,
|
||||
"category": "secubox",
|
||||
"icon": "package",
|
||||
"description": "SecuBox backend service",
|
||||
@ -870,7 +870,7 @@
|
||||
"name": "secubox-core",
|
||||
"version": "0.10.0-r9",
|
||||
"filename": "secubox-core_0.10.0-r9_all.ipk",
|
||||
"size": 80068,
|
||||
"size": 80069,
|
||||
"category": "system",
|
||||
"icon": "box",
|
||||
"description": "SecuBox core components",
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,323 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
SecuBox Analytics Addon for mitmproxy
|
||||
Logs external access attempts with IP, country, user agent, auth attempts, scan detection
|
||||
Feeds data to CrowdSec for threat detection
|
||||
"""
|
||||
|
||||
import json
|
||||
import time
|
||||
import re
|
||||
import hashlib
|
||||
import os
|
||||
from datetime import datetime
|
||||
from mitmproxy import http, ctx
|
||||
from pathlib import Path
|
||||
|
||||
# GeoIP database path (MaxMind GeoLite2)
|
||||
GEOIP_DB = "/srv/mitmproxy/GeoLite2-Country.mmdb"
|
||||
LOG_FILE = "/var/log/secubox-access.log"
|
||||
CROWDSEC_LOG = "/var/log/crowdsec/secubox-mitm.log"
|
||||
ALERTS_FILE = "/tmp/secubox-mitm-alerts.json"
|
||||
|
||||
# Suspicious patterns
|
||||
SCAN_PATTERNS = [
|
||||
r'/\.env', r'/\.git', r'/wp-admin', r'/wp-login', r'/phpmyadmin',
|
||||
r'/admin', r'/administrator', r'/xmlrpc\.php', r'/wp-content/uploads',
|
||||
r'/\.aws', r'/\.ssh', r'/config\.php', r'/backup', r'/db\.sql',
|
||||
r'/shell', r'/cmd', r'/exec', r'/eval', r'\.\./', r'/etc/passwd',
|
||||
r'/proc/self', r'<script', r'union\s+select', r';\s*drop\s+table',
|
||||
]
|
||||
|
||||
AUTH_PATHS = [
|
||||
'/login', '/signin', '/auth', '/api/auth', '/oauth', '/token',
|
||||
'/session', '/cgi-bin/luci', '/admin'
|
||||
]
|
||||
|
||||
BOT_SIGNATURES = [
|
||||
'bot', 'crawler', 'spider', 'scan', 'curl', 'wget', 'python-requests',
|
||||
'go-http-client', 'java/', 'zgrab', 'masscan', 'nmap', 'nikto'
|
||||
]
|
||||
|
||||
class SecuBoxAnalytics:
|
||||
def __init__(self):
|
||||
self.geoip = None
|
||||
self.alerts = []
|
||||
self._load_geoip()
|
||||
ctx.log.info("SecuBox Analytics addon loaded")
|
||||
|
||||
def _load_geoip(self):
|
||||
"""Load GeoIP database if available"""
|
||||
try:
|
||||
import geoip2.database
|
||||
if os.path.exists(GEOIP_DB):
|
||||
self.geoip = geoip2.database.Reader(GEOIP_DB)
|
||||
ctx.log.info(f"GeoIP database loaded: {GEOIP_DB}")
|
||||
except ImportError:
|
||||
ctx.log.warn("geoip2 not available - country detection disabled")
|
||||
except Exception as e:
|
||||
ctx.log.warn(f"Failed to load GeoIP: {e}")
|
||||
|
||||
def _get_country(self, ip: str) -> str:
|
||||
"""Get country code from IP"""
|
||||
if not self.geoip or ip.startswith(('10.', '172.16.', '192.168.', '127.')):
|
||||
return 'LOCAL'
|
||||
try:
|
||||
response = self.geoip.country(ip)
|
||||
return response.country.iso_code or 'XX'
|
||||
except:
|
||||
return 'XX'
|
||||
|
||||
def _get_client_fingerprint(self, request: http.Request) -> dict:
|
||||
"""Generate client fingerprint from headers"""
|
||||
ua = request.headers.get('user-agent', '')
|
||||
accept = request.headers.get('accept', '')
|
||||
accept_lang = request.headers.get('accept-language', '')
|
||||
accept_enc = request.headers.get('accept-encoding', '')
|
||||
|
||||
# Create fingerprint hash
|
||||
fp_str = f"{ua}|{accept}|{accept_lang}|{accept_enc}"
|
||||
fp_hash = hashlib.md5(fp_str.encode()).hexdigest()[:12]
|
||||
|
||||
# Detect bot
|
||||
is_bot = any(sig in ua.lower() for sig in BOT_SIGNATURES)
|
||||
|
||||
# Parse UA for device info
|
||||
device = 'unknown'
|
||||
if 'mobile' in ua.lower() or 'android' in ua.lower():
|
||||
device = 'mobile'
|
||||
elif 'iphone' in ua.lower() or 'ipad' in ua.lower():
|
||||
device = 'ios'
|
||||
elif 'windows' in ua.lower():
|
||||
device = 'windows'
|
||||
elif 'mac' in ua.lower():
|
||||
device = 'macos'
|
||||
elif 'linux' in ua.lower():
|
||||
device = 'linux'
|
||||
|
||||
return {
|
||||
'fingerprint': fp_hash,
|
||||
'user_agent': ua[:200],
|
||||
'is_bot': is_bot,
|
||||
'device': device
|
||||
}
|
||||
|
||||
def _detect_scan(self, request: http.Request) -> dict:
|
||||
"""Detect scan/attack patterns"""
|
||||
path = request.path.lower()
|
||||
full_url = request.pretty_url.lower()
|
||||
|
||||
for pattern in SCAN_PATTERNS:
|
||||
if re.search(pattern, path, re.IGNORECASE):
|
||||
return {'is_scan': True, 'pattern': pattern, 'type': 'path_scan'}
|
||||
if re.search(pattern, full_url, re.IGNORECASE):
|
||||
return {'is_scan': True, 'pattern': pattern, 'type': 'url_scan'}
|
||||
|
||||
# Check for SQL injection
|
||||
if re.search(r"['\";\-\-]|union|select|insert|drop|update|delete", full_url, re.I):
|
||||
return {'is_scan': True, 'pattern': 'sql_injection', 'type': 'injection'}
|
||||
|
||||
# Check for XSS
|
||||
if re.search(r"<script|javascript:|onerror=|onload=", full_url, re.I):
|
||||
return {'is_scan': True, 'pattern': 'xss', 'type': 'injection'}
|
||||
|
||||
return {'is_scan': False, 'pattern': None, 'type': None}
|
||||
|
||||
def _is_auth_attempt(self, request: http.Request) -> bool:
|
||||
"""Check if request is authentication attempt"""
|
||||
path = request.path.lower()
|
||||
return any(auth_path in path for auth_path in AUTH_PATHS)
|
||||
|
||||
def _log_entry(self, entry: dict):
|
||||
"""Write log entry to files"""
|
||||
line = json.dumps(entry)
|
||||
|
||||
# Main access log
|
||||
try:
|
||||
with open(LOG_FILE, 'a') as f:
|
||||
f.write(line + '\n')
|
||||
except Exception as e:
|
||||
ctx.log.error(f"Failed to write access log: {e}")
|
||||
|
||||
# CrowdSec compatible log (if scan/suspicious)
|
||||
if entry.get('scan', {}).get('is_scan') or entry.get('is_auth_attempt'):
|
||||
try:
|
||||
cs_entry = {
|
||||
'timestamp': entry['timestamp'],
|
||||
'source_ip': entry['client_ip'],
|
||||
'country': entry['country'],
|
||||
'request': f"{entry['method']} {entry['path']}",
|
||||
'user_agent': entry['client'].get('user_agent', ''),
|
||||
'type': entry['scan'].get('type') or ('auth_attempt' if entry['is_auth_attempt'] else 'access'),
|
||||
'pattern': entry['scan'].get('pattern', '')
|
||||
}
|
||||
with open(CROWDSEC_LOG, 'a') as f:
|
||||
f.write(json.dumps(cs_entry) + '\n')
|
||||
except Exception as e:
|
||||
ctx.log.error(f"Failed to write CrowdSec log: {e}")
|
||||
|
||||
def _add_alert(self, alert: dict):
|
||||
"""Add security alert"""
|
||||
self.alerts.append(alert)
|
||||
# Keep last 100 alerts
|
||||
self.alerts = self.alerts[-100:]
|
||||
try:
|
||||
with open(ALERTS_FILE, 'w') as f:
|
||||
json.dump(self.alerts, f)
|
||||
except:
|
||||
pass
|
||||
|
||||
def _is_cache_refresh(self, request: http.Request) -> bool:
|
||||
"""Check if request should bypass cache for refresh"""
|
||||
# Cache-Control: no-cache or max-age=0
|
||||
cache_control = request.headers.get('cache-control', '').lower()
|
||||
if 'no-cache' in cache_control or 'max-age=0' in cache_control:
|
||||
return True
|
||||
|
||||
# Pragma: no-cache
|
||||
if request.headers.get('pragma', '').lower() == 'no-cache':
|
||||
return True
|
||||
|
||||
# Custom header for forced refresh
|
||||
if request.headers.get('x-secubox-refresh', '') == '1':
|
||||
return True
|
||||
|
||||
# If-None-Match or If-Modified-Since (conditional refresh)
|
||||
if request.headers.get('if-none-match') or request.headers.get('if-modified-since'):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def _should_proxy_internal(self, request: http.Request, source_ip: str) -> dict:
|
||||
"""Determine if request stays internal (proxied) or goes direct"""
|
||||
is_refresh = self._is_cache_refresh(request)
|
||||
is_internal = source_ip.startswith(('10.', '172.16.', '192.168.', '127.'))
|
||||
|
||||
# Internal requests always proxied unless refresh
|
||||
if is_internal:
|
||||
return {'proxied': not is_refresh, 'reason': 'internal', 'direct': is_refresh}
|
||||
|
||||
# External requests: proxied through cache, refresh goes direct
|
||||
return {
|
||||
'proxied': not is_refresh,
|
||||
'reason': 'cache_refresh' if is_refresh else 'external_cached',
|
||||
'direct': is_refresh
|
||||
}
|
||||
|
||||
def request(self, flow: http.HTTPFlow):
|
||||
"""Process incoming request"""
|
||||
request = flow.request
|
||||
client_ip = flow.client_conn.peername[0] if flow.client_conn.peername else 'unknown'
|
||||
|
||||
# Get forwarded IP if behind proxy
|
||||
forwarded_ip = request.headers.get('x-forwarded-for', '').split(',')[0].strip()
|
||||
real_ip = request.headers.get('x-real-ip', '')
|
||||
source_ip = forwarded_ip or real_ip or client_ip
|
||||
|
||||
# Determine routing (proxied vs direct)
|
||||
routing = self._should_proxy_internal(request, source_ip)
|
||||
|
||||
# Build log entry
|
||||
entry = {
|
||||
'timestamp': datetime.utcnow().isoformat() + 'Z',
|
||||
'ts': int(time.time()),
|
||||
'client_ip': source_ip,
|
||||
'proxy_ip': client_ip,
|
||||
'country': self._get_country(source_ip),
|
||||
'method': request.method,
|
||||
'host': request.host,
|
||||
'path': request.path,
|
||||
'query': request.query.get('q', '')[:100] if request.query else '',
|
||||
'client': self._get_client_fingerprint(request),
|
||||
'scan': self._detect_scan(request),
|
||||
'is_auth_attempt': self._is_auth_attempt(request),
|
||||
'content_length': len(request.content) if request.content else 0,
|
||||
'routing': routing,
|
||||
'headers': {
|
||||
'referer': request.headers.get('referer', '')[:200],
|
||||
'origin': request.headers.get('origin', ''),
|
||||
'cache_control': request.headers.get('cache-control', ''),
|
||||
}
|
||||
}
|
||||
|
||||
# Add routing header for downstream (HAProxy/Squid)
|
||||
if routing['direct']:
|
||||
request.headers['x-secubox-direct'] = '1'
|
||||
request.headers['cache-control'] = 'no-cache, no-store'
|
||||
else:
|
||||
request.headers['x-secubox-proxied'] = '1'
|
||||
|
||||
# Store for response processing
|
||||
flow.metadata['secubox_entry'] = entry
|
||||
|
||||
# Log scan attempts immediately
|
||||
if entry['scan']['is_scan']:
|
||||
ctx.log.warn(f"SCAN DETECTED: {source_ip} ({entry['country']}) - {entry['scan']['pattern']} - {request.path}")
|
||||
self._add_alert({
|
||||
'time': entry['timestamp'],
|
||||
'ip': source_ip,
|
||||
'country': entry['country'],
|
||||
'type': 'scan',
|
||||
'pattern': entry['scan']['pattern'],
|
||||
'path': request.path
|
||||
})
|
||||
|
||||
# Log auth attempts
|
||||
if entry['is_auth_attempt']:
|
||||
ctx.log.info(f"AUTH ATTEMPT: {source_ip} ({entry['country']}) - {request.method} {request.path}")
|
||||
|
||||
def response(self, flow: http.HTTPFlow):
|
||||
"""Process response to complete log entry"""
|
||||
entry = flow.metadata.get('secubox_entry', {})
|
||||
if not entry:
|
||||
return
|
||||
|
||||
response = flow.response
|
||||
|
||||
# CDN Cache detection
|
||||
cache_status = response.headers.get('x-cache', '') or response.headers.get('x-cache-status', '')
|
||||
cache_hit = 'HIT' in cache_status.upper() if cache_status else None
|
||||
cdn_cache = response.headers.get('x-cdn-cache', '')
|
||||
squid_cache = response.headers.get('x-squid-cache', '')
|
||||
|
||||
entry['response'] = {
|
||||
'status': response.status_code,
|
||||
'content_length': len(response.content) if response.content else 0,
|
||||
'content_type': response.headers.get('content-type', '')[:50]
|
||||
}
|
||||
|
||||
# CDN/Cache info
|
||||
entry['cache'] = {
|
||||
'status': cache_status,
|
||||
'hit': cache_hit,
|
||||
'cdn': cdn_cache,
|
||||
'squid': squid_cache,
|
||||
'age': response.headers.get('age', ''),
|
||||
'cache_control': response.headers.get('cache-control', '')[:100],
|
||||
'etag': response.headers.get('etag', '')[:50],
|
||||
'via': response.headers.get('via', '')[:100]
|
||||
}
|
||||
|
||||
# Calculate response time
|
||||
entry['response_time_ms'] = int((time.time() - entry['ts']) * 1000)
|
||||
|
||||
# Log cache stats
|
||||
if cache_hit is not None:
|
||||
ctx.log.debug(f"CACHE {'HIT' if cache_hit else 'MISS'}: {entry['path']} ({entry['response_time_ms']}ms)")
|
||||
|
||||
# Log failed auth attempts (4xx on auth paths)
|
||||
if entry['is_auth_attempt'] and 400 <= response.status_code < 500:
|
||||
ctx.log.warn(f"AUTH FAILED: {entry['client_ip']} ({entry['country']}) - {response.status_code}")
|
||||
self._add_alert({
|
||||
'time': entry['timestamp'],
|
||||
'ip': entry['client_ip'],
|
||||
'country': entry['country'],
|
||||
'type': 'auth_failed',
|
||||
'status': response.status_code,
|
||||
'path': entry['path']
|
||||
})
|
||||
|
||||
self._log_entry(entry)
|
||||
|
||||
|
||||
addons = [SecuBoxAnalytics()]
|
||||
@ -0,0 +1,629 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
SecuBox Console - Remote Management Point
|
||||
KISS modular self-enhancing architecture
|
||||
|
||||
Usage:
|
||||
secubox-console # Interactive TUI
|
||||
secubox-console discover # Find devices
|
||||
secubox-console status # All devices status
|
||||
secubox-console <device> <cmd> # Run command on device
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import time
|
||||
import hashlib
|
||||
import threading
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
from typing import Dict, List, Optional, Callable
|
||||
from dataclasses import dataclass, field
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
# ============================================================================
|
||||
# Configuration
|
||||
# ============================================================================
|
||||
CONSOLE_VERSION = "1.0.0"
|
||||
CONFIG_DIR = Path.home() / ".secubox-console"
|
||||
DEVICES_FILE = CONFIG_DIR / "devices.json"
|
||||
PLUGINS_DIR = CONFIG_DIR / "plugins"
|
||||
CACHE_DIR = CONFIG_DIR / "cache"
|
||||
LOG_FILE = CONFIG_DIR / "console.log"
|
||||
|
||||
# ============================================================================
|
||||
# Data Classes
|
||||
# ============================================================================
|
||||
@dataclass
|
||||
class SecuBoxDevice:
|
||||
"""Represents a SecuBox device"""
|
||||
name: str
|
||||
host: str
|
||||
port: int = 22
|
||||
user: str = "root"
|
||||
node_id: str = ""
|
||||
status: str = "unknown"
|
||||
last_seen: float = 0
|
||||
version: str = ""
|
||||
mesh_enabled: bool = False
|
||||
services: Dict = field(default_factory=dict)
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
return {
|
||||
"name": self.name,
|
||||
"host": self.host,
|
||||
"port": self.port,
|
||||
"user": self.user,
|
||||
"node_id": self.node_id,
|
||||
"status": self.status,
|
||||
"last_seen": self.last_seen,
|
||||
"version": self.version,
|
||||
"mesh_enabled": self.mesh_enabled,
|
||||
"services": self.services
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict) -> "SecuBoxDevice":
|
||||
return cls(**data)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Plugin:
|
||||
"""Plugin metadata"""
|
||||
name: str
|
||||
version: str
|
||||
description: str
|
||||
author: str
|
||||
commands: List[str]
|
||||
module: object = None
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Core Console Class
|
||||
# ============================================================================
|
||||
class SecuBoxConsole:
|
||||
"""Main console application - KISS modular architecture"""
|
||||
|
||||
def __init__(self):
|
||||
self.devices: Dict[str, SecuBoxDevice] = {}
|
||||
self.plugins: Dict[str, Plugin] = {}
|
||||
self.commands: Dict[str, Callable] = {}
|
||||
self._ssh_connections = {}
|
||||
self._init_dirs()
|
||||
self._load_devices()
|
||||
self._load_plugins()
|
||||
self._register_core_commands()
|
||||
|
||||
def _init_dirs(self):
|
||||
"""Initialize directory structure"""
|
||||
for d in [CONFIG_DIR, PLUGINS_DIR, CACHE_DIR]:
|
||||
d.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def _load_devices(self):
|
||||
"""Load saved devices"""
|
||||
if DEVICES_FILE.exists():
|
||||
try:
|
||||
data = json.loads(DEVICES_FILE.read_text())
|
||||
for name, dev_data in data.get("devices", {}).items():
|
||||
self.devices[name] = SecuBoxDevice.from_dict(dev_data)
|
||||
except Exception as e:
|
||||
self.log(f"Failed to load devices: {e}")
|
||||
|
||||
def _save_devices(self):
|
||||
"""Save devices to file"""
|
||||
data = {"devices": {n: d.to_dict() for n, d in self.devices.items()}}
|
||||
DEVICES_FILE.write_text(json.dumps(data, indent=2))
|
||||
|
||||
def _load_plugins(self):
|
||||
"""Load plugins from plugins directory"""
|
||||
if not PLUGINS_DIR.exists():
|
||||
return
|
||||
|
||||
for plugin_file in PLUGINS_DIR.glob("*.py"):
|
||||
try:
|
||||
self._load_plugin(plugin_file)
|
||||
except Exception as e:
|
||||
self.log(f"Failed to load plugin {plugin_file.name}: {e}")
|
||||
|
||||
# Load built-in plugins
|
||||
builtin_plugins = Path(__file__).parent / "plugins"
|
||||
if builtin_plugins.exists():
|
||||
for plugin_file in builtin_plugins.glob("*.py"):
|
||||
try:
|
||||
self._load_plugin(plugin_file)
|
||||
except Exception as e:
|
||||
self.log(f"Failed to load builtin plugin {plugin_file.name}: {e}")
|
||||
|
||||
def _load_plugin(self, plugin_file: Path):
|
||||
"""Load a single plugin"""
|
||||
import importlib.util
|
||||
spec = importlib.util.spec_from_file_location(plugin_file.stem, plugin_file)
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(module)
|
||||
|
||||
if hasattr(module, "PLUGIN_INFO"):
|
||||
info = module.PLUGIN_INFO
|
||||
plugin = Plugin(
|
||||
name=info.get("name", plugin_file.stem),
|
||||
version=info.get("version", "1.0.0"),
|
||||
description=info.get("description", ""),
|
||||
author=info.get("author", ""),
|
||||
commands=info.get("commands", []),
|
||||
module=module
|
||||
)
|
||||
self.plugins[plugin.name] = plugin
|
||||
|
||||
# Register plugin commands
|
||||
if hasattr(module, "register_commands"):
|
||||
module.register_commands(self)
|
||||
|
||||
self.log(f"Loaded plugin: {plugin.name} v{plugin.version}")
|
||||
|
||||
def _register_core_commands(self):
|
||||
"""Register built-in commands"""
|
||||
self.commands["help"] = self.cmd_help
|
||||
self.commands["discover"] = self.cmd_discover
|
||||
self.commands["add"] = self.cmd_add
|
||||
self.commands["remove"] = self.cmd_remove
|
||||
self.commands["list"] = self.cmd_list
|
||||
self.commands["status"] = self.cmd_status
|
||||
self.commands["connect"] = self.cmd_connect
|
||||
self.commands["exec"] = self.cmd_exec
|
||||
self.commands["snapshot"] = self.cmd_snapshot
|
||||
self.commands["sync"] = self.cmd_sync
|
||||
self.commands["plugins"] = self.cmd_plugins
|
||||
self.commands["update"] = self.cmd_update
|
||||
self.commands["dashboard"] = self.cmd_dashboard
|
||||
|
||||
def register_command(self, name: str, handler: Callable, description: str = ""):
|
||||
"""Register a new command (for plugins)"""
|
||||
self.commands[name] = handler
|
||||
|
||||
def log(self, message: str, level: str = "INFO"):
|
||||
"""Log message"""
|
||||
timestamp = datetime.now().isoformat()
|
||||
line = f"[{timestamp}] [{level}] {message}"
|
||||
print(line)
|
||||
with open(LOG_FILE, "a") as f:
|
||||
f.write(line + "\n")
|
||||
|
||||
# =========================================================================
|
||||
# SSH Connection Management
|
||||
# =========================================================================
|
||||
def get_ssh(self, device: SecuBoxDevice):
|
||||
"""Get SSH connection to device"""
|
||||
try:
|
||||
import paramiko
|
||||
except ImportError:
|
||||
self.log("paramiko not installed. Run: pip install paramiko")
|
||||
return None
|
||||
|
||||
key = f"{device.host}:{device.port}"
|
||||
if key in self._ssh_connections:
|
||||
ssh = self._ssh_connections[key]
|
||||
if ssh.get_transport() and ssh.get_transport().is_active():
|
||||
return ssh
|
||||
|
||||
ssh = paramiko.SSHClient()
|
||||
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||
|
||||
try:
|
||||
# Try key-based auth first
|
||||
ssh.connect(
|
||||
device.host,
|
||||
port=device.port,
|
||||
username=device.user,
|
||||
timeout=10,
|
||||
look_for_keys=True,
|
||||
allow_agent=True
|
||||
)
|
||||
self._ssh_connections[key] = ssh
|
||||
return ssh
|
||||
except Exception as e:
|
||||
self.log(f"SSH connection failed to {device.name}: {e}")
|
||||
return None
|
||||
|
||||
def ssh_exec(self, device: SecuBoxDevice, command: str) -> tuple:
|
||||
"""Execute command via SSH"""
|
||||
ssh = self.get_ssh(device)
|
||||
if not ssh:
|
||||
return "", f"Cannot connect to {device.name}", 1
|
||||
|
||||
try:
|
||||
stdin, stdout, stderr = ssh.exec_command(command, timeout=60)
|
||||
exit_code = stdout.channel.recv_exit_status()
|
||||
return stdout.read().decode(), stderr.read().decode(), exit_code
|
||||
except Exception as e:
|
||||
return "", str(e), 1
|
||||
|
||||
# =========================================================================
|
||||
# Core Commands
|
||||
# =========================================================================
|
||||
def cmd_help(self, args: List[str] = None):
|
||||
"""Show help"""
|
||||
print("""
|
||||
╔══════════════════════════════════════════════════════════════════╗
|
||||
║ SecuBox Console - Remote Management Point ║
|
||||
╠══════════════════════════════════════════════════════════════════╣
|
||||
║ KISS modular self-enhancing architecture ║
|
||||
╚══════════════════════════════════════════════════════════════════╝
|
||||
|
||||
Commands:
|
||||
discover Scan network for SecuBox devices
|
||||
add <name> <host> Add device manually
|
||||
remove <name> Remove device
|
||||
list List all devices
|
||||
status [device] Show status (all or specific)
|
||||
connect <device> Interactive SSH to device
|
||||
exec <device> <cmd> Execute command on device
|
||||
snapshot <device> Create snapshot on device
|
||||
sync Sync all devices via mesh
|
||||
plugins List loaded plugins
|
||||
update Self-update from mesh
|
||||
dashboard Live dashboard (TUI)
|
||||
help Show this help
|
||||
""")
|
||||
if self.plugins:
|
||||
print("Plugin Commands:")
|
||||
for name, plugin in self.plugins.items():
|
||||
print(f" [{name}] {', '.join(plugin.commands)}")
|
||||
|
||||
def cmd_discover(self, args: List[str] = None):
|
||||
"""Discover SecuBox devices on network"""
|
||||
print("🔍 Discovering SecuBox devices...")
|
||||
|
||||
import socket
|
||||
discovered = []
|
||||
|
||||
# Scan common subnets
|
||||
subnets = ["192.168.255", "192.168.1", "10.0.0"]
|
||||
ports = [22, 80, 443, 7331] # SSH, HTTP, HTTPS, Mesh
|
||||
|
||||
def check_host(ip):
|
||||
for port in [22, 7331]:
|
||||
try:
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
sock.settimeout(0.5)
|
||||
result = sock.connect_ex((ip, port))
|
||||
sock.close()
|
||||
if result == 0:
|
||||
return ip, port
|
||||
except:
|
||||
pass
|
||||
return None
|
||||
|
||||
with ThreadPoolExecutor(max_workers=50) as executor:
|
||||
for subnet in subnets:
|
||||
futures = []
|
||||
for i in range(1, 255):
|
||||
ip = f"{subnet}.{i}"
|
||||
futures.append(executor.submit(check_host, ip))
|
||||
|
||||
for future in futures:
|
||||
result = future.result()
|
||||
if result:
|
||||
ip, port = result
|
||||
print(f" Found: {ip}:{port}")
|
||||
discovered.append(ip)
|
||||
|
||||
# Check discovered hosts for SecuBox
|
||||
for ip in discovered:
|
||||
self._probe_device(ip)
|
||||
|
||||
print(f"\n✅ Discovery complete. Found {len(discovered)} potential devices.")
|
||||
|
||||
def _probe_device(self, host: str):
|
||||
"""Probe a host to check if it's SecuBox"""
|
||||
try:
|
||||
import httpx
|
||||
# Try mesh API
|
||||
r = httpx.get(f"http://{host}:7331/api/chain/tip", timeout=2)
|
||||
if r.status_code == 200:
|
||||
data = r.json()
|
||||
node_id = data.get("node", "")[:8]
|
||||
name = f"secubox-{node_id}" if node_id else f"secubox-{host.split('.')[-1]}"
|
||||
|
||||
if name not in self.devices:
|
||||
self.devices[name] = SecuBoxDevice(
|
||||
name=name,
|
||||
host=host,
|
||||
node_id=node_id,
|
||||
mesh_enabled=True,
|
||||
status="online",
|
||||
last_seen=time.time()
|
||||
)
|
||||
self._save_devices()
|
||||
print(f" ✅ Added: {name} (mesh node: {node_id})")
|
||||
except:
|
||||
pass
|
||||
|
||||
def cmd_add(self, args: List[str]):
|
||||
"""Add device: add <name> <host> [port] [user]"""
|
||||
if len(args) < 2:
|
||||
print("Usage: add <name> <host> [port] [user]")
|
||||
return
|
||||
|
||||
name, host = args[0], args[1]
|
||||
port = int(args[2]) if len(args) > 2 else 22
|
||||
user = args[3] if len(args) > 3 else "root"
|
||||
|
||||
self.devices[name] = SecuBoxDevice(
|
||||
name=name, host=host, port=port, user=user
|
||||
)
|
||||
self._save_devices()
|
||||
print(f"✅ Added device: {name} ({user}@{host}:{port})")
|
||||
|
||||
def cmd_remove(self, args: List[str]):
|
||||
"""Remove device"""
|
||||
if not args:
|
||||
print("Usage: remove <name>")
|
||||
return
|
||||
|
||||
name = args[0]
|
||||
if name in self.devices:
|
||||
del self.devices[name]
|
||||
self._save_devices()
|
||||
print(f"✅ Removed: {name}")
|
||||
else:
|
||||
print(f"❌ Device not found: {name}")
|
||||
|
||||
def cmd_list(self, args: List[str] = None):
|
||||
"""List all devices"""
|
||||
if not self.devices:
|
||||
print("No devices configured. Use 'discover' or 'add' to add devices.")
|
||||
return
|
||||
|
||||
print("\n📡 SecuBox Devices:")
|
||||
print("-" * 60)
|
||||
for name, dev in self.devices.items():
|
||||
status_icon = "🟢" if dev.status == "online" else "🔴"
|
||||
mesh_icon = "🔗" if dev.mesh_enabled else " "
|
||||
print(f" {status_icon} {mesh_icon} {name:20} {dev.host:15} {dev.version or 'unknown'}")
|
||||
print("-" * 60)
|
||||
|
||||
def cmd_status(self, args: List[str] = None):
|
||||
"""Show device status"""
|
||||
targets = [self.devices[args[0]]] if args and args[0] in self.devices else self.devices.values()
|
||||
|
||||
print("\n📊 Device Status:")
|
||||
print("=" * 70)
|
||||
|
||||
for dev in targets:
|
||||
print(f"\n🖥️ {dev.name} ({dev.host})")
|
||||
|
||||
# Probe device
|
||||
stdout, stderr, code = self.ssh_exec(dev, "cat /etc/secubox-version 2>/dev/null; uptime; free -m | head -2")
|
||||
if code == 0:
|
||||
dev.status = "online"
|
||||
dev.last_seen = time.time()
|
||||
lines = stdout.strip().split("\n")
|
||||
if lines:
|
||||
dev.version = lines[0] if lines[0] else "unknown"
|
||||
print(f" Status: 🟢 Online")
|
||||
print(f" Version: {dev.version}")
|
||||
for line in lines[1:]:
|
||||
print(f" {line}")
|
||||
else:
|
||||
dev.status = "offline"
|
||||
print(f" Status: 🔴 Offline")
|
||||
|
||||
self._save_devices()
|
||||
|
||||
def cmd_connect(self, args: List[str]):
|
||||
"""Interactive SSH connection"""
|
||||
if not args or args[0] not in self.devices:
|
||||
print("Usage: connect <device>")
|
||||
return
|
||||
|
||||
dev = self.devices[args[0]]
|
||||
os.system(f"ssh {dev.user}@{dev.host} -p {dev.port}")
|
||||
|
||||
def cmd_exec(self, args: List[str]):
|
||||
"""Execute command on device"""
|
||||
if len(args) < 2:
|
||||
print("Usage: exec <device> <command>")
|
||||
return
|
||||
|
||||
dev_name = args[0]
|
||||
command = " ".join(args[1:])
|
||||
|
||||
if dev_name not in self.devices:
|
||||
print(f"❌ Device not found: {dev_name}")
|
||||
return
|
||||
|
||||
dev = self.devices[dev_name]
|
||||
print(f"🔄 Executing on {dev.name}: {command}")
|
||||
|
||||
stdout, stderr, code = self.ssh_exec(dev, command)
|
||||
if stdout:
|
||||
print(stdout)
|
||||
if stderr:
|
||||
print(f"STDERR: {stderr}")
|
||||
print(f"Exit code: {code}")
|
||||
|
||||
def cmd_snapshot(self, args: List[str]):
|
||||
"""Create snapshot on device"""
|
||||
if not args or args[0] not in self.devices:
|
||||
print("Usage: snapshot <device> [name]")
|
||||
return
|
||||
|
||||
dev = self.devices[args[0]]
|
||||
name = args[1] if len(args) > 1 else f"remote-{datetime.now().strftime('%Y%m%d-%H%M%S')}"
|
||||
|
||||
print(f"📸 Creating snapshot on {dev.name}...")
|
||||
stdout, stderr, code = self.ssh_exec(dev, f"secubox-recover snapshot {name}")
|
||||
print(stdout)
|
||||
if code == 0:
|
||||
print(f"✅ Snapshot created: {name}")
|
||||
else:
|
||||
print(f"❌ Failed: {stderr}")
|
||||
|
||||
def cmd_sync(self, args: List[str] = None):
|
||||
"""Sync all devices via mesh"""
|
||||
print("🔄 Syncing mesh across all devices...")
|
||||
|
||||
for name, dev in self.devices.items():
|
||||
if not dev.mesh_enabled:
|
||||
continue
|
||||
|
||||
print(f" Syncing {name}...")
|
||||
stdout, stderr, code = self.ssh_exec(dev, "secubox-mesh sync")
|
||||
if code == 0:
|
||||
print(f" ✅ Synced")
|
||||
else:
|
||||
print(f" ❌ Failed")
|
||||
|
||||
def cmd_plugins(self, args: List[str] = None):
|
||||
"""List loaded plugins"""
|
||||
if not self.plugins:
|
||||
print("No plugins loaded.")
|
||||
return
|
||||
|
||||
print("\n🔌 Loaded Plugins:")
|
||||
for name, plugin in self.plugins.items():
|
||||
print(f" • {name} v{plugin.version}")
|
||||
print(f" {plugin.description}")
|
||||
print(f" Commands: {', '.join(plugin.commands)}")
|
||||
|
||||
def cmd_update(self, args: List[str] = None):
|
||||
"""Self-update from mesh"""
|
||||
print("🔄 Checking for updates...")
|
||||
|
||||
# Try to fetch latest version from mesh
|
||||
for dev in self.devices.values():
|
||||
if not dev.mesh_enabled:
|
||||
continue
|
||||
|
||||
try:
|
||||
import httpx
|
||||
r = httpx.get(f"http://{dev.host}:7331/api/catalog/console", timeout=5)
|
||||
if r.status_code == 200:
|
||||
data = r.json()
|
||||
remote_version = data.get("version", "0.0.0")
|
||||
if remote_version > CONSOLE_VERSION:
|
||||
print(f" New version available: {remote_version}")
|
||||
# Download and update
|
||||
# ... implementation
|
||||
else:
|
||||
print(f" Already up to date: {CONSOLE_VERSION}")
|
||||
return
|
||||
except:
|
||||
continue
|
||||
|
||||
print(" No updates found or mesh unavailable.")
|
||||
|
||||
def cmd_dashboard(self, args: List[str] = None):
|
||||
"""Live dashboard TUI"""
|
||||
try:
|
||||
from rich.console import Console
|
||||
from rich.table import Table
|
||||
from rich.live import Live
|
||||
from rich.panel import Panel
|
||||
from rich.layout import Layout
|
||||
except ImportError:
|
||||
print("Dashboard requires 'rich'. Install: pip install rich")
|
||||
return
|
||||
|
||||
console = Console()
|
||||
|
||||
def make_dashboard():
|
||||
layout = Layout()
|
||||
layout.split_column(
|
||||
Layout(name="header", size=3),
|
||||
Layout(name="main"),
|
||||
Layout(name="footer", size=3)
|
||||
)
|
||||
|
||||
# Header
|
||||
layout["header"].update(Panel(
|
||||
f"[bold cyan]SecuBox Console[/] v{CONSOLE_VERSION} | "
|
||||
f"Devices: {len(self.devices)} | "
|
||||
f"Plugins: {len(self.plugins)}",
|
||||
style="cyan"
|
||||
))
|
||||
|
||||
# Devices table
|
||||
table = Table(title="Devices", expand=True)
|
||||
table.add_column("Name", style="cyan")
|
||||
table.add_column("Host")
|
||||
table.add_column("Status")
|
||||
table.add_column("Version")
|
||||
table.add_column("Mesh")
|
||||
|
||||
for name, dev in self.devices.items():
|
||||
status = "[green]●[/]" if dev.status == "online" else "[red]●[/]"
|
||||
mesh = "[blue]🔗[/]" if dev.mesh_enabled else ""
|
||||
table.add_row(name, dev.host, status, dev.version or "-", mesh)
|
||||
|
||||
layout["main"].update(table)
|
||||
|
||||
# Footer
|
||||
layout["footer"].update(Panel(
|
||||
"[dim]q: quit | r: refresh | s: sync | d: discover[/]",
|
||||
style="dim"
|
||||
))
|
||||
|
||||
return layout
|
||||
|
||||
with Live(make_dashboard(), refresh_per_second=1, console=console) as live:
|
||||
import select
|
||||
import termios
|
||||
import tty
|
||||
|
||||
old_settings = termios.tcgetattr(sys.stdin)
|
||||
try:
|
||||
tty.setcbreak(sys.stdin.fileno())
|
||||
while True:
|
||||
if select.select([sys.stdin], [], [], 0.5)[0]:
|
||||
key = sys.stdin.read(1)
|
||||
if key == 'q':
|
||||
break
|
||||
elif key == 'r':
|
||||
self.cmd_status()
|
||||
elif key == 's':
|
||||
self.cmd_sync()
|
||||
elif key == 'd':
|
||||
self.cmd_discover()
|
||||
live.update(make_dashboard())
|
||||
finally:
|
||||
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_settings)
|
||||
|
||||
# =========================================================================
|
||||
# Main Entry Point
|
||||
# =========================================================================
|
||||
def run(self, args: List[str] = None):
|
||||
"""Main entry point"""
|
||||
if not args:
|
||||
args = sys.argv[1:]
|
||||
|
||||
if not args:
|
||||
self.cmd_dashboard()
|
||||
return
|
||||
|
||||
cmd = args[0]
|
||||
cmd_args = args[1:]
|
||||
|
||||
if cmd in self.commands:
|
||||
self.commands[cmd](cmd_args)
|
||||
elif cmd in self.devices:
|
||||
# Shortcut: device name as first arg -> exec on that device
|
||||
if cmd_args:
|
||||
self.cmd_exec([cmd] + cmd_args)
|
||||
else:
|
||||
self.cmd_status([cmd])
|
||||
else:
|
||||
print(f"Unknown command: {cmd}")
|
||||
self.cmd_help()
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Entry Point
|
||||
# ============================================================================
|
||||
def main():
|
||||
console = SecuBoxConsole()
|
||||
console.run()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
51
package/secubox/secubox-core/root/etc/init.d/secubox-mesh
Normal file
51
package/secubox/secubox-core/root/etc/init.d/secubox-mesh
Normal file
@ -0,0 +1,51 @@
|
||||
#!/bin/sh /etc/rc.common
|
||||
# SecuBox P2P Mesh Daemon
|
||||
# Background synchronization of distributed recovery infrastructure
|
||||
|
||||
START=95
|
||||
STOP=10
|
||||
USE_PROCD=1
|
||||
|
||||
MESH_BIN="/usr/sbin/secubox-mesh"
|
||||
MESH_LIB="/usr/lib/secubox/p2p-mesh.sh"
|
||||
PID_FILE="/var/run/secubox-mesh.pid"
|
||||
SYNC_INTERVAL=300 # 5 minutes
|
||||
|
||||
start_service() {
|
||||
# Initialize mesh if needed
|
||||
$MESH_BIN init
|
||||
|
||||
procd_open_instance
|
||||
procd_set_param command /bin/sh -c "
|
||||
while true; do
|
||||
$MESH_BIN sync 2>/dev/null
|
||||
$MESH_BIN discover 2>/dev/null
|
||||
sleep $SYNC_INTERVAL
|
||||
done
|
||||
"
|
||||
procd_set_param respawn
|
||||
procd_set_param pidfile $PID_FILE
|
||||
procd_close_instance
|
||||
|
||||
# Start API server
|
||||
$MESH_BIN api &
|
||||
}
|
||||
|
||||
stop_service() {
|
||||
killall -q secubox-mesh 2>/dev/null
|
||||
}
|
||||
|
||||
reload_service() {
|
||||
$MESH_BIN sync
|
||||
}
|
||||
|
||||
status() {
|
||||
if [ -f "$PID_FILE" ] && kill -0 $(cat "$PID_FILE") 2>/dev/null; then
|
||||
echo "SecuBox Mesh: running"
|
||||
echo "Node ID: $(cat /srv/secubox/mesh/node.id 2>/dev/null)"
|
||||
echo "Peers: $(cat /srv/secubox/mesh/peers.json 2>/dev/null | jsonfilter -e '@.peers[*]' | wc -l)"
|
||||
echo "Blocks: $(find /srv/secubox/mesh/blocks -type f 2>/dev/null | wc -l)"
|
||||
else
|
||||
echo "SecuBox Mesh: stopped"
|
||||
fi
|
||||
}
|
||||
592
package/secubox/secubox-core/root/usr/lib/secubox/p2p-mesh.sh
Normal file
592
package/secubox/secubox-core/root/usr/lib/secubox/p2p-mesh.sh
Normal file
@ -0,0 +1,592 @@
|
||||
#!/bin/sh
|
||||
# SecuBox P2P Mesh - Distributed Recovery Infrastructure
|
||||
# Multipoint hosting with synchronized catalogs via blockchain-style data blocks
|
||||
# Copyright 2026 CyberMind - Licensed under MIT
|
||||
|
||||
# ============================================================================
|
||||
# Configuration
|
||||
# ============================================================================
|
||||
MESH_DIR="/srv/secubox/mesh"
|
||||
BLOCKS_DIR="$MESH_DIR/blocks"
|
||||
CATALOG_DIR="$MESH_DIR/catalog"
|
||||
PEERS_FILE="$MESH_DIR/peers.json"
|
||||
CHAIN_FILE="$MESH_DIR/chain.json"
|
||||
NODE_ID_FILE="$MESH_DIR/node.id"
|
||||
SYNC_LOCK="/tmp/secubox-mesh-sync.lock"
|
||||
MESH_PORT="${MESH_PORT:-7331}"
|
||||
DISCOVERY_PORT="${DISCOVERY_PORT:-7332}"
|
||||
|
||||
# ============================================================================
|
||||
# Initialization
|
||||
# ============================================================================
|
||||
mesh_init() {
|
||||
mkdir -p "$BLOCKS_DIR" "$CATALOG_DIR" "$MESH_DIR/snapshots" "$MESH_DIR/tmp"
|
||||
|
||||
# Generate node ID if not exists
|
||||
if [ ! -f "$NODE_ID_FILE" ]; then
|
||||
node_id=$(cat /proc/sys/kernel/random/uuid | tr -d '-' | head -c 16)
|
||||
echo "$node_id" > "$NODE_ID_FILE"
|
||||
fi
|
||||
|
||||
# Initialize peers list
|
||||
[ ! -f "$PEERS_FILE" ] && echo '{"peers":[],"last_sync":0}' > "$PEERS_FILE"
|
||||
|
||||
# Initialize chain
|
||||
if [ ! -f "$CHAIN_FILE" ]; then
|
||||
genesis_hash=$(echo "secubox-genesis-$(date +%s)" | sha256sum | cut -d' ' -f1)
|
||||
cat > "$CHAIN_FILE" << EOF
|
||||
{
|
||||
"version": 1,
|
||||
"genesis": "$genesis_hash",
|
||||
"blocks": [
|
||||
{
|
||||
"index": 0,
|
||||
"timestamp": $(date +%s),
|
||||
"type": "genesis",
|
||||
"hash": "$genesis_hash",
|
||||
"prev_hash": "0000000000000000000000000000000000000000000000000000000000000000",
|
||||
"data": {"node": "$(cat $NODE_ID_FILE)", "created": "$(date -Iseconds)"}
|
||||
}
|
||||
]
|
||||
}
|
||||
EOF
|
||||
fi
|
||||
|
||||
echo "Mesh initialized: node=$(cat $NODE_ID_FILE)"
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Content-Addressed Block Storage
|
||||
# ============================================================================
|
||||
block_hash() {
|
||||
# Generate SHA256 hash of content
|
||||
sha256sum | cut -d' ' -f1
|
||||
}
|
||||
|
||||
block_store() {
|
||||
# Store content as block, return hash
|
||||
local content="$1"
|
||||
local hash=$(echo "$content" | block_hash)
|
||||
local block_path="$BLOCKS_DIR/${hash:0:2}/${hash:2:2}"
|
||||
|
||||
mkdir -p "$block_path"
|
||||
echo "$content" > "$block_path/$hash"
|
||||
echo "$hash"
|
||||
}
|
||||
|
||||
block_store_file() {
|
||||
# Store file as block
|
||||
local file="$1"
|
||||
local hash=$(cat "$file" | block_hash)
|
||||
local block_path="$BLOCKS_DIR/${hash:0:2}/${hash:2:2}"
|
||||
|
||||
mkdir -p "$block_path"
|
||||
cp "$file" "$block_path/$hash"
|
||||
echo "$hash"
|
||||
}
|
||||
|
||||
block_get() {
|
||||
# Retrieve block by hash
|
||||
local hash="$1"
|
||||
local block_path="$BLOCKS_DIR/${hash:0:2}/${hash:2:2}/$hash"
|
||||
|
||||
if [ -f "$block_path" ]; then
|
||||
cat "$block_path"
|
||||
return 0
|
||||
fi
|
||||
return 1
|
||||
}
|
||||
|
||||
block_exists() {
|
||||
local hash="$1"
|
||||
[ -f "$BLOCKS_DIR/${hash:0:2}/${hash:2:2}/$hash" ]
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Blockchain Chain Management
|
||||
# ============================================================================
|
||||
chain_get_last() {
|
||||
jsonfilter -i "$CHAIN_FILE" -e '@.blocks[-1]'
|
||||
}
|
||||
|
||||
chain_get_hash() {
|
||||
jsonfilter -i "$CHAIN_FILE" -e '@.blocks[-1].hash'
|
||||
}
|
||||
|
||||
chain_add_block() {
|
||||
local block_type="$1"
|
||||
local block_data="$2"
|
||||
local block_hash="$3"
|
||||
|
||||
local prev_hash=$(chain_get_hash)
|
||||
local index=$(jsonfilter -i "$CHAIN_FILE" -e '@.blocks[*]' | wc -l)
|
||||
local timestamp=$(date +%s)
|
||||
local node_id=$(cat "$NODE_ID_FILE")
|
||||
|
||||
# Create block record
|
||||
local block_record=$(cat << EOF
|
||||
{
|
||||
"index": $index,
|
||||
"timestamp": $timestamp,
|
||||
"type": "$block_type",
|
||||
"hash": "$block_hash",
|
||||
"prev_hash": "$prev_hash",
|
||||
"node": "$node_id",
|
||||
"data": $block_data
|
||||
}
|
||||
EOF
|
||||
)
|
||||
|
||||
# Append to chain (using temp file for atomic update)
|
||||
local tmp_chain="$MESH_DIR/tmp/chain_$$.json"
|
||||
jsonfilter -i "$CHAIN_FILE" -e '@' | sed 's/\]$//' > "$tmp_chain"
|
||||
echo ",$block_record]}" >> "$tmp_chain"
|
||||
mv "$tmp_chain" "$CHAIN_FILE"
|
||||
|
||||
echo "$block_hash"
|
||||
}
|
||||
|
||||
chain_verify() {
|
||||
# Verify chain integrity
|
||||
local prev_hash="0000000000000000000000000000000000000000000000000000000000000000"
|
||||
local valid=1
|
||||
|
||||
jsonfilter -i "$CHAIN_FILE" -e '@.blocks[*]' | while read block; do
|
||||
block_prev=$(echo "$block" | jsonfilter -e '@.prev_hash')
|
||||
if [ "$block_prev" != "$prev_hash" ]; then
|
||||
echo "Chain broken at block $(echo "$block" | jsonfilter -e '@.index')"
|
||||
valid=0
|
||||
break
|
||||
fi
|
||||
prev_hash=$(echo "$block" | jsonfilter -e '@.hash')
|
||||
done
|
||||
|
||||
return $valid
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Catalog Management
|
||||
# ============================================================================
|
||||
catalog_init() {
|
||||
local cat_type="$1" # apps, profiles, snapshots, patches
|
||||
local cat_file="$CATALOG_DIR/${cat_type}.json"
|
||||
|
||||
[ ! -f "$cat_file" ] && cat > "$cat_file" << EOF
|
||||
{
|
||||
"type": "$cat_type",
|
||||
"version": 1,
|
||||
"updated": $(date +%s),
|
||||
"items": []
|
||||
}
|
||||
EOF
|
||||
}
|
||||
|
||||
catalog_add() {
|
||||
local cat_type="$1"
|
||||
local item_id="$2"
|
||||
local item_data="$3"
|
||||
local block_hash="$4"
|
||||
|
||||
local cat_file="$CATALOG_DIR/${cat_type}.json"
|
||||
catalog_init "$cat_type"
|
||||
|
||||
local timestamp=$(date +%s)
|
||||
local node_id=$(cat "$NODE_ID_FILE")
|
||||
|
||||
# Create catalog entry
|
||||
local entry=$(cat << EOF
|
||||
{
|
||||
"id": "$item_id",
|
||||
"hash": "$block_hash",
|
||||
"node": "$node_id",
|
||||
"timestamp": $timestamp,
|
||||
"data": $item_data
|
||||
}
|
||||
EOF
|
||||
)
|
||||
|
||||
# Update catalog
|
||||
local tmp_cat="$MESH_DIR/tmp/cat_$$.json"
|
||||
jsonfilter -i "$cat_file" -e '@' | \
|
||||
sed "s/\"items\":\[/\"items\":[$entry,/" | \
|
||||
sed "s/\"updated\":[0-9]*/\"updated\":$timestamp/" > "$tmp_cat"
|
||||
mv "$tmp_cat" "$cat_file"
|
||||
|
||||
# Add to blockchain
|
||||
chain_add_block "catalog_$cat_type" "{\"id\":\"$item_id\",\"hash\":\"$block_hash\"}" "$block_hash"
|
||||
}
|
||||
|
||||
catalog_list() {
|
||||
local cat_type="$1"
|
||||
local cat_file="$CATALOG_DIR/${cat_type}.json"
|
||||
|
||||
[ -f "$cat_file" ] && jsonfilter -i "$cat_file" -e '@.items[*].id'
|
||||
}
|
||||
|
||||
catalog_get() {
|
||||
local cat_type="$1"
|
||||
local item_id="$2"
|
||||
local cat_file="$CATALOG_DIR/${cat_type}.json"
|
||||
|
||||
[ -f "$cat_file" ] && jsonfilter -i "$cat_file" -e "@.items[@.id='$item_id']"
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Peer Management
|
||||
# ============================================================================
|
||||
peer_add() {
|
||||
local peer_addr="$1"
|
||||
local peer_port="${2:-$MESH_PORT}"
|
||||
local peer_id="${3:-unknown}"
|
||||
|
||||
local timestamp=$(date +%s)
|
||||
local tmp_peers="$MESH_DIR/tmp/peers_$$.json"
|
||||
|
||||
# Check if peer exists
|
||||
if grep -q "\"$peer_addr\"" "$PEERS_FILE" 2>/dev/null; then
|
||||
echo "Peer already exists: $peer_addr"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Add peer
|
||||
local entry="{\"id\":\"$peer_id\",\"addr\":\"$peer_addr\",\"port\":$peer_port,\"added\":$timestamp,\"last_seen\":$timestamp,\"status\":\"pending\"}"
|
||||
|
||||
jsonfilter -i "$PEERS_FILE" -e '@' | \
|
||||
sed "s/\"peers\":\[/\"peers\":[$entry,/" > "$tmp_peers"
|
||||
mv "$tmp_peers" "$PEERS_FILE"
|
||||
|
||||
echo "Added peer: $peer_addr:$peer_port"
|
||||
}
|
||||
|
||||
peer_list() {
|
||||
jsonfilter -i "$PEERS_FILE" -e '@.peers[*]' 2>/dev/null
|
||||
}
|
||||
|
||||
peer_update_status() {
|
||||
local peer_addr="$1"
|
||||
local status="$2"
|
||||
local timestamp=$(date +%s)
|
||||
|
||||
# Update peer status (simplified - would use jq in production)
|
||||
sed -i "s/\"addr\":\"$peer_addr\".*\"status\":\"[^\"]*\"/\"addr\":\"$peer_addr\",\"last_seen\":$timestamp,\"status\":\"$status\"/" "$PEERS_FILE"
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# P2P Discovery & Sync
|
||||
# ============================================================================
|
||||
discover_peers() {
|
||||
# Broadcast discovery on local network
|
||||
local node_id=$(cat "$NODE_ID_FILE")
|
||||
local msg="SECUBOX-MESH:DISCOVER:$node_id:$MESH_PORT"
|
||||
|
||||
# UDP broadcast (requires socat or nc)
|
||||
if command -v socat >/dev/null; then
|
||||
echo "$msg" | socat - UDP-DATAGRAM:255.255.255.255:$DISCOVERY_PORT,broadcast
|
||||
fi
|
||||
|
||||
# Also try mDNS if available
|
||||
if command -v avahi-publish >/dev/null; then
|
||||
avahi-publish -s "secubox-mesh-$node_id" _secubox._tcp $MESH_PORT &
|
||||
fi
|
||||
}
|
||||
|
||||
sync_with_peer() {
|
||||
local peer_addr="$1"
|
||||
local peer_port="${2:-$MESH_PORT}"
|
||||
|
||||
echo "Syncing with peer: $peer_addr:$peer_port"
|
||||
|
||||
# Get peer's chain tip
|
||||
local peer_chain=$(curl -s --connect-timeout 5 "http://$peer_addr:$peer_port/api/chain/tip" 2>/dev/null)
|
||||
if [ -z "$peer_chain" ]; then
|
||||
peer_update_status "$peer_addr" "offline"
|
||||
return 1
|
||||
fi
|
||||
|
||||
peer_update_status "$peer_addr" "online"
|
||||
|
||||
local peer_hash=$(echo "$peer_chain" | jsonfilter -e '@.hash' 2>/dev/null)
|
||||
local local_hash=$(chain_get_hash)
|
||||
|
||||
if [ "$peer_hash" = "$local_hash" ]; then
|
||||
echo "Already in sync with $peer_addr"
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Get missing blocks from peer
|
||||
local missing=$(curl -s "http://$peer_addr:$peer_port/api/chain/since/$local_hash" 2>/dev/null)
|
||||
if [ -n "$missing" ]; then
|
||||
echo "$missing" | jsonfilter -e '@[*]' | while read block; do
|
||||
local block_hash=$(echo "$block" | jsonfilter -e '@.hash')
|
||||
local block_type=$(echo "$block" | jsonfilter -e '@.type')
|
||||
|
||||
# Fetch and store block data
|
||||
if ! block_exists "$block_hash"; then
|
||||
curl -s "http://$peer_addr:$peer_port/api/block/$block_hash" -o "$MESH_DIR/tmp/$block_hash"
|
||||
if [ -f "$MESH_DIR/tmp/$block_hash" ]; then
|
||||
block_store_file "$MESH_DIR/tmp/$block_hash"
|
||||
rm "$MESH_DIR/tmp/$block_hash"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
echo "Synced $(echo "$missing" | jsonfilter -e '@[*]' | wc -l) blocks from $peer_addr"
|
||||
fi
|
||||
}
|
||||
|
||||
sync_all_peers() {
|
||||
[ -f "$SYNC_LOCK" ] && return 1
|
||||
touch "$SYNC_LOCK"
|
||||
|
||||
peer_list | while read peer; do
|
||||
addr=$(echo "$peer" | jsonfilter -e '@.addr')
|
||||
port=$(echo "$peer" | jsonfilter -e '@.port')
|
||||
sync_with_peer "$addr" "$port"
|
||||
done
|
||||
|
||||
rm -f "$SYNC_LOCK"
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Snapshot & Recovery
|
||||
# ============================================================================
|
||||
snapshot_create() {
|
||||
local name="${1:-snapshot-$(date +%Y%m%d-%H%M%S)}"
|
||||
local snapshot_dir="$MESH_DIR/snapshots/$name"
|
||||
|
||||
mkdir -p "$snapshot_dir"
|
||||
|
||||
echo "Creating snapshot: $name"
|
||||
|
||||
# Backup configs
|
||||
cp -a /etc/config "$snapshot_dir/config" 2>/dev/null
|
||||
|
||||
# Backup installed packages list
|
||||
opkg list-installed > "$snapshot_dir/packages.list"
|
||||
|
||||
# Backup SecuBox specific
|
||||
cp -a /srv/secubox "$snapshot_dir/secubox-data" 2>/dev/null
|
||||
cp -a /opt/haproxy "$snapshot_dir/haproxy" 2>/dev/null
|
||||
|
||||
# Create manifest
|
||||
cat > "$snapshot_dir/manifest.json" << EOF
|
||||
{
|
||||
"name": "$name",
|
||||
"created": "$(date -Iseconds)",
|
||||
"node": "$(cat $NODE_ID_FILE)",
|
||||
"hostname": "$(uci get system.@system[0].hostname 2>/dev/null)",
|
||||
"version": "$(cat /etc/secubox-version 2>/dev/null || echo unknown)",
|
||||
"files": $(find "$snapshot_dir" -type f | wc -l)
|
||||
}
|
||||
EOF
|
||||
|
||||
# Archive and hash
|
||||
tar -czf "$snapshot_dir.tar.gz" -C "$MESH_DIR/snapshots" "$name"
|
||||
local hash=$(block_store_file "$snapshot_dir.tar.gz")
|
||||
|
||||
# Add to catalog
|
||||
catalog_add "snapshots" "$name" "$(cat $snapshot_dir/manifest.json)" "$hash"
|
||||
|
||||
# Cleanup temp
|
||||
rm -rf "$snapshot_dir"
|
||||
|
||||
echo "Snapshot created: $name (hash: $hash)"
|
||||
echo "$hash"
|
||||
}
|
||||
|
||||
snapshot_restore() {
|
||||
local name_or_hash="$1"
|
||||
|
||||
# Find snapshot
|
||||
local hash=""
|
||||
if [ ${#name_or_hash} -eq 64 ]; then
|
||||
hash="$name_or_hash"
|
||||
else
|
||||
hash=$(catalog_get "snapshots" "$name_or_hash" | jsonfilter -e '@.hash')
|
||||
fi
|
||||
|
||||
if [ -z "$hash" ] || ! block_exists "$hash"; then
|
||||
echo "Snapshot not found: $name_or_hash"
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo "Restoring snapshot: $hash"
|
||||
|
||||
# Extract
|
||||
local tmp_dir="$MESH_DIR/tmp/restore_$$"
|
||||
mkdir -p "$tmp_dir"
|
||||
block_get "$hash" | tar -xzf - -C "$tmp_dir"
|
||||
|
||||
# Restore configs
|
||||
cp -a "$tmp_dir"/*/config/* /etc/config/ 2>/dev/null
|
||||
|
||||
# Restore SecuBox data
|
||||
cp -a "$tmp_dir"/*/secubox-data/* /srv/secubox/ 2>/dev/null
|
||||
|
||||
# Reinstall packages (optional)
|
||||
if [ -f "$tmp_dir"/*/packages.list ]; then
|
||||
echo "Package list available at: $tmp_dir/*/packages.list"
|
||||
fi
|
||||
|
||||
rm -rf "$tmp_dir"
|
||||
echo "Snapshot restored"
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Bootstrap / Reborn Script
|
||||
# ============================================================================
|
||||
generate_reborn_script() {
|
||||
local output="${1:-/tmp/secubox-reborn.sh}"
|
||||
local snapshot_hash=$(snapshot_create "reborn-$(date +%Y%m%d)")
|
||||
|
||||
cat > "$output" << 'REBORN_SCRIPT'
|
||||
#!/bin/sh
|
||||
# SecuBox Reborn Script - Self-Revival Bootstrap
|
||||
# Generated: TIMESTAMP
|
||||
# Snapshot: SNAPSHOT_HASH
|
||||
|
||||
MESH_SEED="SEED_PEERS"
|
||||
SNAPSHOT_HASH="SNAPSHOT_HASH"
|
||||
|
||||
echo "=== SecuBox Reborn ==="
|
||||
echo "Restoring from distributed mesh..."
|
||||
|
||||
# Install dependencies
|
||||
opkg update
|
||||
opkg install curl tar gzip
|
||||
|
||||
# Bootstrap mesh
|
||||
mkdir -p /srv/secubox/mesh
|
||||
cd /srv/secubox/mesh
|
||||
|
||||
# Try to fetch snapshot from mesh peers
|
||||
for peer in $MESH_SEED; do
|
||||
echo "Trying peer: $peer"
|
||||
if curl -sf "http://$peer/api/block/$SNAPSHOT_HASH" -o snapshot.tar.gz; then
|
||||
echo "Downloaded from $peer"
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
if [ ! -f snapshot.tar.gz ]; then
|
||||
echo "Failed to download snapshot from mesh"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Verify hash
|
||||
actual_hash=$(sha256sum snapshot.tar.gz | cut -d' ' -f1)
|
||||
if [ "$actual_hash" != "$SNAPSHOT_HASH" ]; then
|
||||
echo "Hash mismatch! Expected: $SNAPSHOT_HASH, Got: $actual_hash"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Extract and restore
|
||||
tar -xzf snapshot.tar.gz
|
||||
./restore.sh
|
||||
|
||||
echo "=== SecuBox Reborn Complete ==="
|
||||
REBORN_SCRIPT
|
||||
|
||||
# Fill in values
|
||||
sed -i "s/TIMESTAMP/$(date -Iseconds)/" "$output"
|
||||
sed -i "s/SNAPSHOT_HASH/$snapshot_hash/g" "$output"
|
||||
|
||||
# Get peer addresses
|
||||
local peers=$(peer_list | jsonfilter -e '@.addr' | tr '\n' ' ')
|
||||
sed -i "s/SEED_PEERS/$peers/" "$output"
|
||||
|
||||
chmod +x "$output"
|
||||
echo "Reborn script generated: $output"
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# HTTP API Server (for peer communication)
|
||||
# ============================================================================
|
||||
start_api_server() {
|
||||
local port="${1:-$MESH_PORT}"
|
||||
|
||||
# Simple HTTP API using uhttpd CGI or netcat
|
||||
mkdir -p /www/secubox-mesh/api
|
||||
|
||||
# Chain tip endpoint
|
||||
cat > /www/secubox-mesh/api/chain << 'API'
|
||||
#!/bin/sh
|
||||
echo "Content-Type: application/json"
|
||||
echo ""
|
||||
cat /srv/secubox/mesh/chain.json
|
||||
API
|
||||
|
||||
# Block endpoint
|
||||
cat > /www/secubox-mesh/api/block << 'API'
|
||||
#!/bin/sh
|
||||
echo "Content-Type: application/octet-stream"
|
||||
echo ""
|
||||
hash="${QUERY_STRING##*/}"
|
||||
cat "/srv/secubox/mesh/blocks/${hash:0:2}/${hash:2:2}/$hash" 2>/dev/null
|
||||
API
|
||||
|
||||
chmod +x /www/secubox-mesh/api/*
|
||||
|
||||
echo "API endpoints available at /secubox-mesh/api/"
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Main Commands
|
||||
# ============================================================================
|
||||
case "$1" in
|
||||
init)
|
||||
mesh_init
|
||||
;;
|
||||
peer-add)
|
||||
peer_add "$2" "$3"
|
||||
;;
|
||||
peer-list)
|
||||
peer_list
|
||||
;;
|
||||
discover)
|
||||
discover_peers
|
||||
;;
|
||||
sync)
|
||||
sync_all_peers
|
||||
;;
|
||||
snapshot)
|
||||
snapshot_create "$2"
|
||||
;;
|
||||
restore)
|
||||
snapshot_restore "$2"
|
||||
;;
|
||||
reborn)
|
||||
generate_reborn_script "$2"
|
||||
;;
|
||||
catalog)
|
||||
catalog_list "$2"
|
||||
;;
|
||||
chain)
|
||||
cat "$CHAIN_FILE"
|
||||
;;
|
||||
verify)
|
||||
chain_verify && echo "Chain valid" || echo "Chain invalid"
|
||||
;;
|
||||
api)
|
||||
start_api_server "$2"
|
||||
;;
|
||||
*)
|
||||
echo "SecuBox P2P Mesh - Distributed Recovery Infrastructure"
|
||||
echo ""
|
||||
echo "Usage: $0 <command> [args]"
|
||||
echo ""
|
||||
echo "Commands:"
|
||||
echo " init Initialize mesh node"
|
||||
echo " peer-add <ip> Add peer node"
|
||||
echo " peer-list List known peers"
|
||||
echo " discover Broadcast discovery"
|
||||
echo " sync Sync with all peers"
|
||||
echo " snapshot [name] Create snapshot"
|
||||
echo " restore <hash> Restore from snapshot"
|
||||
echo " reborn [file] Generate reborn script"
|
||||
echo " catalog <type> List catalog (apps|profiles|snapshots)"
|
||||
echo " chain Show blockchain"
|
||||
echo " verify Verify chain integrity"
|
||||
echo " api [port] Start API server"
|
||||
;;
|
||||
esac
|
||||
11
package/secubox/secubox-core/root/usr/sbin/secubox-mesh
Normal file
11
package/secubox/secubox-core/root/usr/sbin/secubox-mesh
Normal file
@ -0,0 +1,11 @@
|
||||
#!/bin/sh
|
||||
# SecuBox Mesh - Main CLI
|
||||
# Distributed P2P recovery infrastructure
|
||||
|
||||
MESH_LIB="/usr/lib/secubox/p2p-mesh.sh"
|
||||
|
||||
# Source mesh library
|
||||
[ -f "$MESH_LIB" ] && . "$MESH_LIB"
|
||||
|
||||
# Forward to library
|
||||
exec /bin/sh "$MESH_LIB" "$@"
|
||||
628
package/secubox/secubox-core/root/usr/sbin/secubox-recover
Normal file
628
package/secubox/secubox-core/root/usr/sbin/secubox-recover
Normal file
@ -0,0 +1,628 @@
|
||||
#!/bin/sh
|
||||
# SecuBox Recover - Self-Revival & Recovery System
|
||||
# Full backup/restore with distributed mesh support
|
||||
# Copyright 2026 CyberMind
|
||||
|
||||
set -e
|
||||
|
||||
# ============================================================================
|
||||
# Configuration
|
||||
# ============================================================================
|
||||
RECOVER_DIR="/srv/secubox/recover"
|
||||
ARCHIVE_DIR="$RECOVER_DIR/archives"
|
||||
CATALOG_FILE="$RECOVER_DIR/catalog.json"
|
||||
HISTORY_FILE="$RECOVER_DIR/history.json"
|
||||
PROFILES_DIR="$RECOVER_DIR/profiles"
|
||||
APPS_DIR="$RECOVER_DIR/apps"
|
||||
|
||||
# Mesh integration
|
||||
MESH_BIN="/usr/sbin/secubox-mesh"
|
||||
MESH_ENABLED=1
|
||||
|
||||
# Colors
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
CYAN='\033[0;36m'
|
||||
NC='\033[0m'
|
||||
|
||||
log() { echo -e "${CYAN}[RECOVER]${NC} $1"; }
|
||||
success() { echo -e "${GREEN}[OK]${NC} $1"; }
|
||||
warn() { echo -e "${YELLOW}[WARN]${NC} $1"; }
|
||||
error() { echo -e "${RED}[ERROR]${NC} $1"; }
|
||||
|
||||
# ============================================================================
|
||||
# Initialization
|
||||
# ============================================================================
|
||||
init() {
|
||||
mkdir -p "$ARCHIVE_DIR" "$PROFILES_DIR" "$APPS_DIR" "$RECOVER_DIR/tmp"
|
||||
|
||||
# Initialize catalog
|
||||
[ ! -f "$CATALOG_FILE" ] && cat > "$CATALOG_FILE" << EOF
|
||||
{
|
||||
"version": 1,
|
||||
"node": "$(cat /srv/secubox/mesh/node.id 2>/dev/null || hostname)",
|
||||
"created": "$(date -Iseconds)",
|
||||
"apps": [],
|
||||
"profiles": [],
|
||||
"snapshots": []
|
||||
}
|
||||
EOF
|
||||
|
||||
# Initialize history
|
||||
[ ! -f "$HISTORY_FILE" ] && cat > "$HISTORY_FILE" << EOF
|
||||
{
|
||||
"version": 1,
|
||||
"entries": []
|
||||
}
|
||||
EOF
|
||||
|
||||
log "Recovery system initialized"
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Full System Snapshot
|
||||
# ============================================================================
|
||||
snapshot_full() {
|
||||
local name="${1:-full-$(date +%Y%m%d-%H%M%S)}"
|
||||
local archive="$ARCHIVE_DIR/$name.tar.gz"
|
||||
|
||||
log "Creating full system snapshot: $name"
|
||||
|
||||
local tmp_dir="$RECOVER_DIR/tmp/$name"
|
||||
mkdir -p "$tmp_dir"
|
||||
|
||||
# 1. System configs
|
||||
log " Backing up system configs..."
|
||||
cp -a /etc/config "$tmp_dir/etc-config" 2>/dev/null || true
|
||||
|
||||
# 2. SecuBox specific
|
||||
log " Backing up SecuBox data..."
|
||||
mkdir -p "$tmp_dir/secubox"
|
||||
cp -a /srv/secubox "$tmp_dir/secubox/srv" 2>/dev/null || true
|
||||
cp -a /opt/haproxy "$tmp_dir/secubox/haproxy" 2>/dev/null || true
|
||||
cp -a /opt/containers "$tmp_dir/secubox/containers" 2>/dev/null || true
|
||||
|
||||
# 3. Installed packages
|
||||
log " Recording installed packages..."
|
||||
opkg list-installed > "$tmp_dir/packages.list"
|
||||
opkg list-user-installed > "$tmp_dir/packages-user.list" 2>/dev/null || true
|
||||
|
||||
# 4. Network config
|
||||
log " Backing up network state..."
|
||||
ip addr > "$tmp_dir/ip-addr.txt" 2>/dev/null || true
|
||||
ip route > "$tmp_dir/ip-route.txt" 2>/dev/null || true
|
||||
|
||||
# 5. Service states
|
||||
log " Recording service states..."
|
||||
for svc in /etc/init.d/*; do
|
||||
[ -x "$svc" ] && echo "$(basename $svc): $($svc enabled && echo enabled || echo disabled)" >> "$tmp_dir/services.txt"
|
||||
done 2>/dev/null || true
|
||||
|
||||
# 6. Crontabs
|
||||
cp -a /etc/crontabs "$tmp_dir/crontabs" 2>/dev/null || true
|
||||
|
||||
# 7. SSL certificates
|
||||
log " Backing up certificates..."
|
||||
mkdir -p "$tmp_dir/certs"
|
||||
cp -a /etc/ssl "$tmp_dir/certs/etc-ssl" 2>/dev/null || true
|
||||
cp -a /opt/haproxy/certs "$tmp_dir/certs/haproxy" 2>/dev/null || true
|
||||
cp -a /srv/mitmproxy/*.pem "$tmp_dir/certs/mitmproxy" 2>/dev/null || true
|
||||
|
||||
# 8. Create manifest
|
||||
cat > "$tmp_dir/manifest.json" << EOF
|
||||
{
|
||||
"name": "$name",
|
||||
"type": "full",
|
||||
"created": "$(date -Iseconds)",
|
||||
"hostname": "$(uci get system.@system[0].hostname 2>/dev/null || hostname)",
|
||||
"version": "$(cat /etc/secubox-version 2>/dev/null || echo unknown)",
|
||||
"openwrt": "$(cat /etc/openwrt_release | grep DISTRIB_RELEASE | cut -d= -f2 | tr -d "'")",
|
||||
"files": $(find "$tmp_dir" -type f | wc -l),
|
||||
"size_bytes": $(du -sb "$tmp_dir" | cut -f1)
|
||||
}
|
||||
EOF
|
||||
|
||||
# 9. Create restore script
|
||||
cat > "$tmp_dir/restore.sh" << 'RESTORE'
|
||||
#!/bin/sh
|
||||
# SecuBox Snapshot Restore Script
|
||||
echo "=== SecuBox Snapshot Restore ==="
|
||||
echo "Restoring from: $(cat manifest.json | jsonfilter -e '@.name')"
|
||||
|
||||
# Restore configs
|
||||
echo "Restoring configs..."
|
||||
cp -a etc-config/* /etc/config/ 2>/dev/null
|
||||
|
||||
# Restore SecuBox data
|
||||
echo "Restoring SecuBox data..."
|
||||
cp -a secubox/srv/* /srv/secubox/ 2>/dev/null
|
||||
cp -a secubox/haproxy/* /opt/haproxy/ 2>/dev/null
|
||||
|
||||
# Restore certs
|
||||
echo "Restoring certificates..."
|
||||
cp -a certs/etc-ssl/* /etc/ssl/ 2>/dev/null
|
||||
cp -a certs/haproxy/* /opt/haproxy/certs/ 2>/dev/null
|
||||
|
||||
# Reinstall user packages
|
||||
echo "Reinstalling packages..."
|
||||
if [ -f packages-user.list ]; then
|
||||
opkg update
|
||||
cat packages-user.list | cut -d' ' -f1 | xargs opkg install 2>/dev/null || true
|
||||
fi
|
||||
|
||||
# Reload services
|
||||
echo "Reloading services..."
|
||||
/etc/init.d/network reload 2>/dev/null || true
|
||||
/etc/init.d/haproxy restart 2>/dev/null || true
|
||||
/etc/init.d/rpcd restart 2>/dev/null || true
|
||||
|
||||
echo "=== Restore Complete ==="
|
||||
echo "Please reboot to apply all changes"
|
||||
RESTORE
|
||||
chmod +x "$tmp_dir/restore.sh"
|
||||
|
||||
# 10. Archive
|
||||
log " Creating archive..."
|
||||
tar -czf "$archive" -C "$RECOVER_DIR/tmp" "$name"
|
||||
rm -rf "$tmp_dir"
|
||||
|
||||
# 11. Calculate hash
|
||||
local hash=$(sha256sum "$archive" | cut -d' ' -f1)
|
||||
|
||||
# 12. Update catalog
|
||||
add_to_catalog "snapshots" "$name" "$hash" "$(stat -c%s "$archive")" "full"
|
||||
|
||||
# 13. Add to history
|
||||
add_to_history "snapshot" "$name" "$hash"
|
||||
|
||||
# 14. Publish to mesh
|
||||
if [ "$MESH_ENABLED" = "1" ] && [ -x "$MESH_BIN" ]; then
|
||||
log " Publishing to mesh..."
|
||||
$MESH_BIN snapshot "$name" 2>/dev/null || true
|
||||
fi
|
||||
|
||||
success "Snapshot created: $archive"
|
||||
echo "Hash: $hash"
|
||||
echo "Size: $(du -h "$archive" | cut -f1)"
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Profile Management
|
||||
# ============================================================================
|
||||
profile_save() {
|
||||
local name="${1:-profile-$(date +%Y%m%d)}"
|
||||
local profile_file="$PROFILES_DIR/$name.json"
|
||||
|
||||
log "Saving profile: $name"
|
||||
|
||||
# Extract current configuration as profile
|
||||
cat > "$profile_file" << EOF
|
||||
{
|
||||
"name": "$name",
|
||||
"created": "$(date -Iseconds)",
|
||||
"hostname": "$(uci get system.@system[0].hostname 2>/dev/null)",
|
||||
"network": {
|
||||
"lan_ip": "$(uci get network.lan.ipaddr 2>/dev/null)",
|
||||
"wan_proto": "$(uci get network.wan.proto 2>/dev/null)"
|
||||
},
|
||||
"secubox": {
|
||||
"theme": "$(uci get secubox.ui.theme 2>/dev/null || echo default)",
|
||||
"modules": $(uci show secubox 2>/dev/null | grep enabled | grep -c '=1' || echo 0)
|
||||
},
|
||||
"haproxy": {
|
||||
"enabled": $(uci get haproxy.main.enabled 2>/dev/null || echo 0),
|
||||
"vhosts": $(uci show haproxy 2>/dev/null | grep -c '=vhost' || echo 0)
|
||||
},
|
||||
"apps": [
|
||||
$(opkg list-user-installed 2>/dev/null | while read pkg; do
|
||||
echo " \"$(echo $pkg | cut -d' ' -f1)\","
|
||||
done | sed '$ s/,$//')
|
||||
]
|
||||
}
|
||||
EOF
|
||||
|
||||
local hash=$(sha256sum "$profile_file" | cut -d' ' -f1 | head -c 16)
|
||||
add_to_catalog "profiles" "$name" "$hash" "$(stat -c%s "$profile_file")" "profile"
|
||||
add_to_history "profile_save" "$name" "$hash"
|
||||
|
||||
success "Profile saved: $profile_file"
|
||||
}
|
||||
|
||||
profile_apply() {
|
||||
local name="$1"
|
||||
local profile_file="$PROFILES_DIR/$name.json"
|
||||
|
||||
if [ ! -f "$profile_file" ]; then
|
||||
error "Profile not found: $name"
|
||||
return 1
|
||||
fi
|
||||
|
||||
log "Applying profile: $name"
|
||||
|
||||
# Apply settings from profile
|
||||
local hostname=$(jsonfilter -i "$profile_file" -e '@.hostname')
|
||||
[ -n "$hostname" ] && uci set system.@system[0].hostname="$hostname"
|
||||
|
||||
local lan_ip=$(jsonfilter -i "$profile_file" -e '@.network.lan_ip')
|
||||
[ -n "$lan_ip" ] && uci set network.lan.ipaddr="$lan_ip"
|
||||
|
||||
local theme=$(jsonfilter -i "$profile_file" -e '@.secubox.theme')
|
||||
[ -n "$theme" ] && uci set secubox.ui.theme="$theme"
|
||||
|
||||
uci commit
|
||||
|
||||
add_to_history "profile_apply" "$name" ""
|
||||
success "Profile applied: $name"
|
||||
}
|
||||
|
||||
profile_list() {
|
||||
log "Available profiles:"
|
||||
for f in "$PROFILES_DIR"/*.json; do
|
||||
[ -f "$f" ] || continue
|
||||
local name=$(basename "$f" .json)
|
||||
local created=$(jsonfilter -i "$f" -e '@.created' 2>/dev/null)
|
||||
echo " - $name ($created)"
|
||||
done
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Apps Catalog
|
||||
# ============================================================================
|
||||
apps_sync() {
|
||||
log "Syncing apps catalog..."
|
||||
|
||||
# Create local apps catalog from installed packages
|
||||
local apps_file="$APPS_DIR/installed.json"
|
||||
|
||||
cat > "$apps_file" << EOF
|
||||
{
|
||||
"synced": "$(date -Iseconds)",
|
||||
"apps": [
|
||||
EOF
|
||||
|
||||
local first=1
|
||||
opkg list-installed | while read line; do
|
||||
local pkg=$(echo "$line" | cut -d' ' -f1)
|
||||
local ver=$(echo "$line" | cut -d' ' -f3)
|
||||
|
||||
[ $first -eq 0 ] && echo "," >> "$apps_file"
|
||||
first=0
|
||||
|
||||
cat >> "$apps_file" << APPEOF
|
||||
{
|
||||
"name": "$pkg",
|
||||
"version": "$ver",
|
||||
"installed": true
|
||||
}
|
||||
APPEOF
|
||||
done
|
||||
|
||||
echo " ]}" >> "$apps_file"
|
||||
|
||||
success "Apps catalog synced: $(opkg list-installed | wc -l) packages"
|
||||
}
|
||||
|
||||
apps_export() {
|
||||
local output="${1:-$APPS_DIR/apps-export-$(date +%Y%m%d).list}"
|
||||
|
||||
opkg list-user-installed > "$output" 2>/dev/null
|
||||
success "Apps exported: $output"
|
||||
}
|
||||
|
||||
apps_import() {
|
||||
local input="$1"
|
||||
|
||||
if [ ! -f "$input" ]; then
|
||||
error "Apps list not found: $input"
|
||||
return 1
|
||||
fi
|
||||
|
||||
log "Importing apps from: $input"
|
||||
opkg update
|
||||
|
||||
cat "$input" | cut -d' ' -f1 | while read pkg; do
|
||||
log " Installing: $pkg"
|
||||
opkg install "$pkg" 2>/dev/null || warn "Failed: $pkg"
|
||||
done
|
||||
|
||||
success "Apps import complete"
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# History & Rollback
|
||||
# ============================================================================
|
||||
add_to_history() {
|
||||
local action="$1"
|
||||
local target="$2"
|
||||
local hash="$3"
|
||||
|
||||
local tmp_hist="$RECOVER_DIR/tmp/history_$$.json"
|
||||
local entry="{\"timestamp\":\"$(date -Iseconds)\",\"action\":\"$action\",\"target\":\"$target\",\"hash\":\"$hash\"}"
|
||||
|
||||
if [ -f "$HISTORY_FILE" ]; then
|
||||
jsonfilter -i "$HISTORY_FILE" -e '@' | \
|
||||
sed "s/\"entries\":\[/\"entries\":[$entry,/" > "$tmp_hist"
|
||||
mv "$tmp_hist" "$HISTORY_FILE"
|
||||
fi
|
||||
}
|
||||
|
||||
history_show() {
|
||||
local limit="${1:-20}"
|
||||
|
||||
log "Recent history (last $limit entries):"
|
||||
jsonfilter -i "$HISTORY_FILE" -e '@.entries[*]' 2>/dev/null | head -$limit | while read entry; do
|
||||
local ts=$(echo "$entry" | jsonfilter -e '@.timestamp')
|
||||
local action=$(echo "$entry" | jsonfilter -e '@.action')
|
||||
local target=$(echo "$entry" | jsonfilter -e '@.target')
|
||||
echo " [$ts] $action: $target"
|
||||
done
|
||||
}
|
||||
|
||||
rollback() {
|
||||
local target="$1"
|
||||
|
||||
if [ -z "$target" ]; then
|
||||
# Find last snapshot
|
||||
target=$(ls -t "$ARCHIVE_DIR"/*.tar.gz 2>/dev/null | head -1)
|
||||
fi
|
||||
|
||||
if [ ! -f "$target" ] && [ -f "$ARCHIVE_DIR/$target.tar.gz" ]; then
|
||||
target="$ARCHIVE_DIR/$target.tar.gz"
|
||||
fi
|
||||
|
||||
if [ ! -f "$target" ]; then
|
||||
error "No snapshot found for rollback"
|
||||
return 1
|
||||
fi
|
||||
|
||||
log "Rolling back to: $target"
|
||||
warn "This will overwrite current configuration!"
|
||||
|
||||
# Extract and run restore
|
||||
local tmp_dir="$RECOVER_DIR/tmp/rollback_$$"
|
||||
mkdir -p "$tmp_dir"
|
||||
tar -xzf "$target" -C "$tmp_dir"
|
||||
|
||||
cd "$tmp_dir"/*
|
||||
./restore.sh
|
||||
|
||||
cd /
|
||||
rm -rf "$tmp_dir"
|
||||
|
||||
add_to_history "rollback" "$(basename $target)" ""
|
||||
success "Rollback complete"
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Catalog Management
|
||||
# ============================================================================
|
||||
add_to_catalog() {
|
||||
local cat_type="$1"
|
||||
local name="$2"
|
||||
local hash="$3"
|
||||
local size="$4"
|
||||
local item_type="$5"
|
||||
|
||||
local tmp_cat="$RECOVER_DIR/tmp/catalog_$$.json"
|
||||
local entry="{\"name\":\"$name\",\"hash\":\"$hash\",\"size\":$size,\"type\":\"$item_type\",\"created\":\"$(date -Iseconds)\"}"
|
||||
|
||||
jsonfilter -i "$CATALOG_FILE" -e '@' | \
|
||||
sed "s/\"$cat_type\":\[/\"$cat_type\":[$entry,/" > "$tmp_cat"
|
||||
mv "$tmp_cat" "$CATALOG_FILE"
|
||||
}
|
||||
|
||||
catalog_show() {
|
||||
log "Recovery Catalog:"
|
||||
echo ""
|
||||
echo "=== Snapshots ==="
|
||||
jsonfilter -i "$CATALOG_FILE" -e '@.snapshots[*]' 2>/dev/null | while read item; do
|
||||
local name=$(echo "$item" | jsonfilter -e '@.name')
|
||||
local size=$(echo "$item" | jsonfilter -e '@.size')
|
||||
local created=$(echo "$item" | jsonfilter -e '@.created')
|
||||
echo " $name ($(numfmt --to=iec $size 2>/dev/null || echo ${size}B)) - $created"
|
||||
done
|
||||
|
||||
echo ""
|
||||
echo "=== Profiles ==="
|
||||
jsonfilter -i "$CATALOG_FILE" -e '@.profiles[*]' 2>/dev/null | while read item; do
|
||||
local name=$(echo "$item" | jsonfilter -e '@.name')
|
||||
echo " $name"
|
||||
done
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Reborn Script Generator
|
||||
# ============================================================================
|
||||
generate_reborn() {
|
||||
local output="${1:-/tmp/secubox-reborn.sh}"
|
||||
|
||||
log "Generating reborn script..."
|
||||
|
||||
# Create fresh snapshot first
|
||||
local snapshot_name="reborn-$(date +%Y%m%d-%H%M%S)"
|
||||
snapshot_full "$snapshot_name"
|
||||
|
||||
local archive="$ARCHIVE_DIR/$snapshot_name.tar.gz"
|
||||
local hash=$(sha256sum "$archive" | cut -d' ' -f1)
|
||||
|
||||
# Get mesh peers
|
||||
local peers=""
|
||||
if [ -x "$MESH_BIN" ]; then
|
||||
peers=$($MESH_BIN peer-list 2>/dev/null | jsonfilter -e '@.addr' | tr '\n' ' ')
|
||||
fi
|
||||
|
||||
# Generate self-contained script
|
||||
cat > "$output" << REBORN
|
||||
#!/bin/sh
|
||||
# ============================================================================
|
||||
# SecuBox Reborn Script - Self-Revival Bootstrap
|
||||
# Generated: $(date -Iseconds)
|
||||
# Snapshot: $snapshot_name
|
||||
# Hash: $hash
|
||||
# ============================================================================
|
||||
|
||||
echo "╔══════════════════════════════════════════════════════════════╗"
|
||||
echo "║ SecuBox Reborn - Self-Revival System ║"
|
||||
echo "╠══════════════════════════════════════════════════════════════╣"
|
||||
echo "║ Snapshot: $snapshot_name"
|
||||
echo "║ Hash: ${hash:0:16}..."
|
||||
echo "╚══════════════════════════════════════════════════════════════╝"
|
||||
|
||||
SNAPSHOT_HASH="$hash"
|
||||
MESH_PEERS="$peers"
|
||||
ARCHIVE_URL=""
|
||||
|
||||
# Embedded snapshot (base64)
|
||||
EMBEDDED_ARCHIVE="
|
||||
$(base64 -w0 "$archive" 2>/dev/null || base64 "$archive")
|
||||
"
|
||||
|
||||
recover_from_embedded() {
|
||||
echo "[1/4] Extracting embedded archive..."
|
||||
echo "\$EMBEDDED_ARCHIVE" | base64 -d > /tmp/snapshot.tar.gz
|
||||
|
||||
verify_and_restore
|
||||
}
|
||||
|
||||
recover_from_mesh() {
|
||||
echo "[1/4] Fetching from mesh peers..."
|
||||
for peer in \$MESH_PEERS; do
|
||||
echo " Trying: \$peer"
|
||||
if curl -sf "http://\$peer:7331/api/block/\$SNAPSHOT_HASH" -o /tmp/snapshot.tar.gz 2>/dev/null; then
|
||||
echo " Downloaded from \$peer"
|
||||
verify_and_restore
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
echo " Failed to fetch from mesh"
|
||||
return 1
|
||||
}
|
||||
|
||||
recover_from_url() {
|
||||
if [ -n "\$ARCHIVE_URL" ]; then
|
||||
echo "[1/4] Downloading from URL..."
|
||||
curl -sfL "\$ARCHIVE_URL" -o /tmp/snapshot.tar.gz
|
||||
verify_and_restore
|
||||
fi
|
||||
}
|
||||
|
||||
verify_and_restore() {
|
||||
echo "[2/4] Verifying integrity..."
|
||||
local actual=\$(sha256sum /tmp/snapshot.tar.gz | cut -d' ' -f1)
|
||||
if [ "\$actual" != "\$SNAPSHOT_HASH" ]; then
|
||||
echo "ERROR: Hash mismatch!"
|
||||
echo " Expected: \$SNAPSHOT_HASH"
|
||||
echo " Got: \$actual"
|
||||
exit 1
|
||||
fi
|
||||
echo " Hash verified OK"
|
||||
|
||||
echo "[3/4] Extracting snapshot..."
|
||||
mkdir -p /tmp/secubox-restore
|
||||
tar -xzf /tmp/snapshot.tar.gz -C /tmp/secubox-restore
|
||||
|
||||
echo "[4/4] Running restore..."
|
||||
cd /tmp/secubox-restore/*
|
||||
chmod +x restore.sh
|
||||
./restore.sh
|
||||
|
||||
echo ""
|
||||
echo "╔══════════════════════════════════════════════════════════════╗"
|
||||
echo "║ SecuBox Reborn Complete! ║"
|
||||
echo "╚══════════════════════════════════════════════════════════════╝"
|
||||
echo ""
|
||||
echo "Please reboot to apply all changes: reboot"
|
||||
}
|
||||
|
||||
# Main
|
||||
case "\$1" in
|
||||
--mesh)
|
||||
recover_from_mesh || recover_from_embedded
|
||||
;;
|
||||
--url)
|
||||
ARCHIVE_URL="\$2"
|
||||
recover_from_url
|
||||
;;
|
||||
*)
|
||||
recover_from_embedded
|
||||
;;
|
||||
esac
|
||||
REBORN
|
||||
|
||||
chmod +x "$output"
|
||||
success "Reborn script generated: $output"
|
||||
echo "Size: $(du -h "$output" | cut -f1)"
|
||||
echo ""
|
||||
echo "Usage:"
|
||||
echo " $output # Restore from embedded archive"
|
||||
echo " $output --mesh # Try mesh peers first"
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Main
|
||||
# ============================================================================
|
||||
init
|
||||
|
||||
case "$1" in
|
||||
snapshot)
|
||||
snapshot_full "$2"
|
||||
;;
|
||||
restore|rollback)
|
||||
rollback "$2"
|
||||
;;
|
||||
profile-save)
|
||||
profile_save "$2"
|
||||
;;
|
||||
profile-apply)
|
||||
profile_apply "$2"
|
||||
;;
|
||||
profile-list|profiles)
|
||||
profile_list
|
||||
;;
|
||||
apps-sync)
|
||||
apps_sync
|
||||
;;
|
||||
apps-export)
|
||||
apps_export "$2"
|
||||
;;
|
||||
apps-import)
|
||||
apps_import "$2"
|
||||
;;
|
||||
history)
|
||||
history_show "$2"
|
||||
;;
|
||||
catalog)
|
||||
catalog_show
|
||||
;;
|
||||
reborn)
|
||||
generate_reborn "$2"
|
||||
;;
|
||||
*)
|
||||
echo "SecuBox Recover - Self-Revival & Recovery System"
|
||||
echo ""
|
||||
echo "Usage: $0 <command> [args]"
|
||||
echo ""
|
||||
echo "Snapshots:"
|
||||
echo " snapshot [name] Create full system snapshot"
|
||||
echo " restore [name|file] Restore from snapshot"
|
||||
echo " rollback [target] Rollback to last/specified snapshot"
|
||||
echo ""
|
||||
echo "Profiles:"
|
||||
echo " profile-save [name] Save current config as profile"
|
||||
echo " profile-apply <name> Apply saved profile"
|
||||
echo " profile-list List available profiles"
|
||||
echo ""
|
||||
echo "Apps:"
|
||||
echo " apps-sync Sync local apps catalog"
|
||||
echo " apps-export [file] Export installed apps list"
|
||||
echo " apps-import <file> Import and install apps"
|
||||
echo ""
|
||||
echo "History:"
|
||||
echo " history [limit] Show recovery history"
|
||||
echo " catalog Show full catalog"
|
||||
echo ""
|
||||
echo "Reborn:"
|
||||
echo " reborn [output] Generate self-contained reborn script"
|
||||
;;
|
||||
esac
|
||||
345
package/secubox/secubox-core/root/usr/sbin/secubox-swiss
Normal file
345
package/secubox/secubox-core/root/usr/sbin/secubox-swiss
Normal file
@ -0,0 +1,345 @@
|
||||
#!/bin/sh
|
||||
# ============================================================================
|
||||
# SecuBox Swiss Army Knife - Unified Management & Recovery Tool
|
||||
# Combines: mesh, recover, console, monitoring, and self-enhancement
|
||||
# ============================================================================
|
||||
|
||||
VERSION="1.0.0"
|
||||
SCRIPT_PATH="$(readlink -f "$0")"
|
||||
SECUBOX_DIR="/srv/secubox"
|
||||
|
||||
# Colors
|
||||
C_CYAN='\033[0;36m'
|
||||
C_GREEN='\033[0;32m'
|
||||
C_YELLOW='\033[1;33m'
|
||||
C_RED='\033[0;31m'
|
||||
C_BOLD='\033[1m'
|
||||
C_NC='\033[0m'
|
||||
|
||||
banner() {
|
||||
echo "${C_CYAN}"
|
||||
cat << 'BANNER'
|
||||
_____ _____ _____ _ _ ____ _____ __
|
||||
/ ____| ____/ ____| | | | _ \ / _ \ \/ /
|
||||
| (___ | _|| | | | | | |_) | | | \ /
|
||||
\___ \| |__| | | |_| | _ <| |_| / \
|
||||
____) |____| |____| |_| | |_) \___/_/\_\
|
||||
|_____/______\_____|_____|____/
|
||||
SWISS
|
||||
BANNER
|
||||
echo "${C_NC}"
|
||||
echo "${C_BOLD}SecuBox Swiss Army Knife v$VERSION${C_NC}"
|
||||
echo "Unified Management • Recovery • Mesh • Self-Enhancement"
|
||||
echo ""
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Tool Dispatch
|
||||
# ============================================================================
|
||||
tool_mesh() {
|
||||
exec /usr/sbin/secubox-mesh "$@"
|
||||
}
|
||||
|
||||
tool_recover() {
|
||||
exec /usr/sbin/secubox-recover "$@"
|
||||
}
|
||||
|
||||
tool_console() {
|
||||
if [ -f /usr/lib/secubox-console/secubox_console.py ]; then
|
||||
exec python3 /usr/lib/secubox-console/secubox_console.py "$@"
|
||||
else
|
||||
echo "Console not installed. Install with: opkg install secubox-console"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
tool_mitm() {
|
||||
exec /usr/sbin/secubox-mitm-logs "$@"
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Quick Actions
|
||||
# ============================================================================
|
||||
action_status() {
|
||||
echo "${C_CYAN}=== SecuBox Status ===${C_NC}"
|
||||
echo ""
|
||||
|
||||
# System
|
||||
echo "${C_BOLD}System:${C_NC}"
|
||||
echo " Hostname: $(uci get system.@system[0].hostname 2>/dev/null || hostname)"
|
||||
echo " Version: $(cat /etc/secubox-version 2>/dev/null || echo 'unknown')"
|
||||
echo " Uptime: $(uptime | cut -d',' -f1 | cut -d' ' -f4-)"
|
||||
echo ""
|
||||
|
||||
# Mesh
|
||||
echo "${C_BOLD}Mesh:${C_NC}"
|
||||
if [ -f /srv/secubox/mesh/node.id ]; then
|
||||
echo " Node ID: $(cat /srv/secubox/mesh/node.id)"
|
||||
echo " Peers: $(cat /srv/secubox/mesh/peers.json 2>/dev/null | jsonfilter -e '@.peers[*]' 2>/dev/null | wc -l)"
|
||||
echo " Blocks: $(find /srv/secubox/mesh/blocks -type f 2>/dev/null | wc -l)"
|
||||
else
|
||||
echo " Not initialized. Run: secubox-swiss mesh init"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Recovery
|
||||
echo "${C_BOLD}Recovery:${C_NC}"
|
||||
echo " Snapshots: $(ls /srv/secubox/recover/archives/*.tar.gz 2>/dev/null | wc -l)"
|
||||
echo " Profiles: $(ls /srv/secubox/recover/profiles/*.json 2>/dev/null | wc -l)"
|
||||
echo ""
|
||||
|
||||
# Services
|
||||
echo "${C_BOLD}Key Services:${C_NC}"
|
||||
for svc in haproxy crowdsec mitmproxy; do
|
||||
if pgrep -x "$svc" >/dev/null 2>&1; then
|
||||
echo " $svc: ${C_GREEN}running${C_NC}"
|
||||
else
|
||||
echo " $svc: ${C_RED}stopped${C_NC}"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
action_backup() {
|
||||
local name="${1:-backup-$(date +%Y%m%d-%H%M%S)}"
|
||||
echo "${C_CYAN}Creating backup: $name${C_NC}"
|
||||
/usr/sbin/secubox-recover snapshot "$name"
|
||||
}
|
||||
|
||||
action_reborn() {
|
||||
local output="${1:-/tmp/secubox-reborn.sh}"
|
||||
echo "${C_CYAN}Generating reborn script: $output${C_NC}"
|
||||
/usr/sbin/secubox-recover reborn "$output"
|
||||
echo ""
|
||||
echo "${C_GREEN}Reborn script ready!${C_NC}"
|
||||
echo "Copy to any machine and run to restore SecuBox."
|
||||
}
|
||||
|
||||
action_sync() {
|
||||
echo "${C_CYAN}Syncing mesh...${C_NC}"
|
||||
/usr/sbin/secubox-mesh sync
|
||||
echo ""
|
||||
echo "${C_CYAN}Syncing recovery catalog...${C_NC}"
|
||||
/usr/sbin/secubox-recover apps-sync
|
||||
}
|
||||
|
||||
action_health() {
|
||||
echo "${C_CYAN}=== Health Check ===${C_NC}"
|
||||
echo ""
|
||||
|
||||
# Disk
|
||||
echo "${C_BOLD}Disk Usage:${C_NC}"
|
||||
df -h / /srv /opt 2>/dev/null | grep -v "^Filesystem"
|
||||
echo ""
|
||||
|
||||
# Memory
|
||||
echo "${C_BOLD}Memory:${C_NC}"
|
||||
free -m 2>/dev/null || cat /proc/meminfo | head -3
|
||||
echo ""
|
||||
|
||||
# Network
|
||||
echo "${C_BOLD}Network:${C_NC}"
|
||||
echo " WAN IP: $(curl -s --connect-timeout 2 ifconfig.me 2>/dev/null || echo 'unavailable')"
|
||||
echo " LAN IP: $(uci get network.lan.ipaddr 2>/dev/null)"
|
||||
echo ""
|
||||
|
||||
# Security
|
||||
echo "${C_BOLD}Security:${C_NC}"
|
||||
if [ -f /var/log/secubox-access.log ]; then
|
||||
scans=$(grep -c '"is_scan":true' /var/log/secubox-access.log 2>/dev/null || echo 0)
|
||||
auths=$(grep -c '"is_auth_attempt":true' /var/log/secubox-access.log 2>/dev/null || echo 0)
|
||||
echo " Scan attempts: $scans"
|
||||
echo " Auth attempts: $auths"
|
||||
else
|
||||
echo " MITM logging not active"
|
||||
fi
|
||||
}
|
||||
|
||||
action_logs() {
|
||||
local component="${1:-all}"
|
||||
|
||||
case "$component" in
|
||||
mitm)
|
||||
/usr/sbin/secubox-mitm-logs tail
|
||||
;;
|
||||
mesh)
|
||||
cat /srv/secubox/mesh/chain.json 2>/dev/null | jsonfilter -e '@.blocks[-5:]' 2>/dev/null
|
||||
;;
|
||||
security)
|
||||
/usr/sbin/secubox-mitm-logs scan
|
||||
;;
|
||||
*)
|
||||
echo "${C_CYAN}=== Recent Logs ===${C_NC}"
|
||||
logread -l 30 2>/dev/null || tail -30 /var/log/messages 2>/dev/null
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Self-Enhancement
|
||||
# ============================================================================
|
||||
action_self_update() {
|
||||
echo "${C_CYAN}Checking for updates...${C_NC}"
|
||||
|
||||
# Check mesh peers for newer version
|
||||
local peers=$(cat /srv/secubox/mesh/peers.json 2>/dev/null | jsonfilter -e '@.peers[*].addr' 2>/dev/null)
|
||||
|
||||
for peer in $peers; do
|
||||
echo " Checking peer: $peer"
|
||||
local remote_ver=$(curl -s --connect-timeout 3 "http://$peer:7331/api/version" 2>/dev/null | jsonfilter -e '@.swiss' 2>/dev/null)
|
||||
|
||||
if [ -n "$remote_ver" ] && [ "$remote_ver" != "$VERSION" ]; then
|
||||
echo " ${C_YELLOW}Update available: $remote_ver${C_NC}"
|
||||
|
||||
# Fetch and verify
|
||||
local update_hash=$(curl -s "http://$peer:7331/api/update/swiss/hash" 2>/dev/null)
|
||||
curl -s "http://$peer:7331/api/update/swiss" -o /tmp/secubox-swiss.new 2>/dev/null
|
||||
|
||||
local local_hash=$(sha256sum /tmp/secubox-swiss.new 2>/dev/null | cut -d' ' -f1)
|
||||
|
||||
if [ "$local_hash" = "$update_hash" ]; then
|
||||
echo " ${C_GREEN}Hash verified. Installing...${C_NC}"
|
||||
chmod +x /tmp/secubox-swiss.new
|
||||
mv /tmp/secubox-swiss.new "$SCRIPT_PATH"
|
||||
echo " ${C_GREEN}Updated to $remote_ver${C_NC}"
|
||||
exit 0
|
||||
else
|
||||
echo " ${C_RED}Hash mismatch. Skipping.${C_NC}"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
echo " ${C_GREEN}Already up to date: $VERSION${C_NC}"
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Interactive Mode
|
||||
# ============================================================================
|
||||
interactive_menu() {
|
||||
while true; do
|
||||
clear
|
||||
banner
|
||||
echo "Quick Actions:"
|
||||
echo " ${C_CYAN}1${C_NC}) Status - System overview"
|
||||
echo " ${C_CYAN}2${C_NC}) Backup - Create snapshot"
|
||||
echo " ${C_CYAN}3${C_NC}) Reborn - Generate recovery script"
|
||||
echo " ${C_CYAN}4${C_NC}) Sync - Sync mesh & catalog"
|
||||
echo " ${C_CYAN}5${C_NC}) Health - Health check"
|
||||
echo " ${C_CYAN}6${C_NC}) Logs - View logs"
|
||||
echo " ${C_CYAN}7${C_NC}) Update - Self-update"
|
||||
echo ""
|
||||
echo "Tools:"
|
||||
echo " ${C_CYAN}m${C_NC}) Mesh - P2P mesh management"
|
||||
echo " ${C_CYAN}r${C_NC}) Recover - Recovery system"
|
||||
echo " ${C_CYAN}c${C_NC}) Console - Remote management (Python)"
|
||||
echo " ${C_CYAN}s${C_NC}) Security - MITM/Security logs"
|
||||
echo ""
|
||||
echo " ${C_CYAN}q${C_NC}) Quit"
|
||||
echo ""
|
||||
printf "Choice: "
|
||||
read choice
|
||||
|
||||
case "$choice" in
|
||||
1) action_status; read -p "Press Enter..." ;;
|
||||
2) action_backup; read -p "Press Enter..." ;;
|
||||
3) action_reborn; read -p "Press Enter..." ;;
|
||||
4) action_sync; read -p "Press Enter..." ;;
|
||||
5) action_health; read -p "Press Enter..." ;;
|
||||
6) action_logs; read -p "Press Enter..." ;;
|
||||
7) action_self_update; read -p "Press Enter..." ;;
|
||||
m) tool_mesh; break ;;
|
||||
r) tool_recover; break ;;
|
||||
c) tool_console; break ;;
|
||||
s) tool_mitm; break ;;
|
||||
q|Q) exit 0 ;;
|
||||
*) echo "Invalid choice" ;;
|
||||
esac
|
||||
done
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# Main
|
||||
# ============================================================================
|
||||
case "$1" in
|
||||
# Quick actions
|
||||
status)
|
||||
action_status
|
||||
;;
|
||||
backup)
|
||||
shift
|
||||
action_backup "$@"
|
||||
;;
|
||||
reborn)
|
||||
shift
|
||||
action_reborn "$@"
|
||||
;;
|
||||
sync)
|
||||
action_sync
|
||||
;;
|
||||
health)
|
||||
action_health
|
||||
;;
|
||||
logs)
|
||||
shift
|
||||
action_logs "$@"
|
||||
;;
|
||||
update)
|
||||
action_self_update
|
||||
;;
|
||||
|
||||
# Tool dispatch
|
||||
mesh)
|
||||
shift
|
||||
tool_mesh "$@"
|
||||
;;
|
||||
recover)
|
||||
shift
|
||||
tool_recover "$@"
|
||||
;;
|
||||
console)
|
||||
shift
|
||||
tool_console "$@"
|
||||
;;
|
||||
mitm|security)
|
||||
shift
|
||||
tool_mitm "$@"
|
||||
;;
|
||||
|
||||
# Version
|
||||
-v|--version|version)
|
||||
echo "SecuBox Swiss v$VERSION"
|
||||
;;
|
||||
|
||||
# Help
|
||||
-h|--help|help)
|
||||
banner
|
||||
echo "Usage: secubox-swiss [command] [args]"
|
||||
echo ""
|
||||
echo "Quick Actions:"
|
||||
echo " status System overview"
|
||||
echo " backup [name] Create snapshot"
|
||||
echo " reborn [file] Generate recovery script"
|
||||
echo " sync Sync mesh & catalog"
|
||||
echo " health Health check"
|
||||
echo " logs [component] View logs (mitm|mesh|security|all)"
|
||||
echo " update Self-update from mesh"
|
||||
echo ""
|
||||
echo "Tools:"
|
||||
echo " mesh <cmd> P2P mesh (init|peer-add|sync|...)"
|
||||
echo " recover <cmd> Recovery (snapshot|restore|profile-*|...)"
|
||||
echo " console <cmd> Remote management console"
|
||||
echo " mitm <cmd> Security/MITM logs"
|
||||
echo ""
|
||||
echo "Run without arguments for interactive menu."
|
||||
;;
|
||||
|
||||
# Interactive
|
||||
"")
|
||||
interactive_menu
|
||||
;;
|
||||
|
||||
*)
|
||||
echo "Unknown command: $1"
|
||||
echo "Run 'secubox-swiss help' for usage."
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
Loading…
Reference in New Issue
Block a user