feat(waf): Never fallback to LuCI, add funny 404 page + OpenClaw package

mitmproxy haproxy_router.py:
- Return 404 instead of routing to LuCI (8081) for missing routes
- Block any routes that point to port 8081
- Add "WAF Says NO" themed 404 page with multi-layer WAF visual

HAProxy (deployed on router):
- Configure end_of_internet backend with custom errorfiles
- Add "End of Internet" themed error pages for 5xx errors
- Patched haproxyctl to include errorfile directives

New package: secubox-app-openclaw
- Personal AI assistant integration for SecuBox
- Supports Anthropic Claude, OpenAI, and Ollama providers
- Chat integrations (Telegram, Discord, Slack)
- Email/calendar automation support
- CLI tool: openclawctl

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
CyberMind-FR 2026-02-27 09:23:43 +01:00
parent c453cef5db
commit a8dc5f58fe
6 changed files with 954 additions and 6 deletions

View File

@ -460,7 +460,29 @@
"Bash(__NEW_LINE_4ae32f0682f969f7__ echo \"\")",
"Bash(__NEW_LINE_9355573ea4eaaf97__ echo \"\")",
"Bash(__NEW_LINE_2d1b6ec757fae311__ echo \"\")",
"Bash(__NEW_LINE_f8bfff16bb831fd7__ echo \"\")"
"Bash(__NEW_LINE_f8bfff16bb831fd7__ echo \"\")",
"Bash(__NEW_LINE_c5c381a297636f7e__ echo \"\")",
"Bash(__NEW_LINE_f49c6da736d3f42b__ echo \"=== Check master DHT ===\")",
"Bash(__NEW_LINE_f49c6da736d3f42b__ echo \"\")",
"Bash(__NEW_LINE_ea3ae0dc3d3aaef0__ echo \"\")",
"Bash(__NEW_LINE_987d8798582642d8__ echo \"\")",
"Bash(__NEW_LINE_e9d215c8ae3f794b__ echo \"\")",
"Bash(__NEW_LINE_bf7149c19b0ef674__ echo \"\")",
"Bash(__NEW_LINE_a6f7aa1271a4ac1e__ echo \"\")",
"Bash(GOOS=linux GOARCH=arm64 CGO_ENABLED=0 go build:*)",
"Bash(GOOS=linux GOARCH=amd64 CGO_ENABLED=0 go build:*)",
"Bash(exec bash -c 'cd /home/reepost && pwd')",
"Bash(HOME=/home/reepost exec /bin/bash --login -c 'cd /home/reepost/CyberMindStudio/secubox-openwrt && pwd && df -h / | tail -1')",
"Bash(/bin/sh -c 'cd /home/reepost/CyberMindStudio/secubox-openwrt && pwd')",
"Bash(/bin/sh:*)",
"Bash(/usr/bin/whoami:*)",
"Bash(/bin/echo:*)",
"Bash(git clone:*)",
"Bash(/bin/ls:*)",
"Bash(pip show:*)",
"Bash(uvx:*)",
"Bash(claude mcp add:*)",
"WebFetch(domain:openclaw.ai)"
]
}
}

View File

@ -14,7 +14,162 @@ from mitmproxy.connection import Address
# Backend routing configuration file
ROUTES_FILE = "/data/haproxy-routes.json"
DEFAULT_BACKEND = ("127.0.0.1", 8081) # LuCI fallback
# 404 page HTML - shown when no route is found
# NEVER fallback to LuCI - return proper 404 instead
NOT_FOUND_HTML = """<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>WAF Says NO - SecuBox</title>
<style>
* { box-sizing: border-box; }
body {
font-family: 'Courier New', monospace;
background: linear-gradient(135deg, #0a0a0a 0%, #1a0a1a 50%, #0a1a0a 100%);
color: #0f0;
display: flex;
justify-content: center;
align-items: center;
min-height: 100vh;
margin: 0;
overflow: hidden;
}
.matrix-bg {
position: fixed;
top: 0; left: 0; right: 0; bottom: 0;
background: repeating-linear-gradient(
0deg,
transparent,
transparent 2px,
rgba(0, 255, 0, 0.03) 2px,
rgba(0, 255, 0, 0.03) 4px
);
pointer-events: none;
animation: scan 8s linear infinite;
}
@keyframes scan { from { background-position: 0 0; } to { background-position: 0 100vh; } }
.container {
text-align: center;
padding: 2rem;
max-width: 700px;
position: relative;
z-index: 10;
}
.skull {
font-size: 5rem;
animation: pulse 2s ease-in-out infinite;
text-shadow: 0 0 20px #f00, 0 0 40px #f00;
}
@keyframes pulse { 0%, 100% { transform: scale(1); } 50% { transform: scale(1.1); } }
h1 {
font-size: 3rem;
margin: 0.5rem 0;
color: #f00;
text-shadow: 0 0 10px #f00, 0 0 20px #800;
animation: glitch 0.5s infinite;
}
@keyframes glitch {
0%, 90%, 100% { transform: translateX(0); }
92% { transform: translateX(-2px); }
94% { transform: translateX(2px); }
96% { transform: translateX(-1px); }
98% { transform: translateX(1px); }
}
h2 {
font-size: 1.2rem;
color: #0f0;
margin: 1rem 0;
}
.domain {
background: rgba(0, 255, 0, 0.1);
border: 1px solid #0f0;
padding: 0.8rem 1.5rem;
border-radius: 4px;
font-family: monospace;
font-size: 1.1rem;
display: inline-block;
margin: 1rem 0;
color: #ff0;
}
.waf-layers {
display: flex;
justify-content: center;
gap: 1rem;
margin: 1.5rem 0;
flex-wrap: wrap;
}
.layer {
background: rgba(255, 0, 0, 0.2);
border: 1px solid #f00;
padding: 0.5rem 1rem;
border-radius: 20px;
font-size: 0.8rem;
color: #f88;
}
.message {
color: #888;
line-height: 1.8;
margin: 1rem 0;
}
.quote {
font-style: italic;
color: #0f0;
margin: 1.5rem 0;
padding: 1rem;
border-left: 3px solid #0f0;
text-align: left;
}
.footer {
margin-top: 2rem;
font-size: 0.8rem;
color: #444;
}
a { color: #0f0; text-decoration: none; }
a:hover { text-decoration: underline; color: #0ff; }
</style>
</head>
<body>
<div class="matrix-bg"></div>
<div class="container">
<div class="skull">&#128128;</div>
<h1>WAF SAYS NO</h1>
<h2>// REQUEST TERMINATED //</h2>
<p class="domain">{domain}</p>
<div class="waf-layers">
<span class="layer">HAProxy &#128683;</span>
<span class="layer">CrowdSec &#128683;</span>
<span class="layer">mitmproxy &#128683;</span>
<span class="layer">SecuBox &#128683;</span>
</div>
<p class="message">
Your request has been inspected by <strong>4 layers of WAF</strong>
and found to be going... absolutely nowhere.<br><br>
This domain either doesn't exist, or the backend decided to take a coffee break.
</p>
<div class="quote">
"You shall not pass!"<br>
<small>- Every WAF ever, probably</small>
</div>
<p class="message">
<strong>Possible reasons:</strong><br>
&#x2022; The service isn't configured<br>
&#x2022; The backend is having an existential crisis<br>
&#x2022; You're lost on the internet (happens to the best of us)
</p>
<div class="footer">
<a href="https://secubox.in">&#128737; SecuBox Multi-Layer WAF Gateway</a>
</div>
</div>
</body>
</html>
"""
class HaproxyRouter:
def __init__(self):
@ -72,14 +227,18 @@ class HaproxyRouter:
except:
pass
def _get_backend(self, host: str) -> tuple:
"""Get backend address for hostname"""
def _get_backend(self, host: str) -> tuple | None:
"""Get backend address for hostname. Returns None if not found."""
# Remove port from host if present
hostname = host.split(':')[0].lower()
# 1. Try exact match first
if hostname in self.routes:
backend = self.routes[hostname]
# NEVER route to 8081 (LuCI) - treat as missing route
if backend[1] == 8081:
ctx.log.warn(f"Route for {hostname} points to 8081 (LuCI), treating as missing")
return None
return (backend[0], backend[1])
# 2. Try wildcard matching - collect all wildcard patterns
@ -100,10 +259,14 @@ class HaproxyRouter:
for suffix, backend in wildcards:
if hostname.endswith(suffix):
# NEVER route to 8081 (LuCI) - treat as missing route
if backend[1] == 8081:
ctx.log.warn(f"Wildcard route for {hostname} points to 8081 (LuCI), treating as missing")
return None
return (backend[0], backend[1])
ctx.log.warn(f"No route for {hostname}, using default")
return DEFAULT_BACKEND
ctx.log.warn(f"No route found for {hostname}")
return None
def request(self, flow: http.HTTPFlow):
"""Route request to appropriate backend"""
@ -116,6 +279,18 @@ class HaproxyRouter:
host = flow.request.host_header or flow.request.host
backend = self._get_backend(host)
# If no backend found, return 404 - NEVER fallback to LuCI
if backend is None:
ctx.log.warn(f"404: No backend for {host}")
flow.response = http.Response.make(
404,
NOT_FOUND_HTML.format(domain=host).encode('utf-8'),
{"Content-Type": "text/html; charset=utf-8"}
)
flow.metadata['original_host'] = host
flow.metadata['backend'] = "404_NOT_FOUND"
return
# Save original Host header before routing
original_host_header = flow.request.headers.get("Host", host)

View File

@ -0,0 +1,86 @@
# SPDX-License-Identifier: MIT
#
# Copyright (C) 2026 CyberMind.fr
#
# OpenClaw - Personal AI Assistant
# Self-hosted AI agent for task automation
#
include $(TOPDIR)/rules.mk
PKG_NAME:=secubox-app-openclaw
PKG_VERSION:=1.0.0
PKG_RELEASE:=1
PKG_LICENSE:=MIT
PKG_MAINTAINER:=CyberMind Studio <contact@cybermind.fr>
include $(INCLUDE_DIR)/package.mk
define Package/secubox-app-openclaw
SECTION:=utils
CATEGORY:=Utilities
SUBMENU:=SecuBox Apps
TITLE:=OpenClaw - Personal AI Assistant
URL:=https://openclaw.ai
DEPENDS:=+node +node-npm +wget-ssl +ca-certificates
PKGARCH:=all
endef
define Package/secubox-app-openclaw/description
OpenClaw - Self-hosted Personal AI Assistant for SecuBox.
Features:
- Personal AI that actually does things (emails, calendar, etc.)
- Multiple LLM provider support (Anthropic Claude, OpenAI, local models)
- Chat app integrations (Telegram, Discord, Slack, etc.)
- Privacy-focused - your data stays on your machine
- CLI tool (openclawctl) for management
API: http://<router-ip>:3333
Configure in /etc/config/openclaw
endef
define Package/secubox-app-openclaw/conffiles
/etc/config/openclaw
endef
define Build/Compile
# Nothing to compile - npm install at runtime
endef
define Package/secubox-app-openclaw/install
$(INSTALL_DIR) $(1)/etc/config
$(INSTALL_CONF) ./files/etc/config/openclaw $(1)/etc/config/openclaw
$(INSTALL_DIR) $(1)/etc/init.d
$(INSTALL_BIN) ./files/etc/init.d/openclaw $(1)/etc/init.d/openclaw
$(INSTALL_DIR) $(1)/usr/sbin
$(INSTALL_BIN) ./files/usr/sbin/openclawctl $(1)/usr/sbin/openclawctl
$(INSTALL_DIR) $(1)/srv/openclaw
endef
define Package/secubox-app-openclaw/postinst
#!/bin/sh
[ -n "$${IPKG_INSTROOT}" ] || {
echo ""
echo "SecuBox OpenClaw installed"
echo ""
echo "Prerequisites:"
echo " - Node.js 18+ (opkg install node)"
echo " - LLM API key (Anthropic, OpenAI, or local Ollama)"
echo ""
echo "Quick start:"
echo " openclawctl install # Install OpenClaw via npm"
echo " openclawctl configure # Set API keys interactively"
echo " /etc/init.d/openclaw enable"
echo " /etc/init.d/openclaw start"
echo ""
echo "API: http://<router-ip>:3333"
}
exit 0
endef
$(eval $(call BuildPackage,secubox-app-openclaw))

View File

@ -0,0 +1,50 @@
config main 'main'
option enabled '0'
option port '3333'
option host '0.0.0.0'
option data_path '/srv/openclaw'
option log_level 'info'
# LLM Provider configuration
config provider 'llm'
option type 'anthropic'
# Types: anthropic, openai, ollama, local
option api_key ''
# For ollama: set to 'local' and configure ollama_url
option ollama_url 'http://127.0.0.1:11434'
option model 'claude-sonnet-4-20250514'
# Models by provider:
# anthropic: claude-sonnet-4-20250514, claude-opus-4-20250514
# openai: gpt-4o, gpt-4-turbo
# ollama: mistral, llama2, tinyllama
# Chat integrations (enable as needed)
config integration 'telegram'
option enabled '0'
option bot_token ''
config integration 'discord'
option enabled '0'
option bot_token ''
config integration 'slack'
option enabled '0'
option bot_token ''
option app_token ''
# Email integration (for inbox management)
config integration 'email'
option enabled '0'
option imap_host ''
option imap_port '993'
option smtp_host ''
option smtp_port '587'
option email ''
option password ''
# Calendar integration
config integration 'calendar'
option enabled '0'
option caldav_url ''
option username ''
option password ''

View File

@ -0,0 +1,40 @@
#!/bin/sh /etc/rc.common
# SecuBox OpenClaw - Personal AI Assistant
# Copyright (C) 2026 CyberMind.fr
START=95
STOP=10
USE_PROCD=1
PROG=/usr/sbin/openclawctl
start_service() {
local enabled
config_load openclaw
config_get enabled main enabled '0'
[ "$enabled" = "1" ] || {
echo "OpenClaw is disabled. Enable with: uci set openclaw.main.enabled=1"
return 0
}
procd_open_instance
procd_set_param command $PROG service-run
procd_set_param respawn 3600 5 5
procd_set_param stdout 1
procd_set_param stderr 1
procd_close_instance
}
stop_service() {
$PROG service-stop
}
service_triggers() {
procd_add_reload_trigger "openclaw"
}
reload_service() {
stop
start
}

View File

@ -0,0 +1,575 @@
#!/bin/sh
# SecuBox OpenClaw manager - npm-based AI assistant
# Copyright (C) 2026 CyberMind.fr
#
# OpenClaw is a personal AI assistant that can manage emails, calendar,
# and integrate with chat platforms like Telegram, Discord, Slack.
CONFIG="openclaw"
INSTALL_DIR="/srv/openclaw"
PID_FILE="/var/run/openclaw.pid"
usage() {
cat <<'EOF'
Usage: openclawctl <command>
Commands:
install Install OpenClaw via npm
update Update OpenClaw to latest version
check Run prerequisite checks
status Show service status
logs Show OpenClaw logs (use -f to follow)
Configuration:
configure Interactive setup (API keys, integrations)
set-provider Set LLM provider (anthropic/openai/ollama)
set-api-key Set LLM API key
test-api Test LLM API connection
Integrations:
enable <name> Enable integration (telegram/discord/slack/email)
disable <name> Disable integration
list-integrations List configured integrations
Service Control:
service-run Internal: run under procd
service-stop Stop service
API Endpoints (default port 3333):
/api/chat - Send message to AI
/api/status - Service status
/api/integrations - List active integrations
Configuration: /etc/config/openclaw
EOF
}
require_root() { [ "$(id -u)" -eq 0 ] || { echo "Root required" >&2; exit 1; }; }
log_info() { echo "[INFO] $*"; logger -t openclaw "$*"; }
log_warn() { echo "[WARN] $*" >&2; logger -t openclaw -p warning "$*"; }
log_error() { echo "[ERROR] $*" >&2; logger -t openclaw -p err "$*"; }
uci_get() { uci -q get ${CONFIG}.$1; }
uci_set() { uci set ${CONFIG}.$1="$2" && uci commit ${CONFIG}; }
# Load configuration
load_config() {
port="$(uci_get main.port || echo 3333)"
host="$(uci_get main.host || echo 0.0.0.0)"
data_path="$(uci_get main.data_path || echo /srv/openclaw)"
log_level="$(uci_get main.log_level || echo info)"
# LLM settings
llm_type="$(uci_get llm.type || echo anthropic)"
api_key="$(uci_get llm.api_key || echo '')"
ollama_url="$(uci_get llm.ollama_url || echo 'http://127.0.0.1:11434')"
model="$(uci_get llm.model || echo 'claude-sonnet-4-20250514')"
# Ensure paths exist
[ -d "$data_path" ] || mkdir -p "$data_path"
}
# =============================================================================
# PREREQUISITES
# =============================================================================
has_node() {
command -v node >/dev/null 2>&1
}
has_npm() {
command -v npm >/dev/null 2>&1
}
get_node_version() {
node --version 2>/dev/null | sed 's/^v//'
}
check_node_version() {
local version=$(get_node_version)
local major=$(echo "$version" | cut -d. -f1)
[ "$major" -ge 18 ] 2>/dev/null
}
# =============================================================================
# INSTALLATION
# =============================================================================
cmd_install() {
require_root
load_config
log_info "Installing OpenClaw..."
# Check prerequisites
if ! has_node; then
log_error "Node.js not found. Install with: opkg install node"
exit 1
fi
if ! check_node_version; then
log_warn "Node.js 18+ recommended. Current: $(get_node_version)"
fi
if ! has_npm; then
log_error "npm not found. Install with: opkg install node-npm"
exit 1
fi
# Create install directory
mkdir -p "$INSTALL_DIR"
cd "$INSTALL_DIR" || exit 1
# Initialize npm project if needed
if [ ! -f "$INSTALL_DIR/package.json" ]; then
log_info "Initializing npm project..."
npm init -y >/dev/null 2>&1
fi
# Install OpenClaw
log_info "Installing OpenClaw via npm..."
log_info "This may take a few minutes..."
if npm install -g openclaw 2>&1; then
log_info "OpenClaw installed successfully!"
else
log_error "Failed to install OpenClaw"
exit 1
fi
log_info ""
log_info "Next steps:"
log_info " 1. Configure API key: openclawctl set-api-key <your-key>"
log_info " 2. Test connection: openclawctl test-api"
log_info " 3. Enable service: uci set openclaw.main.enabled=1 && uci commit"
log_info " 4. Start: /etc/init.d/openclaw start"
log_info ""
log_info "API will be available at: http://<router-ip>:$port"
}
cmd_update() {
require_root
log_info "Updating OpenClaw..."
npm update -g openclaw
if [ "$(uci_get main.enabled)" = "1" ]; then
log_info "Restarting service..."
/etc/init.d/openclaw restart
fi
}
cmd_check() {
load_config
echo "=== OpenClaw Prerequisite Check ==="
echo ""
# Node.js
if has_node; then
local version=$(get_node_version)
if check_node_version; then
echo "[OK] Node.js: v$version"
else
echo "[WARN] Node.js: v$version (18+ recommended)"
fi
else
echo "[FAIL] Node.js not found"
echo " Install: opkg install node"
fi
# npm
if has_npm; then
echo "[OK] npm: $(npm --version 2>/dev/null)"
else
echo "[FAIL] npm not found"
echo " Install: opkg install node-npm"
fi
echo ""
# OpenClaw installation
if command -v openclaw >/dev/null 2>&1; then
echo "[OK] OpenClaw installed"
openclaw --version 2>/dev/null || true
else
echo "[INFO] OpenClaw not installed"
echo " Run: openclawctl install"
fi
echo ""
# LLM provider
echo "=== LLM Configuration ==="
echo "Provider: $llm_type"
echo "Model: $model"
if [ -n "$api_key" ] && [ "$api_key" != "" ]; then
echo "API Key: ***configured***"
else
if [ "$llm_type" = "ollama" ]; then
echo "API Key: (not required for Ollama)"
echo "Ollama URL: $ollama_url"
else
echo "API Key: NOT SET"
echo " Configure with: openclawctl set-api-key <key>"
fi
fi
echo ""
# Check Ollama if configured
if [ "$llm_type" = "ollama" ]; then
if wget -q -O /dev/null "$ollama_url" 2>/dev/null; then
echo "[OK] Ollama reachable at $ollama_url"
else
echo "[WARN] Ollama not responding at $ollama_url"
fi
fi
# Storage
local storage_avail=$(df -h "$data_path" 2>/dev/null | tail -1 | awk '{print $4}')
echo "Storage available: $storage_avail (at $data_path)"
}
# =============================================================================
# CONFIGURATION
# =============================================================================
cmd_configure() {
require_root
load_config
echo "=== OpenClaw Configuration ==="
echo ""
echo "Select LLM provider:"
echo " 1) Anthropic (Claude)"
echo " 2) OpenAI (GPT-4)"
echo " 3) Ollama (Local)"
echo ""
printf "Choice [1-3]: "
read choice
case "$choice" in
1)
uci_set llm.type 'anthropic'
uci_set llm.model 'claude-sonnet-4-20250514'
echo "Selected: Anthropic Claude"
echo ""
printf "Enter Anthropic API key: "
read -r api_key
[ -n "$api_key" ] && uci_set llm.api_key "$api_key"
;;
2)
uci_set llm.type 'openai'
uci_set llm.model 'gpt-4o'
echo "Selected: OpenAI GPT-4"
echo ""
printf "Enter OpenAI API key: "
read -r api_key
[ -n "$api_key" ] && uci_set llm.api_key "$api_key"
;;
3)
uci_set llm.type 'ollama'
uci_set llm.model 'mistral'
echo "Selected: Ollama (Local)"
echo ""
printf "Enter Ollama URL [http://127.0.0.1:11434]: "
read url
[ -n "$url" ] && uci_set llm.ollama_url "$url"
;;
*)
echo "Invalid choice"
return 1
;;
esac
echo ""
echo "Configuration saved."
echo "Test with: openclawctl test-api"
}
cmd_set_provider() {
require_root
local provider="$1"
case "$provider" in
anthropic)
uci_set llm.type 'anthropic'
uci_set llm.model 'claude-sonnet-4-20250514'
echo "Provider set to Anthropic"
;;
openai)
uci_set llm.type 'openai'
uci_set llm.model 'gpt-4o'
echo "Provider set to OpenAI"
;;
ollama)
uci_set llm.type 'ollama'
uci_set llm.model 'mistral'
echo "Provider set to Ollama"
;;
*)
echo "Usage: openclawctl set-provider <anthropic|openai|ollama>"
return 1
;;
esac
}
cmd_set_api_key() {
require_root
local key="$1"
if [ -z "$key" ]; then
echo "Usage: openclawctl set-api-key <api-key>"
return 1
fi
uci_set llm.api_key "$key"
echo "API key configured"
}
cmd_test_api() {
load_config
echo "Testing LLM API connection..."
echo "Provider: $llm_type"
echo "Model: $model"
echo ""
case "$llm_type" in
anthropic)
if [ -z "$api_key" ]; then
log_error "API key not configured"
return 1
fi
# Test Anthropic API
local response=$(wget -q -O - \
--header="x-api-key: $api_key" \
--header="anthropic-version: 2023-06-01" \
--header="content-type: application/json" \
--post-data='{"model":"'"$model"'","max_tokens":10,"messages":[{"role":"user","content":"Hi"}]}' \
"https://api.anthropic.com/v1/messages" 2>&1)
if echo "$response" | grep -q '"content"'; then
echo "[OK] Anthropic API working"
else
log_error "API test failed: $response"
return 1
fi
;;
openai)
if [ -z "$api_key" ]; then
log_error "API key not configured"
return 1
fi
local response=$(wget -q -O - \
--header="Authorization: Bearer $api_key" \
--header="content-type: application/json" \
--post-data='{"model":"'"$model"'","max_tokens":10,"messages":[{"role":"user","content":"Hi"}]}' \
"https://api.openai.com/v1/chat/completions" 2>&1)
if echo "$response" | grep -q '"choices"'; then
echo "[OK] OpenAI API working"
else
log_error "API test failed: $response"
return 1
fi
;;
ollama)
if wget -q -O /dev/null "$ollama_url" 2>/dev/null; then
echo "[OK] Ollama reachable"
# Check if model exists
local models=$(wget -q -O - "$ollama_url/api/tags" 2>/dev/null)
if echo "$models" | grep -q "$model"; then
echo "[OK] Model '$model' available"
else
log_warn "Model '$model' not found. Pull with: ollamactl pull $model"
fi
else
log_error "Ollama not reachable at $ollama_url"
return 1
fi
;;
esac
}
# =============================================================================
# INTEGRATIONS
# =============================================================================
cmd_list_integrations() {
load_config
echo "=== OpenClaw Integrations ==="
echo ""
for int in telegram discord slack email calendar; do
local enabled=$(uci_get $int.enabled || echo 0)
if [ "$enabled" = "1" ]; then
echo " [ON] $int"
else
echo " [OFF] $int"
fi
done
echo ""
echo "Enable with: openclawctl enable <integration>"
}
cmd_enable_integration() {
require_root
local name="$1"
case "$name" in
telegram|discord|slack|email|calendar)
uci_set $name.enabled '1'
echo "Enabled: $name"
echo "Configure tokens/credentials in /etc/config/openclaw"
;;
*)
echo "Unknown integration: $name"
echo "Available: telegram, discord, slack, email, calendar"
return 1
;;
esac
}
cmd_disable_integration() {
require_root
local name="$1"
case "$name" in
telegram|discord|slack|email|calendar)
uci_set $name.enabled '0'
echo "Disabled: $name"
;;
*)
echo "Unknown integration: $name"
return 1
;;
esac
}
# =============================================================================
# SERVICE CONTROL
# =============================================================================
cmd_status() {
load_config
echo "=== OpenClaw Status ==="
echo ""
# Check if installed
if command -v openclaw >/dev/null 2>&1; then
echo "Installation: INSTALLED"
openclaw --version 2>/dev/null || true
else
echo "Installation: NOT INSTALLED"
echo "Run: openclawctl install"
return 0
fi
echo ""
# Check if running
if [ -f "$PID_FILE" ] && kill -0 "$(cat "$PID_FILE")" 2>/dev/null; then
echo "Service: RUNNING (PID: $(cat "$PID_FILE"))"
else
if pgrep -f "openclaw" >/dev/null 2>&1; then
echo "Service: RUNNING"
else
echo "Service: STOPPED"
fi
fi
echo ""
# API health check
if wget -q -O /dev/null "http://127.0.0.1:$port/api/status" 2>/dev/null; then
echo "API: HEALTHY (port $port)"
else
echo "API: NOT RESPONDING"
fi
echo ""
# Configuration
echo "=== Configuration ==="
echo "LLM Provider: $llm_type"
echo "Model: $model"
echo "Port: $port"
echo "Data: $data_path"
}
cmd_logs() {
if [ "$1" = "-f" ]; then
logread -f -e openclaw
else
logread -e openclaw | tail -100
fi
}
cmd_service_run() {
require_root
load_config
# Build environment
export OPENCLAW_PORT="$port"
export OPENCLAW_HOST="$host"
export OPENCLAW_DATA_DIR="$data_path"
export OPENCLAW_LOG_LEVEL="$log_level"
# LLM config
case "$llm_type" in
anthropic)
export ANTHROPIC_API_KEY="$api_key"
export OPENCLAW_MODEL="$model"
;;
openai)
export OPENAI_API_KEY="$api_key"
export OPENCLAW_MODEL="$model"
;;
ollama)
export OLLAMA_HOST="$ollama_url"
export OPENCLAW_MODEL="$model"
;;
esac
# Store PID
echo $$ > "$PID_FILE"
# Run OpenClaw
log_info "Starting OpenClaw on $host:$port"
exec openclaw serve --port "$port" --host "$host"
}
cmd_service_stop() {
require_root
if [ -f "$PID_FILE" ]; then
local pid=$(cat "$PID_FILE")
if kill -0 "$pid" 2>/dev/null; then
log_info "Stopping OpenClaw (PID: $pid)"
kill "$pid"
fi
rm -f "$PID_FILE"
fi
# Also kill any stray processes
pkill -f "openclaw serve" 2>/dev/null || true
}
# Main Entry Point
case "${1:-}" in
install) shift; cmd_install "$@" ;;
update) shift; cmd_update "$@" ;;
check) shift; cmd_check "$@" ;;
status) shift; cmd_status "$@" ;;
logs) shift; cmd_logs "$@" ;;
configure) shift; cmd_configure "$@" ;;
set-provider) shift; cmd_set_provider "$@" ;;
set-api-key) shift; cmd_set_api_key "$@" ;;
test-api) shift; cmd_test_api "$@" ;;
list-integrations) shift; cmd_list_integrations "$@" ;;
enable) shift; cmd_enable_integration "$@" ;;
disable) shift; cmd_disable_integration "$@" ;;
service-run) shift; cmd_service_run "$@" ;;
service-stop) shift; cmd_service_stop "$@" ;;
help|--help|-h|'') usage ;;
*) echo "Unknown command: $1" >&2; usage >&2; exit 1 ;;
esac