Matrix Homeserver (Conduit): - E2EE mesh messaging using Conduit v0.10.12 in LXC container - matrixctl CLI: install/uninstall, user/room management, federation - luci-app-matrix: status cards, user form, emancipate, mesh publish - RPCD backend with 17 methods - Identity (DID) integration and P2P mesh publication SaaS Relay CDN Caching & Session Replay: - CDN cache profiles: minimal, gandalf (default), aggressive - Session replay modes: shared, per_user, master - saasctl cache/session commands for management - Enhanced mitmproxy addon (415 lines) with response caching Media Services Hub Dashboard: - Unified dashboard at /admin/services/media-hub - Category-organized cards (streaming, conferencing, apps, etc.) - Service status indicators with start/stop/restart controls - RPCD backend querying 8 media services Also includes: - HexoJS static upload workflow and multi-user auth - Jitsi config.js Promise handling fix - Feed package updates Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
3769 lines
95 KiB
Bash
Executable File
3769 lines
95 KiB
Bash
Executable File
#!/bin/sh
|
|
# SPDX-License-Identifier: MIT
|
|
# Hexo CMS RPCD backend
|
|
# Copyright (C) 2025 CyberMind.fr
|
|
|
|
. /lib/functions.sh
|
|
. /usr/share/libubox/jshn.sh
|
|
|
|
CONFIG="hexojs"
|
|
HEXOCTL="/usr/sbin/hexoctl"
|
|
DATA_PATH="/srv/hexojs"
|
|
|
|
# Helper functions
|
|
uci_get() { uci -q get ${CONFIG}.$1; }
|
|
uci_set() { uci set ${CONFIG}.$1="$2" && uci commit ${CONFIG}; }
|
|
|
|
is_running() {
|
|
pgrep -f "lxc.*hexojs" >/dev/null 2>&1
|
|
}
|
|
|
|
get_site_path() {
|
|
local data_path=$(uci_get main.data_path) || data_path="$DATA_PATH"
|
|
echo "$data_path/site"
|
|
}
|
|
|
|
# ============================================
|
|
# Status Methods
|
|
# ============================================
|
|
|
|
get_status() {
|
|
local data_path=$(uci_get main.data_path) || data_path="$DATA_PATH"
|
|
local site_path="$data_path/site"
|
|
local enabled=$(uci_get main.enabled) || enabled="0"
|
|
local http_port=$(uci_get main.http_port) || http_port="4000"
|
|
local active_site=$(uci_get main.active_site) || active_site="default"
|
|
|
|
json_init
|
|
|
|
json_add_boolean "enabled" "$enabled"
|
|
json_add_boolean "running" "$(is_running && echo 1 || echo 0)"
|
|
json_add_int "http_port" "$http_port"
|
|
json_add_string "active_site" "$active_site"
|
|
json_add_string "data_path" "$data_path"
|
|
|
|
# Site info
|
|
if [ -d "$site_path" ]; then
|
|
json_add_boolean "site_exists" 1
|
|
|
|
local post_count=0
|
|
local draft_count=0
|
|
local page_count=0
|
|
|
|
# Recursive count for posts (handles subdirectory categories)
|
|
[ -d "$site_path/source/_posts" ] && post_count=$(find "$site_path/source/_posts" -type f -name "*.md" ! -name "index.md" 2>/dev/null | wc -l)
|
|
[ -d "$site_path/source/_drafts" ] && draft_count=$(find "$site_path/source/_drafts" -type f -name "*.md" ! -name "index.md" 2>/dev/null | wc -l)
|
|
|
|
json_add_int "post_count" "$post_count"
|
|
json_add_int "draft_count" "$draft_count"
|
|
|
|
# Site config
|
|
local title=$(uci_get ${active_site}.title) || title="Blog"
|
|
local author=$(uci_get ${active_site}.author) || author="Admin"
|
|
local theme=$(uci_get ${active_site}.theme) || theme="cybermind"
|
|
|
|
json_add_object "site"
|
|
json_add_string "title" "$title"
|
|
json_add_string "author" "$author"
|
|
json_add_string "theme" "$theme"
|
|
json_close_object
|
|
else
|
|
json_add_boolean "site_exists" 0
|
|
json_add_int "post_count" 0
|
|
json_add_int "draft_count" 0
|
|
fi
|
|
|
|
json_dump
|
|
}
|
|
|
|
get_site_stats() {
|
|
local site_path=$(get_site_path)
|
|
|
|
json_init
|
|
|
|
if [ ! -d "$site_path" ]; then
|
|
json_add_int "posts" 0
|
|
json_add_int "drafts" 0
|
|
json_add_int "pages" 0
|
|
json_add_int "categories" 0
|
|
json_add_int "tags" 0
|
|
json_add_int "media" 0
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
local post_count=0
|
|
local draft_count=0
|
|
local page_count=0
|
|
local media_count=0
|
|
local tmp_posts="/tmp/hexojs_stats_$$"
|
|
|
|
# Recursive counts (handles subdirectory categories)
|
|
[ -d "$site_path/source/_posts" ] && post_count=$(find "$site_path/source/_posts" -type f -name "*.md" ! -name "index.md" 2>/dev/null | wc -l)
|
|
[ -d "$site_path/source/_drafts" ] && draft_count=$(find "$site_path/source/_drafts" -type f -name "*.md" ! -name "index.md" 2>/dev/null | wc -l)
|
|
[ -d "$site_path/source/images" ] && media_count=$(find "$site_path/source/images" -type f 2>/dev/null | wc -l)
|
|
|
|
# Count unique categories and tags from posts (recursive)
|
|
local categories=""
|
|
local tags=""
|
|
|
|
if [ -d "$site_path/source/_posts" ]; then
|
|
find "$site_path/source/_posts" -type f -name "*.md" ! -name "index.md" 2>/dev/null > "$tmp_posts"
|
|
while read f; do
|
|
[ -f "$f" ] || continue
|
|
local cat=$(grep -m1 "^categories:" "$f" 2>/dev/null | sed 's/^categories:[[:space:]]*//' | tr -d '[]' | tr ',' '\n')
|
|
local tag=$(grep -m1 "^tags:" "$f" 2>/dev/null | sed 's/^tags:[[:space:]]*//' | tr -d '[]' | tr ',' '\n')
|
|
# Also use directory name as category if no category in front matter
|
|
if [ -z "$cat" ]; then
|
|
local rel_path="${f#$site_path/source/_posts/}"
|
|
local cat_dir=$(dirname "$rel_path")
|
|
[ "$cat_dir" != "." ] && cat="$cat_dir"
|
|
fi
|
|
categories="$categories
|
|
$cat"
|
|
tags="$tags
|
|
$tag"
|
|
done < "$tmp_posts"
|
|
rm -f "$tmp_posts"
|
|
fi
|
|
|
|
local cat_count=$(echo "$categories" | grep -v '^$' | sort -u | wc -l)
|
|
local tag_count=$(echo "$tags" | grep -v '^$' | sort -u | wc -l)
|
|
|
|
json_add_int "posts" "$post_count"
|
|
json_add_int "drafts" "$draft_count"
|
|
json_add_int "pages" "$page_count"
|
|
json_add_int "categories" "$cat_count"
|
|
json_add_int "tags" "$tag_count"
|
|
json_add_int "media" "$media_count"
|
|
|
|
json_dump
|
|
}
|
|
|
|
# ============================================
|
|
# Post Methods
|
|
# ============================================
|
|
|
|
list_posts() {
|
|
local site_path=$(get_site_path)
|
|
local posts_dir="$site_path/source/_posts"
|
|
local tmp_posts="/tmp/hexojs_posts_$$"
|
|
|
|
json_init
|
|
json_add_array "posts"
|
|
|
|
if [ -d "$posts_dir" ]; then
|
|
# Recursively find all markdown files (handles subdirectory categories)
|
|
find "$posts_dir" -type f -name "*.md" ! -name "index.md" 2>/dev/null > "$tmp_posts"
|
|
|
|
while read f; do
|
|
[ -f "$f" ] || continue
|
|
|
|
local filename=$(basename "$f")
|
|
local slug="${filename%.md}"
|
|
|
|
# Get relative path from _posts for category detection
|
|
local rel_path="${f#$posts_dir/}"
|
|
local category_dir=$(dirname "$rel_path")
|
|
[ "$category_dir" = "." ] && category_dir=""
|
|
|
|
# Parse front matter (sanitize for JSON)
|
|
local title=$(grep -m1 "^title:" "$f" 2>/dev/null | sed 's/^title:[[:space:]]*//' | tr -d '"' | tr -d "'" | tr -d '\r')
|
|
local date=$(grep -m1 "^date:" "$f" 2>/dev/null | sed 's/^date:[[:space:]]*//' | tr -d '\r')
|
|
local categories=$(grep -m1 "^categories:" "$f" 2>/dev/null | sed 's/^categories:[[:space:]]*//' | tr -d '[]' | tr -d '\r')
|
|
local tags=$(grep -m1 "^tags:" "$f" 2>/dev/null | sed 's/^tags:[[:space:]]*//' | tr -d '[]' | tr -d '\r')
|
|
local excerpt=$(grep -m1 "^excerpt:" "$f" 2>/dev/null | sed 's/^excerpt:[[:space:]]*//' | tr -d '"' | tr -d '\r' | cut -c1-200)
|
|
|
|
# Use directory as category if not specified in front matter
|
|
[ -z "$categories" ] && [ -n "$category_dir" ] && categories="$category_dir"
|
|
|
|
# Truncate title if too long
|
|
title=$(echo "$title" | cut -c1-150)
|
|
|
|
json_add_object
|
|
json_add_string "slug" "$slug"
|
|
json_add_string "title" "${title:-$slug}"
|
|
json_add_string "date" "$date"
|
|
json_add_string "categories" "$categories"
|
|
json_add_string "tags" "$tags"
|
|
json_add_string "excerpt" "$excerpt"
|
|
json_add_string "path" "$f"
|
|
json_add_string "category_dir" "$category_dir"
|
|
json_close_object
|
|
done < "$tmp_posts"
|
|
|
|
rm -f "$tmp_posts"
|
|
fi
|
|
|
|
json_close_array
|
|
json_dump
|
|
}
|
|
|
|
get_post() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var slug slug
|
|
json_get_var path path ""
|
|
|
|
json_init
|
|
|
|
if [ -z "$slug" ] && [ -z "$path" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Slug or path required"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
local site_path=$(get_site_path)
|
|
local post_file=""
|
|
|
|
# If path provided, use it directly
|
|
if [ -n "$path" ] && [ -f "$path" ]; then
|
|
post_file="$path"
|
|
else
|
|
# Search for post file (first in root, then recursively)
|
|
post_file="$site_path/source/_posts/${slug}.md"
|
|
if [ ! -f "$post_file" ]; then
|
|
# Search recursively in subdirectories
|
|
post_file=$(find "$site_path/source/_posts" -type f -name "${slug}.md" 2>/dev/null | head -1)
|
|
fi
|
|
fi
|
|
|
|
if [ -z "$post_file" ] || [ ! -f "$post_file" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Post not found: $slug"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
# Parse front matter and content
|
|
local title=$(grep -m1 "^title:" "$post_file" | sed 's/^title:[[:space:]]*//' | tr -d '"' | tr -d "'")
|
|
local date=$(grep -m1 "^date:" "$post_file" | sed 's/^date:[[:space:]]*//')
|
|
local categories=$(grep -m1 "^categories:" "$post_file" | sed 's/^categories:[[:space:]]*//' | tr -d '[]')
|
|
local tags=$(grep -m1 "^tags:" "$post_file" | sed 's/^tags:[[:space:]]*//' | tr -d '[]')
|
|
local cover=$(grep -m1 "^cover:" "$post_file" | sed 's/^cover:[[:space:]]*//')
|
|
local excerpt=$(grep -m1 "^excerpt:" "$post_file" | sed 's/^excerpt:[[:space:]]*//' | tr -d '"')
|
|
|
|
# Get content after front matter
|
|
local content=$(awk '/^---$/,/^---$/{next} {print}' "$post_file" | tail -n +2)
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_string "slug" "$slug"
|
|
json_add_string "title" "$title"
|
|
json_add_string "date" "$date"
|
|
json_add_string "categories" "$categories"
|
|
json_add_string "tags" "$tags"
|
|
json_add_string "cover" "$cover"
|
|
json_add_string "excerpt" "$excerpt"
|
|
json_add_string "content" "$content"
|
|
json_add_string "path" "$post_file"
|
|
|
|
json_dump
|
|
}
|
|
|
|
create_post() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var title title
|
|
json_get_var content content
|
|
json_get_var categories categories
|
|
json_get_var tags tags
|
|
json_get_var excerpt excerpt
|
|
|
|
json_init
|
|
|
|
if [ -z "$title" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Title required"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
# Generate slug from title
|
|
local slug=$(echo "$title" | tr '[:upper:]' '[:lower:]' | tr ' ' '-' | tr -cd 'a-z0-9-')
|
|
local date=$(date "+%Y-%m-%d %H:%M:%S")
|
|
|
|
local site_path=$(get_site_path)
|
|
local posts_dir="$site_path/source/_posts"
|
|
local post_file="$posts_dir/${slug}.md"
|
|
|
|
[ -d "$posts_dir" ] || mkdir -p "$posts_dir"
|
|
|
|
# Check if file exists
|
|
if [ -f "$post_file" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Post with this slug already exists"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
# Create post file
|
|
cat > "$post_file" << EOF
|
|
---
|
|
title: $title
|
|
date: $date
|
|
categories: [$categories]
|
|
tags: [$tags]
|
|
excerpt: $excerpt
|
|
---
|
|
|
|
$content
|
|
EOF
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_string "slug" "$slug"
|
|
json_add_string "path" "$post_file"
|
|
json_add_string "message" "Post created"
|
|
|
|
json_dump
|
|
}
|
|
|
|
update_post() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var slug slug
|
|
json_get_var title title
|
|
json_get_var content content
|
|
json_get_var categories categories
|
|
json_get_var tags tags
|
|
json_get_var excerpt excerpt
|
|
json_get_var cover cover
|
|
|
|
json_init
|
|
|
|
if [ -z "$slug" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Slug required"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
local site_path=$(get_site_path)
|
|
local post_file="$site_path/source/_posts/${slug}.md"
|
|
|
|
if [ ! -f "$post_file" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Post not found"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
# Get original date
|
|
local date=$(grep -m1 "^date:" "$post_file" | sed 's/^date:[[:space:]]*//')
|
|
|
|
# Rewrite post file
|
|
cat > "$post_file" << EOF
|
|
---
|
|
title: $title
|
|
date: $date
|
|
categories: [$categories]
|
|
tags: [$tags]
|
|
cover: $cover
|
|
excerpt: $excerpt
|
|
---
|
|
|
|
$content
|
|
EOF
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Post updated"
|
|
|
|
json_dump
|
|
}
|
|
|
|
delete_post() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var slug slug
|
|
|
|
json_init
|
|
|
|
if [ -z "$slug" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Slug required"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
local site_path=$(get_site_path)
|
|
local post_file="$site_path/source/_posts/${slug}.md"
|
|
|
|
if [ ! -f "$post_file" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Post not found"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
rm -f "$post_file"
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Post deleted"
|
|
|
|
json_dump
|
|
}
|
|
|
|
publish_post() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var slug slug
|
|
|
|
json_init
|
|
|
|
if [ -z "$slug" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Slug required"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
local site_path=$(get_site_path)
|
|
local draft_file="$site_path/source/_drafts/${slug}.md"
|
|
local posts_dir="$site_path/source/_posts"
|
|
local post_file="$posts_dir/${slug}.md"
|
|
|
|
if [ ! -f "$draft_file" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Draft not found"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
[ -d "$posts_dir" ] || mkdir -p "$posts_dir"
|
|
|
|
# Add date if not present
|
|
if ! grep -q "^date:" "$draft_file"; then
|
|
local date=$(date "+%Y-%m-%d %H:%M:%S")
|
|
sed -i "/^title:/a date: $date" "$draft_file"
|
|
fi
|
|
|
|
mv "$draft_file" "$post_file"
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Draft published"
|
|
json_add_string "path" "$post_file"
|
|
|
|
json_dump
|
|
}
|
|
|
|
list_drafts() {
|
|
local site_path=$(get_site_path)
|
|
local drafts_dir="$site_path/source/_drafts"
|
|
|
|
json_init
|
|
json_add_array "drafts"
|
|
|
|
if [ -d "$drafts_dir" ]; then
|
|
for f in "$drafts_dir"/*.md; do
|
|
[ -f "$f" ] || continue
|
|
|
|
local filename=$(basename "$f")
|
|
local slug="${filename%.md}"
|
|
local title=$(grep -m1 "^title:" "$f" | sed 's/^title:[[:space:]]*//' | tr -d '"' | tr -d "'")
|
|
|
|
json_add_object
|
|
json_add_string "slug" "$slug"
|
|
json_add_string "title" "${title:-$slug}"
|
|
json_add_string "path" "$f"
|
|
json_close_object
|
|
done
|
|
fi
|
|
|
|
json_close_array
|
|
json_dump
|
|
}
|
|
|
|
search_posts() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var query query
|
|
json_get_var category category
|
|
json_get_var tag tag
|
|
|
|
local site_path=$(get_site_path)
|
|
local posts_dir="$site_path/source/_posts"
|
|
local tmp_posts="/tmp/hexojs_search_$$"
|
|
|
|
json_init
|
|
json_add_array "posts"
|
|
|
|
if [ -d "$posts_dir" ]; then
|
|
# Recursively find all posts
|
|
find "$posts_dir" -type f -name "*.md" ! -name "index.md" 2>/dev/null > "$tmp_posts"
|
|
|
|
while read f; do
|
|
[ -f "$f" ] || continue
|
|
|
|
local match=1
|
|
|
|
# Filter by query
|
|
if [ -n "$query" ]; then
|
|
grep -qi "$query" "$f" || match=0
|
|
fi
|
|
|
|
# Get category from front matter or directory
|
|
local file_cat=$(grep -m1 "^categories:" "$f" | sed 's/^categories:[[:space:]]*//' | tr -d '[]')
|
|
if [ -z "$file_cat" ]; then
|
|
local rel_path="${f#$posts_dir/}"
|
|
local cat_dir=$(dirname "$rel_path")
|
|
[ "$cat_dir" != "." ] && file_cat="$cat_dir"
|
|
fi
|
|
|
|
# Filter by category
|
|
if [ -n "$category" ] && [ "$match" = "1" ]; then
|
|
echo "$file_cat" | grep -qi "$category" || match=0
|
|
fi
|
|
|
|
# Filter by tag
|
|
if [ -n "$tag" ] && [ "$match" = "1" ]; then
|
|
grep -qi "tags:.*$tag" "$f" || match=0
|
|
fi
|
|
|
|
if [ "$match" = "1" ]; then
|
|
local filename=$(basename "$f")
|
|
local slug="${filename%.md}"
|
|
local title=$(grep -m1 "^title:" "$f" | sed 's/^title:[[:space:]]*//' | tr -d '"' | tr -d "'")
|
|
local date=$(grep -m1 "^date:" "$f" | sed 's/^date:[[:space:]]*//')
|
|
|
|
json_add_object
|
|
json_add_string "slug" "$slug"
|
|
json_add_string "title" "${title:-$slug}"
|
|
json_add_string "date" "$date"
|
|
json_add_string "path" "$f"
|
|
json_add_string "categories" "$file_cat"
|
|
json_close_object
|
|
fi
|
|
done < "$tmp_posts"
|
|
|
|
rm -f "$tmp_posts"
|
|
fi
|
|
|
|
json_close_array
|
|
json_dump
|
|
}
|
|
|
|
# ============================================
|
|
# Taxonomy Methods
|
|
# ============================================
|
|
|
|
list_categories() {
|
|
local site_path=$(get_site_path)
|
|
local posts_dir="$site_path/source/_posts"
|
|
local tmp_posts="/tmp/hexojs_cats_$$"
|
|
local tmp_cats="/tmp/hexojs_cats_list_$$"
|
|
local tmp_counts="/tmp/hexojs_cats_counts_$$"
|
|
|
|
json_init
|
|
json_add_array "categories"
|
|
|
|
if [ -d "$posts_dir" ]; then
|
|
# Recursively find all posts
|
|
find "$posts_dir" -type f -name "*.md" ! -name "index.md" 2>/dev/null > "$tmp_posts"
|
|
: > "$tmp_cats"
|
|
|
|
while read f; do
|
|
[ -f "$f" ] || continue
|
|
local cat=$(grep -m1 "^categories:" "$f" | sed 's/^categories:[[:space:]]*//' | tr -d '[]' | tr ',' '\n')
|
|
# Use directory as category if not in front matter
|
|
if [ -z "$cat" ]; then
|
|
local rel_path="${f#$posts_dir/}"
|
|
local cat_dir=$(dirname "$rel_path")
|
|
[ "$cat_dir" != "." ] && cat="$cat_dir"
|
|
fi
|
|
echo "$cat" >> "$tmp_cats"
|
|
done < "$tmp_posts"
|
|
|
|
# Count unique categories and save to temp file
|
|
grep -v '^$' "$tmp_cats" 2>/dev/null | sort | uniq -c > "$tmp_counts"
|
|
|
|
# Read counts and add to JSON (avoid piped while loop to preserve JSON context)
|
|
while read count name; do
|
|
name=$(echo "$name" | sed 's/^[[:space:]]*//' | sed 's/[[:space:]]*$//')
|
|
[ -n "$name" ] || continue
|
|
json_add_object
|
|
json_add_string "name" "$name"
|
|
json_add_int "count" "$count"
|
|
json_close_object
|
|
done < "$tmp_counts"
|
|
|
|
rm -f "$tmp_posts" "$tmp_cats" "$tmp_counts"
|
|
fi
|
|
|
|
json_close_array
|
|
json_dump
|
|
}
|
|
|
|
list_tags() {
|
|
local site_path=$(get_site_path)
|
|
local posts_dir="$site_path/source/_posts"
|
|
local tmp_posts="/tmp/hexojs_tags_$$"
|
|
local tmp_tags="/tmp/hexojs_tags_list_$$"
|
|
local tmp_counts="/tmp/hexojs_tags_counts_$$"
|
|
|
|
json_init
|
|
json_add_array "tags"
|
|
|
|
if [ -d "$posts_dir" ]; then
|
|
# Recursively find all posts
|
|
find "$posts_dir" -type f -name "*.md" ! -name "index.md" 2>/dev/null > "$tmp_posts"
|
|
: > "$tmp_tags"
|
|
|
|
while read f; do
|
|
[ -f "$f" ] || continue
|
|
local tag=$(grep -m1 "^tags:" "$f" | sed 's/^tags:[[:space:]]*//' | tr -d '[]' | tr ',' '\n')
|
|
echo "$tag" >> "$tmp_tags"
|
|
done < "$tmp_posts"
|
|
|
|
# Count unique tags and save to temp file
|
|
grep -v '^$' "$tmp_tags" 2>/dev/null | sort | uniq -c > "$tmp_counts"
|
|
|
|
# Read counts and add to JSON (avoid piped while loop to preserve JSON context)
|
|
while read count name; do
|
|
name=$(echo "$name" | sed 's/^[[:space:]]*//' | sed 's/[[:space:]]*$//')
|
|
[ -n "$name" ] || continue
|
|
json_add_object
|
|
json_add_string "name" "$name"
|
|
json_add_int "count" "$count"
|
|
json_close_object
|
|
done < "$tmp_counts"
|
|
|
|
rm -f "$tmp_posts" "$tmp_tags" "$tmp_counts"
|
|
fi
|
|
|
|
json_close_array
|
|
json_dump
|
|
}
|
|
|
|
# ============================================
|
|
# Media Methods
|
|
# ============================================
|
|
|
|
list_media() {
|
|
local site_path=$(get_site_path)
|
|
local media_dir="$site_path/source/images"
|
|
local tmp_media="/tmp/hexojs_media_$$"
|
|
|
|
json_init
|
|
json_add_array "media"
|
|
|
|
if [ -d "$media_dir" ]; then
|
|
# Save find results to temp file to avoid piped while loop
|
|
find "$media_dir" -type f 2>/dev/null > "$tmp_media"
|
|
|
|
while read f; do
|
|
[ -f "$f" ] || continue
|
|
local filename=$(basename "$f")
|
|
local size=$(stat -c %s "$f" 2>/dev/null || echo 0)
|
|
local mtime=$(stat -c %Y "$f" 2>/dev/null || echo 0)
|
|
local relpath="${f#$site_path/source}"
|
|
|
|
json_add_object
|
|
json_add_string "name" "$filename"
|
|
json_add_string "path" "$relpath"
|
|
json_add_string "full_path" "$f"
|
|
json_add_int "size" "$size"
|
|
json_add_int "mtime" "$mtime"
|
|
json_close_object
|
|
done < "$tmp_media"
|
|
|
|
rm -f "$tmp_media"
|
|
fi
|
|
|
|
json_close_array
|
|
json_dump
|
|
}
|
|
|
|
delete_media() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var path path
|
|
|
|
json_init
|
|
|
|
if [ -z "$path" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Path required"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
local site_path=$(get_site_path)
|
|
local full_path="$site_path/source$path"
|
|
|
|
if [ ! -f "$full_path" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "File not found"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
rm -f "$full_path"
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Media deleted"
|
|
|
|
json_dump
|
|
}
|
|
|
|
# ============================================
|
|
# Apps (Portfolio) Methods
|
|
# ============================================
|
|
|
|
list_apps() {
|
|
local site_path=$(get_site_path)
|
|
local apps_dir="$site_path/source/apps"
|
|
|
|
json_init
|
|
json_add_array "apps"
|
|
|
|
if [ -d "$apps_dir" ]; then
|
|
for f in "$apps_dir"/*.md; do
|
|
[ -f "$f" ] || continue
|
|
|
|
local filename=$(basename "$f")
|
|
local slug="${filename%.md}"
|
|
local title=$(grep -m1 "^title:" "$f" | sed 's/^title:[[:space:]]*//' | tr -d '"' | tr -d "'")
|
|
local icon=$(grep -m1 "^icon:" "$f" | sed 's/^icon:[[:space:]]*//')
|
|
local description=$(grep -m1 "^description:" "$f" | sed 's/^description:[[:space:]]*//' | tr -d '"')
|
|
local url=$(grep -m1 "^url:" "$f" | sed 's/^url:[[:space:]]*//')
|
|
local category=$(grep -m1 "^category:" "$f" | sed 's/^category:[[:space:]]*//')
|
|
local featured=$(grep -m1 "^featured:" "$f" | sed 's/^featured:[[:space:]]*//')
|
|
|
|
json_add_object
|
|
json_add_string "slug" "$slug"
|
|
json_add_string "title" "${title:-$slug}"
|
|
json_add_string "icon" "$icon"
|
|
json_add_string "description" "$description"
|
|
json_add_string "url" "$url"
|
|
json_add_string "category" "$category"
|
|
json_add_boolean "featured" "$([ "$featured" = "true" ] && echo 1 || echo 0)"
|
|
json_close_object
|
|
done
|
|
fi
|
|
|
|
json_close_array
|
|
json_dump
|
|
}
|
|
|
|
create_app() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var title title
|
|
json_get_var icon icon
|
|
json_get_var description description
|
|
json_get_var url url
|
|
json_get_var category category
|
|
json_get_var content content
|
|
|
|
json_init
|
|
|
|
if [ -z "$title" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Title required"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
local slug=$(echo "$title" | tr '[:upper:]' '[:lower:]' | tr ' ' '-' | tr -cd 'a-z0-9-')
|
|
local date=$(date "+%Y-%m-%d %H:%M:%S")
|
|
|
|
local site_path=$(get_site_path)
|
|
local apps_dir="$site_path/source/apps"
|
|
local app_file="$apps_dir/${slug}.md"
|
|
|
|
[ -d "$apps_dir" ] || mkdir -p "$apps_dir"
|
|
|
|
if [ -f "$app_file" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "App with this slug already exists"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
cat > "$app_file" << EOF
|
|
---
|
|
title: $title
|
|
date: $date
|
|
layout: app
|
|
icon: $icon
|
|
description: $description
|
|
url: $url
|
|
category: ${category:-tools}
|
|
featured: false
|
|
status: active
|
|
---
|
|
|
|
$content
|
|
EOF
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_string "slug" "$slug"
|
|
json_add_string "message" "App created"
|
|
|
|
json_dump
|
|
}
|
|
|
|
# ============================================
|
|
# Build & Deploy Methods
|
|
# ============================================
|
|
|
|
do_generate() {
|
|
json_init
|
|
|
|
if ! is_running; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Container not running"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
local output=$("$HEXOCTL" exec sh -c "cd /opt/hexojs/site && hexo generate" 2>&1)
|
|
local result=$?
|
|
|
|
if [ "$result" -eq 0 ]; then
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Site generated successfully"
|
|
else
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "$output"
|
|
fi
|
|
|
|
json_dump
|
|
}
|
|
|
|
do_clean() {
|
|
json_init
|
|
|
|
if ! is_running; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Container not running"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
local output=$("$HEXOCTL" exec sh -c "cd /opt/hexojs/site && hexo clean" 2>&1)
|
|
local result=$?
|
|
|
|
if [ "$result" -eq 0 ]; then
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Site cleaned"
|
|
else
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "$output"
|
|
fi
|
|
|
|
json_dump
|
|
}
|
|
|
|
do_deploy() {
|
|
json_init
|
|
|
|
if ! is_running; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Container not running"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
local repo=$(uci_get deploy.repo)
|
|
if [ -z "$repo" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Deploy repository not configured"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
# Run deploy in background
|
|
"$HEXOCTL" exec sh -c "cd /opt/hexojs/site && hexo deploy" > /tmp/hexo-deploy.log 2>&1 &
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Deploy started"
|
|
json_add_string "log_file" "/tmp/hexo-deploy.log"
|
|
|
|
json_dump
|
|
}
|
|
|
|
get_deploy_status() {
|
|
json_init
|
|
|
|
if pgrep -f "hexo deploy" >/dev/null 2>&1; then
|
|
json_add_string "status" "running"
|
|
else
|
|
json_add_string "status" "idle"
|
|
fi
|
|
|
|
if [ -f /tmp/hexo-deploy.log ]; then
|
|
local log=$(tail -20 /tmp/hexo-deploy.log 2>/dev/null)
|
|
json_add_string "log" "$log"
|
|
fi
|
|
|
|
json_dump
|
|
}
|
|
|
|
# ============================================
|
|
# Preview Methods
|
|
# ============================================
|
|
|
|
preview_start() {
|
|
json_init
|
|
|
|
if ! is_running; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Container not running. Start service first."
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
local http_port=$(uci_get main.http_port) || http_port="4000"
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Preview server running"
|
|
json_add_int "port" "$http_port"
|
|
json_add_string "url" "http://$(uci -q get network.lan.ipaddr || echo "localhost"):$http_port"
|
|
|
|
json_dump
|
|
}
|
|
|
|
preview_status() {
|
|
json_init
|
|
|
|
local running=$(is_running && echo 1 || echo 0)
|
|
local http_port=$(uci_get main.http_port) || http_port="4000"
|
|
|
|
json_add_boolean "running" "$running"
|
|
json_add_int "port" "$http_port"
|
|
|
|
if [ "$running" = "1" ]; then
|
|
json_add_string "url" "http://$(uci -q get network.lan.ipaddr || echo "localhost"):$http_port"
|
|
fi
|
|
|
|
json_dump
|
|
}
|
|
|
|
# ============================================
|
|
# Configuration Methods
|
|
# ============================================
|
|
|
|
get_config() {
|
|
json_init
|
|
|
|
local enabled=$(uci_get main.enabled) || enabled="0"
|
|
local http_port=$(uci_get main.http_port) || http_port="4000"
|
|
local data_path=$(uci_get main.data_path) || data_path="$DATA_PATH"
|
|
local active_site=$(uci_get main.active_site) || active_site="default"
|
|
local memory_limit=$(uci_get main.memory_limit) || memory_limit="512M"
|
|
|
|
json_add_boolean "enabled" "$enabled"
|
|
json_add_int "http_port" "$http_port"
|
|
json_add_string "data_path" "$data_path"
|
|
json_add_string "active_site" "$active_site"
|
|
json_add_string "memory_limit" "$memory_limit"
|
|
|
|
# Site config
|
|
json_add_object "site"
|
|
json_add_string "title" "$(uci_get ${active_site}.title)"
|
|
json_add_string "subtitle" "$(uci_get ${active_site}.subtitle)"
|
|
json_add_string "author" "$(uci_get ${active_site}.author)"
|
|
json_add_string "language" "$(uci_get ${active_site}.language)"
|
|
json_add_string "theme" "$(uci_get ${active_site}.theme)"
|
|
json_add_string "url" "$(uci_get ${active_site}.url)"
|
|
json_close_object
|
|
|
|
# Deploy config
|
|
json_add_object "deploy"
|
|
json_add_string "type" "$(uci_get deploy.type)"
|
|
json_add_string "repo" "$(uci_get deploy.repo)"
|
|
json_add_string "branch" "$(uci_get deploy.branch)"
|
|
json_close_object
|
|
|
|
json_dump
|
|
}
|
|
|
|
save_config() {
|
|
read input
|
|
json_load "$input"
|
|
|
|
json_get_var enabled enabled
|
|
json_get_var http_port http_port
|
|
json_get_var title title
|
|
json_get_var subtitle subtitle
|
|
json_get_var author author
|
|
json_get_var language language
|
|
json_get_var url url
|
|
json_get_var deploy_repo deploy_repo
|
|
json_get_var deploy_branch deploy_branch
|
|
|
|
json_init
|
|
|
|
local active_site=$(uci_get main.active_site) || active_site="default"
|
|
|
|
[ -n "$enabled" ] && uci_set main.enabled "$enabled"
|
|
[ -n "$http_port" ] && uci_set main.http_port "$http_port"
|
|
[ -n "$title" ] && uci_set ${active_site}.title "$title"
|
|
[ -n "$subtitle" ] && uci_set ${active_site}.subtitle "$subtitle"
|
|
[ -n "$author" ] && uci_set ${active_site}.author "$author"
|
|
[ -n "$language" ] && uci_set ${active_site}.language "$language"
|
|
[ -n "$url" ] && uci_set ${active_site}.url "$url"
|
|
[ -n "$deploy_repo" ] && uci_set deploy.repo "$deploy_repo"
|
|
[ -n "$deploy_branch" ] && uci_set deploy.branch "$deploy_branch"
|
|
|
|
# Update site _config.yml
|
|
"$HEXOCTL" exec sh -c "cd /opt/hexojs && hexoctl update_site_config" 2>/dev/null || true
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Configuration saved"
|
|
|
|
json_dump
|
|
}
|
|
|
|
get_theme_config() {
|
|
json_init
|
|
|
|
json_add_string "default_mode" "$(uci_get theme_config.default_mode)"
|
|
json_add_boolean "allow_toggle" "$(uci_get theme_config.allow_toggle)"
|
|
json_add_string "accent_color" "$(uci_get theme_config.accent_color)"
|
|
json_add_string "logo_symbol" "$(uci_get theme_config.logo_symbol)"
|
|
json_add_string "logo_text" "$(uci_get theme_config.logo_text)"
|
|
|
|
json_dump
|
|
}
|
|
|
|
save_theme_config() {
|
|
read input
|
|
json_load "$input"
|
|
|
|
json_get_var default_mode default_mode
|
|
json_get_var allow_toggle allow_toggle
|
|
json_get_var accent_color accent_color
|
|
json_get_var logo_symbol logo_symbol
|
|
json_get_var logo_text logo_text
|
|
|
|
json_init
|
|
|
|
[ -n "$default_mode" ] && uci_set theme_config.default_mode "$default_mode"
|
|
[ -n "$allow_toggle" ] && uci_set theme_config.allow_toggle "$allow_toggle"
|
|
[ -n "$accent_color" ] && uci_set theme_config.accent_color "$accent_color"
|
|
[ -n "$logo_symbol" ] && uci_set theme_config.logo_symbol "$logo_symbol"
|
|
[ -n "$logo_text" ] && uci_set theme_config.logo_text "$logo_text"
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Theme configuration saved"
|
|
|
|
json_dump
|
|
}
|
|
|
|
list_presets() {
|
|
json_init
|
|
json_add_array "presets"
|
|
|
|
local presets_dir="/usr/share/hexojs/presets"
|
|
if [ -d "$presets_dir" ]; then
|
|
for f in "$presets_dir"/*.yml; do
|
|
[ -f "$f" ] || continue
|
|
|
|
local filename=$(basename "$f")
|
|
local id="${filename%.yml}"
|
|
local name=$(grep -m1 "^name:" "$f" | sed 's/^name:[[:space:]]*//')
|
|
local description=$(grep -m1 "^description:" "$f" | sed 's/^description:[[:space:]]*//')
|
|
local icon=$(grep -m1 "^icon:" "$f" | sed 's/^icon:[[:space:]]*//')
|
|
|
|
json_add_object
|
|
json_add_string "id" "$id"
|
|
json_add_string "name" "$name"
|
|
json_add_string "description" "$description"
|
|
json_add_string "icon" "$icon"
|
|
json_close_object
|
|
done
|
|
fi
|
|
|
|
json_close_array
|
|
json_dump
|
|
}
|
|
|
|
apply_preset() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var preset_id preset_id
|
|
|
|
json_init
|
|
|
|
if [ -z "$preset_id" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Preset ID required"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
local preset_file="/usr/share/hexojs/presets/${preset_id}.yml"
|
|
if [ ! -f "$preset_file" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Preset not found"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
local site_path=$(get_site_path)
|
|
local theme_config="$site_path/themes/cybermind/_config.yml"
|
|
|
|
if [ -f "$theme_config" ]; then
|
|
cp "$preset_file" "$theme_config"
|
|
fi
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Preset applied"
|
|
|
|
json_dump
|
|
}
|
|
|
|
# ============================================
|
|
# GitHub Sync Methods
|
|
# ============================================
|
|
|
|
git_status() {
|
|
local site_path=$(get_site_path)
|
|
|
|
json_init
|
|
|
|
if [ ! -d "$site_path" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Site not found"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
cd "$site_path" 2>/dev/null || {
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Cannot access site directory"
|
|
json_dump
|
|
return
|
|
}
|
|
|
|
# Check if it's a git repo
|
|
if [ ! -d "$site_path/.git" ]; then
|
|
json_add_boolean "success" 1
|
|
json_add_boolean "is_repo" 0
|
|
json_add_string "message" "Not a git repository"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_boolean "is_repo" 1
|
|
|
|
# Get remote URL
|
|
local remote=$(git remote get-url origin 2>/dev/null || echo "")
|
|
json_add_string "remote" "$remote"
|
|
|
|
# Get current branch
|
|
local branch=$(git rev-parse --abbrev-ref HEAD 2>/dev/null || echo "")
|
|
json_add_string "branch" "$branch"
|
|
|
|
# Get status summary
|
|
local modified=$(git status --porcelain 2>/dev/null | grep -c '^.M' || echo 0)
|
|
local untracked=$(git status --porcelain 2>/dev/null | grep -c '^??' || echo 0)
|
|
local staged=$(git status --porcelain 2>/dev/null | grep -c '^M' || echo 0)
|
|
|
|
json_add_int "modified" "$modified"
|
|
json_add_int "untracked" "$untracked"
|
|
json_add_int "staged" "$staged"
|
|
|
|
# Check for unpushed commits
|
|
local ahead=$(git rev-list --count @{u}..HEAD 2>/dev/null || echo 0)
|
|
local behind=$(git rev-list --count HEAD..@{u} 2>/dev/null || echo 0)
|
|
|
|
json_add_int "ahead" "$ahead"
|
|
json_add_int "behind" "$behind"
|
|
|
|
# Last commit info
|
|
local last_commit=$(git log -1 --format="%h - %s (%cr)" 2>/dev/null || echo "No commits")
|
|
json_add_string "last_commit" "$last_commit"
|
|
|
|
# Full status output
|
|
local status_output=$(git status --short 2>/dev/null | head -20)
|
|
json_add_string "status_output" "$status_output"
|
|
|
|
json_dump
|
|
}
|
|
|
|
git_init() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var repo repo
|
|
json_get_var branch branch
|
|
|
|
json_init
|
|
|
|
local site_path=$(get_site_path)
|
|
|
|
if [ -z "$repo" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Repository URL required"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
[ -z "$branch" ] && branch="main"
|
|
|
|
# Initialize git if not already a repo
|
|
if [ ! -d "$site_path/.git" ]; then
|
|
cd "$site_path" && git init >/dev/null 2>&1
|
|
fi
|
|
|
|
cd "$site_path"
|
|
|
|
# Remove existing origin if present
|
|
git remote remove origin 2>/dev/null || true
|
|
|
|
# Add remote
|
|
git remote add origin "$repo"
|
|
|
|
# Set default branch
|
|
git branch -M "$branch" 2>/dev/null || true
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Repository initialized with remote: $repo"
|
|
|
|
json_dump
|
|
}
|
|
|
|
git_clone() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var repo repo
|
|
json_get_var branch branch
|
|
|
|
json_init
|
|
|
|
if [ -z "$repo" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Repository URL required"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
[ -z "$branch" ] && branch="main"
|
|
|
|
local data_path=$(uci_get main.data_path) || data_path="$DATA_PATH"
|
|
local site_path="$data_path/site"
|
|
|
|
# Backup existing site if present
|
|
if [ -d "$site_path" ]; then
|
|
local backup="$data_path/site.backup.$(date +%Y%m%d%H%M%S)"
|
|
mv "$site_path" "$backup"
|
|
fi
|
|
|
|
# Clone repository
|
|
local output=$(git clone --branch "$branch" --single-branch "$repo" "$site_path" 2>&1)
|
|
local result=$?
|
|
|
|
if [ "$result" -eq 0 ]; then
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Repository cloned successfully"
|
|
json_add_string "branch" "$branch"
|
|
else
|
|
# Restore backup on failure
|
|
[ -d "$backup" ] && mv "$backup" "$site_path"
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Clone failed: $output"
|
|
fi
|
|
|
|
json_dump
|
|
}
|
|
|
|
git_pull() {
|
|
json_init
|
|
|
|
local site_path=$(get_site_path)
|
|
|
|
if [ ! -d "$site_path/.git" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Not a git repository"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
cd "$site_path"
|
|
|
|
# Stash local changes first
|
|
git stash push -m "auto-stash-$(date +%Y%m%d%H%M%S)" 2>/dev/null
|
|
|
|
# Pull changes
|
|
local output=$(git pull --rebase 2>&1)
|
|
local result=$?
|
|
|
|
# Pop stash if it was created
|
|
git stash pop 2>/dev/null || true
|
|
|
|
if [ "$result" -eq 0 ]; then
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Pull successful"
|
|
json_add_string "output" "$output"
|
|
else
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Pull failed: $output"
|
|
fi
|
|
|
|
json_dump
|
|
}
|
|
|
|
git_push() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var message message
|
|
json_get_var force force
|
|
|
|
json_init
|
|
|
|
local site_path=$(get_site_path)
|
|
|
|
if [ ! -d "$site_path/.git" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Not a git repository"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
cd "$site_path"
|
|
|
|
# Check if there's a remote
|
|
local remote=$(git remote get-url origin 2>/dev/null || echo "")
|
|
if [ -z "$remote" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "No remote repository configured"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
# Stage all changes
|
|
git add -A
|
|
|
|
# Commit if there are staged changes
|
|
local staged=$(git diff --cached --quiet; echo $?)
|
|
if [ "$staged" = "1" ]; then
|
|
local commit_msg="${message:-Auto-commit from SecuBox CMS $(date +%Y-%m-%d\ %H:%M:%S)}"
|
|
git commit -m "$commit_msg" 2>/dev/null
|
|
fi
|
|
|
|
# Push changes
|
|
local push_args=""
|
|
[ "$force" = "1" ] && push_args="--force"
|
|
|
|
local output=$(git push $push_args origin HEAD 2>&1)
|
|
local result=$?
|
|
|
|
if [ "$result" -eq 0 ]; then
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Push successful"
|
|
json_add_string "output" "$output"
|
|
else
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Push failed: $output"
|
|
fi
|
|
|
|
json_dump
|
|
}
|
|
|
|
git_fetch() {
|
|
json_init
|
|
|
|
local site_path=$(get_site_path)
|
|
|
|
if [ ! -d "$site_path/.git" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Not a git repository"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
cd "$site_path"
|
|
|
|
local output=$(git fetch --all 2>&1)
|
|
local result=$?
|
|
|
|
if [ "$result" -eq 0 ]; then
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Fetch successful"
|
|
json_add_string "output" "$output"
|
|
else
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Fetch failed: $output"
|
|
fi
|
|
|
|
json_dump
|
|
}
|
|
|
|
git_log() {
|
|
json_init
|
|
|
|
local site_path=$(get_site_path)
|
|
local tmp_log="/tmp/hexojs_git_log_$$"
|
|
|
|
if [ ! -d "$site_path/.git" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Not a git repository"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
cd "$site_path"
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_array "commits"
|
|
|
|
# Save git log to temp file to avoid piped while loop
|
|
git log --oneline -20 2>/dev/null > "$tmp_log"
|
|
|
|
while read line; do
|
|
local hash=$(echo "$line" | cut -d' ' -f1)
|
|
local msg=$(echo "$line" | cut -d' ' -f2-)
|
|
|
|
json_add_object
|
|
json_add_string "hash" "$hash"
|
|
json_add_string "message" "$msg"
|
|
json_close_object
|
|
done < "$tmp_log"
|
|
|
|
rm -f "$tmp_log"
|
|
|
|
json_close_array
|
|
json_dump
|
|
}
|
|
|
|
git_reset() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var hard hard
|
|
|
|
json_init
|
|
|
|
local site_path=$(get_site_path)
|
|
|
|
if [ ! -d "$site_path/.git" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Not a git repository"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
cd "$site_path"
|
|
|
|
if [ "$hard" = "1" ]; then
|
|
local output=$(git reset --hard HEAD 2>&1)
|
|
local result=$?
|
|
|
|
if [ "$result" -eq 0 ]; then
|
|
git clean -fd 2>/dev/null
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Hard reset complete. All local changes discarded."
|
|
else
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Reset failed: $output"
|
|
fi
|
|
else
|
|
git reset HEAD 2>/dev/null
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Unstaged all changes"
|
|
fi
|
|
|
|
json_dump
|
|
}
|
|
|
|
git_set_credentials() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var name name
|
|
json_get_var email email
|
|
|
|
json_init
|
|
|
|
local site_path=$(get_site_path)
|
|
|
|
if [ ! -d "$site_path/.git" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Not a git repository"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
cd "$site_path"
|
|
|
|
[ -n "$name" ] && git config user.name "$name"
|
|
[ -n "$email" ] && git config user.email "$email"
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Git credentials configured"
|
|
|
|
json_dump
|
|
}
|
|
|
|
git_get_credentials() {
|
|
json_init
|
|
|
|
local site_path=$(get_site_path)
|
|
|
|
if [ ! -d "$site_path/.git" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Not a git repository"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
cd "$site_path"
|
|
|
|
local name=$(git config user.name 2>/dev/null || echo "")
|
|
local email=$(git config user.email 2>/dev/null || echo "")
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_string "name" "$name"
|
|
json_add_string "email" "$email"
|
|
|
|
json_dump
|
|
}
|
|
|
|
# ============================================
|
|
# Gitea Integration Methods
|
|
# ============================================
|
|
|
|
gitea_status() {
|
|
json_init
|
|
|
|
local enabled=$(uci -q get hexojs.gitea.enabled) || enabled="0"
|
|
local gitea_url=$(uci -q get hexojs.gitea.url) || gitea_url=""
|
|
local gitea_user=$(uci -q get hexojs.gitea.user) || gitea_user=""
|
|
local content_repo=$(uci -q get hexojs.gitea.content_repo) || content_repo=""
|
|
local content_branch=$(uci -q get hexojs.gitea.content_branch) || content_branch="main"
|
|
local auto_sync=$(uci -q get hexojs.gitea.auto_sync) || auto_sync="0"
|
|
|
|
local data_path=$(uci_get main.data_path) || data_path="$DATA_PATH"
|
|
local content_path="$data_path/content"
|
|
local has_repo="false"
|
|
local last_commit=""
|
|
local branch=""
|
|
|
|
if [ -d "$content_path/.git" ]; then
|
|
has_repo="true"
|
|
cd "$content_path"
|
|
last_commit=$(git log -1 --format="%h %s" 2>/dev/null || echo "unknown")
|
|
branch=$(git branch --show-current 2>/dev/null || echo "unknown")
|
|
fi
|
|
|
|
json_add_boolean "enabled" "$enabled"
|
|
json_add_string "gitea_url" "$gitea_url"
|
|
json_add_string "gitea_user" "$gitea_user"
|
|
json_add_string "content_repo" "$content_repo"
|
|
json_add_string "content_branch" "$content_branch"
|
|
json_add_boolean "auto_sync" "$auto_sync"
|
|
json_add_boolean "has_local_repo" "$([ "$has_repo" = "true" ] && echo 1 || echo 0)"
|
|
json_add_string "local_branch" "$branch"
|
|
json_add_string "last_commit" "$last_commit"
|
|
|
|
json_dump
|
|
}
|
|
|
|
gitea_setup() {
|
|
json_init
|
|
|
|
local output=$("$HEXOCTL" gitea setup 2>&1)
|
|
local result=$?
|
|
|
|
if [ "$result" -eq 0 ]; then
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Git credentials configured"
|
|
else
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "$output"
|
|
fi
|
|
|
|
json_dump
|
|
}
|
|
|
|
gitea_clone() {
|
|
json_init
|
|
|
|
local output=$("$HEXOCTL" gitea clone 2>&1)
|
|
local result=$?
|
|
|
|
if [ "$result" -eq 0 ]; then
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Content cloned from Gitea"
|
|
else
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "$output"
|
|
fi
|
|
|
|
json_dump
|
|
}
|
|
|
|
gitea_sync() {
|
|
json_init
|
|
|
|
local output=$("$HEXOCTL" gitea sync 2>&1)
|
|
local result=$?
|
|
|
|
if [ "$result" -eq 0 ]; then
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Content synced from Gitea"
|
|
else
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "$output"
|
|
fi
|
|
|
|
json_dump
|
|
}
|
|
|
|
gitea_save_config() {
|
|
read input
|
|
json_load "$input"
|
|
|
|
json_get_var enabled enabled
|
|
json_get_var gitea_url gitea_url
|
|
json_get_var gitea_user gitea_user
|
|
json_get_var gitea_token gitea_token
|
|
json_get_var content_repo content_repo
|
|
json_get_var content_branch content_branch
|
|
json_get_var auto_sync auto_sync
|
|
|
|
json_init
|
|
|
|
[ -n "$enabled" ] && uci set hexojs.gitea.enabled="$enabled"
|
|
[ -n "$gitea_url" ] && uci set hexojs.gitea.url="$gitea_url"
|
|
[ -n "$gitea_user" ] && uci set hexojs.gitea.user="$gitea_user"
|
|
[ -n "$gitea_token" ] && uci set hexojs.gitea.token="$gitea_token"
|
|
[ -n "$content_repo" ] && uci set hexojs.gitea.content_repo="$content_repo"
|
|
[ -n "$content_branch" ] && uci set hexojs.gitea.content_branch="$content_branch"
|
|
[ -n "$auto_sync" ] && uci set hexojs.gitea.auto_sync="$auto_sync"
|
|
|
|
uci commit hexojs
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Gitea configuration saved"
|
|
|
|
json_dump
|
|
}
|
|
|
|
# ============================================
|
|
# Workflow Status (Gitea → Hexo → HAProxy)
|
|
# ============================================
|
|
|
|
get_workflow_status() {
|
|
json_init
|
|
|
|
local site_path=$(get_site_path)
|
|
local data_path=$(uci_get main.data_path) || data_path="$DATA_PATH"
|
|
|
|
# ── Gitea Integration Status ──
|
|
json_add_object "gitea"
|
|
|
|
local gitea_enabled=$(uci -q get hexojs.gitea.enabled) || gitea_enabled="0"
|
|
local gitea_url=$(uci -q get hexojs.gitea.url) || gitea_url=""
|
|
local content_repo=$(uci -q get hexojs.gitea.content_repo) || content_repo=""
|
|
local auto_sync=$(uci -q get hexojs.gitea.auto_sync) || auto_sync="0"
|
|
|
|
json_add_boolean "enabled" "$gitea_enabled"
|
|
json_add_string "url" "$gitea_url"
|
|
json_add_string "repo" "$content_repo"
|
|
json_add_boolean "auto_sync" "$auto_sync"
|
|
|
|
# Check git repo status
|
|
if [ -d "$site_path/.git" ]; then
|
|
cd "$site_path"
|
|
local branch=$(git branch --show-current 2>/dev/null || echo "")
|
|
local remote=$(git remote get-url origin 2>/dev/null || echo "")
|
|
local last_commit=$(git log -1 --format="%h - %s" 2>/dev/null || echo "")
|
|
local last_commit_date=$(git log -1 --format="%ci" 2>/dev/null || echo "")
|
|
local ahead=$(git rev-list --count @{u}..HEAD 2>/dev/null || echo "0")
|
|
local behind=$(git rev-list --count HEAD..@{u} 2>/dev/null || echo "0")
|
|
local modified=$(git status --porcelain 2>/dev/null | grep -c '^.M' || echo "0")
|
|
local untracked=$(git status --porcelain 2>/dev/null | grep -c '^??' || echo "0")
|
|
|
|
json_add_boolean "has_repo" 1
|
|
json_add_string "branch" "$branch"
|
|
json_add_string "remote" "$remote"
|
|
json_add_string "last_commit" "$last_commit"
|
|
json_add_string "last_commit_date" "$last_commit_date"
|
|
json_add_int "ahead" "$ahead"
|
|
json_add_int "behind" "$behind"
|
|
json_add_int "modified" "$modified"
|
|
json_add_int "untracked" "$untracked"
|
|
|
|
# Sync status
|
|
if [ "$behind" -gt 0 ]; then
|
|
json_add_string "sync_status" "behind"
|
|
json_add_string "sync_message" "Pull required ($behind commits behind)"
|
|
elif [ "$ahead" -gt 0 ]; then
|
|
json_add_string "sync_status" "ahead"
|
|
json_add_string "sync_message" "Push available ($ahead commits ahead)"
|
|
elif [ "$modified" -gt 0 ] || [ "$untracked" -gt 0 ]; then
|
|
json_add_string "sync_status" "modified"
|
|
json_add_string "sync_message" "Local changes pending"
|
|
else
|
|
json_add_string "sync_status" "synced"
|
|
json_add_string "sync_message" "Up to date"
|
|
fi
|
|
else
|
|
json_add_boolean "has_repo" 0
|
|
json_add_string "sync_status" "not_initialized"
|
|
json_add_string "sync_message" "Git repository not configured"
|
|
fi
|
|
|
|
json_close_object
|
|
|
|
# ── Hexo Build Status ──
|
|
json_add_object "hexo"
|
|
|
|
local running=$(is_running && echo 1 || echo 0)
|
|
local http_port=$(uci_get main.http_port) || http_port="4000"
|
|
local post_count=0
|
|
local draft_count=0
|
|
|
|
[ -d "$site_path/source/_posts" ] && post_count=$(find "$site_path/source/_posts" -type f -name "*.md" ! -name "index.md" 2>/dev/null | wc -l)
|
|
[ -d "$site_path/source/_drafts" ] && draft_count=$(find "$site_path/source/_drafts" -type f -name "*.md" ! -name "index.md" 2>/dev/null | wc -l)
|
|
|
|
json_add_boolean "container_running" "$running"
|
|
json_add_int "http_port" "$http_port"
|
|
json_add_int "post_count" "$post_count"
|
|
json_add_int "draft_count" "$draft_count"
|
|
|
|
# Check if public/ exists and its freshness
|
|
local public_dir="$site_path/public"
|
|
if [ -d "$public_dir" ]; then
|
|
json_add_boolean "site_built" 1
|
|
local public_files=$(find "$public_dir" -type f 2>/dev/null | wc -l)
|
|
json_add_int "public_files" "$public_files"
|
|
|
|
# Check build freshness
|
|
local newest_post=$(find "$site_path/source/_posts" -type f -name "*.md" -printf '%T@\n' 2>/dev/null | sort -rn | head -1)
|
|
local newest_public=$(find "$public_dir" -type f -name "*.html" -printf '%T@\n' 2>/dev/null | sort -rn | head -1)
|
|
|
|
if [ -n "$newest_post" ] && [ -n "$newest_public" ]; then
|
|
if [ "${newest_post%.*}" -gt "${newest_public%.*}" ]; then
|
|
json_add_string "build_status" "outdated"
|
|
json_add_string "build_message" "Rebuild required (source newer than build)"
|
|
else
|
|
json_add_string "build_status" "current"
|
|
json_add_string "build_message" "Build is up to date"
|
|
fi
|
|
else
|
|
json_add_string "build_status" "unknown"
|
|
json_add_string "build_message" "Unable to determine build freshness"
|
|
fi
|
|
else
|
|
json_add_boolean "site_built" 0
|
|
json_add_int "public_files" 0
|
|
json_add_string "build_status" "not_built"
|
|
json_add_string "build_message" "Site has not been generated"
|
|
fi
|
|
|
|
json_close_object
|
|
|
|
# ── HAProxy Publishing Status ──
|
|
json_add_object "haproxy"
|
|
|
|
local site_url=$(uci -q get hexojs.default.url) || site_url=""
|
|
local domain=""
|
|
# Extract domain from URL
|
|
if [ -n "$site_url" ]; then
|
|
domain=$(echo "$site_url" | sed 's|^https\?://||' | sed 's|/.*$||')
|
|
fi
|
|
|
|
json_add_string "site_url" "$site_url"
|
|
json_add_string "domain" "$domain"
|
|
|
|
# Check HAProxy vhost existence
|
|
local haproxy_running=$(pgrep haproxy >/dev/null 2>&1 && echo 1 || echo 0)
|
|
json_add_boolean "running" "$haproxy_running"
|
|
|
|
if [ -n "$domain" ]; then
|
|
# Check if vhost exists via ubus
|
|
local vhost_exists=$(ubus call luci.haproxy list_vhosts 2>/dev/null | grep -q "\"$domain\"" && echo 1 || echo 0)
|
|
json_add_boolean "vhost_configured" "$vhost_exists"
|
|
|
|
# Check certificate status
|
|
local cert_status=""
|
|
local cert_file="/etc/haproxy/certs/${domain}.pem"
|
|
if [ -f "$cert_file" ]; then
|
|
local expiry=$(openssl x509 -in "$cert_file" -noout -enddate 2>/dev/null | sed 's/notAfter=//')
|
|
if [ -n "$expiry" ]; then
|
|
local expiry_epoch=$(date -d "$expiry" +%s 2>/dev/null || echo 0)
|
|
local now_epoch=$(date +%s)
|
|
local days_left=$(( (expiry_epoch - now_epoch) / 86400 ))
|
|
|
|
if [ "$days_left" -lt 0 ]; then
|
|
cert_status="expired"
|
|
elif [ "$days_left" -lt 7 ]; then
|
|
cert_status="critical"
|
|
elif [ "$days_left" -lt 30 ]; then
|
|
cert_status="expiring"
|
|
else
|
|
cert_status="valid"
|
|
fi
|
|
json_add_int "cert_days_left" "$days_left"
|
|
fi
|
|
else
|
|
cert_status="missing"
|
|
fi
|
|
json_add_string "cert_status" "$cert_status"
|
|
|
|
# Publishing status
|
|
if [ "$vhost_exists" = "1" ] && [ "$cert_status" = "valid" ]; then
|
|
json_add_string "publish_status" "published"
|
|
json_add_string "publish_message" "Live at https://$domain"
|
|
elif [ "$vhost_exists" = "1" ]; then
|
|
json_add_string "publish_status" "partial"
|
|
json_add_string "publish_message" "Vhost configured, certificate issue"
|
|
else
|
|
json_add_string "publish_status" "not_published"
|
|
json_add_string "publish_message" "Not published to HAProxy"
|
|
fi
|
|
else
|
|
json_add_boolean "vhost_configured" 0
|
|
json_add_string "cert_status" "no_domain"
|
|
json_add_string "publish_status" "not_configured"
|
|
json_add_string "publish_message" "No domain configured"
|
|
fi
|
|
|
|
json_close_object
|
|
|
|
# ── Portal Status ──
|
|
json_add_object "portal"
|
|
|
|
local portal_path=$(uci -q get hexojs.portal.path) || portal_path="/www/blog"
|
|
json_add_string "path" "$portal_path"
|
|
|
|
if [ -d "$portal_path" ]; then
|
|
local portal_files=$(find "$portal_path" -type f 2>/dev/null | wc -l)
|
|
json_add_boolean "deployed" 1
|
|
json_add_int "file_count" "$portal_files"
|
|
|
|
# Check portal freshness vs public/
|
|
local portal_mtime=$(stat -c %Y "$portal_path/index.html" 2>/dev/null || echo 0)
|
|
local public_mtime=$(stat -c %Y "$site_path/public/index.html" 2>/dev/null || echo 0)
|
|
|
|
if [ "$public_mtime" -gt "$portal_mtime" ]; then
|
|
json_add_string "deploy_status" "outdated"
|
|
json_add_string "deploy_message" "Portal needs republishing"
|
|
else
|
|
json_add_string "deploy_status" "current"
|
|
json_add_string "deploy_message" "Portal is up to date"
|
|
fi
|
|
else
|
|
json_add_boolean "deployed" 0
|
|
json_add_int "file_count" 0
|
|
json_add_string "deploy_status" "not_deployed"
|
|
json_add_string "deploy_message" "Not deployed to portal"
|
|
fi
|
|
|
|
json_close_object
|
|
|
|
# ── Overall Workflow Status ──
|
|
json_add_object "workflow"
|
|
|
|
# Calculate overall health
|
|
local issues=0
|
|
local warnings=0
|
|
|
|
# Check each stage
|
|
[ ! -d "$site_path/.git" ] && issues=$((issues + 1))
|
|
[ "$running" != "1" ] && issues=$((issues + 1))
|
|
[ ! -d "$public_dir" ] && issues=$((issues + 1))
|
|
|
|
if [ "$issues" -eq 0 ]; then
|
|
json_add_string "status" "healthy"
|
|
json_add_string "message" "All workflow stages operational"
|
|
elif [ "$issues" -eq 1 ]; then
|
|
json_add_string "status" "warning"
|
|
json_add_string "message" "One workflow stage needs attention"
|
|
else
|
|
json_add_string "status" "critical"
|
|
json_add_string "message" "Multiple workflow stages need attention"
|
|
fi
|
|
|
|
json_add_int "issues" "$issues"
|
|
|
|
json_close_object
|
|
|
|
json_dump
|
|
}
|
|
|
|
# Publish to /www (portal)
|
|
publish_to_www() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var path path
|
|
|
|
json_init
|
|
|
|
if ! is_running; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Container not running"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
# Default path is /www/blog
|
|
[ -z "$path" ] && path="/www/blog"
|
|
|
|
# Set portal path in UCI for hexoctl
|
|
uci set hexojs.portal=hexojs
|
|
uci set hexojs.portal.path="$path"
|
|
uci commit hexojs
|
|
|
|
# Run publish command
|
|
local output=$("$HEXOCTL" publish 2>&1)
|
|
local result=$?
|
|
|
|
if [ "$result" -eq 0 ]; then
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Published to $path"
|
|
json_add_string "path" "$path"
|
|
else
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "$output"
|
|
fi
|
|
|
|
json_dump
|
|
}
|
|
|
|
# ============================================
|
|
# Publishing Profiles (Wizard)
|
|
# ============================================
|
|
|
|
# List available publishing profiles
|
|
list_profiles() {
|
|
json_init
|
|
json_add_array "profiles"
|
|
|
|
# Blog Profile
|
|
json_add_object
|
|
json_add_string "id" "blog"
|
|
json_add_string "name" "Blog"
|
|
json_add_string "description" "Personal blog with public domain and optional Tor"
|
|
json_add_string "icon" "📝"
|
|
json_add_boolean "haproxy" 1
|
|
json_add_boolean "tor" 0
|
|
json_add_boolean "acme" 1
|
|
json_add_int "default_port" 4000
|
|
json_close_object
|
|
|
|
# Portfolio Profile
|
|
json_add_object
|
|
json_add_string "id" "portfolio"
|
|
json_add_string "name" "Portfolio"
|
|
json_add_string "description" "Professional portfolio with SSL"
|
|
json_add_string "icon" "💼"
|
|
json_add_boolean "haproxy" 1
|
|
json_add_boolean "tor" 0
|
|
json_add_boolean "acme" 1
|
|
json_add_int "default_port" 4001
|
|
json_close_object
|
|
|
|
# Privacy Blog Profile
|
|
json_add_object
|
|
json_add_string "id" "privacy"
|
|
json_add_string "name" "Privacy Blog"
|
|
json_add_string "description" "Tor-only hidden service blog"
|
|
json_add_string "icon" "🧅"
|
|
json_add_boolean "haproxy" 0
|
|
json_add_boolean "tor" 1
|
|
json_add_boolean "acme" 0
|
|
json_add_int "default_port" 4002
|
|
json_close_object
|
|
|
|
# Dual Access Profile
|
|
json_add_object
|
|
json_add_string "id" "dual"
|
|
json_add_string "name" "Dual Access"
|
|
json_add_string "description" "Public domain + Tor hidden service"
|
|
json_add_string "icon" "🌐"
|
|
json_add_boolean "haproxy" 1
|
|
json_add_boolean "tor" 1
|
|
json_add_boolean "acme" 1
|
|
json_add_int "default_port" 4003
|
|
json_close_object
|
|
|
|
# Documentation Profile
|
|
json_add_object
|
|
json_add_string "id" "docs"
|
|
json_add_string "name" "Documentation"
|
|
json_add_string "description" "Internal documentation site"
|
|
json_add_string "icon" "📚"
|
|
json_add_boolean "haproxy" 1
|
|
json_add_boolean "tor" 0
|
|
json_add_boolean "acme" 0
|
|
json_add_int "default_port" 4004
|
|
json_close_object
|
|
|
|
json_close_array
|
|
json_dump
|
|
}
|
|
|
|
# Apply a publishing profile to an instance
|
|
apply_profile() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var instance instance
|
|
json_get_var profile profile
|
|
json_get_var domain domain
|
|
json_get_var enable_tor enable_tor
|
|
|
|
json_init
|
|
|
|
if [ -z "$instance" ] || [ -z "$profile" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Instance and profile required"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
local port=$(uci -q get hexojs.${instance}.port)
|
|
if [ -z "$port" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Instance not found: $instance"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
local results=""
|
|
|
|
# Apply profile settings
|
|
case "$profile" in
|
|
blog|portfolio|dual|docs)
|
|
if [ -n "$domain" ]; then
|
|
# Create HAProxy backend and vhost
|
|
local haproxy_result=$(create_haproxy_vhost "$instance" "$domain" "$port" 1)
|
|
results="$results HAProxy:$haproxy_result"
|
|
uci set hexojs.${instance}.domain="$domain"
|
|
fi
|
|
;;
|
|
privacy)
|
|
enable_tor="1"
|
|
;;
|
|
esac
|
|
|
|
# Enable Tor if requested
|
|
if [ "$enable_tor" = "1" ] || [ "$profile" = "privacy" ] || [ "$profile" = "dual" ]; then
|
|
local tor_result=$(create_tor_hidden_service "$instance" "$port")
|
|
results="$results Tor:$tor_result"
|
|
uci set hexojs.${instance}.tor_enabled="1"
|
|
fi
|
|
|
|
uci set hexojs.${instance}.profile="$profile"
|
|
uci commit hexojs
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Profile '$profile' applied to instance '$instance'"
|
|
json_add_string "results" "$results"
|
|
|
|
json_dump
|
|
}
|
|
|
|
# ============================================
|
|
# HAProxy Integration
|
|
# ============================================
|
|
|
|
# Internal: Create HAProxy vhost for instance
|
|
create_haproxy_vhost() {
|
|
local instance="$1"
|
|
local domain="$2"
|
|
local port="$3"
|
|
local acme="$4"
|
|
|
|
# Check if HAProxy RPCD is available
|
|
if ! ubus list | grep -q "luci.haproxy"; then
|
|
echo "haproxy_unavailable"
|
|
return 1
|
|
fi
|
|
|
|
# Create backend
|
|
ubus call luci.haproxy create_backend \
|
|
"{\"name\":\"hexo_${instance}\",\"mode\":\"http\"}" 2>/dev/null
|
|
|
|
# Create server in backend (use LAN IP - HAProxy is in LXC container)
|
|
local lan_ip=$(uci -q get network.lan.ipaddr || echo "192.168.255.1")
|
|
ubus call luci.haproxy create_server \
|
|
"{\"backend\":\"hexo_${instance}\",\"name\":\"${instance}\",\"address\":\"${lan_ip}\",\"port\":${port}}" 2>/dev/null
|
|
|
|
# Create vhost
|
|
local vhost_params="{\"domain\":\"${domain}\",\"backend\":\"hexo_${instance}\",\"ssl\":true,\"ssl_redirect\":true"
|
|
[ "$acme" = "1" ] && vhost_params="${vhost_params},\"acme\":true"
|
|
vhost_params="${vhost_params}}"
|
|
|
|
ubus call luci.haproxy create_vhost "$vhost_params" 2>/dev/null
|
|
|
|
# Request certificate if ACME enabled
|
|
if [ "$acme" = "1" ]; then
|
|
ubus call luci.haproxy request_certificate "{\"domain\":\"${domain}\"}" 2>/dev/null &
|
|
fi
|
|
|
|
echo "ok"
|
|
return 0
|
|
}
|
|
|
|
# Publish instance to HAProxy
|
|
publish_to_haproxy() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var instance instance
|
|
json_get_var domain domain
|
|
json_get_var acme acme
|
|
|
|
json_init
|
|
|
|
if [ -z "$instance" ] || [ -z "$domain" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Instance and domain required"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
local port=$(uci -q get hexojs.${instance}.port)
|
|
if [ -z "$port" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Instance not found: $instance"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
[ -z "$acme" ] && acme="1"
|
|
|
|
local result=$(create_haproxy_vhost "$instance" "$domain" "$port" "$acme")
|
|
|
|
if [ "$result" = "ok" ]; then
|
|
uci set hexojs.${instance}.domain="$domain"
|
|
uci set hexojs.${instance}.haproxy_enabled="1"
|
|
uci commit hexojs
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Published to HAProxy"
|
|
json_add_string "domain" "$domain"
|
|
json_add_string "url" "https://${domain}"
|
|
else
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Failed to create HAProxy vhost: $result"
|
|
fi
|
|
|
|
json_dump
|
|
}
|
|
|
|
# Unpublish from HAProxy
|
|
unpublish_from_haproxy() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var instance instance
|
|
|
|
json_init
|
|
|
|
if [ -z "$instance" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Instance required"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
local domain=$(uci -q get hexojs.${instance}.domain)
|
|
|
|
if [ -n "$domain" ]; then
|
|
# Delete vhost
|
|
ubus call luci.haproxy delete_vhost "{\"domain\":\"${domain}\"}" 2>/dev/null
|
|
# Delete backend
|
|
ubus call luci.haproxy delete_backend "{\"name\":\"hexo_${instance}\"}" 2>/dev/null
|
|
fi
|
|
|
|
uci delete hexojs.${instance}.domain 2>/dev/null
|
|
uci set hexojs.${instance}.haproxy_enabled="0"
|
|
uci commit hexojs
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Unpublished from HAProxy"
|
|
|
|
json_dump
|
|
}
|
|
|
|
# Get HAProxy status for instance
|
|
get_haproxy_status() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var instance instance
|
|
|
|
json_init
|
|
|
|
if [ -z "$instance" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Instance required"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
local domain=$(uci -q get hexojs.${instance}.domain)
|
|
local port=$(uci -q get hexojs.${instance}.port)
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_string "instance" "$instance"
|
|
json_add_string "domain" "$domain"
|
|
json_add_int "port" "$port"
|
|
|
|
if [ -n "$domain" ]; then
|
|
json_add_boolean "published" 1
|
|
json_add_string "url" "https://${domain}"
|
|
|
|
# Check certificate
|
|
local cert_file="/etc/haproxy/certs/${domain}.pem"
|
|
if [ -f "$cert_file" ]; then
|
|
local expiry=$(openssl x509 -in "$cert_file" -noout -enddate 2>/dev/null | sed 's/notAfter=//')
|
|
if [ -n "$expiry" ]; then
|
|
local expiry_epoch=$(date -d "$expiry" +%s 2>/dev/null || echo 0)
|
|
local now_epoch=$(date +%s)
|
|
local days_left=$(( (expiry_epoch - now_epoch) / 86400 ))
|
|
json_add_string "cert_status" "valid"
|
|
json_add_int "cert_days_left" "$days_left"
|
|
json_add_string "cert_expiry" "$expiry"
|
|
fi
|
|
else
|
|
json_add_string "cert_status" "missing"
|
|
fi
|
|
|
|
# Check DNS
|
|
local resolved_ip=$(nslookup "$domain" 2>/dev/null | grep -A1 "Name:" | grep "Address:" | awk '{print $2}' | head -1)
|
|
if [ -n "$resolved_ip" ]; then
|
|
json_add_string "dns_status" "ok"
|
|
json_add_string "dns_resolved_ip" "$resolved_ip"
|
|
else
|
|
json_add_string "dns_status" "failed"
|
|
fi
|
|
else
|
|
json_add_boolean "published" 0
|
|
fi
|
|
|
|
json_dump
|
|
}
|
|
|
|
# ============================================
|
|
# Tor Hidden Service Integration
|
|
# ============================================
|
|
|
|
# Internal: Create Tor hidden service
|
|
create_tor_hidden_service() {
|
|
local instance="$1"
|
|
local port="$2"
|
|
|
|
# Check if Tor Shield RPCD is available
|
|
if ! ubus list | grep -q "luci.tor-shield"; then
|
|
echo "tor_unavailable"
|
|
return 1
|
|
fi
|
|
|
|
# Create hidden service
|
|
ubus call luci.tor-shield add_hidden_service \
|
|
"{\"name\":\"hexo_${instance}\",\"local_port\":${port},\"virtual_port\":80}" 2>/dev/null
|
|
|
|
# Wait for onion address (up to 10 seconds)
|
|
local onion=""
|
|
local tries=0
|
|
while [ -z "$onion" ] && [ "$tries" -lt 10 ]; do
|
|
sleep 1
|
|
onion=$(cat /var/lib/tor/hidden_service_hexo_${instance}/hostname 2>/dev/null)
|
|
tries=$((tries + 1))
|
|
done
|
|
|
|
if [ -n "$onion" ]; then
|
|
echo "$onion"
|
|
return 0
|
|
else
|
|
echo "pending"
|
|
return 0
|
|
fi
|
|
}
|
|
|
|
# Publish instance to Tor
|
|
publish_to_tor() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var instance instance
|
|
|
|
json_init
|
|
|
|
if [ -z "$instance" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Instance required"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
local port=$(uci -q get hexojs.${instance}.port)
|
|
if [ -z "$port" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Instance not found: $instance"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
local onion=$(create_tor_hidden_service "$instance" "$port")
|
|
|
|
if [ "$onion" != "tor_unavailable" ]; then
|
|
uci set hexojs.${instance}.tor_enabled="1"
|
|
[ "$onion" != "pending" ] && uci set hexojs.${instance}.onion_address="$onion"
|
|
uci commit hexojs
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Tor hidden service created"
|
|
|
|
if [ "$onion" != "pending" ]; then
|
|
json_add_string "onion_address" "$onion"
|
|
json_add_string "url" "http://${onion}"
|
|
else
|
|
json_add_string "status" "pending"
|
|
json_add_string "message" "Onion address generating, check back soon"
|
|
fi
|
|
else
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Tor Shield not available"
|
|
fi
|
|
|
|
json_dump
|
|
}
|
|
|
|
# Unpublish from Tor
|
|
unpublish_from_tor() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var instance instance
|
|
|
|
json_init
|
|
|
|
if [ -z "$instance" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Instance required"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
# Remove hidden service
|
|
ubus call luci.tor-shield remove_hidden_service "{\"name\":\"hexo_${instance}\"}" 2>/dev/null
|
|
|
|
uci delete hexojs.${instance}.onion_address 2>/dev/null
|
|
uci set hexojs.${instance}.tor_enabled="0"
|
|
uci commit hexojs
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Unpublished from Tor"
|
|
|
|
json_dump
|
|
}
|
|
|
|
# Get Tor status for instance
|
|
get_tor_status() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var instance instance
|
|
|
|
json_init
|
|
|
|
if [ -z "$instance" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Instance required"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
local tor_enabled=$(uci -q get hexojs.${instance}.tor_enabled)
|
|
local onion=$(uci -q get hexojs.${instance}.onion_address)
|
|
|
|
# Try to get onion from file if not in UCI
|
|
if [ -z "$onion" ]; then
|
|
onion=$(cat /var/lib/tor/hidden_service_hexo_${instance}/hostname 2>/dev/null)
|
|
[ -n "$onion" ] && uci set hexojs.${instance}.onion_address="$onion" && uci commit hexojs
|
|
fi
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_string "instance" "$instance"
|
|
json_add_boolean "enabled" "${tor_enabled:-0}"
|
|
|
|
if [ -n "$onion" ]; then
|
|
json_add_string "onion_address" "$onion"
|
|
json_add_string "url" "http://${onion}"
|
|
json_add_string "status" "active"
|
|
elif [ "$tor_enabled" = "1" ]; then
|
|
json_add_string "status" "pending"
|
|
else
|
|
json_add_string "status" "disabled"
|
|
fi
|
|
|
|
json_dump
|
|
}
|
|
|
|
# ============================================
|
|
# Full Publishing Pipeline
|
|
# ============================================
|
|
|
|
# Get all publishing endpoints for an instance
|
|
get_instance_endpoints() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var instance instance
|
|
|
|
json_init
|
|
|
|
if [ -z "$instance" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Instance required"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
local port=$(uci -q get hexojs.${instance}.port)
|
|
local domain=$(uci -q get hexojs.${instance}.domain)
|
|
local onion=$(uci -q get hexojs.${instance}.onion_address)
|
|
local lan_ip=$(uci -q get network.lan.ipaddr || echo "192.168.255.1")
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_string "instance" "$instance"
|
|
|
|
json_add_array "endpoints"
|
|
|
|
# Local endpoint
|
|
json_add_object
|
|
json_add_string "type" "local"
|
|
json_add_string "url" "http://${lan_ip}:${port}"
|
|
json_add_string "status" "active"
|
|
json_add_string "icon" "🏠"
|
|
json_close_object
|
|
|
|
# HAProxy/clearnet endpoint
|
|
if [ -n "$domain" ]; then
|
|
json_add_object
|
|
json_add_string "type" "clearnet"
|
|
json_add_string "url" "https://${domain}"
|
|
json_add_string "domain" "$domain"
|
|
|
|
# Check health
|
|
local cert_file="/etc/haproxy/certs/${domain}.pem"
|
|
if [ -f "$cert_file" ]; then
|
|
json_add_string "status" "active"
|
|
json_add_string "ssl" "valid"
|
|
else
|
|
json_add_string "status" "no_cert"
|
|
json_add_string "ssl" "missing"
|
|
fi
|
|
json_add_string "icon" "🌐"
|
|
json_close_object
|
|
fi
|
|
|
|
# Tor endpoint
|
|
if [ -n "$onion" ]; then
|
|
json_add_object
|
|
json_add_string "type" "tor"
|
|
json_add_string "url" "http://${onion}"
|
|
json_add_string "onion" "$onion"
|
|
json_add_string "status" "active"
|
|
json_add_string "icon" "🧅"
|
|
json_close_object
|
|
fi
|
|
|
|
json_close_array
|
|
json_dump
|
|
}
|
|
|
|
# Full publish pipeline - build, deploy to all configured endpoints
|
|
full_publish() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var instance instance
|
|
json_get_var rebuild rebuild
|
|
|
|
json_init
|
|
|
|
if [ -z "$instance" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Instance required"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
local data_path=$(uci_get main.data_path) || data_path="$DATA_PATH"
|
|
local site_path="$data_path/instances/$instance/site"
|
|
|
|
json_add_array "steps"
|
|
|
|
# Step 1: Rebuild if requested or needed
|
|
if [ "$rebuild" = "1" ] || [ ! -d "$site_path/public" ]; then
|
|
json_add_object
|
|
json_add_string "step" "build"
|
|
json_add_string "status" "running"
|
|
json_close_object
|
|
|
|
local build_output=$("$HEXOCTL" build "$instance" 2>&1)
|
|
local build_result=$?
|
|
|
|
json_add_object
|
|
json_add_string "step" "build"
|
|
if [ "$build_result" -eq 0 ]; then
|
|
json_add_string "status" "success"
|
|
else
|
|
json_add_string "status" "failed"
|
|
json_add_string "error" "$build_output"
|
|
fi
|
|
json_close_object
|
|
fi
|
|
|
|
# Step 2: Refresh HAProxy if configured
|
|
local domain=$(uci -q get hexojs.${instance}.domain)
|
|
if [ -n "$domain" ]; then
|
|
json_add_object
|
|
json_add_string "step" "haproxy"
|
|
json_add_string "status" "active"
|
|
json_add_string "url" "https://${domain}"
|
|
json_close_object
|
|
fi
|
|
|
|
# Step 3: Refresh Tor if configured
|
|
local onion=$(uci -q get hexojs.${instance}.onion_address)
|
|
if [ -n "$onion" ]; then
|
|
json_add_object
|
|
json_add_string "step" "tor"
|
|
json_add_string "status" "active"
|
|
json_add_string "url" "http://${onion}"
|
|
json_close_object
|
|
fi
|
|
|
|
json_close_array
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Publishing pipeline complete"
|
|
|
|
json_dump
|
|
}
|
|
|
|
# ============================================
|
|
# Gitea Webhook Handler
|
|
# ============================================
|
|
|
|
# Handle Gitea webhook push event
|
|
handle_webhook() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var event event
|
|
json_get_var repository repository
|
|
json_get_var ref ref
|
|
json_get_var secret secret
|
|
|
|
json_init
|
|
|
|
# Verify webhook secret if configured
|
|
local configured_secret=$(uci -q get hexojs.gitea.webhook_secret)
|
|
if [ -n "$configured_secret" ] && [ "$secret" != "$configured_secret" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Invalid webhook secret"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
# Only handle push events
|
|
if [ "$event" != "push" ]; then
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Event '$event' ignored"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
local content_repo=$(uci -q get hexojs.gitea.content_repo)
|
|
|
|
# Check if this is our content repo
|
|
if [ -n "$repository" ] && [ "$repository" != "$content_repo" ]; then
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Repository '$repository' not configured for sync"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
# Trigger sync and rebuild
|
|
local sync_output=$("$HEXOCTL" gitea sync 2>&1)
|
|
local sync_result=$?
|
|
|
|
if [ "$sync_result" -eq 0 ]; then
|
|
# Auto-rebuild if configured
|
|
local auto_build=$(uci -q get hexojs.gitea.auto_build)
|
|
if [ "$auto_build" = "1" ]; then
|
|
"$HEXOCTL" build 2>&1 &
|
|
fi
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Content synced from Gitea"
|
|
json_add_string "ref" "$ref"
|
|
|
|
# Log the event
|
|
logger -t hexojs "Webhook: Synced content from Gitea push to $ref"
|
|
else
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Sync failed: $sync_output"
|
|
fi
|
|
|
|
json_dump
|
|
}
|
|
|
|
# Configure Gitea webhook
|
|
setup_webhook() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var auto_build auto_build
|
|
json_get_var webhook_secret webhook_secret
|
|
|
|
json_init
|
|
|
|
[ -n "$auto_build" ] && uci set hexojs.gitea.auto_build="$auto_build"
|
|
[ -n "$webhook_secret" ] && uci set hexojs.gitea.webhook_secret="$webhook_secret"
|
|
uci commit hexojs
|
|
|
|
# Generate webhook URL
|
|
local lan_ip=$(uci -q get network.lan.ipaddr || echo "192.168.255.1")
|
|
local webhook_url="http://${lan_ip}/cgi-bin/luci/admin/services/hexojs/webhook"
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Webhook configured"
|
|
json_add_string "webhook_url" "$webhook_url"
|
|
json_add_string "hint" "Add this URL to Gitea repository settings > Webhooks"
|
|
|
|
json_dump
|
|
}
|
|
|
|
# ============================================
|
|
# Instance Health & Pipeline Status
|
|
# ============================================
|
|
|
|
# Get detailed health status for an instance
|
|
get_instance_health() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var instance instance
|
|
|
|
json_init
|
|
|
|
if [ -z "$instance" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Instance required"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
local data_path=$(uci_get main.data_path) || data_path="$DATA_PATH"
|
|
local site_path="$data_path/instances/$instance/site"
|
|
local port=$(uci -q get hexojs.${instance}.port)
|
|
local domain=$(uci -q get hexojs.${instance}.domain)
|
|
local onion=$(uci -q get hexojs.${instance}.onion_address)
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_string "instance" "$instance"
|
|
|
|
# Overall health score (0-100)
|
|
local health_score=100
|
|
local issues=""
|
|
|
|
# Check container
|
|
json_add_object "container"
|
|
if is_running; then
|
|
json_add_string "status" "running"
|
|
json_add_boolean "healthy" 1
|
|
else
|
|
json_add_string "status" "stopped"
|
|
json_add_boolean "healthy" 0
|
|
health_score=$((health_score - 30))
|
|
issues="$issues container_stopped"
|
|
fi
|
|
json_close_object
|
|
|
|
# Check site files
|
|
json_add_object "site"
|
|
if [ -d "$site_path" ]; then
|
|
json_add_boolean "exists" 1
|
|
local post_count=$(find "$site_path/source/_posts" -type f -name "*.md" ! -name "index.md" 2>/dev/null | wc -l)
|
|
json_add_int "post_count" "$post_count"
|
|
|
|
if [ -d "$site_path/public" ]; then
|
|
json_add_boolean "built" 1
|
|
local public_files=$(find "$site_path/public" -type f 2>/dev/null | wc -l)
|
|
json_add_int "public_files" "$public_files"
|
|
else
|
|
json_add_boolean "built" 0
|
|
health_score=$((health_score - 20))
|
|
issues="$issues not_built"
|
|
fi
|
|
else
|
|
json_add_boolean "exists" 0
|
|
health_score=$((health_score - 40))
|
|
issues="$issues no_site"
|
|
fi
|
|
json_close_object
|
|
|
|
# Check server
|
|
json_add_object "server"
|
|
if [ -n "$port" ]; then
|
|
json_add_int "port" "$port"
|
|
# Check if port is listening
|
|
if netstat -tln 2>/dev/null | grep -q ":${port}[[:space:]]"; then
|
|
json_add_boolean "listening" 1
|
|
else
|
|
json_add_boolean "listening" 0
|
|
health_score=$((health_score - 20))
|
|
issues="$issues not_listening"
|
|
fi
|
|
fi
|
|
json_close_object
|
|
|
|
# Check HAProxy
|
|
json_add_object "haproxy"
|
|
if [ -n "$domain" ]; then
|
|
json_add_boolean "configured" 1
|
|
json_add_string "domain" "$domain"
|
|
|
|
# Check certificate
|
|
local cert_file="/etc/haproxy/certs/${domain}.pem"
|
|
if [ -f "$cert_file" ]; then
|
|
local expiry_epoch=$(openssl x509 -in "$cert_file" -noout -enddate 2>/dev/null | sed 's/notAfter=//' | xargs -I{} date -d "{}" +%s 2>/dev/null || echo 0)
|
|
local now_epoch=$(date +%s)
|
|
local days_left=$(( (expiry_epoch - now_epoch) / 86400 ))
|
|
|
|
if [ "$days_left" -lt 0 ]; then
|
|
json_add_string "cert_status" "expired"
|
|
health_score=$((health_score - 20))
|
|
issues="$issues cert_expired"
|
|
elif [ "$days_left" -lt 7 ]; then
|
|
json_add_string "cert_status" "critical"
|
|
health_score=$((health_score - 10))
|
|
issues="$issues cert_expiring"
|
|
elif [ "$days_left" -lt 30 ]; then
|
|
json_add_string "cert_status" "warning"
|
|
else
|
|
json_add_string "cert_status" "valid"
|
|
fi
|
|
json_add_int "cert_days_left" "$days_left"
|
|
else
|
|
json_add_string "cert_status" "missing"
|
|
health_score=$((health_score - 10))
|
|
issues="$issues no_cert"
|
|
fi
|
|
else
|
|
json_add_boolean "configured" 0
|
|
fi
|
|
json_close_object
|
|
|
|
# Check Tor
|
|
json_add_object "tor"
|
|
if [ -n "$onion" ]; then
|
|
json_add_boolean "configured" 1
|
|
json_add_string "onion_address" "$onion"
|
|
|
|
# Check if Tor service file exists
|
|
if [ -f "/var/lib/tor/hidden_service_hexo_${instance}/hostname" ]; then
|
|
json_add_string "status" "active"
|
|
else
|
|
json_add_string "status" "pending"
|
|
fi
|
|
else
|
|
json_add_boolean "configured" 0
|
|
fi
|
|
json_close_object
|
|
|
|
# Check Gitea sync
|
|
json_add_object "gitea"
|
|
if [ -d "$site_path/.git" ]; then
|
|
json_add_boolean "configured" 1
|
|
cd "$site_path"
|
|
local behind=$(git rev-list --count HEAD..@{u} 2>/dev/null || echo "0")
|
|
local modified=$(git status --porcelain 2>/dev/null | wc -l)
|
|
|
|
if [ "$behind" -gt 0 ]; then
|
|
json_add_string "sync_status" "behind"
|
|
json_add_int "commits_behind" "$behind"
|
|
issues="$issues sync_behind"
|
|
elif [ "$modified" -gt 0 ]; then
|
|
json_add_string "sync_status" "modified"
|
|
json_add_int "local_changes" "$modified"
|
|
else
|
|
json_add_string "sync_status" "synced"
|
|
fi
|
|
else
|
|
json_add_boolean "configured" 0
|
|
fi
|
|
json_close_object
|
|
|
|
# Final health score
|
|
[ "$health_score" -lt 0 ] && health_score=0
|
|
json_add_int "health_score" "$health_score"
|
|
|
|
if [ "$health_score" -ge 80 ]; then
|
|
json_add_string "health_status" "healthy"
|
|
elif [ "$health_score" -ge 50 ]; then
|
|
json_add_string "health_status" "degraded"
|
|
else
|
|
json_add_string "health_status" "critical"
|
|
fi
|
|
|
|
json_add_string "issues" "$issues"
|
|
|
|
json_dump
|
|
}
|
|
|
|
# Get pipeline status for all instances
|
|
get_pipeline_status() {
|
|
json_init
|
|
|
|
local data_path=$(uci_get main.data_path) || data_path="$DATA_PATH"
|
|
|
|
json_add_array "instances"
|
|
|
|
# Iterate through UCI instances
|
|
for instance in $(uci -q show hexojs | grep "=instance" | sed 's/hexojs\.\([^=]*\)=instance/\1/'); do
|
|
local port=$(uci -q get hexojs.${instance}.port)
|
|
local enabled=$(uci -q get hexojs.${instance}.enabled)
|
|
local domain=$(uci -q get hexojs.${instance}.domain)
|
|
local onion=$(uci -q get hexojs.${instance}.onion_address)
|
|
local profile=$(uci -q get hexojs.${instance}.profile)
|
|
local site_path="$data_path/instances/$instance/site"
|
|
|
|
json_add_object
|
|
json_add_string "id" "$instance"
|
|
json_add_int "port" "$port"
|
|
json_add_boolean "enabled" "${enabled:-0}"
|
|
json_add_string "profile" "${profile:-default}"
|
|
|
|
# Quick status checks
|
|
local status="unknown"
|
|
if [ -d "$site_path" ]; then
|
|
# Check if server is running on this port
|
|
if netstat -tln 2>/dev/null | grep -q ":${port}[[:space:]]"; then
|
|
status="running"
|
|
else
|
|
status="stopped"
|
|
fi
|
|
else
|
|
status="no_site"
|
|
fi
|
|
json_add_string "status" "$status"
|
|
|
|
# Endpoints
|
|
json_add_object "endpoints"
|
|
json_add_string "local" "http://localhost:${port}"
|
|
[ -n "$domain" ] && json_add_string "clearnet" "https://${domain}"
|
|
[ -n "$onion" ] && json_add_string "tor" "http://${onion}"
|
|
json_close_object
|
|
|
|
json_close_object
|
|
done
|
|
|
|
json_close_array
|
|
|
|
# Global stats
|
|
json_add_object "stats"
|
|
local total=$(uci -q show hexojs | grep -c "=instance" || echo 0)
|
|
local running=$(netstat -tln 2>/dev/null | grep -c ":400[0-9][[:space:]]" || echo 0)
|
|
json_add_int "total_instances" "$total"
|
|
json_add_int "running_instances" "$running"
|
|
json_close_object
|
|
|
|
json_dump
|
|
}
|
|
|
|
# ============================================
|
|
# Instance Management Methods
|
|
# ============================================
|
|
|
|
list_instances() {
|
|
json_init
|
|
json_add_array "instances"
|
|
|
|
for instance in $(uci -q show hexojs | grep "=instance" | sed 's/hexojs\.\([^=]*\)=instance/\1/'); do
|
|
local port=$(uci -q get hexojs.${instance}.port)
|
|
local enabled=$(uci -q get hexojs.${instance}.enabled)
|
|
local title=$(uci -q get hexojs.${instance}.title)
|
|
local theme=$(uci -q get hexojs.${instance}.theme)
|
|
local domain=$(uci -q get hexojs.${instance}.domain)
|
|
local data_path=$(uci_get main.data_path) || data_path="$DATA_PATH"
|
|
local site_path="$data_path/instances/$instance/site"
|
|
|
|
local running="false"
|
|
local site_exists="false"
|
|
[ -d "$site_path" ] && site_exists="true"
|
|
netstat -tln 2>/dev/null | grep -q ":${port}[[:space:]]" && running="true"
|
|
|
|
json_add_object
|
|
json_add_string "name" "$instance"
|
|
json_add_boolean "enabled" "${enabled:-0}"
|
|
json_add_boolean "running" "$([ "$running" = "true" ] && echo 1 || echo 0)"
|
|
json_add_int "port" "${port:-4000}"
|
|
json_add_string "title" "${title:-$instance Blog}"
|
|
json_add_string "theme" "${theme:-cybermind}"
|
|
json_add_string "domain" "$domain"
|
|
json_add_boolean "site_exists" "$([ "$site_exists" = "true" ] && echo 1 || echo 0)"
|
|
json_close_object
|
|
done
|
|
|
|
json_close_array
|
|
json_dump
|
|
}
|
|
|
|
create_instance() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var name name
|
|
json_get_var title title
|
|
json_get_var port port
|
|
|
|
json_init
|
|
|
|
if [ -z "$name" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Instance name required"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
# Validate name
|
|
echo "$name" | grep -qE '^[a-z][a-z0-9_]*$' || {
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Invalid name. Use lowercase letters, numbers, underscore."
|
|
json_dump
|
|
return
|
|
}
|
|
|
|
# Check if exists
|
|
local existing=$(uci -q get hexojs.${name})
|
|
if [ -n "$existing" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Instance '$name' already exists"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
# Find next available port if not specified
|
|
if [ -z "$port" ]; then
|
|
port=4000
|
|
while uci -q show hexojs | grep -q "port='$port'"; do
|
|
port=$((port + 1))
|
|
done
|
|
fi
|
|
|
|
# Create UCI config
|
|
uci set hexojs.${name}=instance
|
|
uci set hexojs.${name}.enabled='1'
|
|
uci set hexojs.${name}.port="$port"
|
|
uci set hexojs.${name}.title="${title:-$name Blog}"
|
|
uci set hexojs.${name}.theme='cybermind'
|
|
uci commit hexojs
|
|
|
|
# Create directory
|
|
local data_path=$(uci_get main.data_path) || data_path="$DATA_PATH"
|
|
mkdir -p "$data_path/instances/$name"
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Instance '$name' created on port $port"
|
|
json_add_string "name" "$name"
|
|
json_add_int "port" "$port"
|
|
|
|
json_dump
|
|
}
|
|
|
|
delete_instance() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var name name
|
|
json_get_var delete_data delete_data
|
|
|
|
json_init
|
|
|
|
if [ -z "$name" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Instance name required"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
# Stop instance if running
|
|
"$HEXOCTL" instance stop "$name" 2>/dev/null
|
|
|
|
# Remove UCI config
|
|
uci delete hexojs.${name} 2>/dev/null
|
|
uci commit hexojs
|
|
|
|
# Optionally delete data
|
|
if [ "$delete_data" = "1" ]; then
|
|
local data_path=$(uci_get main.data_path) || data_path="$DATA_PATH"
|
|
rm -rf "$data_path/instances/$name"
|
|
fi
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Instance '$name' deleted"
|
|
|
|
json_dump
|
|
}
|
|
|
|
start_instance() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var name name
|
|
|
|
json_init
|
|
|
|
if [ -z "$name" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Instance name required"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
local output=$("$HEXOCTL" instance start "$name" 2>&1)
|
|
local result=$?
|
|
|
|
if [ "$result" -eq 0 ]; then
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Instance started"
|
|
json_add_string "output" "$output"
|
|
else
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "$output"
|
|
fi
|
|
|
|
json_dump
|
|
}
|
|
|
|
stop_instance() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var name name
|
|
|
|
json_init
|
|
|
|
if [ -z "$name" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Instance name required"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
local output=$("$HEXOCTL" instance stop "$name" 2>&1)
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Instance stopped"
|
|
|
|
json_dump
|
|
}
|
|
|
|
# ============================================
|
|
# Backup/Restore Methods
|
|
# ============================================
|
|
|
|
list_backups() {
|
|
local data_path=$(uci_get main.data_path) || data_path="$DATA_PATH"
|
|
local backup_dir="$data_path/backups"
|
|
|
|
json_init
|
|
json_add_array "backups"
|
|
|
|
if [ -d "$backup_dir" ]; then
|
|
for f in "$backup_dir"/*.tar.gz; do
|
|
[ -f "$f" ] || continue
|
|
local name=$(basename "$f" .tar.gz)
|
|
local size=$(du -h "$f" | cut -f1)
|
|
local ts=$(stat -c %Y "$f" 2>/dev/null || echo 0)
|
|
|
|
json_add_object
|
|
json_add_string "name" "$name"
|
|
json_add_string "size" "$size"
|
|
json_add_int "timestamp" "$ts"
|
|
json_close_object
|
|
done
|
|
fi
|
|
|
|
json_close_array
|
|
json_dump
|
|
}
|
|
|
|
create_backup() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var instance instance
|
|
json_get_var name name
|
|
|
|
json_init
|
|
|
|
[ -z "$instance" ] && instance="default"
|
|
[ -z "$name" ] && name="$(date +%Y%m%d-%H%M%S)"
|
|
|
|
local output=$("$HEXOCTL" backup "$instance" "$name" 2>&1)
|
|
local result=$?
|
|
|
|
if [ "$result" -eq 0 ]; then
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Backup created"
|
|
json_add_string "name" "${instance}_${name}"
|
|
json_add_string "output" "$output"
|
|
else
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "$output"
|
|
fi
|
|
|
|
json_dump
|
|
}
|
|
|
|
restore_backup() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var name name
|
|
json_get_var instance instance
|
|
|
|
json_init
|
|
|
|
if [ -z "$name" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Backup name required"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
[ -z "$instance" ] && instance="default"
|
|
|
|
local output=$("$HEXOCTL" restore "$name" "$instance" 2>&1)
|
|
local result=$?
|
|
|
|
if [ "$result" -eq 0 ]; then
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Backup restored"
|
|
json_add_string "output" "$output"
|
|
else
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "$output"
|
|
fi
|
|
|
|
json_dump
|
|
}
|
|
|
|
delete_backup() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var name name
|
|
|
|
json_init
|
|
|
|
if [ -z "$name" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Backup name required"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
local output=$("$HEXOCTL" backup delete "$name" 2>&1)
|
|
local result=$?
|
|
|
|
if [ "$result" -eq 0 ]; then
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Backup deleted"
|
|
else
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "$output"
|
|
fi
|
|
|
|
json_dump
|
|
}
|
|
|
|
# ============================================
|
|
# Static Site Methods (KISS Upload)
|
|
# ============================================
|
|
|
|
# List static sites or files in a site
|
|
static_list() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var instance instance
|
|
|
|
local data_path=$(uci_get main.data_path) || data_path="$DATA_PATH"
|
|
|
|
json_init
|
|
json_add_boolean "success" 1
|
|
|
|
if [ -n "$instance" ]; then
|
|
# List files in specific instance
|
|
local static_dir="$data_path/static/$instance"
|
|
if [ ! -d "$static_dir" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Static instance '$instance' not found"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
json_add_string "instance" "$instance"
|
|
json_add_array "files"
|
|
|
|
for f in "$static_dir"/*; do
|
|
[ -f "$f" ] || continue
|
|
local filename=$(basename "$f")
|
|
local size=$(stat -c%s "$f" 2>/dev/null || echo 0)
|
|
local modified=$(stat -c%Y "$f" 2>/dev/null || echo 0)
|
|
|
|
json_add_object
|
|
json_add_string "name" "$filename"
|
|
json_add_int "size" "$size"
|
|
json_add_int "modified" "$modified"
|
|
json_close_object
|
|
done
|
|
|
|
json_close_array
|
|
else
|
|
# List all static instances
|
|
json_add_array "instances"
|
|
|
|
for dir in "$data_path/static"/*; do
|
|
[ -d "$dir" ] || continue
|
|
local name=$(basename "$dir")
|
|
local count=$(find "$dir" -type f 2>/dev/null | wc -l)
|
|
local port=$(uci -q get hexojs.${name}.port)
|
|
local domain=$(uci -q get hexojs.${name}.domain)
|
|
local auth=$(uci -q get hexojs.${name}.auth_enabled)
|
|
|
|
json_add_object
|
|
json_add_string "name" "$name"
|
|
json_add_int "file_count" "$count"
|
|
json_add_int "port" "${port:-0}"
|
|
json_add_string "domain" "$domain"
|
|
json_add_boolean "auth_enabled" "${auth:-0}"
|
|
json_close_object
|
|
done
|
|
|
|
json_close_array
|
|
fi
|
|
|
|
json_dump
|
|
}
|
|
|
|
# Upload file to static site (base64 encoded)
|
|
static_upload() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var instance instance
|
|
json_get_var filename filename
|
|
json_get_var content content
|
|
|
|
json_init
|
|
|
|
if [ -z "$filename" ] || [ -z "$content" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Filename and content required"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
[ -z "$instance" ] && instance="default"
|
|
|
|
local data_path=$(uci_get main.data_path) || data_path="$DATA_PATH"
|
|
local static_dir="$data_path/static/$instance"
|
|
|
|
# Auto-create instance if needed
|
|
if [ ! -d "$static_dir" ]; then
|
|
"$HEXOCTL" static create "$instance" >/dev/null 2>&1
|
|
fi
|
|
|
|
# Sanitize filename (prevent path traversal)
|
|
local safe_filename=$(basename "$filename" | tr -cd 'a-zA-Z0-9._-')
|
|
local target_path="$static_dir/$safe_filename"
|
|
|
|
# Decode base64 content and save
|
|
echo "$content" | base64 -d > "$target_path" 2>/dev/null
|
|
local result=$?
|
|
|
|
if [ "$result" -eq 0 ] && [ -f "$target_path" ]; then
|
|
local size=$(stat -c%s "$target_path" 2>/dev/null || echo 0)
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "File uploaded"
|
|
json_add_string "filename" "$safe_filename"
|
|
json_add_string "path" "$target_path"
|
|
json_add_int "size" "$size"
|
|
else
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Failed to save file"
|
|
fi
|
|
|
|
json_dump
|
|
}
|
|
|
|
# Create static site instance
|
|
static_create() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var name name
|
|
json_get_var domain domain
|
|
|
|
json_init
|
|
|
|
if [ -z "$name" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Instance name required"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
local output=$("$HEXOCTL" static create "$name" 2>&1)
|
|
local result=$?
|
|
|
|
if [ "$result" -eq 0 ]; then
|
|
# Set domain if provided
|
|
[ -n "$domain" ] && uci set hexojs.${name}.domain="$domain" && uci commit hexojs
|
|
|
|
local port=$(uci -q get hexojs.${name}.port)
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Static instance created"
|
|
json_add_string "name" "$name"
|
|
json_add_int "port" "$port"
|
|
else
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "$output"
|
|
fi
|
|
|
|
json_dump
|
|
}
|
|
|
|
# Delete static site instance
|
|
static_delete() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var name name
|
|
|
|
json_init
|
|
|
|
if [ -z "$name" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Instance name required"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
local output=$("$HEXOCTL" static delete "$name" 2>&1)
|
|
local result=$?
|
|
|
|
if [ "$result" -eq 0 ]; then
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Static instance deleted"
|
|
else
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "$output"
|
|
fi
|
|
|
|
json_dump
|
|
}
|
|
|
|
# Publish static site to /www/static/
|
|
static_publish() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var instance instance
|
|
|
|
json_init
|
|
|
|
[ -z "$instance" ] && instance="default"
|
|
|
|
local output=$("$HEXOCTL" static publish "$instance" 2>&1)
|
|
local result=$?
|
|
|
|
if [ "$result" -eq 0 ]; then
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Published to /www/static/$instance"
|
|
json_add_string "url" "/static/$instance/"
|
|
else
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "$output"
|
|
fi
|
|
|
|
json_dump
|
|
}
|
|
|
|
# Delete file from static site
|
|
static_delete_file() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var instance instance
|
|
json_get_var filename filename
|
|
|
|
json_init
|
|
|
|
if [ -z "$instance" ] || [ -z "$filename" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Instance and filename required"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
local data_path=$(uci_get main.data_path) || data_path="$DATA_PATH"
|
|
local static_dir="$data_path/static/$instance"
|
|
local safe_filename=$(basename "$filename")
|
|
local target="$static_dir/$safe_filename"
|
|
|
|
if [ -f "$target" ]; then
|
|
rm -f "$target"
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "File deleted"
|
|
else
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "File not found"
|
|
fi
|
|
|
|
json_dump
|
|
}
|
|
|
|
# Configure HAProxy auth for static site
|
|
static_configure_auth() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var instance instance
|
|
json_get_var enabled enabled
|
|
json_get_var domain domain
|
|
|
|
json_init
|
|
|
|
if [ -z "$instance" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Instance required"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
[ -z "$domain" ] && domain=$(uci -q get hexojs.${instance}.domain)
|
|
|
|
if [ -z "$domain" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "Domain required for HAProxy auth"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
# Update UCI config
|
|
uci set hexojs.${instance}.auth_enabled="${enabled:-1}"
|
|
uci set hexojs.${instance}.domain="$domain"
|
|
uci commit hexojs
|
|
|
|
# Apply auth via hexoctl
|
|
local output=$("$HEXOCTL" auth apply "$instance" 2>&1)
|
|
local result=$?
|
|
|
|
if [ "$result" -eq 0 ]; then
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Auth configured for $domain"
|
|
json_add_string "domain" "$domain"
|
|
else
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "$output"
|
|
fi
|
|
|
|
json_dump
|
|
}
|
|
|
|
# ============================================
|
|
# GitHub Integration Methods
|
|
# ============================================
|
|
|
|
github_clone() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var repo repo
|
|
json_get_var instance instance
|
|
json_get_var branch branch
|
|
|
|
json_init
|
|
|
|
if [ -z "$repo" ]; then
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "GitHub repo URL required"
|
|
json_dump
|
|
return
|
|
fi
|
|
|
|
[ -z "$instance" ] && instance="default"
|
|
[ -z "$branch" ] && branch="main"
|
|
|
|
local output=$("$HEXOCTL" github clone "$repo" "$instance" "$branch" 2>&1)
|
|
local result=$?
|
|
|
|
if [ "$result" -eq 0 ]; then
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "GitHub repo cloned successfully"
|
|
json_add_string "instance" "$instance"
|
|
json_add_string "repo" "$repo"
|
|
json_add_string "branch" "$branch"
|
|
else
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "$output"
|
|
fi
|
|
|
|
json_dump
|
|
}
|
|
|
|
# ============================================
|
|
# Gitea Push Method
|
|
# ============================================
|
|
|
|
gitea_push() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var instance instance
|
|
json_get_var message message
|
|
|
|
json_init
|
|
|
|
[ -z "$instance" ] && instance="default"
|
|
[ -z "$message" ] && message="Auto-commit from SecuBox"
|
|
|
|
local output=$("$HEXOCTL" gitea push "$instance" "$message" 2>&1)
|
|
local result=$?
|
|
|
|
if [ "$result" -eq 0 ]; then
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Changes pushed to Gitea"
|
|
json_add_string "output" "$output"
|
|
else
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "$output"
|
|
fi
|
|
|
|
json_dump
|
|
}
|
|
|
|
# ============================================
|
|
# Quick Publish Method
|
|
# ============================================
|
|
|
|
quick_publish() {
|
|
read input
|
|
json_load "$input"
|
|
json_get_var instance instance
|
|
|
|
json_init
|
|
|
|
[ -z "$instance" ] && instance="default"
|
|
|
|
local output=$("$HEXOCTL" quick-publish "$instance" 2>&1)
|
|
local result=$?
|
|
|
|
if [ "$result" -eq 0 ]; then
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Quick publish completed"
|
|
json_add_string "output" "$output"
|
|
else
|
|
json_add_boolean "success" 0
|
|
json_add_string "error" "$output"
|
|
fi
|
|
|
|
json_dump
|
|
}
|
|
|
|
# ============================================
|
|
# Service Control
|
|
# ============================================
|
|
|
|
service_start() {
|
|
json_init
|
|
|
|
/etc/init.d/hexojs start >/dev/null 2>&1 &
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Service starting"
|
|
|
|
json_dump
|
|
}
|
|
|
|
service_stop() {
|
|
json_init
|
|
|
|
/etc/init.d/hexojs stop >/dev/null 2>&1
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Service stopped"
|
|
|
|
json_dump
|
|
}
|
|
|
|
service_restart() {
|
|
json_init
|
|
|
|
/etc/init.d/hexojs restart >/dev/null 2>&1 &
|
|
|
|
json_add_boolean "success" 1
|
|
json_add_string "message" "Service restarting"
|
|
|
|
json_dump
|
|
}
|
|
|
|
# ============================================
|
|
# Main dispatcher
|
|
# ============================================
|
|
|
|
case "$1" in
|
|
list)
|
|
cat << 'EOF'
|
|
{
|
|
"status": {},
|
|
"site_stats": {},
|
|
"list_posts": {},
|
|
"get_post": {"slug": "str"},
|
|
"create_post": {"title": "str", "content": "str", "categories": "str", "tags": "str", "excerpt": "str"},
|
|
"update_post": {"slug": "str", "title": "str", "content": "str", "categories": "str", "tags": "str", "excerpt": "str", "cover": "str"},
|
|
"delete_post": {"slug": "str"},
|
|
"publish_post": {"slug": "str"},
|
|
"list_drafts": {},
|
|
"search_posts": {"query": "str", "category": "str", "tag": "str"},
|
|
"list_categories": {},
|
|
"list_tags": {},
|
|
"list_media": {},
|
|
"delete_media": {"path": "str"},
|
|
"list_apps": {},
|
|
"create_app": {"title": "str", "icon": "str", "description": "str", "url": "str", "category": "str", "content": "str"},
|
|
"generate": {},
|
|
"clean": {},
|
|
"deploy": {},
|
|
"deploy_status": {},
|
|
"preview_start": {},
|
|
"preview_status": {},
|
|
"get_config": {},
|
|
"save_config": {"enabled": "bool", "http_port": "int", "title": "str", "subtitle": "str", "author": "str", "language": "str", "url": "str", "deploy_repo": "str", "deploy_branch": "str"},
|
|
"get_theme_config": {},
|
|
"save_theme_config": {"default_mode": "str", "allow_toggle": "bool", "accent_color": "str", "logo_symbol": "str", "logo_text": "str"},
|
|
"list_presets": {},
|
|
"apply_preset": {"preset_id": "str"},
|
|
"service_start": {},
|
|
"service_stop": {},
|
|
"service_restart": {},
|
|
"git_status": {},
|
|
"git_init": {"repo": "str", "branch": "str"},
|
|
"git_clone": {"repo": "str", "branch": "str"},
|
|
"git_pull": {},
|
|
"git_push": {"message": "str", "force": "bool"},
|
|
"git_fetch": {},
|
|
"git_log": {},
|
|
"git_reset": {"hard": "bool"},
|
|
"git_set_credentials": {"name": "str", "email": "str"},
|
|
"git_get_credentials": {},
|
|
"gitea_status": {},
|
|
"gitea_setup": {},
|
|
"gitea_clone": {},
|
|
"gitea_sync": {},
|
|
"gitea_save_config": {"enabled": "bool", "gitea_url": "str", "gitea_user": "str", "gitea_token": "str", "content_repo": "str", "content_branch": "str", "auto_sync": "bool"},
|
|
"gitea_push": {"instance": "str", "message": "str"},
|
|
"list_instances": {},
|
|
"create_instance": {"name": "str", "title": "str", "port": "int"},
|
|
"delete_instance": {"name": "str", "delete_data": "bool"},
|
|
"start_instance": {"name": "str"},
|
|
"stop_instance": {"name": "str"},
|
|
"list_backups": {},
|
|
"create_backup": {"instance": "str", "name": "str"},
|
|
"restore_backup": {"name": "str", "instance": "str"},
|
|
"delete_backup": {"name": "str"},
|
|
"github_clone": {"repo": "str", "instance": "str", "branch": "str"},
|
|
"quick_publish": {"instance": "str"},
|
|
"publish_to_www": {"path": "str"},
|
|
"get_workflow_status": {},
|
|
"list_profiles": {},
|
|
"apply_profile": {"instance": "str", "profile": "str", "domain": "str", "enable_tor": "bool"},
|
|
"publish_to_haproxy": {"instance": "str", "domain": "str", "acme": "bool"},
|
|
"unpublish_from_haproxy": {"instance": "str"},
|
|
"get_haproxy_status": {"instance": "str"},
|
|
"publish_to_tor": {"instance": "str"},
|
|
"unpublish_from_tor": {"instance": "str"},
|
|
"get_tor_status": {"instance": "str"},
|
|
"get_instance_endpoints": {"instance": "str"},
|
|
"full_publish": {"instance": "str", "rebuild": "bool"},
|
|
"handle_webhook": {"event": "str", "repository": "str", "ref": "str", "secret": "str"},
|
|
"setup_webhook": {"auto_build": "bool", "webhook_secret": "str"},
|
|
"get_instance_health": {"instance": "str"},
|
|
"get_pipeline_status": {},
|
|
"static_list": {"instance": "str"},
|
|
"static_upload": {"instance": "str", "filename": "str", "content": "str"},
|
|
"static_create": {"name": "str", "domain": "str"},
|
|
"static_delete": {"name": "str"},
|
|
"static_publish": {"instance": "str"},
|
|
"static_delete_file": {"instance": "str", "filename": "str"},
|
|
"static_configure_auth": {"instance": "str", "enabled": "bool", "domain": "str"}
|
|
}
|
|
EOF
|
|
;;
|
|
call)
|
|
case "$2" in
|
|
status) get_status ;;
|
|
site_stats) get_site_stats ;;
|
|
list_posts) list_posts ;;
|
|
get_post) get_post ;;
|
|
create_post) create_post ;;
|
|
update_post) update_post ;;
|
|
delete_post) delete_post ;;
|
|
publish_post) publish_post ;;
|
|
list_drafts) list_drafts ;;
|
|
search_posts) search_posts ;;
|
|
list_categories) list_categories ;;
|
|
list_tags) list_tags ;;
|
|
list_media) list_media ;;
|
|
delete_media) delete_media ;;
|
|
list_apps) list_apps ;;
|
|
create_app) create_app ;;
|
|
generate) do_generate ;;
|
|
clean) do_clean ;;
|
|
deploy) do_deploy ;;
|
|
deploy_status) get_deploy_status ;;
|
|
preview_start) preview_start ;;
|
|
preview_status) preview_status ;;
|
|
get_config) get_config ;;
|
|
save_config) save_config ;;
|
|
get_theme_config) get_theme_config ;;
|
|
save_theme_config) save_theme_config ;;
|
|
list_presets) list_presets ;;
|
|
apply_preset) apply_preset ;;
|
|
service_start) service_start ;;
|
|
service_stop) service_stop ;;
|
|
service_restart) service_restart ;;
|
|
git_status) git_status ;;
|
|
git_init) git_init ;;
|
|
git_clone) git_clone ;;
|
|
git_pull) git_pull ;;
|
|
git_push) git_push ;;
|
|
git_fetch) git_fetch ;;
|
|
git_log) git_log ;;
|
|
git_reset) git_reset ;;
|
|
git_set_credentials) git_set_credentials ;;
|
|
git_get_credentials) git_get_credentials ;;
|
|
gitea_status) gitea_status ;;
|
|
gitea_setup) gitea_setup ;;
|
|
gitea_clone) gitea_clone ;;
|
|
gitea_sync) gitea_sync ;;
|
|
gitea_save_config) gitea_save_config ;;
|
|
gitea_push) gitea_push ;;
|
|
list_instances) list_instances ;;
|
|
create_instance) create_instance ;;
|
|
delete_instance) delete_instance ;;
|
|
start_instance) start_instance ;;
|
|
stop_instance) stop_instance ;;
|
|
list_backups) list_backups ;;
|
|
create_backup) create_backup ;;
|
|
restore_backup) restore_backup ;;
|
|
delete_backup) delete_backup ;;
|
|
github_clone) github_clone ;;
|
|
quick_publish) quick_publish ;;
|
|
publish_to_www) publish_to_www ;;
|
|
get_workflow_status) get_workflow_status ;;
|
|
list_profiles) list_profiles ;;
|
|
apply_profile) apply_profile ;;
|
|
publish_to_haproxy) publish_to_haproxy ;;
|
|
unpublish_from_haproxy) unpublish_from_haproxy ;;
|
|
get_haproxy_status) get_haproxy_status ;;
|
|
publish_to_tor) publish_to_tor ;;
|
|
unpublish_from_tor) unpublish_from_tor ;;
|
|
get_tor_status) get_tor_status ;;
|
|
get_instance_endpoints) get_instance_endpoints ;;
|
|
full_publish) full_publish ;;
|
|
handle_webhook) handle_webhook ;;
|
|
setup_webhook) setup_webhook ;;
|
|
get_instance_health) get_instance_health ;;
|
|
get_pipeline_status) get_pipeline_status ;;
|
|
static_list) static_list ;;
|
|
static_upload) static_upload ;;
|
|
static_create) static_create ;;
|
|
static_delete) static_delete ;;
|
|
static_publish) static_publish ;;
|
|
static_delete_file) static_delete_file ;;
|
|
static_configure_auth) static_configure_auth ;;
|
|
*) echo '{"error": "Unknown method"}' ;;
|
|
esac
|
|
;;
|
|
esac
|