#!/bin/sh # RPCD handler for Droplet Publisher . /usr/share/libubox/jshn.sh UPLOAD_DIR="/tmp/droplet-upload" DEFAULT_DOMAIN="gk2.secubox.in" JOB_DIR="/tmp/droplet-jobs" # Ensure job dir exists mkdir -p "$JOB_DIR" case "$1" in list) echo '{"publish":{},"upload":{"file":"string","name":"string","domain":"string"},"list":{},"remove":{"name":"string"},"rename":{"old":"string","new":"string"},"status":{},"job_status":{"job_id":"string"}}' ;; call) case "$2" in status) json_init json_add_string "upload_dir" "$UPLOAD_DIR" json_add_string "default_domain" "$DEFAULT_DOMAIN" json_add_int "sites_count" "$(uci show metablogizer 2>/dev/null | grep -c '=site$')" json_add_int "apps_count" "$(uci show streamlit 2>/dev/null | grep -c '=instance$')" json_dump ;; job_status) # Check status of an async publish job read -r input job_id=$(echo "$input" | jsonfilter -e '@.job_id' 2>/dev/null) if [ -z "$job_id" ]; then echo '{"error":"Job ID required"}' exit 0 fi job_file="$JOB_DIR/$job_id" if [ ! -f "$job_file" ]; then echo '{"status":"not_found"}' exit 0 fi cat "$job_file" ;; list) json_init json_add_array "droplets" # MetaBlog sites - use for loop to avoid subshell # Handles both site_xxx and xxx section names for section in $(uci show metablogizer 2>/dev/null | grep "=site$" | sed "s/metablogizer\.\(.*\)=site/\1/"); do # Extract display name (remove site_ prefix if present) display_name=$(echo "$section" | sed 's/^site_//') domain=$(uci -q get "metablogizer.$section.domain") enabled=$(uci -q get "metablogizer.$section.enabled") [ -z "$enabled" ] && enabled="0" json_add_object "" json_add_string "name" "$display_name" json_add_string "domain" "$domain" json_add_string "type" "static" json_add_boolean "enabled" "$enabled" json_close_object done # Streamlit apps for name in $(uci show streamlit 2>/dev/null | grep "=instance$" | sed "s/streamlit\.\(.*\)=instance/\1/"); do domain=$(uci -q get "streamlit.$name.domain") enabled=$(uci -q get "streamlit.$name.enabled") [ -z "$enabled" ] && enabled="0" json_add_object "" json_add_string "name" "$name" json_add_string "domain" "$domain" json_add_string "type" "streamlit" json_add_boolean "enabled" "$enabled" json_close_object done json_close_array json_dump ;; upload) # Read params using jsonfilter for reliability read -r input file=$(echo "$input" | jsonfilter -e '@.file' 2>/dev/null) name=$(echo "$input" | jsonfilter -e '@.name' 2>/dev/null) domain=$(echo "$input" | jsonfilter -e '@.domain' 2>/dev/null) [ -z "$name" ] && { echo '{"error":"Name required"}'; exit 0; } [ -z "$file" ] && { echo '{"error":"File required"}'; exit 0; } [ -z "$domain" ] && domain="$DEFAULT_DOMAIN" # File should be in upload dir (set by cgi-io) upload_file="$UPLOAD_DIR/$file" if [ ! -f "$upload_file" ]; then # Try direct path upload_file="$file" fi [ ! -f "$upload_file" ] && { echo '{"error":"File not found: '"$file"'"}'; exit 0; } # Generate job ID and run publish async job_id="$(date +%s)_$$" job_file="$JOB_DIR/$job_id" # Initialize job status echo '{"status":"running","name":"'"$name"'","domain":"'"$domain"'"}' > "$job_file" # Write background script - use file output to avoid pipe inheritance issues bg_script="/tmp/droplet-bg-$job_id.sh" result_file="/tmp/droplet-result-$job_id.txt" printf '#!/bin/sh\nupload_file="$1"\nname="$2"\ndomain="$3"\njob_file="$4"\nresult_file="$5"\n' > "$bg_script" # Write output to file instead of capturing in variable (avoids pipe inheritance) printf 'dropletctl publish "$upload_file" "$name" "$domain" > "$result_file" 2>&1\nexit_code=$?\n' >> "$bg_script" # Check for success marker and extract vhost printf 'if grep -q "\\[OK\\] Published:" "$result_file"; then\n' >> "$bg_script" printf ' vhost=$(tail -1 "$result_file")\n' >> "$bg_script" printf ' printf '\''{"status":"complete","success":true,"vhost":"%%s","url":"https://%%s/","message":"Published successfully"}'\'' "$vhost" "$vhost" > "$job_file"\n' >> "$bg_script" printf 'else\n error_msg=$(tail -10 "$result_file" | tr '\''\\n'\'' '\'' '\'' | sed '\''s/"/\\\\"/g'\'')\n' >> "$bg_script" printf ' printf '\''{"status":"complete","success":false,"error":"%%s"}'\'' "$error_msg" > "$job_file"\nfi\n' >> "$bg_script" printf 'rm -f "$upload_file" "$result_file" "$0"\nsleep 300\nrm -f "$job_file"\n' >> "$bg_script" chmod +x "$bg_script" # Run background script detached setsid "$bg_script" "$upload_file" "$name" "$domain" "$job_file" "$result_file" /dev/null 2>&1 & # Return immediately with job ID echo '{"status":"started","job_id":"'"$job_id"'","name":"'"$name"'","domain":"'"$domain"'"}' ;; remove) read -r input name=$(echo "$input" | jsonfilter -e '@.name' 2>/dev/null) [ -z "$name" ] && { echo '{"error":"Name required"}'; exit 0; } result=$(dropletctl remove "$name" 2>&1) json_init json_add_boolean "success" 1 json_add_string "message" "Removed: $name" json_dump ;; rename) read -r input old=$(echo "$input" | jsonfilter -e '@.old' 2>/dev/null) new=$(echo "$input" | jsonfilter -e '@.new' 2>/dev/null) [ -z "$old" ] || [ -z "$new" ] && { echo '{"error":"Old and new names required"}'; exit 0; } result=$(dropletctl rename "$old" "$new" 2>&1) json_init json_add_boolean "success" 1 json_add_string "message" "Renamed: $old -> $new" json_dump ;; *) echo '{"error":"Unknown method"}' ;; esac ;; esac