#!/bin/sh # SecuBox Hub Generator v7 - Full NFO Integration + Capability Filters # Enhanced with MetaBlog NFO, version display, capability filtering, audience tabs OUTPUT="/www/gk2-hub/index.html" TEMP="/tmp/hub_gen_$$.html" DATE=$(date "+%Y-%m-%d %H:%M") PEERTUBE_API="http://192.168.255.1:9001/api/v1" PEERTUBE_URL="https://tube.gk2.secubox.in" STREAMLIT_APPS_DIR="/srv/streamlit/apps" METABLOG_SITES_DIR="/srv/metablogizer/sites" # Fast NFO field extraction using awk (no eval, no subshell parsing) # Usage: get_nfo_info
get_nfo_info() { local app_dir="$1" local section="$2" local field="$3" local default="$4" local nfo_file="$app_dir/README.nfo" [ ! -f "$nfo_file" ] && { echo "$default"; return; } # Direct awk extraction - fast and reliable local val=$(awk -v section="$section" -v field="$field" ' BEGIN { in_section = 0 } /^\[/ { if ($0 ~ "\\[" section "\\]") { in_section = 1 } else { in_section = 0 } } in_section && /^[a-zA-Z_-]+=/ { # Extract key and value key = $0 sub(/=.*/, "", key) gsub(/^[ \t]+|[ \t]+$/, "", key) gsub(/-/, "_", key) if (key == field) { val = $0 sub(/^[^=]+=[ \t]*/, "", val) gsub(/^["'"'"']|["'"'"']$/, "", val) print val exit } } ' "$nfo_file" 2>/dev/null) [ -n "$val" ] && echo "$val" || echo "$default" } # Get all NFO metadata for an app in single awk pass # Returns: category|description|keywords|capabilities|audience|icon|version get_nfo_full() { local app_dir="$1" local nfo_file="$app_dir/README.nfo" [ ! -f "$nfo_file" ] && return 1 # Single-pass extraction of all needed fields awk ' BEGIN { section = "" } /^\[/ { section = $0 sub(/^\[/, "", section) sub(/\]$/, "", section) } section == "tags" && /^category=/ { val=$0; sub(/^category=/, "", val); category=val } section == "tags" && /^keywords=/ { val=$0; sub(/^keywords=/, "", val); keywords=val } section == "tags" && /^audience=/ { val=$0; sub(/^audience=/, "", val); audience=val } section == "description" && /^short=/ { val=$0; sub(/^short=/, "", val); desc=val } section == "dynamics" && /^capabilities=/ { val=$0; sub(/^capabilities=/, "", val); caps=val } section == "media" && /^icon=/ { val=$0; sub(/^icon=/, "", val); icon=val } section == "identity" && /^version=/ { val=$0; sub(/^version=/, "", val); version=val } END { printf "%s|%s|%s|%s|%s|%s|%s\n", category, desc, keywords, caps, audience, icon, version } ' "$nfo_file" 2>/dev/null } categorize_site() { local name=$(echo "$1" | tr '[:upper:]' '[:lower:]') case "$name" in *intel*|*dgse*|*osint*|*threat*|*secu*|*raid*|*confid*|*mku*|*bdgse*|*camus*) echo "Intelligence" ;; *game*|*play*|*comic*|*virus*|*survie*) echo "Divertissement" ;; *dev*|*code*|*git*|*sdlc*|*crt*|*fabric*|*hermes*) echo "Développement" ;; *doc*|*manual*|*guide*|*how*|*fm*|*bgp*|*lrh*|*bcf*) echo "Documentation" ;; *media*|*video*|*tube*|*stream*|*radio*|*lyrion*|*jellyfin*) echo "Média" ;; *blog*|*news*|*press*|*zine*|*flash*|*pub*) echo "Actualités" ;; *cloud*|*file*|*nextcloud*|*photo*) echo "Cloud" ;; *admin*|*control*|*status*|*hub*|*glances*|*holo*|*console*|*evolution*) echo "Administration" ;; *money*|*coin*|*crypto*|*cgv*|*cpi*|*apr*) echo "Finance" ;; *geo*|*map*|*gondwana*|*earth*) echo "Géographie" ;; *psy*|*oracle*|*yijing*|*bazi*|*equa*|*lunaquar*|*clock*|*wuyun*|*yling*|*pix*|*tam*) echo "Ésotérique" ;; *metabol*|*osint*|*generix*|*swg*|*ftvm*|*cpf*) echo "Outils" ;; *) echo "Projets" ;; esac } get_emoji() { case "$1" in "Intelligence") echo "🔍" ;; "Divertissement") echo "🎮" ;; "Développement") echo "💻" ;; "Documentation") echo "📚" ;; "Média") echo "🎬" ;; "Actualités") echo "📰" ;; "Cloud") echo "☁️" ;; "Administration") echo "⚙️" ;; "Finance") echo "💰" ;; "Géographie") echo "🌍" ;; "Ésotérique") echo "🔮" ;; "Outils") echo "🛠️" ;; "Streamlit") echo "📊" ;; "PeerTube") echo "🎥" ;; "Communication") echo "💬" ;; "Social") echo "👥" ;; "Security") echo "🛡️" ;; "service") echo "🔌" ;; *) echo "📄" ;; esac } format_duration() { local sec=$1 local min=$((sec / 60)) local s=$((sec % 60)) if [ $min -ge 60 ]; then local h=$((min / 60)) min=$((min % 60)) printf "%d:%02d:%02d" $h $min $s else printf "%d:%02d" $min $s fi } # HTML Header cat > "$TEMP" << 'HTMLHEAD' GK² Hub — Portal SecuBox
HTMLHEAD # Header cat >> "$TEMP" << EOF
EOF # Collect all sites SITES_FILE="/tmp/hub_sites_$$.txt" CAT_FILE="/tmp/hub_cats_$$.txt" VIDEOS_FILE="/tmp/hub_videos_$$.txt" > "$SITES_FILE" > "$CAT_FILE" > "$VIDEOS_FILE" # MetaBlogizer sites - now with full NFO support uci show metablogizer 2>/dev/null | grep "=site$" | sed "s/metablogizer\.\(.*\)=site/\1/" | while read site; do name=$(uci -q get "metablogizer.$site.name") domain=$(uci -q get "metablogizer.$site.domain") enabled=$(uci -q get "metablogizer.$site.enabled") auth_required=$(uci -q get "metablogizer.$site.auth_required") [ "$enabled" != "1" ] && continue [ -z "$domain" ] && continue # Get full NFO metadata for MetaBlog sites site_dir="$METABLOG_SITES_DIR/$name" nfo_data=$(get_nfo_full "$site_dir" 2>/dev/null) if [ -n "$nfo_data" ]; then nfo_cat=$(echo "$nfo_data" | cut -d'|' -f1) nfo_desc=$(echo "$nfo_data" | cut -d'|' -f2) nfo_keywords=$(echo "$nfo_data" | cut -d'|' -f3) nfo_caps=$(echo "$nfo_data" | cut -d'|' -f4) nfo_audience=$(echo "$nfo_data" | cut -d'|' -f5) nfo_version=$(echo "$nfo_data" | cut -d'|' -f7) else nfo_cat="" nfo_desc="" nfo_keywords="" nfo_caps="" nfo_audience="" nfo_version="" fi # Use NFO category or fallback to name-based categorization [ -n "$nfo_cat" ] && cat="$nfo_cat" || cat=$(categorize_site "$name") emoji=$(get_emoji "$cat") echo "$cat" >> "$CAT_FILE" # Track audiences and capabilities for filters [ -n "$nfo_audience" ] && echo "$nfo_audience" >> "/tmp/hub_audiences_$$.txt" [ -n "$nfo_caps" ] && echo "$nfo_caps" | tr ',' '\n' >> "/tmp/hub_caps_$$.txt" protected="-" [ "$auth_required" = "1" ] && protected="protected" # Format: domain name cat emoji type thumb protected desc keywords caps version audience printf '%s\t%s\t%s\t%s\tmeta\t-\t%s\t%s\t%s\t%s\t%s\t%s\n' \ "$domain" "$name" "$cat" "$emoji" "$protected" \ "${nfo_desc:--}" "${nfo_keywords:--}" "${nfo_caps:--}" "${nfo_version:--}" "${nfo_audience:--}" >> "$SITES_FILE" done # Streamlit instances uci show streamlit 2>/dev/null | grep "=instance$" | sed "s/streamlit\.\(.*\)=instance/\1/" | while read app; do name=$(uci -q get "streamlit.$app.name") domain=$(uci -q get "streamlit.$app.domain") enabled=$(uci -q get "streamlit.$app.enabled") [ "$enabled" != "1" ] && continue [ -z "$domain" ] && continue # Get full NFO metadata app_dir="$STREAMLIT_APPS_DIR/$name" nfo_data=$(get_nfo_full "$app_dir" 2>/dev/null) if [ -n "$nfo_data" ]; then nfo_cat=$(echo "$nfo_data" | cut -d'|' -f1) nfo_desc=$(echo "$nfo_data" | cut -d'|' -f2) nfo_keywords=$(echo "$nfo_data" | cut -d'|' -f3) nfo_caps=$(echo "$nfo_data" | cut -d'|' -f4) nfo_audience=$(echo "$nfo_data" | cut -d'|' -f5) nfo_version=$(echo "$nfo_data" | cut -d'|' -f7) else nfo_cat="" nfo_desc="" nfo_keywords="" nfo_caps="" nfo_audience="" nfo_version="" fi [ -n "$nfo_cat" ] && cat="$nfo_cat" || cat=$(categorize_site "$name") emoji=$(get_emoji "$cat") echo "$cat" >> "$CAT_FILE" # Track audiences and capabilities for filters [ -n "$nfo_audience" ] && echo "$nfo_audience" >> "/tmp/hub_audiences_$$.txt" [ -n "$nfo_caps" ] && echo "$nfo_caps" | tr ',' '\n' >> "/tmp/hub_caps_$$.txt" # Format: domain name cat emoji type thumb protected desc keywords caps version audience printf '%s\t%s\t%s\t%s\tstreamlit\t-\t-\t%s\t%s\t%s\t%s\t%s\n' \ "$domain" "$name" "$cat" "$emoji" \ "${nfo_desc:--}" "${nfo_keywords:--}" "${nfo_caps:--}" "${nfo_version:--}" "${nfo_audience:--}" >> "$SITES_FILE" done # Also check streamlit-forge config uci show streamlit-forge 2>/dev/null | grep "=app$" | sed "s/streamlit-forge\.\(.*\)=app/\1/" | while read app; do name=$(uci -q get "streamlit-forge.$app.name") domain=$(uci -q get "streamlit-forge.$app.domain") enabled=$(uci -q get "streamlit-forge.$app.enabled") [ "$enabled" != "1" ] && continue [ -z "$domain" ] && continue # Check if already added from streamlit config grep -q " $name " "$SITES_FILE" 2>/dev/null && continue # Get full NFO metadata app_dir="$STREAMLIT_APPS_DIR/$name" nfo_data=$(get_nfo_full "$app_dir" 2>/dev/null) if [ -n "$nfo_data" ]; then nfo_cat=$(echo "$nfo_data" | cut -d'|' -f1) nfo_desc=$(echo "$nfo_data" | cut -d'|' -f2) nfo_keywords=$(echo "$nfo_data" | cut -d'|' -f3) nfo_caps=$(echo "$nfo_data" | cut -d'|' -f4) nfo_audience=$(echo "$nfo_data" | cut -d'|' -f5) nfo_version=$(echo "$nfo_data" | cut -d'|' -f7) else nfo_cat="" nfo_desc="" nfo_keywords="" nfo_caps="" nfo_audience="" nfo_version="" fi [ -n "$nfo_cat" ] && cat="$nfo_cat" || cat=$(categorize_site "$name") emoji=$(get_emoji "$cat") echo "$cat" >> "$CAT_FILE" # Track audiences and capabilities [ -n "$nfo_audience" ] && echo "$nfo_audience" >> "/tmp/hub_audiences_$$.txt" [ -n "$nfo_caps" ] && echo "$nfo_caps" | tr ',' '\n' >> "/tmp/hub_caps_$$.txt" printf '%s\t%s\t%s\t%s\tstreamlit\t-\t-\t%s\t%s\t%s\t%s\t%s\n' \ "$domain" "$name" "$cat" "$emoji" \ "${nfo_desc:--}" "${nfo_keywords:--}" "${nfo_caps:--}" "${nfo_version:--}" "${nfo_audience:--}" >> "$SITES_FILE" done # HAProxy vhosts - scan ALL exposed services uci show haproxy 2>/dev/null | grep "=vhost$" | sed "s/haproxy\.\(.*\)=vhost/\1/" | while read vhost; do domain=$(uci -q get "haproxy.$vhost.domain") enabled=$(uci -q get "haproxy.$vhost.enabled") [ "$enabled" = "0" ] && continue [ -z "$domain" ] && continue # Skip if already added from metablogizer/streamlit configs grep -q "^$domain " "$SITES_FILE" 2>/dev/null && continue # Get backend to determine service type backend=$(uci -q get "haproxy.$vhost.backend") # Determine type and name from domain/backend name=$(echo "$domain" | cut -d'.' -f1) # Categorize based on backend or domain patterns case "$backend" in *streamlit*) type="streamlit"; cat=$(categorize_site "$name") ;; *metablog*|*uhttpd*) type="meta"; cat=$(categorize_site "$name") ;; *jellyfin*) type="service"; cat="Média" ;; *peertube*|*tube*) type="service"; cat="Média" ;; *nextcloud*|*cloud*) type="service"; cat="Cloud" ;; *gitea*|*git*) type="service"; cat="Développement" ;; *lyrion*|*music*) type="service"; cat="Média" ;; *glances*) type="service"; cat="Administration" ;; *jitsi*|*meet*) type="service"; cat="Communication" ;; *photoprism*|*photo*) type="service"; cat="Cloud" ;; *social*|*gotosocial*) type="service"; cat="Social" ;; *admin*|*luci*) type="service"; cat="Administration" ;; *) type="service"; cat=$(categorize_site "$name") ;; esac emoji=$(get_emoji "$cat") echo "$cat" >> "$CAT_FILE" # Format: domain name cat emoji type thumb protected desc keywords caps version audience printf '%s\t%s\t%s\t%s\t%s\t-\t-\t-\t-\t-\t-\t-\n' \ "$domain" "$name" "$cat" "$emoji" "$type" >> "$SITES_FILE" done # PeerTube videos VIDEOS_JSON=$(curl -s "${PEERTUBE_API}/videos?count=50" 2>/dev/null) TOTAL_VIDEOS=0 if [ -n "$VIDEOS_JSON" ]; then # Parse videos using jsonfilter echo "$VIDEOS_JSON" | jsonfilter -e '@.data[*]' 2>/dev/null | while read -r video_line; do # Skip if empty [ -z "$video_line" ] && continue uuid=$(echo "$VIDEOS_JSON" | jsonfilter -e "@.data[$TOTAL_VIDEOS].uuid" 2>/dev/null) name=$(echo "$VIDEOS_JSON" | jsonfilter -e "@.data[$TOTAL_VIDEOS].name" 2>/dev/null) thumb=$(echo "$VIDEOS_JSON" | jsonfilter -e "@.data[$TOTAL_VIDEOS].thumbnailPath" 2>/dev/null) duration=$(echo "$VIDEOS_JSON" | jsonfilter -e "@.data[$TOTAL_VIDEOS].duration" 2>/dev/null) [ -z "$uuid" ] && break [ -z "$name" ] && continue # Format duration dur_fmt=$(format_duration "$duration") # Video URL and thumbnail video_url="${PEERTUBE_URL}/w/${uuid}" thumb_url="${PEERTUBE_URL}${thumb}" echo "Média" >> "$CAT_FILE" printf '%s\t%s\tMédia\t🎥\tvideo\t%s\t%s\n' "$video_url" "$name" "$thumb_url" "$dur_fmt" >> "$VIDEOS_FILE" TOTAL_VIDEOS=$((TOTAL_VIDEOS + 1)) done # Alternative parsing if jsonfilter fails if [ ! -s "$VIDEOS_FILE" ]; then # Use simple sed/awk parsing echo "$VIDEOS_JSON" | sed 's/},/}\n/g' | while read -r line; do uuid=$(echo "$line" | sed -n 's/.*"uuid":"\([^"]*\)".*/\1/p') name=$(echo "$line" | sed -n 's/.*"name":"\([^"]*\)".*/\1/p' | head -1) thumb=$(echo "$line" | sed -n 's/.*"thumbnailPath":"\([^"]*\)".*/\1/p') duration=$(echo "$line" | sed -n 's/.*"duration":\([0-9]*\).*/\1/p') [ -z "$uuid" ] && continue [ -z "$name" ] && continue dur_fmt=$(format_duration "${duration:-0}") video_url="${PEERTUBE_URL}/w/${uuid}" thumb_url="${PEERTUBE_URL}${thumb}" echo "Média" >> "$CAT_FILE" printf '%s\t%s\tMédia\t🎥\tvideo\t%s\t%s\n' "$video_url" "$name" "$thumb_url" "$dur_fmt" >> "$VIDEOS_FILE" done fi fi # Merge videos into sites file cat "$VIDEOS_FILE" >> "$SITES_FILE" 2>/dev/null # Stats TOTAL=$(wc -l < "$SITES_FILE" | tr -d ' ') TOTAL_META=$(grep " meta " "$SITES_FILE" | wc -l | tr -d ' ') TOTAL_STREAMLIT=$(grep " streamlit " "$SITES_FILE" | wc -l | tr -d ' ') TOTAL_VIDEOS=$(grep " video " "$SITES_FILE" | wc -l | tr -d ' ') TOTAL_SERVICES=$(grep " service " "$SITES_FILE" | wc -l | tr -d ' ') CAT_COUNTS=$(grep -v "^$" "$CAT_FILE" 2>/dev/null | sort | uniq -c | sort -rn) # Capability and audience counts CAPS_FILE="/tmp/hub_caps_$$.txt" AUDIENCES_FILE="/tmp/hub_audiences_$$.txt" [ -f "$CAPS_FILE" ] && CAP_COUNTS=$(sort "$CAPS_FILE" | uniq -c | sort -rn | head -10) || CAP_COUNTS="" [ -f "$AUDIENCES_FILE" ] && AUDIENCE_COUNTS=$(sort "$AUDIENCES_FILE" | uniq -c | sort -rn) || AUDIENCE_COUNTS="" TOTAL_WITH_NFO=$(grep -v " - - - - -$" "$SITES_FILE" 2>/dev/null | wc -l | tr -d ' ') # Stats bar cat >> "$TEMP" << EOF
$TOTALTotal
$TOTAL_METASites
$TOTAL_STREAMLITStreamlit
$TOTAL_SERVICESServices
$TOTAL_VIDEOSVidéos
$TOTAL_WITH_NFONFO
EOF echo "$CAT_COUNTS" | head -3 | while read count cat; do [ -n "$cat" ] && printf '
%s%s
\n' "$count" "$cat" >> "$TEMP" done echo "
" >> "$TEMP" # Tag cloud - categories echo '
' >> "$TEMP" echo 'Tous' >> "$TEMP" echo '📝 Sites' >> "$TEMP" echo '📊 Streamlit' >> "$TEMP" echo '🔌 Services' >> "$TEMP" echo '🎥 Vidéos' >> "$TEMP" echo "$CAT_COUNTS" | while read count cat; do [ -n "$cat" ] && printf '%s%s\n' "$cat" "$cat" "$count" >> "$TEMP" done echo '
' >> "$TEMP" # Capability filter cloud if [ -n "$CAP_COUNTS" ]; then echo '
' >> "$TEMP" echo '🔧 Capabilities:' >> "$TEMP" echo "$CAP_COUNTS" | while read count cap; do [ -n "$cap" ] && printf '%s%s\n' "$cap" "$cap" "$count" >> "$TEMP" done echo '
' >> "$TEMP" fi # Audience filter if [ -n "$AUDIENCE_COUNTS" ]; then echo '
' >> "$TEMP" echo '👥 Audience:' >> "$TEMP" echo 'Tous' >> "$TEMP" echo "$AUDIENCE_COUNTS" | while read count audience; do [ -n "$audience" ] && printf '%s%s\n' "$audience" "$audience" "$count" >> "$TEMP" done echo '
' >> "$TEMP" fi # Category tabs echo '
' >> "$TEMP" printf '
📁 Tous%s
\n' "$TOTAL" >> "$TEMP" printf '
📝 Sites%s
\n' "$TOTAL_META" >> "$TEMP" printf '
📊 Streamlit%s
\n' "$TOTAL_STREAMLIT" >> "$TEMP" printf '
🔌 Services%s
\n' "$TOTAL_SERVICES" >> "$TEMP" printf '
🎥 Vidéos%s
\n' "$TOTAL_VIDEOS" >> "$TEMP" echo "$CAT_COUNTS" | head -6 | while read count cat; do emoji=$(get_emoji "$cat") [ -n "$cat" ] && printf '
%s %s%s
\n' "$cat" "$emoji" "$cat" "$count" >> "$TEMP" done echo '
' >> "$TEMP" # Sites grid echo '
' >> "$TEMP" while IFS=' ' read -r url name cat emoji type thumb protected nfo_desc nfo_keywords nfo_caps nfo_version nfo_audience; do [ -z "$url" ] && continue # Handle placeholder values (- means empty, used for BusyBox read compatibility) [ "$thumb" = "-" ] && thumb="" [ "$protected" = "-" ] && protected="" [ "$nfo_desc" = "-" ] && nfo_desc="" [ "$nfo_keywords" = "-" ] && nfo_keywords="" [ "$nfo_caps" = "-" ] && nfo_caps="" [ "$nfo_version" = "-" ] && nfo_version="" [ "$nfo_audience" = "-" ] && nfo_audience="" # For videos, 'thumb' is thumbnail URL and 'protected' is duration duration="" if [ "$type" = "video" ]; then duration="$protected" protected="" fi protected_attr="" protected_badge="" if [ "$protected" = "protected" ]; then protected_attr="data-protected=\"1\"" protected_badge="🔒" fi if [ "$type" = "streamlit" ]; then card_class="site-card streamlit" cat_class="card-cat streamlit" preview_html="" elif [ "$type" = "video" ]; then card_class="site-card video" cat_class="card-cat video" preview_html="\"$name\"
$duration" elif [ "$type" = "service" ]; then card_class="site-card service" cat_class="card-cat service" preview_html="" else card_class="site-card" cat_class="card-cat meta" preview_html="" fi # URL handling - videos already have full URL if [ "$type" = "video" ]; then link_url="$url" domain_display="tube.gk2.secubox.in" else link_url="https://$url/" domain_display="$url" fi # Build version badge version_html="" if [ -n "$nfo_version" ]; then version_html="v$nfo_version" fi # Build description HTML desc_html="" if [ -n "$nfo_desc" ]; then desc_html="
$nfo_desc
" fi # Build capabilities badges HTML caps_html="" if [ -n "$nfo_caps" ]; then caps_html="
" for cap in $(echo "$nfo_caps" | tr ',' ' '); do [ -n "$cap" ] && caps_html="$caps_html$cap" done caps_html="$caps_html
" fi # Include all metadata in search data search_data="$url $name $cat $type $nfo_keywords $nfo_caps $nfo_audience $nfo_desc" # Data attributes for filtering data_caps="" [ -n "$nfo_caps" ] && data_caps="data-caps=\"$nfo_caps\"" data_audience="" [ -n "$nfo_audience" ] && data_audience="data-audience=\"$nfo_audience\"" cat >> "$TEMP" << CARD
$preview_html $protected_badge
$name
$version_html
$domain_display
$desc_html $emoji $cat $caps_html
CARD done < "$SITES_FILE" echo '
' >> "$TEMP" # Footer and JS cat >> "$TEMP" << 'FOOTER'
FOOTER rm -f "$SITES_FILE" "$CAT_FILE" "$VIDEOS_FILE" "$CAPS_FILE" "$AUDIENCES_FILE" mv "$TEMP" "$OUTPUT" chmod 644 "$OUTPUT" logger -t hub-generator "Hub v7: $TOTAL items ($TOTAL_META sites + $TOTAL_STREAMLIT streamlit + $TOTAL_VIDEOS videos, $TOTAL_WITH_NFO with NFO)"