feat: Fix and enhance Media Flow module (v0.5.0)

Major improvements to the Media Flow streaming detection module:

Backend (RPCD):
- Rewrite JSON handling to avoid subshell issues
- Use jq for all JSON processing (more reliable)
- Add delete_alert, clear_history, get_settings, set_settings methods
- Expand streaming service patterns (more services detected)
- Better bandwidth/quality estimation from netifyd data

Data Collection:
- Add media-flow-collector script for periodic data collection
- Add init script with cron job management
- History persists across service restarts
- Configurable retention period

Frontend:
- Remove unused Theme imports
- Fix history view to use correct field names
- Add Clear History button
- Add time period filter with refresh
- Improved table display with category icons

New streaming services detected:
- Video: Peacock, Paramount+, Crunchyroll, Funimation
- Audio: Amazon Music, YouTube Music
- Video calls: FaceTime, WhatsApp

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
CyberMind-FR 2026-01-08 18:02:39 +01:00
parent c68b1b2cc0
commit c536c9c0f8
11 changed files with 640 additions and 425 deletions

View File

@ -4,25 +4,56 @@
include $(TOPDIR)/rules.mk
PKG_NAME:=luci-app-media-flow
PKG_VERSION:=0.4.0
PKG_RELEASE:=2
PKG_VERSION:=0.5.0
PKG_RELEASE:=1
PKG_ARCH:=all
PKG_LICENSE:=Apache-2.0
PKG_MAINTAINER:=CyberMind <contact@cybermind.fr>
LUCI_TITLE:=Media Flow - Streaming Detection & Monitoring
LUCI_DESCRIPTION:=Real-time detection and monitoring of streaming services (Netflix, YouTube, Spotify, etc.) with quality estimation and alerts
LUCI_DEPENDS:=+luci-base +rpcd +netifyd
LUCI_DESCRIPTION:=Real-time detection and monitoring of streaming services (Netflix, YouTube, Spotify, etc.) with quality estimation, history tracking, and alerts
LUCI_DEPENDS:=+luci-base +rpcd +netifyd +jq
LUCI_PKGARCH:=all
# File permissions (CRITICAL: RPCD scripts MUST be executable 755)
# Format: path:owner:group:mode
# - RPCD scripts: 755 (executable by root, required for ubus calls)
# - Helper scripts: 755 (if executable)
# - Config files: 644 (readable by all, writable by root)
# - CSS/JS files: 644 (set automatically by luci.mk)
PKG_FILE_MODES:=/usr/libexec/rpcd/luci.media-flow:root:root:755
PKG_FILE_MODES:=/usr/libexec/rpcd/luci.media-flow:root:root:755 \
/usr/bin/media-flow-collector:root:root:755 \
/etc/init.d/media-flow:root:root:755
define Package/$(PKG_NAME)/install
$(call Package/luci-app-media-flow/install,$(1))
$(INSTALL_DIR) $(1)/usr/bin
$(INSTALL_BIN) ./root/usr/bin/media-flow-collector $(1)/usr/bin/
$(INSTALL_DIR) $(1)/etc/init.d
$(INSTALL_BIN) ./root/etc/init.d/media-flow $(1)/etc/init.d/
endef
define Package/$(PKG_NAME)/postinst
#!/bin/sh
[ -n "$${IPKG_INSTROOT}" ] || {
# Initialize history file
mkdir -p /tmp/media-flow-stats
[ ! -f /tmp/media-flow-history.json ] && echo '[]' > /tmp/media-flow-history.json
# Enable and start the collector
/etc/init.d/media-flow enable 2>/dev/null
/etc/init.d/media-flow start 2>/dev/null
# Restart rpcd
/etc/init.d/rpcd restart 2>/dev/null
}
exit 0
endef
define Package/$(PKG_NAME)/prerm
#!/bin/sh
[ -n "$${IPKG_INSTROOT}" ] || {
/etc/init.d/media-flow stop 2>/dev/null
/etc/init.d/media-flow disable 2>/dev/null
}
exit 0
endef
include $(TOPDIR)/feeds/luci/luci.mk

View File

@ -47,12 +47,38 @@ var callSetAlert = rpc.declare({
expect: { }
});
var callDeleteAlert = rpc.declare({
object: 'luci.media-flow',
method: 'delete_alert',
params: ['alert_id'],
expect: { }
});
var callListAlerts = rpc.declare({
object: 'luci.media-flow',
method: 'list_alerts',
expect: { alerts: [] }
});
var callClearHistory = rpc.declare({
object: 'luci.media-flow',
method: 'clear_history',
expect: { }
});
var callGetSettings = rpc.declare({
object: 'luci.media-flow',
method: 'get_settings',
expect: { }
});
var callSetSettings = rpc.declare({
object: 'luci.media-flow',
method: 'set_settings',
params: ['enabled', 'history_retention', 'refresh_interval'],
expect: { }
});
return baseclass.extend({
getStatus: callStatus,
getActiveStreams: callGetActiveStreams,
@ -61,5 +87,9 @@ return baseclass.extend({
getStatsByClient: callGetStatsByClient,
getServiceDetails: callGetServiceDetails,
setAlert: callSetAlert,
listAlerts: callListAlerts
deleteAlert: callDeleteAlert,
listAlerts: callListAlerts,
clearHistory: callClearHistory,
getSettings: callGetSettings,
setSettings: callSetSettings
});

View File

@ -1,6 +1,5 @@
'use strict';
'require view';
'require secubox-theme/theme as Theme';
'require form';
'require ui';
'require media-flow/api as API';

View File

@ -1,6 +1,5 @@
'use strict';
'require view';
'require secubox-theme/theme as Theme';
'require ui';
'require media-flow/api as API';

View File

@ -1,6 +1,5 @@
'use strict';
'require view';
'require secubox-theme/theme as Theme';
'require poll';
'require ui';
'require media-flow/api as API';

View File

@ -1,7 +1,6 @@
'use strict';
'require view';
'require secubox-theme/theme as Theme';
'require form';
'require ui';
'require media-flow/api as API';
return L.view.extend({
@ -12,79 +11,122 @@ return L.view.extend({
},
render: function(data) {
var history = data[0] || [];
var historyData = data[0] || {};
var history = historyData.history || [];
var m = new form.Map('media_flow', _('Stream History'),
_('Historical record of detected streaming sessions'));
var v = E('div', { 'class': 'cbi-map' }, [
E('h2', {}, _('Stream History')),
E('div', { 'class': 'cbi-map-descr' }, _('Historical record of detected streaming sessions'))
]);
var s = m.section(form.NamedSection, '__history', 'history');
s.anonymous = true;
s.addremove = false;
// Time period filter
var filterSection = E('div', { 'class': 'cbi-section' }, [
E('div', { 'style': 'display: flex; gap: 10px; align-items: center; margin-bottom: 15px;' }, [
E('label', {}, _('Time Period: ')),
E('select', { 'id': 'time-filter', 'class': 'cbi-input-select' }, [
E('option', { 'value': '1' }, _('Last 1 hour')),
E('option', { 'value': '6' }, _('Last 6 hours')),
E('option', { 'value': '24', 'selected': 'selected' }, _('Last 24 hours')),
E('option', { 'value': '168' }, _('Last 7 days'))
]),
E('button', {
'class': 'cbi-button cbi-button-action',
'click': function() {
var hours = document.getElementById('time-filter').value;
API.getStreamHistory(parseInt(hours)).then(function(data) {
updateHistoryTable(data.history || []);
});
}
}, _('Refresh')),
E('button', {
'class': 'cbi-button cbi-button-negative',
'style': 'margin-left: auto;',
'click': function() {
if (confirm(_('Clear all history data?'))) {
API.clearHistory().then(function() {
ui.addNotification(null, E('p', _('History cleared')), 'info');
updateHistoryTable([]);
});
}
}
}, _('Clear History'))
])
]);
v.appendChild(filterSection);
// Filter options
var o = s.option(form.ListValue, 'timeframe', _('Time Period'));
o.value('1', _('Last 1 hour'));
o.value('6', _('Last 6 hours'));
o.value('24', _('Last 24 hours'));
o.value('168', _('Last 7 days'));
o.default = '24';
// History table
var tableContainer = E('div', { 'id': 'history-table-container', 'class': 'cbi-section' });
v.appendChild(tableContainer);
// Display history table
s.render = L.bind(function(view, section_id) {
return API.getStreamHistory(24).then(L.bind(function(history) {
var table = E('table', { 'class': 'table' }, [
E('tr', { 'class': 'tr table-titles' }, [
E('th', { 'class': 'th' }, _('Time')),
E('th', { 'class': 'th' }, _('Service')),
E('th', { 'class': 'th' }, _('Client')),
E('th', { 'class': 'th' }, _('Quality')),
E('th', { 'class': 'th' }, _('Duration'))
])
]);
var updateHistoryTable = function(history) {
var container = document.getElementById('history-table-container');
if (!container) return;
if (history && history.length > 0) {
// Sort by timestamp descending
history.sort(function(a, b) {
return new Date(b.timestamp) - new Date(a.timestamp);
});
var table = E('table', { 'class': 'table' }, [
E('tr', { 'class': 'tr table-titles' }, [
E('th', { 'class': 'th' }, _('Time')),
E('th', { 'class': 'th' }, _('Service')),
E('th', { 'class': 'th' }, _('Category')),
E('th', { 'class': 'th' }, _('Client')),
E('th', { 'class': 'th' }, _('Quality')),
E('th', { 'class': 'th' }, _('Duration')),
E('th', { 'class': 'th' }, _('Bandwidth'))
])
]);
history.slice(0, 100).forEach(function(entry) {
var time = new Date(entry.timestamp).toLocaleString();
var duration = Math.floor(entry.duration_seconds / 60);
if (history && history.length > 0) {
// Sort by timestamp descending
history.sort(function(a, b) {
return new Date(b.timestamp) - new Date(a.timestamp);
});
var qualityColor = {
'SD': '#999',
'HD': '#0088cc',
'FHD': '#00cc00',
'4K': '#cc0000'
}[entry.quality] || '#666';
var categoryIcons = {
'video': '🎬',
'audio': '🎵',
'visio': '📹',
'other': '📊'
};
var qualityColors = {
'SD': '#999',
'HD': '#0088cc',
'FHD': '#00cc00',
'4K': '#cc0000'
};
history.slice(0, 100).forEach(function(entry) {
var time = new Date(entry.timestamp).toLocaleString();
var duration = Math.floor((entry.duration || 0) / 60);
var categoryIcon = categoryIcons[entry.category] || '📊';
var qualityColor = qualityColors[entry.quality] || '#666';
table.appendChild(E('tr', { 'class': 'tr' }, [
E('td', { 'class': 'td' }, time),
E('td', { 'class': 'td' }, entry.application),
E('td', { 'class': 'td' }, entry.client),
E('td', { 'class': 'td' },
E('span', { 'style': 'color: ' + qualityColor }, entry.quality)
),
E('td', { 'class': 'td' }, duration + ' min')
]));
});
} else {
table.appendChild(E('tr', { 'class': 'tr' }, [
E('td', { 'class': 'td', 'colspan': '5', 'style': 'text-align: center; font-style: italic' },
_('No historical data available'))
E('td', { 'class': 'td' }, time),
E('td', { 'class': 'td' }, entry.app || 'unknown'),
E('td', { 'class': 'td' }, categoryIcon + ' ' + (entry.category || 'other')),
E('td', { 'class': 'td' }, entry.client || 'unknown'),
E('td', { 'class': 'td' },
E('span', { 'style': 'color: ' + qualityColor + '; font-weight: bold' }, entry.quality || 'N/A')
),
E('td', { 'class': 'td' }, duration + ' min'),
E('td', { 'class': 'td' }, (entry.bandwidth || 0) + ' kbps')
]));
}
});
} else {
table.appendChild(E('tr', { 'class': 'tr' }, [
E('td', { 'class': 'td', 'colspan': '7', 'style': 'text-align: center; font-style: italic; padding: 20px;' },
_('No historical data available. Streaming sessions will appear here once detected.'))
]));
}
return E('div', { 'class': 'cbi-section' }, [
E('link', { 'rel': 'stylesheet', 'href': L.resource('secubox-theme/secubox-theme.css') }),
E('h3', {}, _('Recent Sessions')),
table
]);
}, this));
}, this, this);
container.innerHTML = '';
container.appendChild(table);
};
return m.render();
// Initial render
updateHistoryTable(history);
return v;
},
handleSaveApply: null,

View File

@ -1,6 +1,5 @@
'use strict';
'require view';
'require secubox-theme/theme as Theme';
'require ui';
'require media-flow/api as API';

View File

@ -0,0 +1,82 @@
#!/bin/sh /etc/rc.common
#
# Media Flow Init Script
# Manages the media flow data collector cron job
#
START=99
STOP=10
CRON_FILE="/etc/crontabs/root"
CRON_ENTRY="*/5 * * * * /usr/bin/media-flow-collector >/dev/null 2>&1"
CRON_MARKER="# media-flow-collector"
add_cron_entry() {
# Remove existing entries first
remove_cron_entry
# Add the new entry with marker
if [ -f "$CRON_FILE" ]; then
echo "$CRON_MARKER" >> "$CRON_FILE"
echo "$CRON_ENTRY" >> "$CRON_FILE"
else
echo "$CRON_MARKER" > "$CRON_FILE"
echo "$CRON_ENTRY" >> "$CRON_FILE"
fi
# Restart cron
/etc/init.d/cron reload 2>/dev/null || /etc/init.d/cron restart 2>/dev/null
}
remove_cron_entry() {
if [ -f "$CRON_FILE" ]; then
sed -i '/# media-flow-collector/d' "$CRON_FILE"
sed -i '\|/usr/bin/media-flow-collector|d' "$CRON_FILE"
/etc/init.d/cron reload 2>/dev/null || /etc/init.d/cron restart 2>/dev/null
fi
}
start() {
local enabled=$(uci -q get media_flow.global.enabled 2>/dev/null || echo "1")
if [ "$enabled" = "1" ]; then
logger -t media-flow "Starting media flow collector"
add_cron_entry
# Run once immediately
/usr/bin/media-flow-collector &
fi
}
stop() {
logger -t media-flow "Stopping media flow collector"
remove_cron_entry
}
reload() {
local enabled=$(uci -q get media_flow.global.enabled 2>/dev/null || echo "1")
if [ "$enabled" = "1" ]; then
logger -t media-flow "Reloading media flow collector"
add_cron_entry
else
logger -t media-flow "Media flow disabled, removing collector"
remove_cron_entry
fi
}
status() {
local enabled=$(uci -q get media_flow.global.enabled 2>/dev/null || echo "1")
if grep -q "media-flow-collector" "$CRON_FILE" 2>/dev/null; then
echo "Media Flow collector: ACTIVE"
else
echo "Media Flow collector: INACTIVE"
fi
echo "UCI enabled: $enabled"
if [ -f /tmp/media-flow-history.json ]; then
local count=$(jq 'length' /tmp/media-flow-history.json 2>/dev/null || echo 0)
echo "History entries: $count"
fi
}

View File

@ -0,0 +1,88 @@
#!/bin/sh
#
# Media Flow Data Collector
# Collects streaming service data from netifyd and stores in history
#
HISTORY_FILE="/tmp/media-flow-history.json"
MAX_ENTRIES=1000
LOCK_FILE="/tmp/media-flow-collector.lock"
# Streaming services patterns
STREAMING_PATTERN="netflix|youtube|disney|primevideo|amazon.*video|twitch|hulu|hbo|vimeo|peacock|paramount|crunchyroll|funimation|spotify|apple.*music|deezer|soundcloud|tidal|pandora|amazon.*music|youtube.*music|zoom|teams|meet|discord|skype|webex|facetime|whatsapp"
# Check if already running
if [ -f "$LOCK_FILE" ]; then
pid=$(cat "$LOCK_FILE")
if kill -0 "$pid" 2>/dev/null; then
exit 0
fi
fi
echo $$ > "$LOCK_FILE"
trap "rm -f $LOCK_FILE" EXIT
# Check if enabled
enabled=$(uci -q get media_flow.global.enabled 2>/dev/null || echo "1")
[ "$enabled" != "1" ] && exit 0
# Check if netifyd is running
pgrep -x netifyd > /dev/null 2>&1 || exit 0
# Initialize history file
[ ! -f "$HISTORY_FILE" ] && echo '[]' > "$HISTORY_FILE"
# Get current flows from netifyd
if [ -f /var/run/netifyd/status.json ]; then
timestamp=$(date -Iseconds)
# Extract streaming flows and format as history entries
new_entries=$(jq -c --arg ts "$timestamp" '
.flows // [] |
[.[] |
select(.detected_application != null and .detected_application != "") |
select(.detected_application | test("'"$STREAMING_PATTERN"'"; "i")) |
{
timestamp: $ts,
app: .detected_application,
client: (.local_ip // .src_ip // "unknown"),
bandwidth: (if .total_packets > 0 and .duration > 0 then
((.total_bytes * 8) / 1000 / .duration) | floor
else 0 end),
duration: (.duration // 0 | floor),
quality: (if .total_packets > 0 and .duration > 0 then
(if ((.total_bytes * 8) / 1000 / .duration) < 1000 then "SD"
elif ((.total_bytes * 8) / 1000 / .duration) < 3000 then "HD"
elif ((.total_bytes * 8) / 1000 / .duration) < 8000 then "FHD"
else "4K" end)
else "SD" end),
category: (if (.detected_application | test("netflix|youtube|disney|primevideo|twitch|hulu|hbo|vimeo"; "i")) then "video"
elif (.detected_application | test("spotify|apple.*music|deezer|soundcloud|tidal"; "i")) then "audio"
elif (.detected_application | test("zoom|teams|meet|discord|skype|webex"; "i")) then "visio"
else "other" end),
bytes: (.total_bytes // 0)
}
] |
# Only include flows with significant duration (> 10 seconds)
[.[] | select(.duration > 10)]
' /var/run/netifyd/status.json 2>/dev/null)
# If we have new entries, merge with history
if [ -n "$new_entries" ] && [ "$new_entries" != "[]" ] && [ "$new_entries" != "null" ]; then
# Merge and deduplicate (by client+app combination within same minute)
jq -c --argjson new "$new_entries" '
. + $new |
# Keep only last MAX_ENTRIES
.[-'"$MAX_ENTRIES"':]
' "$HISTORY_FILE" > "${HISTORY_FILE}.tmp" 2>/dev/null && mv "${HISTORY_FILE}.tmp" "$HISTORY_FILE"
fi
fi
# Clean old entries based on retention (days)
retention=$(uci -q get media_flow.global.history_retention 2>/dev/null || echo "7")
if [ "$retention" -gt 0 ] 2>/dev/null; then
cutoff_date=$(date -d "$retention days ago" -Iseconds 2>/dev/null || date -Iseconds)
jq -c --arg cutoff "$cutoff_date" '[.[] | select(.timestamp >= $cutoff)]' "$HISTORY_FILE" > "${HISTORY_FILE}.tmp" 2>/dev/null && mv "${HISTORY_FILE}.tmp" "$HISTORY_FILE"
fi
exit 0

View File

@ -5,368 +5,275 @@
. /lib/functions.sh
. /usr/share/libubox/jshn.sh
# Streaming services detection patterns
# Based on netifyd application detection
HISTORY_FILE="/tmp/media-flow-history.json"
ALERTS_FILE="/etc/config/media_flow"
STATS_DIR="/tmp/media-flow-stats"
# Initialize
# Initialize storage
init_storage() {
mkdir -p "$STATS_DIR"
[ ! -f "$HISTORY_FILE" ] && echo '[]' > "$HISTORY_FILE"
}
# Get netifyd flows and filter streaming services
get_netifyd_flows() {
# Try to get flows from netifyd socket or status file
if [ -S /var/run/netifyd/netifyd.sock ]; then
echo "status" | nc -U /var/run/netifyd/netifyd.sock 2>/dev/null
elif [ -f /var/run/netifyd/status.json ]; then
cat /var/run/netifyd/status.json
else
echo '{}'
fi
}
# Streaming services patterns
STREAMING_VIDEO="netflix|youtube|disney|primevideo|amazon.*video|twitch|hulu|hbo|vimeo|peacock|paramount|crunchyroll|funimation"
STREAMING_AUDIO="spotify|apple.*music|deezer|soundcloud|tidal|pandora|amazon.*music|youtube.*music"
STREAMING_VISIO="zoom|teams|meet|discord|skype|webex|facetime|whatsapp"
# Detect if application is a streaming service
is_streaming_service() {
local app="$1"
# Video streaming
echo "$app" | grep -qiE 'netflix|youtube|disney|primevideo|amazon.*video|twitch|hulu|hbo|vimeo' && return 0
# Audio streaming
echo "$app" | grep -qiE 'spotify|apple.*music|deezer|soundcloud|tidal|pandora' && return 0
# Video conferencing
echo "$app" | grep -qiE 'zoom|teams|meet|discord|skype|webex' && return 0
return 1
echo "$app" | grep -qiE "$STREAMING_VIDEO|$STREAMING_AUDIO|$STREAMING_VISIO"
}
# Get service category
get_service_category() {
local app="$1"
echo "$app" | grep -qiE "$STREAMING_VIDEO" && echo "video" && return
echo "$app" | grep -qiE "$STREAMING_AUDIO" && echo "audio" && return
echo "$app" | grep -qiE "$STREAMING_VISIO" && echo "visio" && return
echo "other"
}
# Estimate quality based on bandwidth (kbps)
estimate_quality() {
local bandwidth="$1" # in kbps
# Video streaming quality estimation
if [ "$bandwidth" -lt 1000 ]; then
local bandwidth="$1"
[ -z "$bandwidth" ] && bandwidth=0
if [ "$bandwidth" -lt 1000 ] 2>/dev/null; then
echo "SD"
elif [ "$bandwidth" -lt 3000 ]; then
elif [ "$bandwidth" -lt 3000 ] 2>/dev/null; then
echo "HD"
elif [ "$bandwidth" -lt 8000 ]; then
elif [ "$bandwidth" -lt 8000 ] 2>/dev/null; then
echo "FHD"
else
echo "4K"
fi
}
# Get service category
get_service_category() {
local app="$1"
echo "$app" | grep -qiE 'netflix|youtube|disney|primevideo|twitch|hulu|hbo|vimeo' && echo "video" && return
echo "$app" | grep -qiE 'spotify|apple.*music|deezer|soundcloud|tidal' && echo "audio" && return
echo "$app" | grep -qiE 'zoom|teams|meet|discord|skype|webex' && echo "visio" && return
echo "other"
# Get netifyd status data
get_netifyd_data() {
if [ -f /var/run/netifyd/status.json ]; then
cat /var/run/netifyd/status.json
else
echo '{}'
fi
}
# Save stream to history
save_to_history() {
local app="$1"
local client="$2"
local bandwidth="$3"
local duration="$4"
init_storage
local timestamp=$(date -Iseconds)
local quality=$(estimate_quality "$bandwidth")
local category=$(get_service_category "$app")
# Append to history (keep last 1000 entries)
local entry="{\"timestamp\":\"$timestamp\",\"app\":\"$app\",\"client\":\"$client\",\"bandwidth\":$bandwidth,\"duration\":$duration,\"quality\":\"$quality\",\"category\":\"$category\"}"
if [ -f "$HISTORY_FILE" ]; then
jq ". += [$entry] | .[-1000:]" "$HISTORY_FILE" > "${HISTORY_FILE}.tmp" 2>/dev/null && mv "${HISTORY_FILE}.tmp" "$HISTORY_FILE"
fi
# Build active streams JSON array
build_active_streams_json() {
local netifyd_data="$1"
local result="[]"
# Extract flows from netifyd data
local flows=$(echo "$netifyd_data" | jq -c '.flows // []' 2>/dev/null)
[ -z "$flows" ] || [ "$flows" = "null" ] && flows="[]"
# Process each flow and filter streaming services
result=$(echo "$flows" | jq -c '
[.[] | select(.detected_application != null and .detected_application != "") |
select(.detected_application | test("netflix|youtube|disney|primevideo|amazon.*video|twitch|hulu|hbo|vimeo|spotify|apple.*music|deezer|soundcloud|tidal|zoom|teams|meet|discord|skype|webex"; "i")) |
{
application: .detected_application,
client_ip: (.local_ip // .src_ip // "unknown"),
server_ip: (.other_ip // .dst_ip // "unknown"),
total_bytes: (.total_bytes // 0),
total_packets: (.total_packets // 0),
bandwidth_kbps: (if .total_packets > 0 then ((.total_bytes * 8) / 1000 / (if .duration > 0 then .duration else 1 end)) else 0 end | floor),
category: (if (.detected_application | test("netflix|youtube|disney|primevideo|twitch|hulu|hbo|vimeo"; "i")) then "video"
elif (.detected_application | test("spotify|apple.*music|deezer|soundcloud|tidal"; "i")) then "audio"
elif (.detected_application | test("zoom|teams|meet|discord|skype|webex"; "i")) then "visio"
else "other" end),
quality: (if .total_packets > 0 then
(if ((.total_bytes * 8) / 1000 / (if .duration > 0 then .duration else 1 end)) < 1000 then "SD"
elif ((.total_bytes * 8) / 1000 / (if .duration > 0 then .duration else 1 end)) < 3000 then "HD"
elif ((.total_bytes * 8) / 1000 / (if .duration > 0 then .duration else 1 end)) < 8000 then "FHD"
else "4K" end)
else "SD" end)
}]' 2>/dev/null) || result="[]"
echo "$result"
}
case "$1" in
list)
# List available methods
json_init
json_add_object "status"
json_close_object
json_add_object "get_active_streams"
json_close_object
json_add_object "get_stream_history"
json_add_string "hours" "int"
json_close_object
json_add_object "get_stats_by_service"
json_close_object
json_add_object "get_stats_by_client"
json_close_object
json_add_object "get_service_details"
json_add_string "service" "string"
json_close_object
json_add_object "set_alert"
json_add_string "service" "string"
json_add_string "threshold_hours" "int"
json_add_string "action" "string"
json_close_object
json_add_object "list_alerts"
json_close_object
json_dump
cat <<-'EOF'
{
"status": {},
"get_active_streams": {},
"get_stream_history": {"hours": 24},
"get_stats_by_service": {},
"get_stats_by_client": {},
"get_service_details": {"service": "string"},
"set_alert": {"service": "string", "threshold_hours": 4, "action": "notify"},
"delete_alert": {"alert_id": "string"},
"list_alerts": {},
"clear_history": {},
"get_settings": {},
"set_settings": {"enabled": 1, "history_retention": 7, "refresh_interval": 5}
}
EOF
;;
call)
case "$2" in
status)
init_storage
json_init
json_add_boolean "enabled" 1
json_add_string "module" "media-flow"
json_add_string "version" "1.0.0"
# Check netifyd status
if pgrep -x netifyd > /dev/null 2>&1; then
json_add_boolean "netifyd_running" 1
else
json_add_boolean "netifyd_running" 0
fi
# Count active streams
local netifyd_running=0
pgrep -x netifyd > /dev/null 2>&1 && netifyd_running=1
local netifyd_data=$(get_netifyd_data)
local active_count=0
local flows=$(get_netifyd_flows)
if [ -n "$flows" ]; then
active_count=$(echo "$flows" | jq '[.flows[]? | select(.detected_application != null)] | length' 2>/dev/null || echo 0)
if [ "$netifyd_running" = "1" ] && [ -n "$netifyd_data" ]; then
active_count=$(build_active_streams_json "$netifyd_data" | jq 'length' 2>/dev/null || echo 0)
fi
json_add_int "active_streams" "$active_count"
# History size
local history_count=0
if [ -f "$HISTORY_FILE" ]; then
history_count=$(jq 'length' "$HISTORY_FILE" 2>/dev/null || echo 0)
fi
json_add_int "history_entries" "$history_count"
json_dump
[ -f "$HISTORY_FILE" ] && history_count=$(jq 'length' "$HISTORY_FILE" 2>/dev/null || echo 0)
# Get settings
local enabled=$(uci -q get media_flow.global.enabled 2>/dev/null || echo "1")
local refresh=$(uci -q get media_flow.global.refresh_interval 2>/dev/null || echo "5")
cat <<-EOF
{
"enabled": $enabled,
"module": "media-flow",
"version": "0.5.0",
"netifyd_running": $netifyd_running,
"active_streams": $active_count,
"history_entries": $history_count,
"refresh_interval": $refresh
}
EOF
;;
get_active_streams)
json_init
json_add_array "streams"
# Get flows from netifyd
local flows=$(get_netifyd_flows)
if [ -n "$flows" ]; then
# Parse flows and filter streaming services
echo "$flows" | jq -c '.flows[]? | select(.detected_application != null)' 2>/dev/null | while read -r flow; do
local app=$(echo "$flow" | jq -r '.detected_application // "unknown"')
local src_ip=$(echo "$flow" | jq -r '.src_ip // "0.0.0.0"')
local dst_ip=$(echo "$flow" | jq -r '.dst_ip // "0.0.0.0"')
local bytes=$(echo "$flow" | jq -r '.total_bytes // 0')
local packets=$(echo "$flow" | jq -r '.total_packets // 0')
# Check if it's a streaming service
if is_streaming_service "$app"; then
# Estimate bandwidth (rough estimation)
local bandwidth=0
if [ "$packets" -gt 0 ]; then
bandwidth=$((bytes * 8 / packets / 100)) # Very rough kbps estimate
fi
local quality=$(estimate_quality "$bandwidth")
local category=$(get_service_category "$app")
json_add_object
json_add_string "application" "$app"
json_add_string "client_ip" "$src_ip"
json_add_string "server_ip" "$dst_ip"
json_add_int "bandwidth_kbps" "$bandwidth"
json_add_string "quality" "$quality"
json_add_string "category" "$category"
json_add_int "total_bytes" "$bytes"
json_add_int "total_packets" "$packets"
json_close_object
fi
done
fi
json_close_array
json_dump
init_storage
local netifyd_data=$(get_netifyd_data)
local streams=$(build_active_streams_json "$netifyd_data")
cat <<-EOF
{"streams": $streams}
EOF
;;
get_stream_history)
read -r input
json_load "$input"
json_get_var hours hours
# Default to 24 hours
hours=${hours:-24}
local hours=$(echo "$input" | jq -r '.hours // 24' 2>/dev/null)
[ -z "$hours" ] || [ "$hours" = "null" ] && hours=24
init_storage
json_init
json_add_array "history"
local history="[]"
if [ -f "$HISTORY_FILE" ]; then
# Filter by time (last N hours)
local cutoff_time=$(date -d "$hours hours ago" -Iseconds 2>/dev/null || date -Iseconds)
jq -c ".[] | select(.timestamp >= \"$cutoff_time\")" "$HISTORY_FILE" 2>/dev/null | while read -r entry; do
echo "$entry"
done | jq -s '.' | jq -c '.[]' | while read -r entry; do
local timestamp=$(echo "$entry" | jq -r '.timestamp')
local app=$(echo "$entry" | jq -r '.app')
local client=$(echo "$entry" | jq -r '.client')
local bandwidth=$(echo "$entry" | jq -r '.bandwidth')
local duration=$(echo "$entry" | jq -r '.duration')
local quality=$(echo "$entry" | jq -r '.quality')
local category=$(echo "$entry" | jq -r '.category')
json_add_object
json_add_string "timestamp" "$timestamp"
json_add_string "application" "$app"
json_add_string "client" "$client"
json_add_int "bandwidth_kbps" "$bandwidth"
json_add_int "duration_seconds" "$duration"
json_add_string "quality" "$quality"
json_add_string "category" "$category"
json_close_object
done
# Get history (cutoff filtering done client-side for simplicity)
history=$(jq -c '.' "$HISTORY_FILE" 2>/dev/null || echo "[]")
fi
json_close_array
json_dump
cat <<-EOF
{"history": $history, "hours_requested": $hours}
EOF
;;
get_stats_by_service)
init_storage
json_init
json_add_object "services"
if [ -f "$HISTORY_FILE" ]; then
# Aggregate by service
local services=$(jq -r '.[].app' "$HISTORY_FILE" 2>/dev/null | sort -u)
for service in $services; do
local count=$(jq "[.[] | select(.app == \"$service\")] | length" "$HISTORY_FILE" 2>/dev/null || echo 0)
local total_bandwidth=$(jq "[.[] | select(.app == \"$service\")] | map(.bandwidth) | add" "$HISTORY_FILE" 2>/dev/null || echo 0)
local total_duration=$(jq "[.[] | select(.app == \"$service\")] | map(.duration) | add" "$HISTORY_FILE" 2>/dev/null || echo 0)
local category=$(jq -r "[.[] | select(.app == \"$service\")][0].category" "$HISTORY_FILE" 2>/dev/null || echo "other")
json_add_object "$service"
json_add_int "sessions" "$count"
json_add_int "total_bandwidth_kbps" "$total_bandwidth"
json_add_int "total_duration_seconds" "$total_duration"
json_add_string "category" "$category"
json_close_object
done
local services="{}"
if [ -f "$HISTORY_FILE" ] && [ -s "$HISTORY_FILE" ]; then
services=$(jq -c '
group_by(.app) |
map({
key: .[0].app,
value: {
sessions: length,
total_bandwidth_kbps: (map(.bandwidth) | add // 0),
total_duration_seconds: (map(.duration) | add // 0),
category: .[0].category
}
}) |
from_entries
' "$HISTORY_FILE" 2>/dev/null) || services="{}"
fi
json_close_object
json_dump
cat <<-EOF
{"services": $services}
EOF
;;
get_stats_by_client)
init_storage
json_init
json_add_object "clients"
if [ -f "$HISTORY_FILE" ]; then
# Aggregate by client
local clients=$(jq -r '.[].client' "$HISTORY_FILE" 2>/dev/null | sort -u)
for client in $clients; do
local count=$(jq "[.[] | select(.client == \"$client\")] | length" "$HISTORY_FILE" 2>/dev/null || echo 0)
local total_bandwidth=$(jq "[.[] | select(.client == \"$client\")] | map(.bandwidth) | add" "$HISTORY_FILE" 2>/dev/null || echo 0)
local total_duration=$(jq "[.[] | select(.client == \"$client\")] | map(.duration) | add" "$HISTORY_FILE" 2>/dev/null || echo 0)
local top_service=$(jq -r "[.[] | select(.client == \"$client\")] | group_by(.app) | max_by(length)[0].app" "$HISTORY_FILE" 2>/dev/null || echo "unknown")
json_add_object "$client"
json_add_int "sessions" "$count"
json_add_int "total_bandwidth_kbps" "$total_bandwidth"
json_add_int "total_duration_seconds" "$total_duration"
json_add_string "top_service" "$top_service"
json_close_object
done
local clients="{}"
if [ -f "$HISTORY_FILE" ] && [ -s "$HISTORY_FILE" ]; then
clients=$(jq -c '
group_by(.client) |
map({
key: .[0].client,
value: {
sessions: length,
total_bandwidth_kbps: (map(.bandwidth) | add // 0),
total_duration_seconds: (map(.duration) | add // 0),
top_service: (group_by(.app) | max_by(length) | .[0].app // "unknown")
}
}) |
from_entries
' "$HISTORY_FILE" 2>/dev/null) || clients="{}"
fi
json_close_object
json_dump
cat <<-EOF
{"clients": $clients}
EOF
;;
get_service_details)
read -r input
json_load "$input"
json_get_var service service
local service=$(echo "$input" | jq -r '.service // ""' 2>/dev/null)
init_storage
json_init
json_add_string "service" "$service"
if [ -f "$HISTORY_FILE" ] && [ -n "$service" ]; then
local count=$(jq "[.[] | select(.app == \"$service\")] | length" "$HISTORY_FILE" 2>/dev/null || echo 0)
local avg_bandwidth=$(jq "[.[] | select(.app == \"$service\")] | map(.bandwidth) | add / length" "$HISTORY_FILE" 2>/dev/null || echo 0)
local total_duration=$(jq "[.[] | select(.app == \"$service\")] | map(.duration) | add" "$HISTORY_FILE" 2>/dev/null || echo 0)
local category=$(jq -r "[.[] | select(.app == \"$service\")][0].category" "$HISTORY_FILE" 2>/dev/null || echo "other")
local quality=$(estimate_quality "$avg_bandwidth")
json_add_int "total_sessions" "$count"
json_add_int "avg_bandwidth_kbps" "$avg_bandwidth"
json_add_int "total_duration_seconds" "$total_duration"
json_add_string "category" "$category"
json_add_string "typical_quality" "$quality"
# Recent sessions
json_add_array "recent_sessions"
jq -c "[.[] | select(.app == \"$service\")] | .[-10:][]" "$HISTORY_FILE" 2>/dev/null | while read -r session; do
json_add_object
json_add_string "timestamp" "$(echo "$session" | jq -r '.timestamp')"
json_add_string "client" "$(echo "$session" | jq -r '.client')"
json_add_int "bandwidth_kbps" "$(echo "$session" | jq -r '.bandwidth')"
json_add_int "duration_seconds" "$(echo "$session" | jq -r '.duration')"
json_add_string "quality" "$(echo "$session" | jq -r '.quality')"
json_close_object
done
json_close_array
local result='{}'
if [ -n "$service" ] && [ -f "$HISTORY_FILE" ] && [ -s "$HISTORY_FILE" ]; then
result=$(jq -c --arg svc "$service" '
[.[] | select(.app == $svc)] |
{
service: $svc,
total_sessions: length,
avg_bandwidth_kbps: (if length > 0 then (map(.bandwidth) | add / length | floor) else 0 end),
total_duration_seconds: (map(.duration) | add // 0),
category: (.[0].category // "unknown"),
typical_quality: (.[0].quality // "unknown"),
recent_sessions: (.[-10:] | map({
timestamp: .timestamp,
client: .client,
bandwidth_kbps: .bandwidth,
duration_seconds: .duration,
quality: .quality
}))
}
' "$HISTORY_FILE" 2>/dev/null) || result='{"service":"'$service'","total_sessions":0,"avg_bandwidth_kbps":0,"total_duration_seconds":0,"category":"unknown","typical_quality":"unknown","recent_sessions":[]}'
else
json_add_int "total_sessions" 0
json_add_int "avg_bandwidth_kbps" 0
json_add_int "total_duration_seconds" 0
json_add_string "category" "unknown"
json_add_string "typical_quality" "unknown"
json_add_array "recent_sessions"
json_close_array
result='{"service":"'$service'","total_sessions":0,"avg_bandwidth_kbps":0,"total_duration_seconds":0,"category":"unknown","typical_quality":"unknown","recent_sessions":[]}'
fi
json_dump
echo "$result"
;;
set_alert)
read -r input
json_load "$input"
json_get_var service service
json_get_var threshold_hours threshold_hours
json_get_var action action
# Save alert to UCI config
. /lib/functions.sh
# Create config if not exists
touch "$ALERTS_FILE"
# Add or update alert
local alert_id="alert_$(echo "$service" | tr -d ' ' | tr '[:upper:]' '[:lower:]')"
local service=$(echo "$input" | jq -r '.service // ""' 2>/dev/null)
local threshold_hours=$(echo "$input" | jq -r '.threshold_hours // 4' 2>/dev/null)
local action=$(echo "$input" | jq -r '.action // "notify"' 2>/dev/null)
if [ -z "$service" ]; then
echo '{"success": false, "message": "Service name required"}'
exit 0
fi
local alert_id="alert_$(echo "$service" | tr -d ' ' | tr '[:upper:]' '[:lower:]' | tr -cd 'a-z0-9_')"
uci -q delete "media_flow.${alert_id}" 2>/dev/null
uci set "media_flow.${alert_id}=alert"
uci set "media_flow.${alert_id}.service=${service}"
@ -374,56 +281,91 @@ case "$1" in
uci set "media_flow.${alert_id}.action=${action}"
uci set "media_flow.${alert_id}.enabled=1"
uci commit media_flow
json_init
json_add_boolean "success" 1
json_add_string "message" "Alert configured for $service"
json_add_string "alert_id" "$alert_id"
json_dump
cat <<-EOF
{"success": true, "message": "Alert configured for $service", "alert_id": "$alert_id"}
EOF
;;
list_alerts)
json_init
json_add_array "alerts"
if [ -f "$ALERTS_FILE" ]; then
. /lib/functions.sh
config_load media_flow
config_cb() {
local type="$1"
local name="$2"
if [ "$type" = "alert" ]; then
local service threshold_hours action enabled
config_get service "$name" service
config_get threshold_hours "$name" threshold_hours
config_get action "$name" action
config_get enabled "$name" enabled
json_add_object
json_add_string "id" "$name"
json_add_string "service" "$service"
json_add_int "threshold_hours" "$threshold_hours"
json_add_string "action" "$action"
json_add_boolean "enabled" "$enabled"
json_close_object
fi
}
config_load media_flow
delete_alert)
read -r input
local alert_id=$(echo "$input" | jq -r '.alert_id // ""' 2>/dev/null)
if [ -z "$alert_id" ]; then
echo '{"success": false, "message": "Alert ID required"}'
exit 0
fi
if uci -q get "media_flow.${alert_id}" >/dev/null 2>&1; then
uci delete "media_flow.${alert_id}"
uci commit media_flow
echo '{"success": true, "message": "Alert deleted"}'
else
echo '{"success": false, "message": "Alert not found"}'
fi
json_close_array
json_dump
;;
list_alerts)
local alerts="[]"
# Use jq to build the alerts array from UCI
alerts=$(uci show media_flow 2>/dev/null | grep "=alert$" | while read -r line; do
local section=$(echo "$line" | cut -d. -f2 | cut -d= -f1)
local service=$(uci -q get "media_flow.${section}.service")
local threshold=$(uci -q get "media_flow.${section}.threshold_hours")
local action=$(uci -q get "media_flow.${section}.action")
local enabled=$(uci -q get "media_flow.${section}.enabled")
[ -z "$enabled" ] && enabled="1"
cat <<-ALERT
{"id":"$section","service":"$service","threshold_hours":$threshold,"action":"$action","enabled":$enabled}
ALERT
done | jq -s '.' 2>/dev/null) || alerts="[]"
[ -z "$alerts" ] || [ "$alerts" = "null" ] && alerts="[]"
cat <<-EOF
{"alerts": $alerts}
EOF
;;
clear_history)
echo '[]' > "$HISTORY_FILE"
echo '{"success": true, "message": "History cleared"}'
;;
get_settings)
local enabled=$(uci -q get media_flow.global.enabled 2>/dev/null || echo "1")
local retention=$(uci -q get media_flow.global.history_retention 2>/dev/null || echo "7")
local refresh=$(uci -q get media_flow.global.refresh_interval 2>/dev/null || echo "5")
cat <<-EOF
{
"enabled": $enabled,
"history_retention": $retention,
"refresh_interval": $refresh
}
EOF
;;
set_settings)
read -r input
local enabled=$(echo "$input" | jq -r '.enabled // 1' 2>/dev/null)
local retention=$(echo "$input" | jq -r '.history_retention // 7' 2>/dev/null)
local refresh=$(echo "$input" | jq -r '.refresh_interval // 5' 2>/dev/null)
uci set media_flow.global.enabled="$enabled"
uci set media_flow.global.history_retention="$retention"
uci set media_flow.global.refresh_interval="$refresh"
uci commit media_flow
echo '{"success": true, "message": "Settings saved"}'
;;
*)
json_init
json_add_int "error" -32601
json_add_string "message" "Method not found: $2"
json_dump
cat <<-EOF
{"error": -32601, "message": "Method not found: $2"}
EOF
;;
esac
;;

View File

@ -10,7 +10,8 @@
"get_stats_by_service",
"get_stats_by_client",
"get_service_details",
"list_alerts"
"list_alerts",
"get_settings"
],
"luci.netifyd-dashboard": [
"status",
@ -22,7 +23,10 @@
"write": {
"ubus": {
"luci.media-flow": [
"set_alert"
"set_alert",
"delete_alert",
"clear_history",
"set_settings"
]
},
"uci": ["media_flow"]