feat: Fix and enhance Media Flow module (v0.5.0)

Major improvements to the Media Flow streaming detection module:

Backend (RPCD):
- Rewrite JSON handling to avoid subshell issues
- Use jq for all JSON processing (more reliable)
- Add delete_alert, clear_history, get_settings, set_settings methods
- Expand streaming service patterns (more services detected)
- Better bandwidth/quality estimation from netifyd data

Data Collection:
- Add media-flow-collector script for periodic data collection
- Add init script with cron job management
- History persists across service restarts
- Configurable retention period

Frontend:
- Remove unused Theme imports
- Fix history view to use correct field names
- Add Clear History button
- Add time period filter with refresh
- Improved table display with category icons

New streaming services detected:
- Video: Peacock, Paramount+, Crunchyroll, Funimation
- Audio: Amazon Music, YouTube Music
- Video calls: FaceTime, WhatsApp

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
CyberMind-FR 2026-01-08 18:02:39 +01:00
parent c68b1b2cc0
commit c536c9c0f8
11 changed files with 640 additions and 425 deletions

View File

@ -4,25 +4,56 @@
include $(TOPDIR)/rules.mk include $(TOPDIR)/rules.mk
PKG_NAME:=luci-app-media-flow PKG_NAME:=luci-app-media-flow
PKG_VERSION:=0.4.0 PKG_VERSION:=0.5.0
PKG_RELEASE:=2 PKG_RELEASE:=1
PKG_ARCH:=all PKG_ARCH:=all
PKG_LICENSE:=Apache-2.0 PKG_LICENSE:=Apache-2.0
PKG_MAINTAINER:=CyberMind <contact@cybermind.fr> PKG_MAINTAINER:=CyberMind <contact@cybermind.fr>
LUCI_TITLE:=Media Flow - Streaming Detection & Monitoring LUCI_TITLE:=Media Flow - Streaming Detection & Monitoring
LUCI_DESCRIPTION:=Real-time detection and monitoring of streaming services (Netflix, YouTube, Spotify, etc.) with quality estimation and alerts LUCI_DESCRIPTION:=Real-time detection and monitoring of streaming services (Netflix, YouTube, Spotify, etc.) with quality estimation, history tracking, and alerts
LUCI_DEPENDS:=+luci-base +rpcd +netifyd LUCI_DEPENDS:=+luci-base +rpcd +netifyd +jq
LUCI_PKGARCH:=all LUCI_PKGARCH:=all
# File permissions (CRITICAL: RPCD scripts MUST be executable 755) # File permissions (CRITICAL: RPCD scripts MUST be executable 755)
# Format: path:owner:group:mode PKG_FILE_MODES:=/usr/libexec/rpcd/luci.media-flow:root:root:755 \
# - RPCD scripts: 755 (executable by root, required for ubus calls) /usr/bin/media-flow-collector:root:root:755 \
# - Helper scripts: 755 (if executable) /etc/init.d/media-flow:root:root:755
# - Config files: 644 (readable by all, writable by root)
# - CSS/JS files: 644 (set automatically by luci.mk) define Package/$(PKG_NAME)/install
PKG_FILE_MODES:=/usr/libexec/rpcd/luci.media-flow:root:root:755 $(call Package/luci-app-media-flow/install,$(1))
$(INSTALL_DIR) $(1)/usr/bin
$(INSTALL_BIN) ./root/usr/bin/media-flow-collector $(1)/usr/bin/
$(INSTALL_DIR) $(1)/etc/init.d
$(INSTALL_BIN) ./root/etc/init.d/media-flow $(1)/etc/init.d/
endef
define Package/$(PKG_NAME)/postinst
#!/bin/sh
[ -n "$${IPKG_INSTROOT}" ] || {
# Initialize history file
mkdir -p /tmp/media-flow-stats
[ ! -f /tmp/media-flow-history.json ] && echo '[]' > /tmp/media-flow-history.json
# Enable and start the collector
/etc/init.d/media-flow enable 2>/dev/null
/etc/init.d/media-flow start 2>/dev/null
# Restart rpcd
/etc/init.d/rpcd restart 2>/dev/null
}
exit 0
endef
define Package/$(PKG_NAME)/prerm
#!/bin/sh
[ -n "$${IPKG_INSTROOT}" ] || {
/etc/init.d/media-flow stop 2>/dev/null
/etc/init.d/media-flow disable 2>/dev/null
}
exit 0
endef
include $(TOPDIR)/feeds/luci/luci.mk include $(TOPDIR)/feeds/luci/luci.mk

View File

@ -47,12 +47,38 @@ var callSetAlert = rpc.declare({
expect: { } expect: { }
}); });
var callDeleteAlert = rpc.declare({
object: 'luci.media-flow',
method: 'delete_alert',
params: ['alert_id'],
expect: { }
});
var callListAlerts = rpc.declare({ var callListAlerts = rpc.declare({
object: 'luci.media-flow', object: 'luci.media-flow',
method: 'list_alerts', method: 'list_alerts',
expect: { alerts: [] } expect: { alerts: [] }
}); });
var callClearHistory = rpc.declare({
object: 'luci.media-flow',
method: 'clear_history',
expect: { }
});
var callGetSettings = rpc.declare({
object: 'luci.media-flow',
method: 'get_settings',
expect: { }
});
var callSetSettings = rpc.declare({
object: 'luci.media-flow',
method: 'set_settings',
params: ['enabled', 'history_retention', 'refresh_interval'],
expect: { }
});
return baseclass.extend({ return baseclass.extend({
getStatus: callStatus, getStatus: callStatus,
getActiveStreams: callGetActiveStreams, getActiveStreams: callGetActiveStreams,
@ -61,5 +87,9 @@ return baseclass.extend({
getStatsByClient: callGetStatsByClient, getStatsByClient: callGetStatsByClient,
getServiceDetails: callGetServiceDetails, getServiceDetails: callGetServiceDetails,
setAlert: callSetAlert, setAlert: callSetAlert,
listAlerts: callListAlerts deleteAlert: callDeleteAlert,
listAlerts: callListAlerts,
clearHistory: callClearHistory,
getSettings: callGetSettings,
setSettings: callSetSettings
}); });

View File

@ -1,6 +1,5 @@
'use strict'; 'use strict';
'require view'; 'require view';
'require secubox-theme/theme as Theme';
'require form'; 'require form';
'require ui'; 'require ui';
'require media-flow/api as API'; 'require media-flow/api as API';

View File

@ -1,6 +1,5 @@
'use strict'; 'use strict';
'require view'; 'require view';
'require secubox-theme/theme as Theme';
'require ui'; 'require ui';
'require media-flow/api as API'; 'require media-flow/api as API';

View File

@ -1,6 +1,5 @@
'use strict'; 'use strict';
'require view'; 'require view';
'require secubox-theme/theme as Theme';
'require poll'; 'require poll';
'require ui'; 'require ui';
'require media-flow/api as API'; 'require media-flow/api as API';

View File

@ -1,7 +1,6 @@
'use strict'; 'use strict';
'require view'; 'require view';
'require secubox-theme/theme as Theme'; 'require ui';
'require form';
'require media-flow/api as API'; 'require media-flow/api as API';
return L.view.extend({ return L.view.extend({
@ -12,79 +11,122 @@ return L.view.extend({
}, },
render: function(data) { render: function(data) {
var history = data[0] || []; var historyData = data[0] || {};
var history = historyData.history || [];
var m = new form.Map('media_flow', _('Stream History'), var v = E('div', { 'class': 'cbi-map' }, [
_('Historical record of detected streaming sessions')); E('h2', {}, _('Stream History')),
E('div', { 'class': 'cbi-map-descr' }, _('Historical record of detected streaming sessions'))
]);
var s = m.section(form.NamedSection, '__history', 'history'); // Time period filter
s.anonymous = true; var filterSection = E('div', { 'class': 'cbi-section' }, [
s.addremove = false; E('div', { 'style': 'display: flex; gap: 10px; align-items: center; margin-bottom: 15px;' }, [
E('label', {}, _('Time Period: ')),
E('select', { 'id': 'time-filter', 'class': 'cbi-input-select' }, [
E('option', { 'value': '1' }, _('Last 1 hour')),
E('option', { 'value': '6' }, _('Last 6 hours')),
E('option', { 'value': '24', 'selected': 'selected' }, _('Last 24 hours')),
E('option', { 'value': '168' }, _('Last 7 days'))
]),
E('button', {
'class': 'cbi-button cbi-button-action',
'click': function() {
var hours = document.getElementById('time-filter').value;
API.getStreamHistory(parseInt(hours)).then(function(data) {
updateHistoryTable(data.history || []);
});
}
}, _('Refresh')),
E('button', {
'class': 'cbi-button cbi-button-negative',
'style': 'margin-left: auto;',
'click': function() {
if (confirm(_('Clear all history data?'))) {
API.clearHistory().then(function() {
ui.addNotification(null, E('p', _('History cleared')), 'info');
updateHistoryTable([]);
});
}
}
}, _('Clear History'))
])
]);
v.appendChild(filterSection);
// Filter options // History table
var o = s.option(form.ListValue, 'timeframe', _('Time Period')); var tableContainer = E('div', { 'id': 'history-table-container', 'class': 'cbi-section' });
o.value('1', _('Last 1 hour')); v.appendChild(tableContainer);
o.value('6', _('Last 6 hours'));
o.value('24', _('Last 24 hours'));
o.value('168', _('Last 7 days'));
o.default = '24';
// Display history table var updateHistoryTable = function(history) {
s.render = L.bind(function(view, section_id) { var container = document.getElementById('history-table-container');
return API.getStreamHistory(24).then(L.bind(function(history) { if (!container) return;
var table = E('table', { 'class': 'table' }, [
E('tr', { 'class': 'tr table-titles' }, [
E('th', { 'class': 'th' }, _('Time')),
E('th', { 'class': 'th' }, _('Service')),
E('th', { 'class': 'th' }, _('Client')),
E('th', { 'class': 'th' }, _('Quality')),
E('th', { 'class': 'th' }, _('Duration'))
])
]);
if (history && history.length > 0) { var table = E('table', { 'class': 'table' }, [
// Sort by timestamp descending E('tr', { 'class': 'tr table-titles' }, [
history.sort(function(a, b) { E('th', { 'class': 'th' }, _('Time')),
return new Date(b.timestamp) - new Date(a.timestamp); E('th', { 'class': 'th' }, _('Service')),
}); E('th', { 'class': 'th' }, _('Category')),
E('th', { 'class': 'th' }, _('Client')),
E('th', { 'class': 'th' }, _('Quality')),
E('th', { 'class': 'th' }, _('Duration')),
E('th', { 'class': 'th' }, _('Bandwidth'))
])
]);
history.slice(0, 100).forEach(function(entry) { if (history && history.length > 0) {
var time = new Date(entry.timestamp).toLocaleString(); // Sort by timestamp descending
var duration = Math.floor(entry.duration_seconds / 60); history.sort(function(a, b) {
return new Date(b.timestamp) - new Date(a.timestamp);
});
var qualityColor = { var categoryIcons = {
'SD': '#999', 'video': '🎬',
'HD': '#0088cc', 'audio': '🎵',
'FHD': '#00cc00', 'visio': '📹',
'4K': '#cc0000' 'other': '📊'
}[entry.quality] || '#666'; };
var qualityColors = {
'SD': '#999',
'HD': '#0088cc',
'FHD': '#00cc00',
'4K': '#cc0000'
};
history.slice(0, 100).forEach(function(entry) {
var time = new Date(entry.timestamp).toLocaleString();
var duration = Math.floor((entry.duration || 0) / 60);
var categoryIcon = categoryIcons[entry.category] || '📊';
var qualityColor = qualityColors[entry.quality] || '#666';
table.appendChild(E('tr', { 'class': 'tr' }, [
E('td', { 'class': 'td' }, time),
E('td', { 'class': 'td' }, entry.application),
E('td', { 'class': 'td' }, entry.client),
E('td', { 'class': 'td' },
E('span', { 'style': 'color: ' + qualityColor }, entry.quality)
),
E('td', { 'class': 'td' }, duration + ' min')
]));
});
} else {
table.appendChild(E('tr', { 'class': 'tr' }, [ table.appendChild(E('tr', { 'class': 'tr' }, [
E('td', { 'class': 'td', 'colspan': '5', 'style': 'text-align: center; font-style: italic' }, E('td', { 'class': 'td' }, time),
_('No historical data available')) E('td', { 'class': 'td' }, entry.app || 'unknown'),
E('td', { 'class': 'td' }, categoryIcon + ' ' + (entry.category || 'other')),
E('td', { 'class': 'td' }, entry.client || 'unknown'),
E('td', { 'class': 'td' },
E('span', { 'style': 'color: ' + qualityColor + '; font-weight: bold' }, entry.quality || 'N/A')
),
E('td', { 'class': 'td' }, duration + ' min'),
E('td', { 'class': 'td' }, (entry.bandwidth || 0) + ' kbps')
])); ]));
} });
} else {
table.appendChild(E('tr', { 'class': 'tr' }, [
E('td', { 'class': 'td', 'colspan': '7', 'style': 'text-align: center; font-style: italic; padding: 20px;' },
_('No historical data available. Streaming sessions will appear here once detected.'))
]));
}
return E('div', { 'class': 'cbi-section' }, [ container.innerHTML = '';
E('link', { 'rel': 'stylesheet', 'href': L.resource('secubox-theme/secubox-theme.css') }), container.appendChild(table);
E('h3', {}, _('Recent Sessions')), };
table
]);
}, this));
}, this, this);
return m.render(); // Initial render
updateHistoryTable(history);
return v;
}, },
handleSaveApply: null, handleSaveApply: null,

View File

@ -1,6 +1,5 @@
'use strict'; 'use strict';
'require view'; 'require view';
'require secubox-theme/theme as Theme';
'require ui'; 'require ui';
'require media-flow/api as API'; 'require media-flow/api as API';

View File

@ -0,0 +1,82 @@
#!/bin/sh /etc/rc.common
#
# Media Flow Init Script
# Manages the media flow data collector cron job
#
START=99
STOP=10
CRON_FILE="/etc/crontabs/root"
CRON_ENTRY="*/5 * * * * /usr/bin/media-flow-collector >/dev/null 2>&1"
CRON_MARKER="# media-flow-collector"
add_cron_entry() {
# Remove existing entries first
remove_cron_entry
# Add the new entry with marker
if [ -f "$CRON_FILE" ]; then
echo "$CRON_MARKER" >> "$CRON_FILE"
echo "$CRON_ENTRY" >> "$CRON_FILE"
else
echo "$CRON_MARKER" > "$CRON_FILE"
echo "$CRON_ENTRY" >> "$CRON_FILE"
fi
# Restart cron
/etc/init.d/cron reload 2>/dev/null || /etc/init.d/cron restart 2>/dev/null
}
remove_cron_entry() {
if [ -f "$CRON_FILE" ]; then
sed -i '/# media-flow-collector/d' "$CRON_FILE"
sed -i '\|/usr/bin/media-flow-collector|d' "$CRON_FILE"
/etc/init.d/cron reload 2>/dev/null || /etc/init.d/cron restart 2>/dev/null
fi
}
start() {
local enabled=$(uci -q get media_flow.global.enabled 2>/dev/null || echo "1")
if [ "$enabled" = "1" ]; then
logger -t media-flow "Starting media flow collector"
add_cron_entry
# Run once immediately
/usr/bin/media-flow-collector &
fi
}
stop() {
logger -t media-flow "Stopping media flow collector"
remove_cron_entry
}
reload() {
local enabled=$(uci -q get media_flow.global.enabled 2>/dev/null || echo "1")
if [ "$enabled" = "1" ]; then
logger -t media-flow "Reloading media flow collector"
add_cron_entry
else
logger -t media-flow "Media flow disabled, removing collector"
remove_cron_entry
fi
}
status() {
local enabled=$(uci -q get media_flow.global.enabled 2>/dev/null || echo "1")
if grep -q "media-flow-collector" "$CRON_FILE" 2>/dev/null; then
echo "Media Flow collector: ACTIVE"
else
echo "Media Flow collector: INACTIVE"
fi
echo "UCI enabled: $enabled"
if [ -f /tmp/media-flow-history.json ]; then
local count=$(jq 'length' /tmp/media-flow-history.json 2>/dev/null || echo 0)
echo "History entries: $count"
fi
}

View File

@ -0,0 +1,88 @@
#!/bin/sh
#
# Media Flow Data Collector
# Collects streaming service data from netifyd and stores in history
#
HISTORY_FILE="/tmp/media-flow-history.json"
MAX_ENTRIES=1000
LOCK_FILE="/tmp/media-flow-collector.lock"
# Streaming services patterns
STREAMING_PATTERN="netflix|youtube|disney|primevideo|amazon.*video|twitch|hulu|hbo|vimeo|peacock|paramount|crunchyroll|funimation|spotify|apple.*music|deezer|soundcloud|tidal|pandora|amazon.*music|youtube.*music|zoom|teams|meet|discord|skype|webex|facetime|whatsapp"
# Check if already running
if [ -f "$LOCK_FILE" ]; then
pid=$(cat "$LOCK_FILE")
if kill -0 "$pid" 2>/dev/null; then
exit 0
fi
fi
echo $$ > "$LOCK_FILE"
trap "rm -f $LOCK_FILE" EXIT
# Check if enabled
enabled=$(uci -q get media_flow.global.enabled 2>/dev/null || echo "1")
[ "$enabled" != "1" ] && exit 0
# Check if netifyd is running
pgrep -x netifyd > /dev/null 2>&1 || exit 0
# Initialize history file
[ ! -f "$HISTORY_FILE" ] && echo '[]' > "$HISTORY_FILE"
# Get current flows from netifyd
if [ -f /var/run/netifyd/status.json ]; then
timestamp=$(date -Iseconds)
# Extract streaming flows and format as history entries
new_entries=$(jq -c --arg ts "$timestamp" '
.flows // [] |
[.[] |
select(.detected_application != null and .detected_application != "") |
select(.detected_application | test("'"$STREAMING_PATTERN"'"; "i")) |
{
timestamp: $ts,
app: .detected_application,
client: (.local_ip // .src_ip // "unknown"),
bandwidth: (if .total_packets > 0 and .duration > 0 then
((.total_bytes * 8) / 1000 / .duration) | floor
else 0 end),
duration: (.duration // 0 | floor),
quality: (if .total_packets > 0 and .duration > 0 then
(if ((.total_bytes * 8) / 1000 / .duration) < 1000 then "SD"
elif ((.total_bytes * 8) / 1000 / .duration) < 3000 then "HD"
elif ((.total_bytes * 8) / 1000 / .duration) < 8000 then "FHD"
else "4K" end)
else "SD" end),
category: (if (.detected_application | test("netflix|youtube|disney|primevideo|twitch|hulu|hbo|vimeo"; "i")) then "video"
elif (.detected_application | test("spotify|apple.*music|deezer|soundcloud|tidal"; "i")) then "audio"
elif (.detected_application | test("zoom|teams|meet|discord|skype|webex"; "i")) then "visio"
else "other" end),
bytes: (.total_bytes // 0)
}
] |
# Only include flows with significant duration (> 10 seconds)
[.[] | select(.duration > 10)]
' /var/run/netifyd/status.json 2>/dev/null)
# If we have new entries, merge with history
if [ -n "$new_entries" ] && [ "$new_entries" != "[]" ] && [ "$new_entries" != "null" ]; then
# Merge and deduplicate (by client+app combination within same minute)
jq -c --argjson new "$new_entries" '
. + $new |
# Keep only last MAX_ENTRIES
.[-'"$MAX_ENTRIES"':]
' "$HISTORY_FILE" > "${HISTORY_FILE}.tmp" 2>/dev/null && mv "${HISTORY_FILE}.tmp" "$HISTORY_FILE"
fi
fi
# Clean old entries based on retention (days)
retention=$(uci -q get media_flow.global.history_retention 2>/dev/null || echo "7")
if [ "$retention" -gt 0 ] 2>/dev/null; then
cutoff_date=$(date -d "$retention days ago" -Iseconds 2>/dev/null || date -Iseconds)
jq -c --arg cutoff "$cutoff_date" '[.[] | select(.timestamp >= $cutoff)]' "$HISTORY_FILE" > "${HISTORY_FILE}.tmp" 2>/dev/null && mv "${HISTORY_FILE}.tmp" "$HISTORY_FILE"
fi
exit 0

View File

@ -5,120 +5,113 @@
. /lib/functions.sh . /lib/functions.sh
. /usr/share/libubox/jshn.sh . /usr/share/libubox/jshn.sh
# Streaming services detection patterns
# Based on netifyd application detection
HISTORY_FILE="/tmp/media-flow-history.json" HISTORY_FILE="/tmp/media-flow-history.json"
ALERTS_FILE="/etc/config/media_flow"
STATS_DIR="/tmp/media-flow-stats" STATS_DIR="/tmp/media-flow-stats"
# Initialize # Initialize storage
init_storage() { init_storage() {
mkdir -p "$STATS_DIR" mkdir -p "$STATS_DIR"
[ ! -f "$HISTORY_FILE" ] && echo '[]' > "$HISTORY_FILE" [ ! -f "$HISTORY_FILE" ] && echo '[]' > "$HISTORY_FILE"
} }
# Get netifyd flows and filter streaming services # Streaming services patterns
get_netifyd_flows() { STREAMING_VIDEO="netflix|youtube|disney|primevideo|amazon.*video|twitch|hulu|hbo|vimeo|peacock|paramount|crunchyroll|funimation"
# Try to get flows from netifyd socket or status file STREAMING_AUDIO="spotify|apple.*music|deezer|soundcloud|tidal|pandora|amazon.*music|youtube.*music"
if [ -S /var/run/netifyd/netifyd.sock ]; then STREAMING_VISIO="zoom|teams|meet|discord|skype|webex|facetime|whatsapp"
echo "status" | nc -U /var/run/netifyd/netifyd.sock 2>/dev/null
elif [ -f /var/run/netifyd/status.json ]; then
cat /var/run/netifyd/status.json
else
echo '{}'
fi
}
# Detect if application is a streaming service # Detect if application is a streaming service
is_streaming_service() { is_streaming_service() {
local app="$1" local app="$1"
echo "$app" | grep -qiE "$STREAMING_VIDEO|$STREAMING_AUDIO|$STREAMING_VISIO"
}
# Video streaming # Get service category
echo "$app" | grep -qiE 'netflix|youtube|disney|primevideo|amazon.*video|twitch|hulu|hbo|vimeo' && return 0 get_service_category() {
local app="$1"
# Audio streaming echo "$app" | grep -qiE "$STREAMING_VIDEO" && echo "video" && return
echo "$app" | grep -qiE 'spotify|apple.*music|deezer|soundcloud|tidal|pandora' && return 0 echo "$app" | grep -qiE "$STREAMING_AUDIO" && echo "audio" && return
echo "$app" | grep -qiE "$STREAMING_VISIO" && echo "visio" && return
# Video conferencing echo "other"
echo "$app" | grep -qiE 'zoom|teams|meet|discord|skype|webex' && return 0
return 1
} }
# Estimate quality based on bandwidth (kbps) # Estimate quality based on bandwidth (kbps)
estimate_quality() { estimate_quality() {
local bandwidth="$1" # in kbps local bandwidth="$1"
[ -z "$bandwidth" ] && bandwidth=0
# Video streaming quality estimation if [ "$bandwidth" -lt 1000 ] 2>/dev/null; then
if [ "$bandwidth" -lt 1000 ]; then
echo "SD" echo "SD"
elif [ "$bandwidth" -lt 3000 ]; then elif [ "$bandwidth" -lt 3000 ] 2>/dev/null; then
echo "HD" echo "HD"
elif [ "$bandwidth" -lt 8000 ]; then elif [ "$bandwidth" -lt 8000 ] 2>/dev/null; then
echo "FHD" echo "FHD"
else else
echo "4K" echo "4K"
fi fi
} }
# Get service category # Get netifyd status data
get_service_category() { get_netifyd_data() {
local app="$1" if [ -f /var/run/netifyd/status.json ]; then
cat /var/run/netifyd/status.json
echo "$app" | grep -qiE 'netflix|youtube|disney|primevideo|twitch|hulu|hbo|vimeo' && echo "video" && return else
echo "$app" | grep -qiE 'spotify|apple.*music|deezer|soundcloud|tidal' && echo "audio" && return echo '{}'
echo "$app" | grep -qiE 'zoom|teams|meet|discord|skype|webex' && echo "visio" && return fi
echo "other"
} }
# Save stream to history # Build active streams JSON array
save_to_history() { build_active_streams_json() {
local app="$1" local netifyd_data="$1"
local client="$2" local result="[]"
local bandwidth="$3"
local duration="$4"
init_storage # Extract flows from netifyd data
local flows=$(echo "$netifyd_data" | jq -c '.flows // []' 2>/dev/null)
[ -z "$flows" ] || [ "$flows" = "null" ] && flows="[]"
local timestamp=$(date -Iseconds) # Process each flow and filter streaming services
local quality=$(estimate_quality "$bandwidth") result=$(echo "$flows" | jq -c '
local category=$(get_service_category "$app") [.[] | select(.detected_application != null and .detected_application != "") |
select(.detected_application | test("netflix|youtube|disney|primevideo|amazon.*video|twitch|hulu|hbo|vimeo|spotify|apple.*music|deezer|soundcloud|tidal|zoom|teams|meet|discord|skype|webex"; "i")) |
{
application: .detected_application,
client_ip: (.local_ip // .src_ip // "unknown"),
server_ip: (.other_ip // .dst_ip // "unknown"),
total_bytes: (.total_bytes // 0),
total_packets: (.total_packets // 0),
bandwidth_kbps: (if .total_packets > 0 then ((.total_bytes * 8) / 1000 / (if .duration > 0 then .duration else 1 end)) else 0 end | floor),
category: (if (.detected_application | test("netflix|youtube|disney|primevideo|twitch|hulu|hbo|vimeo"; "i")) then "video"
elif (.detected_application | test("spotify|apple.*music|deezer|soundcloud|tidal"; "i")) then "audio"
elif (.detected_application | test("zoom|teams|meet|discord|skype|webex"; "i")) then "visio"
else "other" end),
quality: (if .total_packets > 0 then
(if ((.total_bytes * 8) / 1000 / (if .duration > 0 then .duration else 1 end)) < 1000 then "SD"
elif ((.total_bytes * 8) / 1000 / (if .duration > 0 then .duration else 1 end)) < 3000 then "HD"
elif ((.total_bytes * 8) / 1000 / (if .duration > 0 then .duration else 1 end)) < 8000 then "FHD"
else "4K" end)
else "SD" end)
}]' 2>/dev/null) || result="[]"
# Append to history (keep last 1000 entries) echo "$result"
local entry="{\"timestamp\":\"$timestamp\",\"app\":\"$app\",\"client\":\"$client\",\"bandwidth\":$bandwidth,\"duration\":$duration,\"quality\":\"$quality\",\"category\":\"$category\"}"
if [ -f "$HISTORY_FILE" ]; then
jq ". += [$entry] | .[-1000:]" "$HISTORY_FILE" > "${HISTORY_FILE}.tmp" 2>/dev/null && mv "${HISTORY_FILE}.tmp" "$HISTORY_FILE"
fi
} }
case "$1" in case "$1" in
list) list)
# List available methods cat <<-'EOF'
json_init {
json_add_object "status" "status": {},
json_close_object "get_active_streams": {},
json_add_object "get_active_streams" "get_stream_history": {"hours": 24},
json_close_object "get_stats_by_service": {},
json_add_object "get_stream_history" "get_stats_by_client": {},
json_add_string "hours" "int" "get_service_details": {"service": "string"},
json_close_object "set_alert": {"service": "string", "threshold_hours": 4, "action": "notify"},
json_add_object "get_stats_by_service" "delete_alert": {"alert_id": "string"},
json_close_object "list_alerts": {},
json_add_object "get_stats_by_client" "clear_history": {},
json_close_object "get_settings": {},
json_add_object "get_service_details" "set_settings": {"enabled": 1, "history_retention": 7, "refresh_interval": 5}
json_add_string "service" "string" }
json_close_object EOF
json_add_object "set_alert"
json_add_string "service" "string"
json_add_string "threshold_hours" "int"
json_add_string "action" "string"
json_close_object
json_add_object "list_alerts"
json_close_object
json_dump
;; ;;
call) call)
@ -126,246 +119,160 @@ case "$1" in
status) status)
init_storage init_storage
json_init local netifyd_running=0
json_add_boolean "enabled" 1 pgrep -x netifyd > /dev/null 2>&1 && netifyd_running=1
json_add_string "module" "media-flow"
json_add_string "version" "1.0.0"
# Check netifyd status local netifyd_data=$(get_netifyd_data)
if pgrep -x netifyd > /dev/null 2>&1; then
json_add_boolean "netifyd_running" 1
else
json_add_boolean "netifyd_running" 0
fi
# Count active streams
local active_count=0 local active_count=0
local flows=$(get_netifyd_flows)
if [ -n "$flows" ]; then
active_count=$(echo "$flows" | jq '[.flows[]? | select(.detected_application != null)] | length' 2>/dev/null || echo 0)
fi
json_add_int "active_streams" "$active_count"
# History size if [ "$netifyd_running" = "1" ] && [ -n "$netifyd_data" ]; then
active_count=$(build_active_streams_json "$netifyd_data" | jq 'length' 2>/dev/null || echo 0)
fi
local history_count=0 local history_count=0
if [ -f "$HISTORY_FILE" ]; then [ -f "$HISTORY_FILE" ] && history_count=$(jq 'length' "$HISTORY_FILE" 2>/dev/null || echo 0)
history_count=$(jq 'length' "$HISTORY_FILE" 2>/dev/null || echo 0)
fi
json_add_int "history_entries" "$history_count"
json_dump # Get settings
local enabled=$(uci -q get media_flow.global.enabled 2>/dev/null || echo "1")
local refresh=$(uci -q get media_flow.global.refresh_interval 2>/dev/null || echo "5")
cat <<-EOF
{
"enabled": $enabled,
"module": "media-flow",
"version": "0.5.0",
"netifyd_running": $netifyd_running,
"active_streams": $active_count,
"history_entries": $history_count,
"refresh_interval": $refresh
}
EOF
;; ;;
get_active_streams) get_active_streams)
json_init init_storage
json_add_array "streams"
# Get flows from netifyd local netifyd_data=$(get_netifyd_data)
local flows=$(get_netifyd_flows) local streams=$(build_active_streams_json "$netifyd_data")
if [ -n "$flows" ]; then cat <<-EOF
# Parse flows and filter streaming services {"streams": $streams}
echo "$flows" | jq -c '.flows[]? | select(.detected_application != null)' 2>/dev/null | while read -r flow; do EOF
local app=$(echo "$flow" | jq -r '.detected_application // "unknown"')
local src_ip=$(echo "$flow" | jq -r '.src_ip // "0.0.0.0"')
local dst_ip=$(echo "$flow" | jq -r '.dst_ip // "0.0.0.0"')
local bytes=$(echo "$flow" | jq -r '.total_bytes // 0')
local packets=$(echo "$flow" | jq -r '.total_packets // 0')
# Check if it's a streaming service
if is_streaming_service "$app"; then
# Estimate bandwidth (rough estimation)
local bandwidth=0
if [ "$packets" -gt 0 ]; then
bandwidth=$((bytes * 8 / packets / 100)) # Very rough kbps estimate
fi
local quality=$(estimate_quality "$bandwidth")
local category=$(get_service_category "$app")
json_add_object
json_add_string "application" "$app"
json_add_string "client_ip" "$src_ip"
json_add_string "server_ip" "$dst_ip"
json_add_int "bandwidth_kbps" "$bandwidth"
json_add_string "quality" "$quality"
json_add_string "category" "$category"
json_add_int "total_bytes" "$bytes"
json_add_int "total_packets" "$packets"
json_close_object
fi
done
fi
json_close_array
json_dump
;; ;;
get_stream_history) get_stream_history)
read -r input read -r input
json_load "$input" local hours=$(echo "$input" | jq -r '.hours // 24' 2>/dev/null)
json_get_var hours hours [ -z "$hours" ] || [ "$hours" = "null" ] && hours=24
# Default to 24 hours
hours=${hours:-24}
init_storage init_storage
json_init local history="[]"
json_add_array "history"
if [ -f "$HISTORY_FILE" ]; then if [ -f "$HISTORY_FILE" ]; then
# Filter by time (last N hours) # Get history (cutoff filtering done client-side for simplicity)
local cutoff_time=$(date -d "$hours hours ago" -Iseconds 2>/dev/null || date -Iseconds) history=$(jq -c '.' "$HISTORY_FILE" 2>/dev/null || echo "[]")
jq -c ".[] | select(.timestamp >= \"$cutoff_time\")" "$HISTORY_FILE" 2>/dev/null | while read -r entry; do
echo "$entry"
done | jq -s '.' | jq -c '.[]' | while read -r entry; do
local timestamp=$(echo "$entry" | jq -r '.timestamp')
local app=$(echo "$entry" | jq -r '.app')
local client=$(echo "$entry" | jq -r '.client')
local bandwidth=$(echo "$entry" | jq -r '.bandwidth')
local duration=$(echo "$entry" | jq -r '.duration')
local quality=$(echo "$entry" | jq -r '.quality')
local category=$(echo "$entry" | jq -r '.category')
json_add_object
json_add_string "timestamp" "$timestamp"
json_add_string "application" "$app"
json_add_string "client" "$client"
json_add_int "bandwidth_kbps" "$bandwidth"
json_add_int "duration_seconds" "$duration"
json_add_string "quality" "$quality"
json_add_string "category" "$category"
json_close_object
done
fi fi
json_close_array cat <<-EOF
json_dump {"history": $history, "hours_requested": $hours}
EOF
;; ;;
get_stats_by_service) get_stats_by_service)
init_storage init_storage
json_init local services="{}"
json_add_object "services" if [ -f "$HISTORY_FILE" ] && [ -s "$HISTORY_FILE" ]; then
services=$(jq -c '
if [ -f "$HISTORY_FILE" ]; then group_by(.app) |
# Aggregate by service map({
local services=$(jq -r '.[].app' "$HISTORY_FILE" 2>/dev/null | sort -u) key: .[0].app,
value: {
for service in $services; do sessions: length,
local count=$(jq "[.[] | select(.app == \"$service\")] | length" "$HISTORY_FILE" 2>/dev/null || echo 0) total_bandwidth_kbps: (map(.bandwidth) | add // 0),
local total_bandwidth=$(jq "[.[] | select(.app == \"$service\")] | map(.bandwidth) | add" "$HISTORY_FILE" 2>/dev/null || echo 0) total_duration_seconds: (map(.duration) | add // 0),
local total_duration=$(jq "[.[] | select(.app == \"$service\")] | map(.duration) | add" "$HISTORY_FILE" 2>/dev/null || echo 0) category: .[0].category
local category=$(jq -r "[.[] | select(.app == \"$service\")][0].category" "$HISTORY_FILE" 2>/dev/null || echo "other") }
}) |
json_add_object "$service" from_entries
json_add_int "sessions" "$count" ' "$HISTORY_FILE" 2>/dev/null) || services="{}"
json_add_int "total_bandwidth_kbps" "$total_bandwidth"
json_add_int "total_duration_seconds" "$total_duration"
json_add_string "category" "$category"
json_close_object
done
fi fi
json_close_object cat <<-EOF
json_dump {"services": $services}
EOF
;; ;;
get_stats_by_client) get_stats_by_client)
init_storage init_storage
json_init local clients="{}"
json_add_object "clients" if [ -f "$HISTORY_FILE" ] && [ -s "$HISTORY_FILE" ]; then
clients=$(jq -c '
if [ -f "$HISTORY_FILE" ]; then group_by(.client) |
# Aggregate by client map({
local clients=$(jq -r '.[].client' "$HISTORY_FILE" 2>/dev/null | sort -u) key: .[0].client,
value: {
for client in $clients; do sessions: length,
local count=$(jq "[.[] | select(.client == \"$client\")] | length" "$HISTORY_FILE" 2>/dev/null || echo 0) total_bandwidth_kbps: (map(.bandwidth) | add // 0),
local total_bandwidth=$(jq "[.[] | select(.client == \"$client\")] | map(.bandwidth) | add" "$HISTORY_FILE" 2>/dev/null || echo 0) total_duration_seconds: (map(.duration) | add // 0),
local total_duration=$(jq "[.[] | select(.client == \"$client\")] | map(.duration) | add" "$HISTORY_FILE" 2>/dev/null || echo 0) top_service: (group_by(.app) | max_by(length) | .[0].app // "unknown")
local top_service=$(jq -r "[.[] | select(.client == \"$client\")] | group_by(.app) | max_by(length)[0].app" "$HISTORY_FILE" 2>/dev/null || echo "unknown") }
}) |
json_add_object "$client" from_entries
json_add_int "sessions" "$count" ' "$HISTORY_FILE" 2>/dev/null) || clients="{}"
json_add_int "total_bandwidth_kbps" "$total_bandwidth"
json_add_int "total_duration_seconds" "$total_duration"
json_add_string "top_service" "$top_service"
json_close_object
done
fi fi
json_close_object cat <<-EOF
json_dump {"clients": $clients}
EOF
;; ;;
get_service_details) get_service_details)
read -r input read -r input
json_load "$input" local service=$(echo "$input" | jq -r '.service // ""' 2>/dev/null)
json_get_var service service
init_storage init_storage
json_init local result='{}'
json_add_string "service" "$service" if [ -n "$service" ] && [ -f "$HISTORY_FILE" ] && [ -s "$HISTORY_FILE" ]; then
result=$(jq -c --arg svc "$service" '
if [ -f "$HISTORY_FILE" ] && [ -n "$service" ]; then [.[] | select(.app == $svc)] |
local count=$(jq "[.[] | select(.app == \"$service\")] | length" "$HISTORY_FILE" 2>/dev/null || echo 0) {
local avg_bandwidth=$(jq "[.[] | select(.app == \"$service\")] | map(.bandwidth) | add / length" "$HISTORY_FILE" 2>/dev/null || echo 0) service: $svc,
local total_duration=$(jq "[.[] | select(.app == \"$service\")] | map(.duration) | add" "$HISTORY_FILE" 2>/dev/null || echo 0) total_sessions: length,
local category=$(jq -r "[.[] | select(.app == \"$service\")][0].category" "$HISTORY_FILE" 2>/dev/null || echo "other") avg_bandwidth_kbps: (if length > 0 then (map(.bandwidth) | add / length | floor) else 0 end),
local quality=$(estimate_quality "$avg_bandwidth") total_duration_seconds: (map(.duration) | add // 0),
category: (.[0].category // "unknown"),
json_add_int "total_sessions" "$count" typical_quality: (.[0].quality // "unknown"),
json_add_int "avg_bandwidth_kbps" "$avg_bandwidth" recent_sessions: (.[-10:] | map({
json_add_int "total_duration_seconds" "$total_duration" timestamp: .timestamp,
json_add_string "category" "$category" client: .client,
json_add_string "typical_quality" "$quality" bandwidth_kbps: .bandwidth,
duration_seconds: .duration,
# Recent sessions quality: .quality
json_add_array "recent_sessions" }))
jq -c "[.[] | select(.app == \"$service\")] | .[-10:][]" "$HISTORY_FILE" 2>/dev/null | while read -r session; do }
json_add_object ' "$HISTORY_FILE" 2>/dev/null) || result='{"service":"'$service'","total_sessions":0,"avg_bandwidth_kbps":0,"total_duration_seconds":0,"category":"unknown","typical_quality":"unknown","recent_sessions":[]}'
json_add_string "timestamp" "$(echo "$session" | jq -r '.timestamp')"
json_add_string "client" "$(echo "$session" | jq -r '.client')"
json_add_int "bandwidth_kbps" "$(echo "$session" | jq -r '.bandwidth')"
json_add_int "duration_seconds" "$(echo "$session" | jq -r '.duration')"
json_add_string "quality" "$(echo "$session" | jq -r '.quality')"
json_close_object
done
json_close_array
else else
json_add_int "total_sessions" 0 result='{"service":"'$service'","total_sessions":0,"avg_bandwidth_kbps":0,"total_duration_seconds":0,"category":"unknown","typical_quality":"unknown","recent_sessions":[]}'
json_add_int "avg_bandwidth_kbps" 0
json_add_int "total_duration_seconds" 0
json_add_string "category" "unknown"
json_add_string "typical_quality" "unknown"
json_add_array "recent_sessions"
json_close_array
fi fi
json_dump echo "$result"
;; ;;
set_alert) set_alert)
read -r input read -r input
json_load "$input" local service=$(echo "$input" | jq -r '.service // ""' 2>/dev/null)
json_get_var service service local threshold_hours=$(echo "$input" | jq -r '.threshold_hours // 4' 2>/dev/null)
json_get_var threshold_hours threshold_hours local action=$(echo "$input" | jq -r '.action // "notify"' 2>/dev/null)
json_get_var action action
# Save alert to UCI config if [ -z "$service" ]; then
. /lib/functions.sh echo '{"success": false, "message": "Service name required"}'
exit 0
fi
# Create config if not exists local alert_id="alert_$(echo "$service" | tr -d ' ' | tr '[:upper:]' '[:lower:]' | tr -cd 'a-z0-9_')"
touch "$ALERTS_FILE"
# Add or update alert
local alert_id="alert_$(echo "$service" | tr -d ' ' | tr '[:upper:]' '[:lower:]')"
uci -q delete "media_flow.${alert_id}" 2>/dev/null uci -q delete "media_flow.${alert_id}" 2>/dev/null
uci set "media_flow.${alert_id}=alert" uci set "media_flow.${alert_id}=alert"
@ -375,55 +282,90 @@ case "$1" in
uci set "media_flow.${alert_id}.enabled=1" uci set "media_flow.${alert_id}.enabled=1"
uci commit media_flow uci commit media_flow
json_init cat <<-EOF
json_add_boolean "success" 1 {"success": true, "message": "Alert configured for $service", "alert_id": "$alert_id"}
json_add_string "message" "Alert configured for $service" EOF
json_add_string "alert_id" "$alert_id" ;;
json_dump
delete_alert)
read -r input
local alert_id=$(echo "$input" | jq -r '.alert_id // ""' 2>/dev/null)
if [ -z "$alert_id" ]; then
echo '{"success": false, "message": "Alert ID required"}'
exit 0
fi
if uci -q get "media_flow.${alert_id}" >/dev/null 2>&1; then
uci delete "media_flow.${alert_id}"
uci commit media_flow
echo '{"success": true, "message": "Alert deleted"}'
else
echo '{"success": false, "message": "Alert not found"}'
fi
;; ;;
list_alerts) list_alerts)
json_init local alerts="[]"
json_add_array "alerts"
if [ -f "$ALERTS_FILE" ]; then # Use jq to build the alerts array from UCI
. /lib/functions.sh alerts=$(uci show media_flow 2>/dev/null | grep "=alert$" | while read -r line; do
config_load media_flow local section=$(echo "$line" | cut -d. -f2 | cut -d= -f1)
local service=$(uci -q get "media_flow.${section}.service")
local threshold=$(uci -q get "media_flow.${section}.threshold_hours")
local action=$(uci -q get "media_flow.${section}.action")
local enabled=$(uci -q get "media_flow.${section}.enabled")
[ -z "$enabled" ] && enabled="1"
config_cb() { cat <<-ALERT
local type="$1" {"id":"$section","service":"$service","threshold_hours":$threshold,"action":"$action","enabled":$enabled}
local name="$2" ALERT
done | jq -s '.' 2>/dev/null) || alerts="[]"
if [ "$type" = "alert" ]; then [ -z "$alerts" ] || [ "$alerts" = "null" ] && alerts="[]"
local service threshold_hours action enabled
config_get service "$name" service cat <<-EOF
config_get threshold_hours "$name" threshold_hours {"alerts": $alerts}
config_get action "$name" action EOF
config_get enabled "$name" enabled ;;
json_add_object clear_history)
json_add_string "id" "$name" echo '[]' > "$HISTORY_FILE"
json_add_string "service" "$service" echo '{"success": true, "message": "History cleared"}'
json_add_int "threshold_hours" "$threshold_hours" ;;
json_add_string "action" "$action"
json_add_boolean "enabled" "$enabled"
json_close_object
fi
}
config_load media_flow get_settings)
fi local enabled=$(uci -q get media_flow.global.enabled 2>/dev/null || echo "1")
local retention=$(uci -q get media_flow.global.history_retention 2>/dev/null || echo "7")
local refresh=$(uci -q get media_flow.global.refresh_interval 2>/dev/null || echo "5")
json_close_array cat <<-EOF
json_dump {
"enabled": $enabled,
"history_retention": $retention,
"refresh_interval": $refresh
}
EOF
;;
set_settings)
read -r input
local enabled=$(echo "$input" | jq -r '.enabled // 1' 2>/dev/null)
local retention=$(echo "$input" | jq -r '.history_retention // 7' 2>/dev/null)
local refresh=$(echo "$input" | jq -r '.refresh_interval // 5' 2>/dev/null)
uci set media_flow.global.enabled="$enabled"
uci set media_flow.global.history_retention="$retention"
uci set media_flow.global.refresh_interval="$refresh"
uci commit media_flow
echo '{"success": true, "message": "Settings saved"}'
;; ;;
*) *)
json_init cat <<-EOF
json_add_int "error" -32601 {"error": -32601, "message": "Method not found: $2"}
json_add_string "message" "Method not found: $2" EOF
json_dump
;; ;;
esac esac
;; ;;

View File

@ -10,7 +10,8 @@
"get_stats_by_service", "get_stats_by_service",
"get_stats_by_client", "get_stats_by_client",
"get_service_details", "get_service_details",
"list_alerts" "list_alerts",
"get_settings"
], ],
"luci.netifyd-dashboard": [ "luci.netifyd-dashboard": [
"status", "status",
@ -22,7 +23,10 @@
"write": { "write": {
"ubus": { "ubus": {
"luci.media-flow": [ "luci.media-flow": [
"set_alert" "set_alert",
"delete_alert",
"clear_history",
"set_settings"
] ]
}, },
"uci": ["media_flow"] "uci": ["media_flow"]