fix(streamlit): Add chunked upload to bypass uhttpd 64KB JSON limit and support top-level .py apps

uhttpd-mod-ubus silently rejects JSON-RPC requests >64KB with "Parse error",
causing uploads of .py files >48KB to fail with "No related RPC reply".

- Add chunked upload (upload_chunk + upload_finalize) that splits base64
  content into 40KB pieces sent sequentially, then reassembles server-side
- Frontend auto-selects chunked upload when content exceeds 40KB
- Stop polling during upload to prevent RPC batch conflicts
- RPCD handlers use cat-to-tempfile instead of shell variables for stdin
  to avoid BusyBox argument size limits
- Container startup script handles top-level .py files (not just subdirs)
- streamlitctl cmd_instance_start also handles top-level .py files
- Add upload_chunk and upload_finalize to ACL

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
CyberMind-FR 2026-02-04 10:16:47 +01:00
parent 8691a5e048
commit 0083513cdc
5 changed files with 287 additions and 69 deletions

View File

@ -110,6 +110,20 @@ var callUploadApp = rpc.declare({
expect: { result: {} }
});
var callUploadChunk = rpc.declare({
object: 'luci.streamlit',
method: 'upload_chunk',
params: ['name', 'data', 'index'],
expect: { result: {} }
});
var callUploadFinalize = rpc.declare({
object: 'luci.streamlit',
method: 'upload_finalize',
params: ['name', 'is_zip'],
expect: { result: {} }
});
var callUploadZip = rpc.declare({
object: 'luci.streamlit',
method: 'upload_zip',
@ -295,6 +309,39 @@ return baseclass.extend({
return callUploadApp(name, content);
},
uploadChunk: function(name, data, index) {
return callUploadChunk(name, data, index);
},
uploadFinalize: function(name, isZip) {
return callUploadFinalize(name, isZip || '0');
},
/**
* Chunked upload for files > 40KB.
* Splits base64 into ~40KB chunks, sends each via upload_chunk,
* then calls upload_finalize to decode and save.
*/
chunkedUpload: function(name, content, isZip) {
var self = this;
var CHUNK_SIZE = 40000; // ~40KB per chunk, well under 64KB ubus limit
var chunks = [];
for (var i = 0; i < content.length; i += CHUNK_SIZE) {
chunks.push(content.substring(i, i + CHUNK_SIZE));
}
var promise = Promise.resolve();
chunks.forEach(function(chunk, idx) {
promise = promise.then(function() {
return self.uploadChunk(name, chunk, idx);
});
});
return promise.then(function() {
return self.uploadFinalize(name, isZip ? '1' : '0');
});
},
uploadZip: function(name, content, selectedFiles) {
return callUploadZip(name, content, selectedFiles);
},

View File

@ -614,6 +614,10 @@ return view.extend({
var isZip = file.name.endsWith('.zip');
var reader = new FileReader();
reader.onerror = function() {
ui.addNotification(null, E('p', {}, _('Failed to read file')), 'error');
};
reader.onload = function(e) {
var bytes = new Uint8Array(e.target.result);
var chunks = [];
@ -622,21 +626,39 @@ return view.extend({
}
var content = btoa(chunks.join(''));
var uploadFn = isZip ? api.uploadZip(name, content, null) : api.uploadApp(name, content);
// Stop polling to prevent RPC batch conflicts
poll.stop();
uploadFn.then(function(r) {
if (r && r.success) {
ui.addNotification(null, E('p', {}, _('App uploaded: ') + name), 'success');
fileInput.value = '';
self.refresh().then(function() { self.updateStatus(); });
// Use chunked upload for files > 40KB (uhttpd has 64KB JSON body limit)
var useChunked = content.length > 40000;
setTimeout(function() {
var uploadFn;
if (useChunked) {
uploadFn = api.chunkedUpload(name, content, isZip);
} else if (isZip) {
uploadFn = api.uploadZip(name, content, null);
} else {
var msg = (r && r.message) ? r.message : _('Upload failed');
ui.addNotification(null, E('p', {}, msg), 'error');
uploadFn = api.uploadApp(name, content);
}
}).catch(function(err) {
ui.addNotification(null, E('p', {},
_('Upload error: ') + (err.message || err)), 'error');
});
uploadFn.then(function(r) {
poll.start();
if (r && r.success) {
ui.addNotification(null, E('p', {}, _('App uploaded: ') + name), 'success');
fileInput.value = '';
self.refresh().then(function() { self.updateStatus(); });
} else {
var msg = (r && r.message) ? r.message : _('Upload failed');
ui.addNotification(null, E('p', {}, msg), 'error');
}
}).catch(function(err) {
poll.start();
ui.addNotification(null, E('p', {},
_('Upload error: ') + (err.message || err)), 'error');
});
}, 10);
};
reader.readAsArrayBuffer(file);

View File

@ -493,17 +493,30 @@ get_app() {
}
# Upload app (receive base64 content)
# NOTE: uhttpd-mod-ubus has a 64KB JSON body limit.
# Small files (<40KB) go through RPC directly.
# Larger files use chunked upload: upload_chunk + upload_finalize.
upload_app() {
read -r input
local name content
name=$(echo "$input" | jsonfilter -e '@.name' 2>/dev/null)
content=$(echo "$input" | jsonfilter -e '@.content' 2>/dev/null)
local tmpinput="/tmp/rpcd_upload_$$.json"
cat > "$tmpinput"
# Sanitize name for UCI compatibility
local name
name=$(jsonfilter -i "$tmpinput" -e '@.name' 2>/dev/null)
name=$(echo "$name" | sed 's/[^a-zA-Z0-9_]/_/g; s/^_*//; s/_*$//')
if [ -z "$name" ] || [ -z "$content" ]; then
json_error "Missing name or content"
if [ -z "$name" ]; then
rm -f "$tmpinput"
json_error "Missing name"
return
fi
local b64file="/tmp/rpcd_b64_$$.txt"
jsonfilter -i "$tmpinput" -e '@.content' > "$b64file" 2>/dev/null
rm -f "$tmpinput"
if [ ! -s "$b64file" ]; then
rm -f "$b64file"
json_error "Missing content"
return
fi
@ -514,22 +527,131 @@ upload_app() {
local app_file="$data_path/apps/${name}.py"
mkdir -p "$data_path/apps"
# Decode base64 and write
echo "$content" | base64 -d > "$app_file" 2>/dev/null
if [ $? -eq 0 ]; then
# Register in UCI
base64 -d < "$b64file" > "$app_file" 2>/dev/null
local rc=$?
rm -f "$b64file"
if [ $rc -eq 0 ] && [ -s "$app_file" ]; then
uci set "${CONFIG}.${name}=app"
uci set "${CONFIG}.${name}.name=$name"
uci set "${CONFIG}.${name}.path=${name}.py"
uci set "${CONFIG}.${name}.enabled=1"
uci commit "$CONFIG"
json_success "App uploaded: $name"
else
rm -f "$app_file"
json_error "Failed to decode app content"
fi
}
# Chunked upload: receive a base64 chunk and append to temp file
upload_chunk() {
local tmpinput="/tmp/rpcd_chunk_$$.json"
cat > "$tmpinput"
local name chunk_data chunk_index
name=$(jsonfilter -i "$tmpinput" -e '@.name' 2>/dev/null)
chunk_data=$(jsonfilter -i "$tmpinput" -e '@.data' 2>/dev/null)
chunk_index=$(jsonfilter -i "$tmpinput" -e '@.index' 2>/dev/null)
rm -f "$tmpinput"
name=$(echo "$name" | sed 's/[^a-zA-Z0-9_]/_/g; s/^_*//; s/_*$//')
if [ -z "$name" ] || [ -z "$chunk_data" ]; then
json_error "Missing name or data"
return
fi
local staging="/tmp/streamlit_upload_${name}.b64"
# First chunk: create new file; subsequent: append
if [ "$chunk_index" = "0" ]; then
printf '%s' "$chunk_data" > "$staging"
else
printf '%s' "$chunk_data" >> "$staging"
fi
json_success "Chunk $chunk_index received"
}
# Finalize chunked upload: decode accumulated base64 and save
upload_finalize() {
local tmpinput="/tmp/rpcd_finalize_$$.json"
cat > "$tmpinput"
local name is_zip
name=$(jsonfilter -i "$tmpinput" -e '@.name' 2>/dev/null)
is_zip=$(jsonfilter -i "$tmpinput" -e '@.is_zip' 2>/dev/null)
rm -f "$tmpinput"
name=$(echo "$name" | sed 's/[^a-zA-Z0-9_]/_/g; s/^_*//; s/_*$//')
if [ -z "$name" ]; then
json_error "Missing name"
return
fi
local staging="/tmp/streamlit_upload_${name}.b64"
if [ ! -s "$staging" ]; then
json_error "No upload data found for $name"
return
fi
local data_path
config_load "$CONFIG"
config_get data_path main data_path "/srv/streamlit"
mkdir -p "$data_path/apps"
if [ "$is_zip" = "1" ] || [ "$is_zip" = "true" ]; then
# Decode as ZIP and extract
local tmpzip="/tmp/upload_${name}_$$.zip"
base64 -d < "$staging" > "$tmpzip" 2>/dev/null
rm -f "$staging"
if [ ! -s "$tmpzip" ]; then
rm -f "$tmpzip"
json_error "Failed to decode ZIP"
return
fi
local app_dir="$data_path/apps/$name"
mkdir -p "$app_dir"
unzip -o "$tmpzip" -d "$app_dir" >/dev/null 2>&1
rm -f "$tmpzip"
local main_py
main_py=$(find "$app_dir" -maxdepth 2 -name "*.py" -type f | head -1)
if [ -n "$main_py" ]; then
uci set "${CONFIG}.${name}=app"
uci set "${CONFIG}.${name}.name=$name"
uci set "${CONFIG}.${name}.path=$main_py"
uci set "${CONFIG}.${name}.enabled=1"
uci commit "$CONFIG"
json_success "ZIP app deployed: $name"
else
json_error "No Python files found in archive"
fi
else
# Decode as .py file
local app_file="$data_path/apps/${name}.py"
base64 -d < "$staging" > "$app_file" 2>/dev/null
local rc=$?
rm -f "$staging"
if [ $rc -eq 0 ] && [ -s "$app_file" ]; then
uci set "${CONFIG}.${name}=app"
uci set "${CONFIG}.${name}.name=$name"
uci set "${CONFIG}.${name}.path=${name}.py"
uci set "${CONFIG}.${name}.enabled=1"
uci commit "$CONFIG"
json_success "App uploaded: $name"
else
rm -f "$app_file"
json_error "Failed to decode app content"
fi
fi
}
# List instances
list_instances() {
json_init_obj
@ -716,20 +838,16 @@ disable_instance() {
# Preview ZIP contents
preview_zip() {
read -r input
local content
content=$(echo "$input" | jsonfilter -e '@.content' 2>/dev/null)
# Write stdin to temp file to avoid shell variable size limits
local tmpinput="/tmp/rpcd_preview_$$.json"
cat > "$tmpinput"
if [ -z "$content" ]; then
json_error "Missing content"
return
fi
# Write to temp file and list contents
local tmpzip="/tmp/preview_$$.zip"
echo "$content" | base64 -d > "$tmpzip" 2>/dev/null
jsonfilter -i "$tmpinput" -e '@.content' 2>/dev/null | base64 -d > "$tmpzip" 2>/dev/null
rm -f "$tmpinput"
if [ ! -f "$tmpzip" ]; then
if [ ! -s "$tmpzip" ]; then
rm -f "$tmpzip"
json_error "Failed to decode ZIP"
return
fi
@ -758,17 +876,20 @@ preview_zip() {
# Upload ZIP with selected files
upload_zip() {
read -r input
local name content selected_files
name=$(echo "$input" | jsonfilter -e '@.name' 2>/dev/null)
content=$(echo "$input" | jsonfilter -e '@.content' 2>/dev/null)
selected_files=$(echo "$input" | jsonfilter -e '@.selected_files' 2>/dev/null)
# Write stdin to temp file to avoid shell variable size limits
local tmpinput="/tmp/rpcd_zipinput_$$.json"
cat > "$tmpinput"
local name selected_files
name=$(jsonfilter -i "$tmpinput" -e '@.name' 2>/dev/null)
selected_files=$(jsonfilter -i "$tmpinput" -e '@.selected_files' 2>/dev/null)
# Sanitize name for UCI compatibility (alphanumeric and underscores only)
name=$(echo "$name" | sed 's/[^a-zA-Z0-9_]/_/g; s/^_*//; s/_*$//')
if [ -z "$name" ] || [ -z "$content" ]; then
json_error "Missing name or content"
if [ -z "$name" ]; then
rm -f "$tmpinput"
json_error "Missing name"
return
fi
@ -779,9 +900,12 @@ upload_zip() {
local app_dir="$data_path/apps/$name"
local tmpzip="/tmp/upload_$$.zip"
# Decode ZIP
echo "$content" | base64 -d > "$tmpzip" 2>/dev/null
if [ ! -f "$tmpzip" ]; then
# Extract base64 content and decode directly to zip file
jsonfilter -i "$tmpinput" -e '@.content' 2>/dev/null | base64 -d > "$tmpzip" 2>/dev/null
rm -f "$tmpinput"
if [ ! -s "$tmpzip" ]; then
rm -f "$tmpzip"
json_error "Failed to decode ZIP"
return
fi
@ -1024,6 +1148,8 @@ case "$1" in
"remove_app": {"name": "str"},
"set_active_app": {"name": "str"},
"upload_app": {"name": "str", "content": "str"},
"upload_chunk": {"name": "str", "data": "str", "index": 0},
"upload_finalize": {"name": "str", "is_zip": "str"},
"preview_zip": {"content": "str"},
"upload_zip": {"name": "str", "content": "str", "selected_files": []},
"get_install_progress": {},
@ -1092,6 +1218,12 @@ case "$1" in
upload_app)
upload_app
;;
upload_chunk)
upload_chunk
;;
upload_finalize)
upload_finalize
;;
preview_zip)
preview_zip
;;

View File

@ -18,9 +18,10 @@
"save_config", "start", "stop", "restart",
"install", "uninstall", "update",
"add_app", "remove_app", "set_active_app", "upload_app",
"upload_chunk", "upload_finalize",
"preview_zip", "upload_zip",
"add_instance", "remove_instance", "enable_instance", "disable_instance",
"rename_app", "rename_instance",
"rename_app", "rename_instance",
"save_gitea_config", "gitea_clone", "gitea_pull"
]
},

View File

@ -276,25 +276,31 @@ start_instance() {
local app_name="$1"
local port="$2"
local app_dir="$APPS_BASE/$app_name"
local app_file=""
local work_dir=""
if [ ! -d "$app_dir" ]; then
echo "App folder not found: $app_dir"
if [ -d "$app_dir" ]; then
# Folder-based app (ZIP upload, Gitea clone, or created via CLI)
app_file=$(find_app_file "$app_dir")
if [ -z "$app_file" ]; then
echo "No Python app file found in $app_dir"
return 1
fi
install_requirements "$app_dir"
work_dir="$app_dir"
elif [ -f "$APPS_BASE/${app_name}.py" ]; then
# Top-level single .py file (direct upload)
app_file="$APPS_BASE/${app_name}.py"
work_dir="$APPS_BASE"
else
echo "App not found: $app_name (no folder or .py file)"
return 1
fi
local app_file=$(find_app_file "$app_dir")
if [ -z "$app_file" ]; then
echo "No Python app file found in $app_dir"
return 1
fi
# Install requirements
install_requirements "$app_dir"
echo "Starting instance: $app_name on port $port (file: $(basename $app_file))"
# Change to app directory so relative imports work
cd "$app_dir"
# Change to app/work directory so relative imports work
cd "$work_dir"
nohup streamlit run "$(basename $app_file)" \
--server.address="0.0.0.0" \
@ -804,18 +810,28 @@ cmd_instance_start() {
log_info "Starting instance '$name' (app: $app, port: $port)..."
lxc_exec sh -c "
cd /srv/apps/$app 2>/dev/null || exit 1
# Find main file
WORK_DIR=''
APP_FILE=''
for f in app.py main.py ${app}.py; do
[ -f \"\$f\" ] && { APP_FILE=\"\$f\"; break; }
done
[ -z \"\$APP_FILE\" ] && APP_FILE=\$(ls -1 *.py 2>/dev/null | head -1)
[ -z \"\$APP_FILE\" ] && { echo 'No Python file found'; exit 1; }
# Install requirements
[ -f requirements.txt ] && pip3 install --break-system-packages -r requirements.txt 2>/dev/null
if [ -d /srv/apps/$app ]; then
WORK_DIR='/srv/apps/$app'
cd \"\$WORK_DIR\"
# Find main file in folder
for f in app.py main.py ${app}.py; do
[ -f \"\$f\" ] && { APP_FILE=\"\$f\"; break; }
done
[ -z \"\$APP_FILE\" ] && APP_FILE=\$(ls -1 *.py 2>/dev/null | head -1)
# Install requirements
[ -f requirements.txt ] && pip3 install --break-system-packages -r requirements.txt 2>/dev/null
elif [ -f /srv/apps/${app}.py ]; then
WORK_DIR='/srv/apps'
APP_FILE='${app}.py'
cd \"\$WORK_DIR\"
fi
[ -z \"\$APP_FILE\" ] && { echo 'No Python file found for $app'; exit 1; }
# Kill existing
[ -f /var/run/streamlit/${app}.pid ] && kill \$(cat /var/run/streamlit/${app}.pid) 2>/dev/null