- Add gte-small embedding model preset to localaictl with proper YAML config (embeddings: true, context_size: 512) - Fix RPC expect declarations across api.js, dashboard.js, models.js to use empty expect objects, preserving full response including error fields - Replace fragile sed/awk JSON escaping in RPCD chat and completion handlers with file I/O streaming through awk for robust handling of special characters in LLM responses - Switch RPCD chat handler from curl to wget to avoid missing output file on timeout (curl doesn't create -o file on exit code 28) - Bypass RPCD 30s script timeout for chat by calling LocalAI API directly from the browser via fetch() - Add embeddings flag to models RPC and filter embedding models from chat view model selector Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
141 lines
2.7 KiB
JavaScript
141 lines
2.7 KiB
JavaScript
'use strict';
|
|
'require baseclass';
|
|
'require rpc';
|
|
|
|
/**
|
|
* LocalAI Dashboard API
|
|
* Package: luci-app-localai
|
|
* RPCD object: luci.localai
|
|
*/
|
|
|
|
// Version: 0.1.0
|
|
|
|
var callStatus = rpc.declare({
|
|
object: 'luci.localai',
|
|
method: 'status',
|
|
expect: { }
|
|
});
|
|
|
|
var callModels = rpc.declare({
|
|
object: 'luci.localai',
|
|
method: 'models',
|
|
expect: { models: [] }
|
|
});
|
|
|
|
var callConfig = rpc.declare({
|
|
object: 'luci.localai',
|
|
method: 'config',
|
|
expect: { }
|
|
});
|
|
|
|
var callHealth = rpc.declare({
|
|
object: 'luci.localai',
|
|
method: 'health',
|
|
expect: { }
|
|
});
|
|
|
|
var callMetrics = rpc.declare({
|
|
object: 'luci.localai',
|
|
method: 'metrics',
|
|
expect: { }
|
|
});
|
|
|
|
var callStart = rpc.declare({
|
|
object: 'luci.localai',
|
|
method: 'start',
|
|
expect: { }
|
|
});
|
|
|
|
var callStop = rpc.declare({
|
|
object: 'luci.localai',
|
|
method: 'stop',
|
|
expect: { }
|
|
});
|
|
|
|
var callRestart = rpc.declare({
|
|
object: 'luci.localai',
|
|
method: 'restart',
|
|
expect: { }
|
|
});
|
|
|
|
var callModelInstall = rpc.declare({
|
|
object: 'luci.localai',
|
|
method: 'model_install',
|
|
params: ['name'],
|
|
expect: { }
|
|
});
|
|
|
|
var callModelRemove = rpc.declare({
|
|
object: 'luci.localai',
|
|
method: 'model_remove',
|
|
params: ['name'],
|
|
expect: { }
|
|
});
|
|
|
|
var callChat = rpc.declare({
|
|
object: 'luci.localai',
|
|
method: 'chat',
|
|
params: ['model', 'messages'],
|
|
expect: { }
|
|
});
|
|
|
|
var callComplete = rpc.declare({
|
|
object: 'luci.localai',
|
|
method: 'complete',
|
|
params: ['model', 'prompt'],
|
|
expect: { }
|
|
});
|
|
|
|
function formatBytes(bytes) {
|
|
if (bytes === 0) return '0 B';
|
|
var k = 1024;
|
|
var sizes = ['B', 'KB', 'MB', 'GB', 'TB'];
|
|
var i = Math.floor(Math.log(bytes) / Math.log(k));
|
|
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
|
|
}
|
|
|
|
function formatUptime(seconds) {
|
|
if (!seconds) return 'N/A';
|
|
var days = Math.floor(seconds / 86400);
|
|
var hours = Math.floor((seconds % 86400) / 3600);
|
|
var mins = Math.floor((seconds % 3600) / 60);
|
|
|
|
if (days > 0) return days + 'd ' + hours + 'h';
|
|
if (hours > 0) return hours + 'h ' + mins + 'm';
|
|
return mins + 'm';
|
|
}
|
|
|
|
return baseclass.extend({
|
|
getStatus: callStatus,
|
|
getModels: callModels,
|
|
getConfig: callConfig,
|
|
getHealth: callHealth,
|
|
getMetrics: callMetrics,
|
|
start: callStart,
|
|
stop: callStop,
|
|
restart: callRestart,
|
|
modelInstall: callModelInstall,
|
|
modelRemove: callModelRemove,
|
|
chat: callChat,
|
|
complete: callComplete,
|
|
formatBytes: formatBytes,
|
|
formatUptime: formatUptime,
|
|
|
|
// Aggregate function for dashboard
|
|
getDashboardData: function() {
|
|
return Promise.all([
|
|
callStatus(),
|
|
callModels(),
|
|
callHealth(),
|
|
callMetrics()
|
|
]).then(function(results) {
|
|
return {
|
|
status: results[0] || {},
|
|
models: results[1] || { models: [] },
|
|
health: results[2] || { healthy: false },
|
|
metrics: results[3] || {}
|
|
};
|
|
});
|
|
}
|
|
});
|