feat(openclaw): Add Google Gemini API support

- Added gemini provider with models: gemini-1.5-flash, gemini-1.5-pro, gemini-pro
- Updated RPCD handler with Gemini API endpoint
- Updated settings.js with Google AI Studio link
- Updated chat.js to parse Gemini response format
- Changed Ollama default URL to LocalAI (port 8091)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
CyberMind-FR 2026-02-27 11:47:17 +01:00
parent 6dca8b3460
commit 5d905c23ac
145 changed files with 311 additions and 262 deletions

View File

@ -102,6 +102,9 @@ return view.extend({
} else if (response.message && response.message.content) {
// Ollama format
aiResponse = response.message.content || '';
} else if (response.candidates && response.candidates[0]) {
// Gemini format
aiResponse = response.candidates[0].content.parts[0].text || '';
} else {
aiResponse = JSON.stringify(response);
}

View File

@ -67,7 +67,8 @@ return view.extend({
var providerSelect = E('select', { 'id': 'provider-select' }, [
E('option', { 'value': 'anthropic', 'selected': config.provider === 'anthropic' }, 'Anthropic (Claude)'),
E('option', { 'value': 'openai', 'selected': config.provider === 'openai' }, 'OpenAI (GPT)'),
E('option', { 'value': 'ollama', 'selected': config.provider === 'ollama' }, 'Ollama (Local)')
E('option', { 'value': 'gemini', 'selected': config.provider === 'gemini' }, 'Google (Gemini)'),
E('option', { 'value': 'ollama', 'selected': config.provider === 'ollama' }, 'Ollama/LocalAI (Local)')
]);
var modelSelect = E('select', { 'id': 'model-select' });
@ -154,8 +155,10 @@ return view.extend({
E('a', { 'href': 'https://console.anthropic.com/', 'target': '_blank' }, 'Anthropic Console'),
' | ',
E('a', { 'href': 'https://platform.openai.com/', 'target': '_blank' }, 'OpenAI Platform'),
' | ',
E('a', { 'href': 'https://aistudio.google.com/apikey', 'target': '_blank' }, 'Google AI Studio'),
' | ',
E('a', { 'href': 'https://ollama.ai/', 'target': '_blank' }, 'Ollama (Free/Local)')
E('a', { 'href': 'https://ollama.ai/', 'target': '_blank' }, 'Ollama/LocalAI')
]),
E('div', { 'class': 'setting-row' }, [
E('label', { 'for': 'provider-select' }, 'Provider'),

View File

@ -108,6 +108,12 @@ list_models() {
json_add_string "" "codellama"
json_close_array
json_add_array "gemini"
json_add_string "" "gemini-1.5-flash"
json_add_string "" "gemini-1.5-pro"
json_add_string "" "gemini-pro"
json_close_array
json_close_object
json_dump
}
@ -149,11 +155,18 @@ do_chat() {
;;
ollama)
local ollama_url=$(uci -q get $OPENCLAW_CONFIG.llm.ollama_url)
ollama_url="${ollama_url:-http://127.0.0.1:11434}"
ollama_url="${ollama_url:-http://127.0.0.1:8091}"
# Use OpenAI-compatible endpoint (works with LocalAI)
response=$(wget -q -O - \
--header="Content-Type: application/json" \
--post-data="{\"model\":\"$model\",\"messages\":[{\"role\":\"user\",\"content\":\"$message\"}],\"stream\":false}" \
"${ollama_url}/api/chat" 2>/dev/null)
--post-data="{\"model\":\"$model\",\"messages\":[{\"role\":\"user\",\"content\":\"$message\"}]}" \
"${ollama_url}/v1/chat/completions" 2>/dev/null)
;;
gemini)
response=$(wget -q -O - \
--header="Content-Type: application/json" \
--post-data="{\"contents\":[{\"parts\":[{\"text\":\"$message\"}]}]}" \
"https://generativelanguage.googleapis.com/v1beta/models/${model}:generateContent?key=${api_key}" 2>/dev/null)
;;
esac
@ -206,8 +219,14 @@ test_api() {
;;
ollama)
local ollama_url=$(uci -q get $OPENCLAW_CONFIG.llm.ollama_url)
ollama_url="${ollama_url:-http://127.0.0.1:11434}"
test_result=$(wget -q -O - "${ollama_url}/api/tags" 2>&1)
ollama_url="${ollama_url:-http://127.0.0.1:8091}"
test_result=$(wget -q -O - "${ollama_url}/v1/models" 2>&1)
;;
gemini)
test_result=$(wget -q -O - \
--header="Content-Type: application/json" \
--post-data="{\"contents\":[{\"parts\":[{\"text\":\"ping\"}]}]}" \
"https://generativelanguage.googleapis.com/v1beta/models/${model}:generateContent?key=${api_key}" 2>&1)
;;
esac

Some files were not shown because too many files have changed in this diff Show More