Ollama AI can now have customized api endpoint instead of always using localhost.

This commit is contained in:
Storm Dragon
2025-08-11 12:10:21 -04:00
parent a80bca78d1
commit 7b84cd7492
6 changed files with 63 additions and 14 deletions
+1 -1
View File
@@ -1,7 +1,7 @@
# Maintainer: Storm Dragon <storm_dragon@stormux.org>
pkgname=cthulhu
pkgver=2025.08.06
pkgver=2025.08.10
pkgrel=1
pkgdesc="Desktop-agnostic screen reader with plugin system, forked from Orca"
url="https://git.stormux.org/storm/cthulhu"
+30 -2
View File
@@ -3571,6 +3571,34 @@
<property name="top_attach">4</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="aiOllamaEndpointLabel">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="halign">start</property>
<property name="label" translatable="yes">Ollama _Endpoint:</property>
<property name="use_underline">True</property>
<property name="mnemonic_widget">aiOllamaEndpointEntry</property>
</object>
<packing>
<property name="left_attach">0</property>
<property name="top_attach">5</property>
</packing>
</child>
<child>
<object class="GtkEntry" id="aiOllamaEndpointEntry">
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="hexpand">True</property>
<property name="text">http://localhost:11434</property>
<property name="placeholder_text" translatable="yes">Ollama API endpoint URL (e.g., http://192.168.1.100:11434)</property>
<signal name="changed" handler="aiOllamaEndpointChanged" swapped="no"/>
</object>
<packing>
<property name="left_attach">1</property>
<property name="top_attach">5</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="aiScreenshotQualityLabel">
<property name="visible">True</property>
@@ -3582,7 +3610,7 @@
</object>
<packing>
<property name="left_attach">0</property>
<property name="top_attach">5</property>
<property name="top_attach">6</property>
</packing>
</child>
<child>
@@ -3600,7 +3628,7 @@
</object>
<packing>
<property name="left_attach">1</property>
<property name="top_attach">5</property>
<property name="top_attach">6</property>
</packing>
</child>
</object>
+1 -1
View File
@@ -23,5 +23,5 @@
# Fork of Orca Screen Reader (GNOME)
# Original source: https://gitlab.gnome.org/GNOME/orca
version = "2025.08.10"
version = "2025.08.11"
codeName = "testing"
+14 -2
View File
@@ -1855,6 +1855,7 @@ class CthulhuSetupGUI(cthulhu_gtkbuilder.GtkBuilderWrapper):
self.aiProviderCombo = self.get_widget("aiProviderCombo")
self.aiApiKeyEntry = self.get_widget("aiApiKeyEntry")
self.aiOllamaModelEntry = self.get_widget("aiOllamaModelEntry")
self.aiOllamaEndpointEntry = self.get_widget("aiOllamaEndpointEntry")
self.aiConfirmationCheckButton = self.get_widget("aiConfirmationCheckButton")
self.aiScreenshotQualityCombo = self.get_widget("aiScreenshotQualityCombo")
@@ -1883,6 +1884,10 @@ class CthulhuSetupGUI(cthulhu_gtkbuilder.GtkBuilderWrapper):
ollamaModel = prefs.get("aiOllamaModel", settings.aiOllamaModel)
self.aiOllamaModelEntry.set_text(ollamaModel)
# Set Ollama endpoint
ollamaEndpoint = prefs.get("aiOllamaEndpoint", settings.aiOllamaEndpoint)
self.aiOllamaEndpointEntry.set_text(ollamaEndpoint)
# Set confirmation checkbox
confirmationRequired = prefs.get("aiConfirmationRequired", settings.aiConfirmationRequired)
self.aiConfirmationCheckButton.set_active(confirmationRequired)
@@ -1904,6 +1909,7 @@ class CthulhuSetupGUI(cthulhu_gtkbuilder.GtkBuilderWrapper):
self.aiProviderCombo.set_sensitive(enabled)
self.aiApiKeyEntry.set_sensitive(enabled)
self.aiOllamaModelEntry.set_sensitive(enabled)
self.aiOllamaEndpointEntry.set_sensitive(enabled)
self.aiConfirmationCheckButton.set_sensitive(enabled)
self.aiScreenshotQualityCombo.set_sensitive(enabled)
try:
@@ -1929,8 +1935,10 @@ class CthulhuSetupGUI(cthulhu_gtkbuilder.GtkBuilderWrapper):
except:
pass # Button might not exist
# Ollama model entry (only for Ollama)
self.aiOllamaModelEntry.set_sensitive(provider == settings.AI_PROVIDER_OLLAMA)
# Ollama model and endpoint entries (only for Ollama)
ollama_enabled = provider == settings.AI_PROVIDER_OLLAMA
self.aiOllamaModelEntry.set_sensitive(ollama_enabled)
self.aiOllamaEndpointEntry.set_sensitive(ollama_enabled)
# Update labels based on provider
if provider == settings.AI_PROVIDER_CLAUDE_CODE:
@@ -3701,6 +3709,10 @@ class CthulhuSetupGUI(cthulhu_gtkbuilder.GtkBuilderWrapper):
"""AI Ollama model entry changed handler"""
self.prefsDict["aiOllamaModel"] = widget.get_text()
def aiOllamaEndpointChanged(self, widget):
"""AI Ollama endpoint entry changed handler"""
self.prefsDict["aiOllamaEndpoint"] = widget.get_text()
def aiGetClaudeKeyClicked(self, widget):
"""Get Claude API Key button clicked handler"""
import subprocess
+15 -8
View File
@@ -61,6 +61,7 @@ class AIAssistant(Plugin):
self._ai_provider = None
self._api_key = None
self._ollama_model = None
self._ollama_endpoint = None
self._settings_manager = settings_manager.getManager()
# Plugin enabled state
@@ -153,14 +154,19 @@ class AIAssistant(Plugin):
else:
self._api_key = None
# Load Ollama model
# Load Ollama model and endpoint
self._ollama_model = self._settings_manager.getSetting('aiOllamaModel')
if not self._ollama_model:
self._ollama_model = settings.aiOllamaModel
self._ollama_endpoint = self._settings_manager.getSetting('aiOllamaEndpoint')
if not self._ollama_endpoint:
self._ollama_endpoint = settings.aiOllamaEndpoint
logger.info(f"AI settings loaded: provider={self._provider_type}, "
f"api_key_configured={bool(self._api_key)}, "
f"ollama_model={self._ollama_model}")
f"ollama_model={self._ollama_model}, "
f"ollama_endpoint={self._ollama_endpoint}")
except Exception as e:
logger.error(f"Error loading AI settings: {e}")
@@ -195,16 +201,17 @@ class AIAssistant(Plugin):
"""Check if Ollama is available and has vision models."""
try:
import requests
# Check if Ollama is running
response = requests.get('http://localhost:11434/api/version', timeout=5)
# Check if Ollama is running at the configured endpoint
endpoint_url = f"{self._ollama_endpoint}/api/version"
response = requests.get(endpoint_url, timeout=5)
if response.status_code == 200:
logger.info("Ollama service is available")
logger.info(f"Ollama service is available at {self._ollama_endpoint}")
return True
else:
logger.warning("Ollama service not responding")
logger.warning(f"Ollama service not responding at {self._ollama_endpoint}")
return False
except Exception as e:
logger.warning(f"Ollama not available: {e}")
logger.warning(f"Ollama not available at {self._ollama_endpoint}: {e}")
return False
def _check_claude_code_availability(self):
@@ -242,7 +249,7 @@ class AIAssistant(Plugin):
elif self._provider_type == settings.AI_PROVIDER_CLAUDE_CODE:
self._ai_provider = create_provider("claude_code")
elif self._provider_type == settings.AI_PROVIDER_OLLAMA:
self._ai_provider = create_provider("ollama", model=self._ollama_model)
self._ai_provider = create_provider("ollama", model=self._ollama_model, base_url=self._ollama_endpoint)
else:
logger.error(f"Unsupported provider type: {self._provider_type}")
return False
+2
View File
@@ -153,6 +153,7 @@ userCustomizableSettings = [
"aiProvider",
"aiApiKeyFile",
"aiOllamaModel",
"aiOllamaEndpoint",
"aiConfirmationRequired",
"aiActionTimeout",
"aiScreenshotQuality",
@@ -439,6 +440,7 @@ aiAssistantEnabled = True
aiProvider = AI_PROVIDER_CLAUDE_CODE
aiApiKeyFile = ""
aiOllamaModel = "llama3.2-vision"
aiOllamaEndpoint = "http://localhost:11434"
aiConfirmationRequired = True
aiActionTimeout = 30
aiScreenshotQuality = AI_SCREENSHOT_QUALITY_MEDIUM