Add codex to ai.py.

This commit is contained in:
Dane Stange
2025-12-02 14:20:11 -05:00
committed by Storm Dragon
parent c19700b089
commit 44c64aef1c

View File

@@ -204,6 +204,8 @@ class AiConfig:
'ollama_model': 'llama2',
'ollama_vision_model': 'llava',
'ollama_host': 'http://localhost:11434',
'codex_model': 'gpt-5.1-codex',
'codex_profile': '',
'confirm_actions': 'true',
'voice_enabled': 'false',
'voice_output': 'true',
@@ -354,6 +356,109 @@ class ClaudeCodeInterface:
except Exception as e:
return f"Error communicating with Claude Code: {str(e)}"
class CodexCliInterface:
"""Interface for Codex CLI provider"""
def is_available(self):
"""Check if Codex CLI is available"""
try:
result = subprocess.run(['codex', '--version'],
capture_output=True, text=True, timeout=5)
return result.returncode == 0
except (subprocess.SubprocessError, FileNotFoundError, OSError):
return False
def send_message(self, message, model=None, profile=None, context=None, image_path=None):
"""Send message to Codex CLI using non-interactive exec mode"""
try:
full_message = message
if context and not context.startswith("You are a helpful AI assistant"):
full_message = f"Context: {context}\n\n{message}"
cmd = ['codex', 'exec', '--skip-git-repo-check', '--sandbox', 'read-only']
if model:
cmd.extend(['--model', model])
if profile:
cmd.extend(['--profile', profile])
if image_path and os.path.exists(image_path):
cmd.extend(['--image', image_path])
with tempfile.NamedTemporaryFile(delete=False) as temp_output:
output_path = temp_output.name
cmd.extend(['--output-last-message', output_path, full_message])
try:
result = subprocess.run(
cmd,
capture_output=True,
text=True,
timeout=120,
cwd=os.path.expanduser('~')
)
finally:
response_text = ""
if os.path.exists(output_path):
try:
with open(output_path, 'r', encoding='utf-8', errors='ignore') as f:
response_text = f.read().strip()
finally:
os.unlink(output_path)
if result.returncode == 0:
return response_text if response_text else result.stdout.strip()
else:
stderr = result.stderr.strip() or "Unknown Codex CLI error"
return f"Error: {stderr}"
except subprocess.TimeoutExpired:
return "Error: Codex CLI request timed out"
except Exception as e:
return f"Error communicating with Codex CLI: {str(e)}"
def discover_available_models(self, max_files=25, max_lines=200):
"""Discover Codex models from recent session logs"""
sessions_dir = Path(os.path.expanduser('~')) / '.codex' / 'sessions'
models = []
if not sessions_dir.exists():
return models
try:
session_files = sorted(
sessions_dir.rglob('rollout-*.jsonl'),
key=lambda path: path.stat().st_mtime,
reverse=True
)
except OSError:
return models
seen = set()
for path in session_files[:max_files]:
try:
with open(path, 'r', encoding='utf-8', errors='ignore') as f:
for _ in range(max_lines):
line = f.readline()
if not line:
break
try:
event = json.loads(line)
except json.JSONDecodeError:
continue
if event.get('type') != 'turn_context':
continue
payload = event.get('payload', {})
model = payload.get('model')
if model and model not in seen:
seen.add(model)
models.append(model)
except (OSError, IOError):
continue
return models
class WindowContext:
"""Get context information from focused window"""
@@ -395,6 +500,7 @@ class AiAssistant(Gtk.Window):
# Initialize components
self.config = AiConfig()
self.claudeInterface = ClaudeCodeInterface()
self.codexInterface = CodexCliInterface()
self.ollamaInterface = OllamaInterface(self.config.get('ollama_host'))
self.windowContext = WindowContext()
self.voiceRecognition = VoiceRecognition(self.config)
@@ -630,6 +736,13 @@ class AiAssistant(Gtk.Window):
self.claudeRadio.set_can_focus(True)
self.claudeRadio.get_accessible().set_description("Use Claude Code CLI as AI provider")
providerBox.pack_start(self.claudeRadio, False, False, 0)
# Codex CLI option
self.codexRadio = Gtk.RadioButton.new_with_mnemonic_from_widget(self.claudeRadio, "_Codex CLI")
self.codexRadio.connect("toggled", self.on_provider_changed)
self.codexRadio.set_can_focus(True)
self.codexRadio.get_accessible().set_description("Use Codex CLI as AI provider")
providerBox.pack_start(self.codexRadio, False, False, 0)
# Ollama option
self.ollamaRadio = Gtk.RadioButton.new_with_mnemonic_from_widget(self.claudeRadio, "_Ollama")
@@ -640,6 +753,66 @@ class AiAssistant(Gtk.Window):
providerFrame.add(providerBox)
vbox.pack_start(providerFrame, False, False, 0)
# Codex CLI settings
self.codexFrame = Gtk.Frame(label="Codex CLI Settings")
codexBox = Gtk.VBox(spacing=10)
codexBox.set_border_width(10)
codexModelLabel = Gtk.Label("Preferred _model (optional):")
codexModelLabel.set_use_underline(True)
codexModelLabel.set_alignment(0, 0.5)
codexBox.pack_start(codexModelLabel, False, False, 0)
self.codexModelEntry = Gtk.Entry()
self.codexModelEntry.set_text(self.config.get('codex_model', ''))
self.codexModelEntry.set_placeholder_text("e.g., gpt-4.1, o3, etc.")
self.codexModelEntry.set_can_focus(True)
self.codexModelEntry.get_accessible().set_name("Codex model")
self.codexModelEntry.get_accessible().set_description("Optional Codex model override")
codexModelLabel.set_mnemonic_widget(self.codexModelEntry)
codexBox.pack_start(self.codexModelEntry, False, False, 0)
codexProfileLabel = Gtk.Label("Configuration _profile (optional):")
codexProfileLabel.set_use_underline(True)
codexProfileLabel.set_alignment(0, 0.5)
codexBox.pack_start(codexProfileLabel, False, False, 0)
self.codexProfileEntry = Gtk.Entry()
self.codexProfileEntry.set_text(self.config.get('codex_profile', ''))
self.codexProfileEntry.set_placeholder_text("Profile name from ~/.codex/config.toml")
self.codexProfileEntry.set_can_focus(True)
self.codexProfileEntry.get_accessible().set_name("Codex profile")
self.codexProfileEntry.get_accessible().set_description("Optional Codex CLI profile to load")
codexProfileLabel.set_mnemonic_widget(self.codexProfileEntry)
codexBox.pack_start(self.codexProfileEntry, False, False, 0)
codexModelsLabel = Gtk.Label("Recently used models:")
codexModelsLabel.set_alignment(0, 0.5)
codexBox.pack_start(codexModelsLabel, False, False, 0)
self.codexModelsBox = Gtk.VBox(spacing=5)
self.codexModelsBox.set_border_width(5)
codexBox.pack_start(self.codexModelsBox, False, False, 0)
self.codexModelRadios = []
self.codexModelGroup = None
self.codexRefreshButton = Gtk.Button("_Discover Models (from history)")
self.codexRefreshButton.set_use_underline(True)
self.codexRefreshButton.connect("clicked", self.on_refresh_codex_models)
self.codexRefreshButton.set_can_focus(True)
self.codexRefreshButton.get_accessible().set_description("Scan ~/.codex/sessions logs to find recently used Codex models")
codexBox.pack_start(self.codexRefreshButton, False, False, 0)
self.codexModelStatus = Gtk.Label("")
self.codexModelStatus.set_alignment(0, 0.5)
self.codexModelStatus.set_line_wrap(True)
self.codexModelStatus.set_text("Use Discover Models to import options from ~/.codex/sessions.")
codexBox.pack_start(self.codexModelStatus, False, False, 0)
self.codexFrame.add(codexBox)
vbox.pack_start(self.codexFrame, False, False, 0)
# Ollama settings
self.ollamaFrame = Gtk.Frame(label="Ollama Settings")
@@ -844,11 +1017,14 @@ class AiAssistant(Gtk.Window):
provider = self.config.get('provider')
if provider == 'claude-code':
self.claudeRadio.set_active(True)
elif provider == 'codex-cli':
self.codexRadio.set_active(True)
else:
self.ollamaRadio.set_active(True)
self.on_provider_changed(None)
self.refresh_ollama_models()
self.on_refresh_codex_models()
# Set saved models after radio buttons are created
self.set_saved_model_selections()
@@ -872,11 +1048,20 @@ class AiAssistant(Gtk.Window):
def on_provider_changed(self, widget):
"""Handle provider radio button change"""
if not hasattr(self, 'codexFrame') or not hasattr(self, 'ollamaFrame'):
return
if self.claudeRadio.get_active():
self.ollamaFrame.set_sensitive(False)
self.codexFrame.set_sensitive(False)
self.update_status("Claude Code selected")
elif self.codexRadio.get_active():
self.ollamaFrame.set_sensitive(False)
self.codexFrame.set_sensitive(True)
self.update_status("Codex CLI selected")
else:
self.ollamaFrame.set_sensitive(True)
self.codexFrame.set_sensitive(False)
self.update_status("Ollama selected")
def refresh_ollama_models(self):
@@ -957,6 +1142,47 @@ class AiAssistant(Gtk.Window):
self.update_status("Ollama running but no models found")
else:
self.update_status("Ollama not available")
def on_refresh_codex_models(self, widget=None):
"""Refresh Codex model radio buttons from recent sessions"""
for radio in self.codexModelRadios:
self.codexModelsBox.remove(radio)
self.codexModelRadios = []
self.codexModelGroup = None
models = self.codexInterface.discover_available_models()
if models:
for i, model in enumerate(models):
if i == 0:
radio = Gtk.RadioButton.new_with_label(None, model)
self.codexModelGroup = radio
else:
radio = Gtk.RadioButton.new_with_label_from_widget(self.codexModelGroup, model)
radio.set_can_focus(True)
radio.get_accessible().set_description(f"Select Codex model {model}")
radio.connect("toggled", self.on_codex_model_radio_changed)
self.codexModelRadios.append(radio)
self.codexModelsBox.pack_start(radio, False, False, 0)
self.codexModelsBox.show_all()
selected = self.codexModelEntry.get_text().strip()
if selected:
for radio in self.codexModelRadios:
if radio.get_label() == selected:
radio.set_active(True)
break
else:
self.codexModelRadios[0].set_active(True)
self.codexModelStatus.set_text(f"Found {len(models)} recent Codex model(s) from ~/.codex/sessions.")
else:
self.codexModelStatus.set_text("No recent Codex models found. Run Codex CLI once to populate history.")
def on_codex_model_radio_changed(self, widget):
"""Update Codex model entry when a radio button is selected"""
if widget.get_active():
self.codexModelEntry.set_text(widget.get_label())
def on_text_model_changed(self, widget):
"""Handle text model radio button change"""
@@ -982,10 +1208,14 @@ class AiAssistant(Gtk.Window):
"""Save settings to configuration"""
if self.claudeRadio.get_active():
self.config.set('provider', 'claude-code')
elif self.codexRadio.get_active():
self.config.set('provider', 'codex-cli')
else:
self.config.set('provider', 'ollama')
self.config.set('ollama_host', self.hostEntry.get_text())
self.config.set('codex_model', self.codexModelEntry.get_text().strip())
self.config.set('codex_profile', self.codexProfileEntry.get_text().strip())
# Save selected text model
for radio in self.textModelRadios:
@@ -1024,6 +1254,9 @@ class AiAssistant(Gtk.Window):
provider = self.config.get('provider')
if provider == 'claude-code':
return "Claude"
elif provider == 'codex-cli':
model = self.config.get('codex_model', '').strip()
return f"Codex ({model})" if model else "Codex"
elif provider == 'ollama':
model = self.config.get('ollama_model', 'llama2')
return f"Ollama ({model})" if model != 'llama2' else "Ollama"
@@ -1127,6 +1360,31 @@ class AiAssistant(Gtk.Window):
return response
finally:
self.hide_processing()
elif provider == 'codex-cli':
if not self.codexInterface.is_available():
return "Error: Codex CLI is not available. Please install or configure Codex CLI."
model = self.config.get('codex_model', '').strip() or None
profile = self.config.get('codex_profile', '').strip() or None
if model:
self.current_processing_model = model
else:
self.current_processing_model = None
self.show_processing("codex-cli")
try:
response = self.codexInterface.send_message(
message,
model=model,
profile=profile,
context=system_context,
image_path=image_path
)
return response
finally:
self.hide_processing()
elif provider == 'ollama':
if not self.ollamaInterface.is_available():