fetch_ml/cmd/tui/internal/controller/controller.go
Jeremie Fraeys ed7b5032a9
Some checks failed
Build CLI with Embedded SQLite / build (arm64, aarch64-linux) (push) Waiting to run
Build CLI with Embedded SQLite / build (x86_64, x86_64-linux) (push) Waiting to run
Build CLI with Embedded SQLite / build-macos (arm64) (push) Waiting to run
Build CLI with Embedded SQLite / build-macos (x86_64) (push) Waiting to run
CI/CD Pipeline / Docker Build (push) Blocked by required conditions
Security Scan / Security Analysis (push) Waiting to run
Security Scan / Native Library Security (push) Waiting to run
Checkout test / test (push) Successful in 6s
CI with Native Libraries / Check Build Environment (push) Successful in 12s
CI/CD Pipeline / Test (push) Failing after 5m15s
CI/CD Pipeline / Dev Compose Smoke Test (push) Has been skipped
CI/CD Pipeline / Build (push) Has been skipped
CI/CD Pipeline / Test Scripts (push) Has been skipped
CI/CD Pipeline / Security Scan (push) Failing after 4m49s
Contract Tests / Spec Drift Detection (push) Failing after 13s
Contract Tests / API Contract Tests (push) Has been skipped
Deploy API Docs / Build API Documentation (push) Failing after 36s
Deploy API Docs / Deploy to GitHub Pages (push) Has been skipped
Documentation / build-and-publish (push) Failing after 26s
CI with Native Libraries / Build and Test Native Libraries (push) Has been cancelled
CI with Native Libraries / Build Release Libraries (push) Has been cancelled
build: update Makefile and TUI controller integration
2026-02-21 18:00:09 -05:00

582 lines
17 KiB
Go

package controller
import (
"fmt"
"strings"
"time"
"github.com/charmbracelet/bubbles/key"
"github.com/charmbracelet/bubbles/list"
tea "github.com/charmbracelet/bubbletea"
"github.com/jfraeys/fetch_ml/cmd/tui/internal/config"
"github.com/jfraeys/fetch_ml/cmd/tui/internal/model"
"github.com/jfraeys/fetch_ml/cmd/tui/internal/services"
"github.com/jfraeys/fetch_ml/internal/logging"
)
// Controller handles all business logic and state updates
type Controller struct {
config *config.Config
server *services.MLServer
taskQueue *services.TaskQueue
logger *logging.Logger
wsClient *services.WebSocketClient
}
func (c *Controller) handleKeyMsg(msg tea.KeyMsg, m model.State) (model.State, tea.Cmd) {
if m.InputMode {
return c.handleInputModeKey(msg, m)
}
if m.ActiveView == model.ViewModeSettings {
return c.handleSettingsKeys(msg, m)
}
if key.Matches(msg, m.Keys.Quit) {
return m, tea.Quit
}
cmds := c.handleGlobalKeys(msg, &m)
return c.finalizeUpdate(msg, m, cmds...)
}
func (c *Controller) handleInputModeKey(msg tea.KeyMsg, m model.State) (model.State, tea.Cmd) {
switch msg.String() {
case "enter":
args := m.Input.Value()
m.Input.SetValue("")
m.InputMode = false
if job := getSelectedJob(m); job != nil {
return m, c.queueJob(job.Name, args)
}
return m, nil
case "esc":
m.InputMode = false
m.Input.SetValue("")
return m, nil
default:
var cmd tea.Cmd
m.Input, cmd = m.Input.Update(msg)
return m, cmd
}
}
func (c *Controller) handleSettingsKeys(msg tea.KeyMsg, m model.State) (model.State, tea.Cmd) {
cmds := c.navigateSettings(msg, &m)
if m.SettingsIndex == 1 {
var inputCmd tea.Cmd
m.APIKeyInput, inputCmd = m.APIKeyInput.Update(msg)
cmds = append(cmds, inputCmd, c.updateSettingsContent(m))
}
return m, tea.Batch(cmds...)
}
func (c *Controller) navigateSettings(msg tea.KeyMsg, m *model.State) []tea.Cmd {
var cmds []tea.Cmd
switch msg.String() {
case "up", "k":
if m.SettingsIndex > 1 {
m.SettingsIndex--
cmds = append(cmds, c.updateSettingsContent(*m))
c.toggleAPIKeyInputFocus(m)
}
case "down", "j":
if m.SettingsIndex < 2 {
m.SettingsIndex++
cmds = append(cmds, c.updateSettingsContent(*m))
c.toggleAPIKeyInputFocus(m)
}
case "enter":
if cmd := c.handleSettingsAction(m); cmd != nil {
cmds = append(cmds, cmd)
}
case "esc":
m.ActiveView = model.ViewModeJobs
m.APIKeyInput.Blur()
}
return cmds
}
func (c *Controller) toggleAPIKeyInputFocus(m *model.State) {
if m.SettingsIndex == 1 {
m.APIKeyInput.Focus()
} else {
m.APIKeyInput.Blur()
}
}
func (c *Controller) handleGlobalKeys(msg tea.KeyMsg, m *model.State) []tea.Cmd {
var cmds []tea.Cmd
switch {
case key.Matches(msg, m.Keys.Refresh):
m.IsLoading = true
m.Status = "Refreshing all data..."
m.LastRefresh = time.Now()
cmds = append(cmds, c.loadAllData())
case key.Matches(msg, m.Keys.RefreshGPU):
m.Status = "Refreshing GPU status..."
cmds = append(cmds, c.loadGPU())
case key.Matches(msg, m.Keys.Trigger):
if job := getSelectedJob(*m); job != nil {
cmds = append(cmds, c.queueJob(job.Name, ""))
}
case key.Matches(msg, m.Keys.TriggerArgs):
if job := getSelectedJob(*m); job != nil {
m.InputMode = true
m.Input.Focus()
}
case key.Matches(msg, m.Keys.ViewQueue):
m.ActiveView = model.ViewModeQueue
cmds = append(cmds, c.showQueue(*m))
case key.Matches(msg, m.Keys.ViewContainer):
m.ActiveView = model.ViewModeContainer
cmds = append(cmds, c.loadContainer())
case key.Matches(msg, m.Keys.ViewGPU):
m.ActiveView = model.ViewModeGPU
cmds = append(cmds, c.loadGPU())
case key.Matches(msg, m.Keys.ViewJobs):
m.ActiveView = model.ViewModeJobs
case key.Matches(msg, m.Keys.ViewSettings):
m.ActiveView = model.ViewModeSettings
m.SettingsIndex = 1
m.APIKeyInput.Focus()
cmds = append(cmds, c.updateSettingsContent(*m))
case key.Matches(msg, m.Keys.ViewExperiments):
m.ActiveView = model.ViewModeExperiments
cmds = append(cmds, c.loadExperiments())
case key.Matches(msg, m.Keys.ViewNarrative):
m.ActiveView = model.ViewModeNarrative
if job := getSelectedJob(*m); job != nil {
m.SelectedJob = *job
}
case key.Matches(msg, m.Keys.ViewTeam):
m.ActiveView = model.ViewModeTeam
case key.Matches(msg, m.Keys.ViewExperimentHistory):
m.ActiveView = model.ViewModeExperimentHistory
cmds = append(cmds, c.loadExperimentHistory())
case key.Matches(msg, m.Keys.ViewConfig):
m.ActiveView = model.ViewModeConfig
cmds = append(cmds, c.loadConfig())
case key.Matches(msg, m.Keys.ViewLogs):
m.ActiveView = model.ViewModeLogs
if job := getSelectedJob(*m); job != nil {
cmds = append(cmds, c.loadLogs(job.Name))
}
case key.Matches(msg, m.Keys.ViewExport):
if job := getSelectedJob(*m); job != nil {
cmds = append(cmds, c.exportJob(job.Name))
}
case key.Matches(msg, m.Keys.FilterTeam):
m.InputMode = true
m.Input.SetValue("@")
m.Input.Focus()
m.Status = "Filter by team member: @alice, @bob, @team-ml"
case key.Matches(msg, m.Keys.Cancel):
if job := getSelectedJob(*m); job != nil && job.TaskID != "" {
cmds = append(cmds, c.cancelTask(job.TaskID))
}
case key.Matches(msg, m.Keys.Delete):
if job := getSelectedJob(*m); job != nil && job.Status == model.StatusPending {
cmds = append(cmds, c.deleteJob(job.Name))
}
case key.Matches(msg, m.Keys.MarkFailed):
if job := getSelectedJob(*m); job != nil && job.Status == model.StatusRunning {
cmds = append(cmds, c.markFailed(job.Name))
}
case key.Matches(msg, m.Keys.Help):
m.ShowHelp = !m.ShowHelp
}
return cmds
}
func (c *Controller) applyWindowSize(msg tea.WindowSizeMsg, m model.State) model.State {
m.Width = msg.Width
m.Height = msg.Height
h, v := 4, 2
listHeight := msg.Height - v - 8
m.JobList.SetSize(msg.Width/3-h, listHeight)
panelWidth := msg.Width*2/3 - h - 2
panelHeight := (listHeight - 6) / 3
m.GpuView.Width = panelWidth
m.GpuView.Height = panelHeight
m.ContainerView.Width = panelWidth
m.ContainerView.Height = panelHeight
m.QueueView.Width = panelWidth
m.QueueView.Height = listHeight - 4
m.SettingsView.Width = panelWidth
m.SettingsView.Height = listHeight - 4
m.NarrativeView.Width = panelWidth
m.NarrativeView.Height = listHeight - 4
m.TeamView.Width = panelWidth
m.TeamView.Height = listHeight - 4
m.ExperimentsView.Width = panelWidth
m.ExperimentsView.Height = listHeight - 4
m.ExperimentHistoryView.Width = panelWidth
m.ExperimentHistoryView.Height = listHeight - 4
m.ConfigView.Width = panelWidth
m.ConfigView.Height = listHeight - 4
m.LogsView.Width = panelWidth
m.LogsView.Height = listHeight - 4
return m
}
func (c *Controller) handleJobsLoadedMsg(msg model.JobsLoadedMsg, m model.State) (model.State, tea.Cmd) {
m.Jobs = []model.Job(msg)
calculateJobStats(&m)
items := make([]list.Item, len(m.Jobs))
for i, job := range m.Jobs {
items[i] = job
}
setItemsCmd := m.JobList.SetItems(items)
m.Status = formatStatus(m)
m.IsLoading = false
return c.finalizeUpdate(msg, m, setItemsCmd)
}
func (c *Controller) handleTasksLoadedMsg(
msg model.TasksLoadedMsg,
m model.State,
) (model.State, tea.Cmd) {
m.QueuedTasks = []*model.Task(msg)
m.Status = formatStatus(m)
return c.finalizeUpdate(msg, m)
}
func (c *Controller) handleGPUContent(msg model.GpuLoadedMsg, m model.State) (model.State, tea.Cmd) {
m.GpuView.SetContent(string(msg))
m.GpuView.GotoTop()
return c.finalizeUpdate(msg, m)
}
func (c *Controller) handleContainerContent(
msg model.ContainerLoadedMsg,
m model.State,
) (model.State, tea.Cmd) {
m.ContainerView.SetContent(string(msg))
m.ContainerView.GotoTop()
return c.finalizeUpdate(msg, m)
}
func (c *Controller) handleQueueContent(msg model.QueueLoadedMsg, m model.State) (model.State, tea.Cmd) {
m.QueueView.SetContent(string(msg))
m.QueueView.GotoTop()
return c.finalizeUpdate(msg, m)
}
func (c *Controller) handleStatusMsg(msg model.StatusMsg, m model.State) (model.State, tea.Cmd) {
if msg.Level == "error" {
m.ErrorMsg = msg.Text
m.Status = "Error occurred - check status"
} else {
m.ErrorMsg = ""
m.Status = msg.Text
}
return c.finalizeUpdate(msg, m)
}
func (c *Controller) handleTickMsg(msg model.TickMsg, m model.State) (model.State, tea.Cmd) {
var cmds []tea.Cmd
// Calculate actual refresh rate
now := time.Now()
if !m.LastFrameTime.IsZero() {
elapsed := now.Sub(m.LastFrameTime).Milliseconds()
if elapsed > 0 {
// Smooth the rate with simple averaging
m.RefreshRate = (m.RefreshRate*float64(m.FrameCount) + float64(elapsed)) / float64(m.FrameCount+1)
m.FrameCount++
if m.FrameCount > 100 {
m.FrameCount = 1
m.RefreshRate = float64(elapsed)
}
}
}
m.LastFrameTime = now
// 500ms refresh target for real-time updates
if time.Since(m.LastRefresh) > 500*time.Millisecond && !m.IsLoading {
m.LastRefresh = time.Now()
cmds = append(cmds, c.loadAllData())
}
cmds = append(cmds, tickCmd())
return c.finalizeUpdate(msg, m, cmds...)
}
func (c *Controller) finalizeUpdate(
msg tea.Msg,
m model.State,
extraCmds ...tea.Cmd,
) (model.State, tea.Cmd) {
cmds := append([]tea.Cmd{}, extraCmds...)
var cmd tea.Cmd
m.JobList, cmd = m.JobList.Update(msg)
cmds = append(cmds, cmd)
m.GpuView, cmd = m.GpuView.Update(msg)
cmds = append(cmds, cmd)
m.ContainerView, cmd = m.ContainerView.Update(msg)
cmds = append(cmds, cmd)
m.QueueView, cmd = m.QueueView.Update(msg)
cmds = append(cmds, cmd)
m.ExperimentsView, cmd = m.ExperimentsView.Update(msg)
cmds = append(cmds, cmd)
var spinCmd tea.Cmd
m.Spinner, spinCmd = m.Spinner.Update(msg)
cmds = append(cmds, spinCmd)
return m, tea.Batch(cmds...)
}
// New creates a new Controller instance
func New(
cfg *config.Config,
srv *services.MLServer,
tq *services.TaskQueue,
logger *logging.Logger,
) *Controller {
// Create WebSocket client for real-time updates
wsClient := services.NewWebSocketClient(cfg.ServerURL, "", logger)
return &Controller{
config: cfg,
server: srv,
taskQueue: tq,
logger: logger,
wsClient: wsClient,
}
}
// Init initializes the TUI and returns initial commands
func (c *Controller) Init() tea.Cmd {
// Connect WebSocket for real-time updates
if err := c.wsClient.Connect(); err != nil {
c.logger.Error("WebSocket connection failed", "error", err)
}
return tea.Batch(
tea.SetWindowTitle("FetchML"),
c.loadAllData(),
tickCmd(),
)
}
func (c *Controller) Update(msg tea.Msg, m model.State) (model.State, tea.Cmd) {
switch typed := msg.(type) {
case tea.KeyMsg:
return c.handleKeyMsg(typed, m)
case tea.WindowSizeMsg:
// Only apply window size on first render, then keep constant
if m.Width == 0 && m.Height == 0 {
updated := c.applyWindowSize(typed, m)
return c.finalizeUpdate(msg, updated)
}
return c.finalizeUpdate(msg, m)
case model.JobsLoadedMsg:
return c.handleJobsLoadedMsg(typed, m)
case model.TasksLoadedMsg:
return c.handleTasksLoadedMsg(typed, m)
case model.GpuLoadedMsg:
return c.handleGPUContent(typed, m)
case model.ContainerLoadedMsg:
return c.handleContainerContent(typed, m)
case model.QueueLoadedMsg:
return c.handleQueueContent(typed, m)
case model.DatasetsLoadedMsg:
// Format datasets into view content
var content strings.Builder
content.WriteString("Available Datasets\n")
content.WriteString(strings.Repeat("═", 50) + "\n\n")
if len(typed) == 0 {
content.WriteString("📭 No datasets found\n\n")
content.WriteString("Datasets will appear here when available\n")
content.WriteString("in the data directory.")
} else {
for i, ds := range typed {
content.WriteString(fmt.Sprintf("%d. 📁 %s\n", i+1, ds.Name))
content.WriteString(fmt.Sprintf(" Location: %s\n", ds.Location))
content.WriteString(fmt.Sprintf(" Size: %d bytes\n", ds.SizeBytes))
content.WriteString(fmt.Sprintf(" Last Access: %s\n\n", ds.LastAccess.Format("2006-01-02 15:04")))
}
}
m.DatasetView.SetContent(content.String())
m.DatasetView.GotoTop()
return c.finalizeUpdate(msg, m)
case model.SettingsContentMsg:
m.SettingsView.SetContent(string(typed))
return c.finalizeUpdate(msg, m)
case ExperimentsLoadedMsg:
m.ExperimentsView.SetContent(string(typed))
m.ExperimentsView.GotoTop()
return c.finalizeUpdate(msg, m)
case ExperimentHistoryLoadedMsg:
m.ExperimentHistoryView.SetContent(string(typed))
m.ExperimentHistoryView.GotoTop()
return c.finalizeUpdate(msg, m)
case ConfigLoadedMsg:
m.ConfigView.SetContent(string(typed))
m.ConfigView.GotoTop()
return c.finalizeUpdate(msg, m)
case LogsLoadedMsg:
m.LogsView.SetContent(string(typed))
m.LogsView.GotoTop()
return c.finalizeUpdate(msg, m)
case model.SettingsUpdateMsg:
return c.finalizeUpdate(msg, m)
case model.StatusMsg:
return c.handleStatusMsg(typed, m)
case model.TickMsg:
return c.handleTickMsg(typed, m)
case model.JobUpdateMsg:
// Handle real-time job status updates from WebSocket
m.Status = fmt.Sprintf("Job %s: %s", typed.JobName, typed.Status)
// Refresh job list to show updated status
return m, c.loadAllData()
case model.GPUUpdateMsg:
// Throttle GPU updates to 1/second (humans can't perceive faster)
if time.Since(m.LastGPUUpdate) > 1*time.Second {
m.LastGPUUpdate = time.Now()
return c.finalizeUpdate(msg, m)
}
return m, nil
default:
return c.finalizeUpdate(msg, m)
}
}
// ExperimentsLoadedMsg is sent when experiments are loaded
type ExperimentsLoadedMsg string
// ExperimentHistoryLoadedMsg is sent when experiment history is loaded
type ExperimentHistoryLoadedMsg string
// ConfigLoadedMsg is sent when config is loaded
type ConfigLoadedMsg string
func (c *Controller) loadExperiments() tea.Cmd {
return func() tea.Msg {
commitIDs, err := c.taskQueue.ListExperiments()
if err != nil {
return model.StatusMsg{Level: "error", Text: fmt.Sprintf("Failed to list experiments: %v", err)}
}
if len(commitIDs) == 0 {
return ExperimentsLoadedMsg("Experiments:\n\nNo experiments found.")
}
var output string
output += "Experiments:\n\n"
for _, commitID := range commitIDs {
details, err := c.taskQueue.GetExperimentDetails(commitID)
if err != nil {
output += fmt.Sprintf("Error loading %s: %v\n\n", commitID, err)
continue
}
output += details + "\n----------------------------------------\n\n"
}
return ExperimentsLoadedMsg(output)
}
}
func (c *Controller) loadExperimentHistory() tea.Cmd {
return func() tea.Msg {
// Placeholder - will show experiment history with annotations
return ExperimentHistoryLoadedMsg("Experiment History & Annotations\n\n" +
"This view will show:\n" +
"- Previous experiment runs\n" +
"- Annotations and notes\n" +
"- Config snapshots\n" +
"- Side-by-side comparisons\n\n" +
"(Requires API: GET /api/experiments/:id/history)")
}
}
func (c *Controller) loadConfig() tea.Cmd {
return func() tea.Msg {
// Build config diff showing changes from defaults
var output strings.Builder
output.WriteString("⚙️ Config View (Read-Only)\n\n")
output.WriteString("┌─ Changes from Defaults ─────────────────────┐\n")
changes := []string{}
if c.config.Host != "" {
changes = append(changes, fmt.Sprintf("│ Host: %s", c.config.Host))
}
if c.config.Port != 0 && c.config.Port != 22 {
changes = append(changes, fmt.Sprintf("│ Port: %d (default: 22)", c.config.Port))
}
if c.config.BasePath != "" {
changes = append(changes, fmt.Sprintf("│ Base Path: %s", c.config.BasePath))
}
if c.config.RedisAddr != "" && c.config.RedisAddr != "localhost:6379" {
changes = append(changes, fmt.Sprintf("│ Redis: %s (default: localhost:6379)", c.config.RedisAddr))
}
if c.config.ServerURL != "" {
changes = append(changes, fmt.Sprintf("│ Server: %s", c.config.ServerURL))
}
if len(changes) == 0 {
output.WriteString("│ (Using all default settings)\n")
} else {
for _, change := range changes {
output.WriteString(change + "\n")
}
}
output.WriteString("└─────────────────────────────────────────────┘\n\n")
output.WriteString("Full Configuration:\n")
output.WriteString(fmt.Sprintf(" Host: %s\n", c.config.Host))
output.WriteString(fmt.Sprintf(" Port: %d\n", c.config.Port))
output.WriteString(fmt.Sprintf(" Base Path: %s\n", c.config.BasePath))
output.WriteString(fmt.Sprintf(" Redis: %s\n", c.config.RedisAddr))
output.WriteString(fmt.Sprintf(" Server: %s\n", c.config.ServerURL))
output.WriteString(fmt.Sprintf(" User: %s\n\n", c.config.User))
output.WriteString("Use CLI to modify: ml config set <key> <value>")
return ConfigLoadedMsg(output.String())
}
}
// LogsLoadedMsg is sent when logs are loaded
type LogsLoadedMsg string
func (c *Controller) loadLogs(jobName string) tea.Cmd {
return func() tea.Msg {
// Placeholder - will stream logs from job
return LogsLoadedMsg("📜 Logs for " + jobName + "\n\n" +
"Log streaming will appear here...\n\n" +
"(Requires API: GET /api/jobs/" + jobName + "/logs?follow=true)")
}
}
// ExportCompletedMsg is sent when export is complete
type ExportCompletedMsg struct {
JobName string
Path string
}
func (c *Controller) exportJob(jobName string) tea.Cmd {
return func() tea.Msg {
// Show export in progress
return model.StatusMsg{
Text: "Exporting " + jobName + "... (anonymized)",
Level: "info",
}
}
}