Phase 7 of the monorepo maintainability plan: New files created: - model/jobs.go - Job type, JobStatus constants, list.Item interface - model/messages.go - tea.Msg types (JobsLoadedMsg, StatusMsg, TickMsg, etc.) - model/styles.go - NewJobListDelegate(), JobListTitleStyle(), SpinnerStyle() - model/keys.go - KeyMap struct, DefaultKeys() function Modified files: - model/state.go - reduced from 226 to ~130 lines - Removed: Job, JobStatus, KeyMap, Keys, inline styles - Kept: State struct, domain re-exports, ViewMode, DatasetInfo, InitialState() - controller/commands.go - use model. prefix for message types - controller/controller.go - use model. prefix for message types - controller/settings.go - use model.SettingsContentMsg Deleted files: - controller/keys.go (moved to model/keys.go since State references KeyMap) Result: - No file >150 lines in model/ package - Single concern per file: state, jobs, messages, styles, keys - All 41 test packages pass
374 lines
9.9 KiB
Go
374 lines
9.9 KiB
Go
package controller
|
|
|
|
import (
|
|
"fmt"
|
|
"time"
|
|
|
|
"github.com/charmbracelet/bubbles/key"
|
|
"github.com/charmbracelet/bubbles/list"
|
|
tea "github.com/charmbracelet/bubbletea"
|
|
"github.com/jfraeys/fetch_ml/cmd/tui/internal/config"
|
|
"github.com/jfraeys/fetch_ml/cmd/tui/internal/model"
|
|
"github.com/jfraeys/fetch_ml/cmd/tui/internal/services"
|
|
"github.com/jfraeys/fetch_ml/internal/logging"
|
|
)
|
|
|
|
// Controller handles all business logic and state updates
|
|
type Controller struct {
|
|
config *config.Config
|
|
server *services.MLServer
|
|
taskQueue *services.TaskQueue
|
|
logger *logging.Logger
|
|
}
|
|
|
|
func (c *Controller) handleKeyMsg(msg tea.KeyMsg, m model.State) (model.State, tea.Cmd) {
|
|
if m.InputMode {
|
|
return c.handleInputModeKey(msg, m)
|
|
}
|
|
|
|
if m.ActiveView == model.ViewModeSettings {
|
|
return c.handleSettingsKeys(msg, m)
|
|
}
|
|
|
|
if key.Matches(msg, m.Keys.Quit) {
|
|
return m, tea.Quit
|
|
}
|
|
|
|
cmds := c.handleGlobalKeys(msg, &m)
|
|
return c.finalizeUpdate(msg, m, cmds...)
|
|
}
|
|
|
|
func (c *Controller) handleInputModeKey(msg tea.KeyMsg, m model.State) (model.State, tea.Cmd) {
|
|
switch msg.String() {
|
|
case "enter":
|
|
args := m.Input.Value()
|
|
m.Input.SetValue("")
|
|
m.InputMode = false
|
|
if job := getSelectedJob(m); job != nil {
|
|
return m, c.queueJob(job.Name, args)
|
|
}
|
|
return m, nil
|
|
case "esc":
|
|
m.InputMode = false
|
|
m.Input.SetValue("")
|
|
return m, nil
|
|
default:
|
|
var cmd tea.Cmd
|
|
m.Input, cmd = m.Input.Update(msg)
|
|
return m, cmd
|
|
}
|
|
}
|
|
|
|
func (c *Controller) handleSettingsKeys(msg tea.KeyMsg, m model.State) (model.State, tea.Cmd) {
|
|
cmds := c.navigateSettings(msg, &m)
|
|
if m.SettingsIndex == 1 {
|
|
var inputCmd tea.Cmd
|
|
m.APIKeyInput, inputCmd = m.APIKeyInput.Update(msg)
|
|
cmds = append(cmds, inputCmd, c.updateSettingsContent(m))
|
|
}
|
|
return m, tea.Batch(cmds...)
|
|
}
|
|
|
|
func (c *Controller) navigateSettings(msg tea.KeyMsg, m *model.State) []tea.Cmd {
|
|
var cmds []tea.Cmd
|
|
switch msg.String() {
|
|
case "up", "k":
|
|
if m.SettingsIndex > 1 {
|
|
m.SettingsIndex--
|
|
cmds = append(cmds, c.updateSettingsContent(*m))
|
|
c.toggleAPIKeyInputFocus(m)
|
|
}
|
|
case "down", "j":
|
|
if m.SettingsIndex < 2 {
|
|
m.SettingsIndex++
|
|
cmds = append(cmds, c.updateSettingsContent(*m))
|
|
c.toggleAPIKeyInputFocus(m)
|
|
}
|
|
case "enter":
|
|
if cmd := c.handleSettingsAction(m); cmd != nil {
|
|
cmds = append(cmds, cmd)
|
|
}
|
|
case "esc":
|
|
m.ActiveView = model.ViewModeJobs
|
|
m.APIKeyInput.Blur()
|
|
}
|
|
return cmds
|
|
}
|
|
|
|
func (c *Controller) toggleAPIKeyInputFocus(m *model.State) {
|
|
if m.SettingsIndex == 1 {
|
|
m.APIKeyInput.Focus()
|
|
} else {
|
|
m.APIKeyInput.Blur()
|
|
}
|
|
}
|
|
|
|
func (c *Controller) handleGlobalKeys(msg tea.KeyMsg, m *model.State) []tea.Cmd {
|
|
var cmds []tea.Cmd
|
|
|
|
switch {
|
|
case key.Matches(msg, m.Keys.Refresh):
|
|
m.IsLoading = true
|
|
m.Status = "Refreshing all data..."
|
|
m.LastRefresh = time.Now()
|
|
cmds = append(cmds, c.loadAllData())
|
|
case key.Matches(msg, m.Keys.RefreshGPU):
|
|
m.Status = "Refreshing GPU status..."
|
|
cmds = append(cmds, c.loadGPU())
|
|
case key.Matches(msg, m.Keys.Trigger):
|
|
if job := getSelectedJob(*m); job != nil {
|
|
cmds = append(cmds, c.queueJob(job.Name, ""))
|
|
}
|
|
case key.Matches(msg, m.Keys.TriggerArgs):
|
|
if job := getSelectedJob(*m); job != nil {
|
|
m.InputMode = true
|
|
m.Input.Focus()
|
|
}
|
|
case key.Matches(msg, m.Keys.ViewQueue):
|
|
m.ActiveView = model.ViewModeQueue
|
|
cmds = append(cmds, c.showQueue(*m))
|
|
case key.Matches(msg, m.Keys.ViewContainer):
|
|
m.ActiveView = model.ViewModeContainer
|
|
cmds = append(cmds, c.loadContainer())
|
|
case key.Matches(msg, m.Keys.ViewGPU):
|
|
m.ActiveView = model.ViewModeGPU
|
|
cmds = append(cmds, c.loadGPU())
|
|
case key.Matches(msg, m.Keys.ViewJobs):
|
|
m.ActiveView = model.ViewModeJobs
|
|
case key.Matches(msg, m.Keys.ViewSettings):
|
|
m.ActiveView = model.ViewModeSettings
|
|
m.SettingsIndex = 1
|
|
m.APIKeyInput.Focus()
|
|
cmds = append(cmds, c.updateSettingsContent(*m))
|
|
case key.Matches(msg, m.Keys.ViewExperiments):
|
|
m.ActiveView = model.ViewModeExperiments
|
|
cmds = append(cmds, c.loadExperiments())
|
|
case key.Matches(msg, m.Keys.Cancel):
|
|
if job := getSelectedJob(*m); job != nil && job.TaskID != "" {
|
|
cmds = append(cmds, c.cancelTask(job.TaskID))
|
|
}
|
|
case key.Matches(msg, m.Keys.Delete):
|
|
if job := getSelectedJob(*m); job != nil && job.Status == model.StatusPending {
|
|
cmds = append(cmds, c.deleteJob(job.Name))
|
|
}
|
|
case key.Matches(msg, m.Keys.MarkFailed):
|
|
if job := getSelectedJob(*m); job != nil && job.Status == model.StatusRunning {
|
|
cmds = append(cmds, c.markFailed(job.Name))
|
|
}
|
|
case key.Matches(msg, m.Keys.Help):
|
|
m.ShowHelp = !m.ShowHelp
|
|
}
|
|
|
|
return cmds
|
|
}
|
|
|
|
func (c *Controller) applyWindowSize(msg tea.WindowSizeMsg, m model.State) model.State {
|
|
m.Width = msg.Width
|
|
m.Height = msg.Height
|
|
|
|
h, v := 4, 2
|
|
listHeight := msg.Height - v - 8
|
|
m.JobList.SetSize(msg.Width/3-h, listHeight)
|
|
|
|
panelWidth := msg.Width*2/3 - h - 2
|
|
panelHeight := (listHeight - 6) / 3
|
|
|
|
m.GpuView.Width = panelWidth
|
|
m.GpuView.Height = panelHeight
|
|
m.ContainerView.Width = panelWidth
|
|
m.ContainerView.Height = panelHeight
|
|
m.QueueView.Width = panelWidth
|
|
m.QueueView.Height = listHeight - 4
|
|
m.SettingsView.Width = panelWidth
|
|
m.SettingsView.Height = listHeight - 4
|
|
m.ExperimentsView.Width = panelWidth
|
|
m.ExperimentsView.Height = listHeight - 4
|
|
|
|
return m
|
|
}
|
|
|
|
func (c *Controller) handleJobsLoadedMsg(msg model.JobsLoadedMsg, m model.State) (model.State, tea.Cmd) {
|
|
m.Jobs = []model.Job(msg)
|
|
calculateJobStats(&m)
|
|
|
|
items := make([]list.Item, len(m.Jobs))
|
|
for i, job := range m.Jobs {
|
|
items[i] = job
|
|
}
|
|
|
|
setItemsCmd := m.JobList.SetItems(items)
|
|
m.Status = formatStatus(m)
|
|
m.IsLoading = false
|
|
return c.finalizeUpdate(msg, m, setItemsCmd)
|
|
}
|
|
|
|
func (c *Controller) handleTasksLoadedMsg(
|
|
msg model.TasksLoadedMsg,
|
|
m model.State,
|
|
) (model.State, tea.Cmd) {
|
|
m.QueuedTasks = []*model.Task(msg)
|
|
m.Status = formatStatus(m)
|
|
return c.finalizeUpdate(msg, m)
|
|
}
|
|
|
|
func (c *Controller) handleGPUContent(msg model.GpuLoadedMsg, m model.State) (model.State, tea.Cmd) {
|
|
m.GpuView.SetContent(string(msg))
|
|
m.GpuView.GotoTop()
|
|
return c.finalizeUpdate(msg, m)
|
|
}
|
|
|
|
func (c *Controller) handleContainerContent(
|
|
msg model.ContainerLoadedMsg,
|
|
m model.State,
|
|
) (model.State, tea.Cmd) {
|
|
m.ContainerView.SetContent(string(msg))
|
|
m.ContainerView.GotoTop()
|
|
return c.finalizeUpdate(msg, m)
|
|
}
|
|
|
|
func (c *Controller) handleQueueContent(msg model.QueueLoadedMsg, m model.State) (model.State, tea.Cmd) {
|
|
m.QueueView.SetContent(string(msg))
|
|
m.QueueView.GotoTop()
|
|
return c.finalizeUpdate(msg, m)
|
|
}
|
|
|
|
func (c *Controller) handleStatusMsg(msg model.StatusMsg, m model.State) (model.State, tea.Cmd) {
|
|
if msg.Level == "error" {
|
|
m.ErrorMsg = msg.Text
|
|
m.Status = "Error occurred - check status"
|
|
} else {
|
|
m.ErrorMsg = ""
|
|
m.Status = msg.Text
|
|
}
|
|
return c.finalizeUpdate(msg, m)
|
|
}
|
|
|
|
func (c *Controller) handleTickMsg(msg model.TickMsg, m model.State) (model.State, tea.Cmd) {
|
|
var cmds []tea.Cmd
|
|
if time.Since(m.LastRefresh) > 10*time.Second && !m.IsLoading {
|
|
m.LastRefresh = time.Now()
|
|
cmds = append(cmds, c.loadAllData())
|
|
}
|
|
cmds = append(cmds, tickCmd())
|
|
return c.finalizeUpdate(msg, m, cmds...)
|
|
}
|
|
|
|
func (c *Controller) finalizeUpdate(
|
|
msg tea.Msg,
|
|
m model.State,
|
|
extraCmds ...tea.Cmd,
|
|
) (model.State, tea.Cmd) {
|
|
cmds := append([]tea.Cmd{}, extraCmds...)
|
|
|
|
var cmd tea.Cmd
|
|
m.JobList, cmd = m.JobList.Update(msg)
|
|
cmds = append(cmds, cmd)
|
|
|
|
m.GpuView, cmd = m.GpuView.Update(msg)
|
|
cmds = append(cmds, cmd)
|
|
|
|
m.ContainerView, cmd = m.ContainerView.Update(msg)
|
|
cmds = append(cmds, cmd)
|
|
|
|
m.QueueView, cmd = m.QueueView.Update(msg)
|
|
cmds = append(cmds, cmd)
|
|
|
|
m.ExperimentsView, cmd = m.ExperimentsView.Update(msg)
|
|
cmds = append(cmds, cmd)
|
|
|
|
var spinCmd tea.Cmd
|
|
m.Spinner, spinCmd = m.Spinner.Update(msg)
|
|
cmds = append(cmds, spinCmd)
|
|
|
|
return m, tea.Batch(cmds...)
|
|
}
|
|
|
|
// New creates a new Controller instance
|
|
func New(
|
|
cfg *config.Config,
|
|
srv *services.MLServer,
|
|
tq *services.TaskQueue,
|
|
logger *logging.Logger,
|
|
) *Controller {
|
|
return &Controller{
|
|
config: cfg,
|
|
server: srv,
|
|
taskQueue: tq,
|
|
logger: logger,
|
|
}
|
|
}
|
|
|
|
// Init initializes the TUI and returns initial commands
|
|
func (c *Controller) Init() tea.Cmd {
|
|
return tea.Batch(
|
|
tea.SetWindowTitle("FetchML"),
|
|
c.loadAllData(),
|
|
tickCmd(),
|
|
)
|
|
}
|
|
|
|
// Update handles all messages and updates the state
|
|
func (c *Controller) Update(msg tea.Msg, m model.State) (model.State, tea.Cmd) {
|
|
switch typed := msg.(type) {
|
|
case tea.KeyMsg:
|
|
return c.handleKeyMsg(typed, m)
|
|
case tea.WindowSizeMsg:
|
|
updated := c.applyWindowSize(typed, m)
|
|
return c.finalizeUpdate(msg, updated)
|
|
case model.JobsLoadedMsg:
|
|
return c.handleJobsLoadedMsg(typed, m)
|
|
case model.TasksLoadedMsg:
|
|
return c.handleTasksLoadedMsg(typed, m)
|
|
case model.GpuLoadedMsg:
|
|
return c.handleGPUContent(typed, m)
|
|
case model.ContainerLoadedMsg:
|
|
return c.handleContainerContent(typed, m)
|
|
case model.QueueLoadedMsg:
|
|
return c.handleQueueContent(typed, m)
|
|
case model.SettingsContentMsg:
|
|
m.SettingsView.SetContent(string(typed))
|
|
return c.finalizeUpdate(msg, m)
|
|
case ExperimentsLoadedMsg:
|
|
m.ExperimentsView.SetContent(string(typed))
|
|
m.ExperimentsView.GotoTop()
|
|
return c.finalizeUpdate(msg, m)
|
|
case model.SettingsUpdateMsg:
|
|
return c.finalizeUpdate(msg, m)
|
|
case model.StatusMsg:
|
|
return c.handleStatusMsg(typed, m)
|
|
case model.TickMsg:
|
|
return c.handleTickMsg(typed, m)
|
|
default:
|
|
return c.finalizeUpdate(msg, m)
|
|
}
|
|
}
|
|
|
|
// ExperimentsLoadedMsg is sent when experiments are loaded
|
|
type ExperimentsLoadedMsg string
|
|
|
|
func (c *Controller) loadExperiments() tea.Cmd {
|
|
return func() tea.Msg {
|
|
commitIDs, err := c.taskQueue.ListExperiments()
|
|
if err != nil {
|
|
return model.StatusMsg{Level: "error", Text: fmt.Sprintf("Failed to list experiments: %v", err)}
|
|
}
|
|
|
|
if len(commitIDs) == 0 {
|
|
return ExperimentsLoadedMsg("Experiments:\n\nNo experiments found.")
|
|
}
|
|
|
|
var output string
|
|
output += "Experiments:\n\n"
|
|
|
|
for _, commitID := range commitIDs {
|
|
details, err := c.taskQueue.GetExperimentDetails(commitID)
|
|
if err != nil {
|
|
output += fmt.Sprintf("Error loading %s: %v\n\n", commitID, err)
|
|
continue
|
|
}
|
|
output += details + "\n----------------------------------------\n\n"
|
|
}
|
|
|
|
return ExperimentsLoadedMsg(output)
|
|
}
|
|
}
|