From 5184ffba0b3444c63d4bd2688612c31870629fef Mon Sep 17 00:00:00 2001 From: Aavash Shrestha Date: Mon, 10 Mar 2025 11:20:00 +0100 Subject: [PATCH] ui: use viewport instead of list model for messages model --- internal/ui/components/chat.go | 171 ++++++++++++++++++++ internal/ui/components/list.go | 1 - internal/ui/components/message_list_item.go | 105 ------------ internal/ui/handlers.go | 36 ++--- internal/ui/model.go | 37 +++-- main.go | 2 +- 6 files changed, 208 insertions(+), 144 deletions(-) create mode 100644 internal/ui/components/chat.go delete mode 100644 internal/ui/components/message_list_item.go diff --git a/internal/ui/components/chat.go b/internal/ui/components/chat.go new file mode 100644 index 0000000..dfd96f7 --- /dev/null +++ b/internal/ui/components/chat.go @@ -0,0 +1,171 @@ +package components + +import ( + "fmt" + "strings" + + "github.com/aavshr/panda/internal/ui/styles" + "github.com/charmbracelet/bubbles/viewport" + tea "github.com/charmbracelet/bubbletea" + "github.com/charmbracelet/lipgloss" +) + +type Message struct { + Content string + CreatedAt string + IsUser bool +} + +type ChatModel struct { + viewport viewport.Model + messages []Message + width int + height int + userStyle lipgloss.Style + assistantStyle lipgloss.Style + timestampStyle lipgloss.Style +} + +func NewChatModel(width, height int) ChatModel { + vp := viewport.New(width, height) + vp.KeyMap.PageDown.SetEnabled(true) + vp.KeyMap.PageUp.SetEnabled(true) + + userStyle := styles.UserMessageStyle() + assistantStyle := styles.AIMessageStyle() + timestampStyle := styles.MetadataStyle() + + return ChatModel{ + viewport: vp, + messages: []Message{}, + width: width, + height: height, + userStyle: userStyle, + assistantStyle: assistantStyle, + timestampStyle: timestampStyle, + } +} + +func (m *ChatModel) SetMessage(index int, msg Message) { + m.messages[index] = msg + m.updateViewportContent() +} + +func (m *ChatModel) AddMessage(msg Message) { + m.messages = append(m.messages, msg) + m.updateViewportContent() + m.ScrollToBottom() +} + +func (m *ChatModel) ResetMessages() { + m.messages = []Message{} + m.updateViewportContent() +} + +func (m *ChatModel) SetSize(width, height int) { + m.width = width + m.height = height + m.viewport.Width = width + m.viewport.Height = height + m.updateViewportContent() +} + +func (m *ChatModel) ScrollToBottom() { + m.viewport.GotoBottom() +} + +func (m *ChatModel) formatMessage(msg Message) string { + if msg.Content == "" { + return "" + } + var style lipgloss.Style + var sender = "You" + if msg.IsUser { + style = m.userStyle + } else { + style = m.assistantStyle + sender = "AI" + } + + header := style.Render(sender) + m.timestampStyle.Render(msg.CreatedAt) + contentWidth := m.width - 4 + wrappedContent := wrapText(msg.Content, contentWidth) + indentedContent := strings.ReplaceAll(wrappedContent, "\n", "\n ") + return fmt.Sprintf("%s\n %s\n", header, indentedContent) +} + +// updateViewportContent updates the content in the viewport +func (m *ChatModel) updateViewportContent() { + var sb strings.Builder + + for _, msg := range m.messages { + sb.WriteString(m.formatMessage(msg)) + sb.WriteString("\n") + } + + m.viewport.SetContent(sb.String()) + m.ScrollToBottom() +} + +func wrapText(text string, width int) string { + var result strings.Builder + lines := strings.Split(text, "\n") + + for i, line := range lines { + if i > 0 { + result.WriteString("\n") + } + + if len(line) <= width { + result.WriteString(line) + continue + } + + words := strings.Fields(line) + lineLength := 0 + + for j, word := range words { + if j > 0 { + if lineLength+len(word)+1 > width { + result.WriteString("\n") + lineLength = 0 + } else { + result.WriteString(" ") + lineLength++ + } + } + + result.WriteString(word) + lineLength += len(word) + } + } + + return result.String() +} + +func (m *ChatModel) View() string { + return m.viewport.View() +} + +func (m *ChatModel) Update(msg tea.Msg) (ChatModel, tea.Cmd) { + var ( + cmd tea.Cmd + cmds []tea.Cmd + ) + + switch msg := msg.(type) { + case tea.KeyMsg: + switch msg.Type { + case tea.KeyEscape: + return *m, EscapeCmd + } + + case tea.WindowSizeMsg: + m.SetSize(msg.Width, msg.Height) + } + + m.viewport, cmd = m.viewport.Update(msg) + cmds = append(cmds, cmd) + + return *m, tea.Batch(cmds...) +} diff --git a/internal/ui/components/list.go b/internal/ui/components/list.go index 34da0a0..9fd64c8 100644 --- a/internal/ui/components/list.go +++ b/internal/ui/components/list.go @@ -107,7 +107,6 @@ func (m *ListModel) Update(msg tea.Msg) (ListModel, tea.Cmd) { switch msg := msg.(type) { case tea.KeyMsg: switch msg.Type { - // TODO: handle enter key case tea.KeyEscape: return *m, EscapeCmd case tea.KeyEnter: diff --git a/internal/ui/components/message_list_item.go b/internal/ui/components/message_list_item.go deleted file mode 100644 index d7d26dd..0000000 --- a/internal/ui/components/message_list_item.go +++ /dev/null @@ -1,105 +0,0 @@ -package components - -import ( - "fmt" - "io" - - "github.com/aavshr/panda/internal/db" - "github.com/aavshr/panda/internal/ui/styles" - "github.com/charmbracelet/bubbles/list" - tea "github.com/charmbracelet/bubbletea" - "github.com/charmbracelet/lipgloss" - "github.com/muesli/reflow/wordwrap" -) - -// MessageListItem implements the list.Item, list.DefaultItem interface -type MessageListItem struct { - message *db.Message -} - -func (t *MessageListItem) Title() string { - return t.message.Content -} - -func (t *MessageListItem) Description() string { - if t.message.Role == "" || t.message.CreatedAt == "" { - return "" - } - role := "You" - if t.message.Role == "assistant" { - role = "AI" - } - return fmt.Sprintf("%s at %s", role, t.message.CreatedAt) -} - -func (t *MessageListItem) FilterValue() string { - return t.Title() -} - -// MessageListItemDelegate implements list.ItemDelegate interface -type MessageListItemDelegate struct { - userStyle lipgloss.Style - aiStyle lipgloss.Style - metaStyle lipgloss.Style -} - -func (d *MessageListItemDelegate) Render(w io.Writer, m list.Model, index int, item list.Item) { - messageItem, ok := item.(*MessageListItem) - if !ok { - return - } - - contentWidth := m.Width() - 4 // account for padding - - var contentStyle lipgloss.Style - if messageItem.message.Role == "assistant" { - contentStyle = d.aiStyle - } else { - contentStyle = d.userStyle - } - - content := wordwrap.String(messageItem.Title(), contentWidth) - meta := d.metaStyle.Render(messageItem.Description()) - - fmt.Fprintln(w, contentStyle.Render(content)) - fmt.Fprintln(w, meta) - - fmt.Fprintln(w) -} - -func (d *MessageListItemDelegate) Height() int { - return 2 + d.Spacing() // Minimum for content + metadata + spacing -} - -func (d *MessageListItemDelegate) Spacing() int { - return 0 -} - -func (d *MessageListItemDelegate) Update(msg tea.Msg, m *list.Model) tea.Cmd { - return nil -} - -func NewMessageListItem(message *db.Message) *MessageListItem { - return &MessageListItem{ - message: message, - } -} - -func NewMessageListItems(messages []*db.Message) []list.Item { - items := make([]list.Item, len(messages)) - for i, m := range messages { - items[i] = &MessageListItem{ - message: m, - } - } - return items -} - -// NewMessageListItemDelegate maintains the existing API while enhancing functionality -func NewMessageListItemDelegate() list.ItemDelegate { - return &MessageListItemDelegate{ - userStyle: styles.UserMessageStyle(), - aiStyle: styles.AIMessageStyle(), - metaStyle: styles.MetadataStyle(), - } -} diff --git a/internal/ui/handlers.go b/internal/ui/handlers.go index 769fd03..b49d766 100644 --- a/internal/ui/handlers.go +++ b/internal/ui/handlers.go @@ -70,8 +70,10 @@ func (m *Model) setFocusedComponent(com components.Component) { switch com { case components.ComponentChatInput: m.chatInputModel.Focus() - case components.ComponentMessages: - m.messagesModel.Focus() + /* + case components.ComponentMessages: + m.messagesModel.Focus() + */ case components.ComponentHistory: m.historyModel.Focus() // TODO: check why we need to reslect the activethreadindex @@ -145,7 +147,7 @@ func (m *Model) handleChatInputReturnMsg(msg components.ChatInputReturnMsg) tea. return m.cmdError(fmt.Errorf("store.CreateMessage: %w", err)) } m.setMessages(append(m.messages, userMessage)) - // TODO: history? + // TODO: history reader, err := m.llm.CreateChatCompletionStream(context.Background(), m.userConfig.LLMModel, msg.Value) if err != nil { @@ -159,6 +161,7 @@ func (m *Model) handleChatInputReturnMsg(msg components.ChatInputReturnMsg) tea. Role: roleAssistant, ThreadID: activeThread.ID, })) + m.messagesModel.ScrollToBottom() return m.cmdForwardChatCompletionStream } @@ -167,8 +170,10 @@ func (m *Model) handleEscapeMsg() { switch m.focusedComponent { case components.ComponentChatInput: m.chatInputModel.Blur() - case components.ComponentMessages: - m.messagesModel.Blur() + /* + case components.ComponentMessages: + m.messagesModel.Blur() + */ case components.ComponentHistory: m.historyModel.Blur() } @@ -234,7 +239,7 @@ func (m *Model) handleForwardChatCompletionStreamMsg(_ ForwardChatCompletionStre createdAt := m.messages[llmMessageIndex].CreatedAt // TODO: what buffer size makes it look smooth? - buffer := make([]byte, 16) + buffer := make([]byte, 64) streamDone := false n, err := m.activeLLMStream.Read(buffer) @@ -245,14 +250,8 @@ func (m *Model) handleForwardChatCompletionStreamMsg(_ ForwardChatCompletionStre streamDone = true m.activeLLMStream.Close() } - /* - if n == 0 && !streamDone { - return m.cmdError(fmt.Errorf("activeLLMStream.Read: no bytes read")) - } - */ if n > 0 { content = fmt.Sprintf("%s%s", content, string(buffer[:n])) - // upate created at as soon as first bytes are read if createdAt == "" { createdAt = time.Now().Format(timeFormat) } @@ -264,15 +263,16 @@ func (m *Model) handleForwardChatCompletionStreamMsg(_ ForwardChatCompletionStre ThreadID: activeThreadId, } m.messages[llmMessageIndex] = updatedLLMMessage - setItemCmd := m.messagesModel.SetItem( - llmMessageIndex, - components.NewMessageListItem(updatedLLMMessage), - ) + m.messagesModel.SetMessage(llmMessageIndex, components.Message{ + Content: updatedLLMMessage.Content, + CreatedAt: updatedLLMMessage.CreatedAt, + IsUser: false, + }) if streamDone { if err := m.store.CreateMessage(updatedLLMMessage); err != nil { return m.cmdError(fmt.Errorf("store.CreateMessage: %w", err)) } - return setItemCmd + return nil } - return tea.Batch(setItemCmd, m.cmdForwardChatCompletionStream) + return m.cmdForwardChatCompletionStream } diff --git a/internal/ui/model.go b/internal/ui/model.go index 98ed4d9..504aed8 100644 --- a/internal/ui/model.go +++ b/internal/ui/model.go @@ -56,7 +56,7 @@ type Model struct { userConfig *config.Config showSettings bool - messagesModel components.ListModel + messagesModel components.ChatModel historyModel components.ListModel chatInputModel components.ChatInputModel settingsModel components.SettingsModel @@ -113,7 +113,7 @@ func New(conf *Config, store store.Store, llm llm.LLM) (*Model, error) { m.threads = []*db.Thread{ { Name: newThreadName, - // TODO: not this hack lmao + // TODO: not this hack CreatedAt: "Create a new thread..", }, } @@ -125,26 +125,16 @@ func New(conf *Config, store store.Store, llm llm.LLM) (*Model, error) { m.messages = []*db.Message{} - containerPaddingHeight := 18 - containerPaddingWidth := 10 m.historyModel = components.NewListModel(&components.NewListModelInput{ Title: titleHistory, Items: components.NewThreadListItems(m.threads), - Width: conf.historyWidth - containerPaddingWidth, - Height: conf.historyHeight - containerPaddingHeight, + Width: conf.historyWidth - 18, // padding + Height: conf.historyHeight - 10, // padding Delegate: components.NewThreadListItemDelegate(), AllowInfiniteScrolling: false, }) m.historyModel.Select(0) // New Thread is selected by default - - m.messagesModel = components.NewListModel(&components.NewListModelInput{ - Title: titleMessages, - Items: components.NewMessageListItems(m.messages), - Width: conf.messagesWidth - containerPaddingWidth, - Height: conf.messagesHeight - containerPaddingHeight, - Delegate: components.NewMessageListItemDelegate(), - AllowInfiniteScrolling: false, - }) + m.messagesModel = components.NewChatModel(conf.messagesWidth, conf.messagesHeight) m.chatInputModel = components.NewChatInputModel(conf.chatInputWidth, conf.chatInputHeight) listContainer := styles.ListContainerStyle() @@ -172,8 +162,18 @@ func (m *Model) setThreads(threads []*db.Thread) { func (m *Model) setMessages(messages []*db.Message) { m.messages = messages - m.messagesModel.SetItems(components.NewMessageListItems(messages)) - m.messagesModel.GoToLastPage() + // TODO: a more efficient way to do this? + m.messagesModel.ResetMessages() + for _, message := range messages { + isUser := message.Role == roleUser + m.messagesModel.AddMessage( + components.Message{ + Content: message.Content, + CreatedAt: message.CreatedAt, + IsUser: isUser, + }, + ) + } } func (m *Model) setActiveThreadIndex(index int) { @@ -219,8 +219,7 @@ func (m *Model) View() string { lipgloss.Top, m.componentsToContainer[components.ComponentHistory].Render(m.historyModel.View()), m.componentsToContainer[components.ComponentMessages].Render( - // the inner container is to enforce max height on the messages list - styles.InnerContainerStyle().MaxHeight(m.conf.messagesHeight).Render(m.messagesModel.View()), + m.messagesModel.View(), ), ), lipgloss.JoinVertical( diff --git a/main.go b/main.go index 13d2f0f..57ac681 100644 --- a/main.go +++ b/main.go @@ -110,7 +110,7 @@ func main() { m, err := ui.New(&ui.Config{ InitThreadsLimit: 10, MaxThreadsLimit: 100, - Width: width - 10, + Width: width - 8, Height: height - 10, }, mockStore, openaiLLM) if err != nil {