diff --git a/chat.py b/chat.py index b78572a..97b4a48 100755 --- a/chat.py +++ b/chat.py @@ -571,7 +571,7 @@ def _send_message(self, message): if comments: print("\n" + UI.colorize("LLM Explanation / Reasoning:", "BRIGHT_CYAN")) - print(comments) + UI.render_markdown(comments) else: print("\nNo explanation provided by the LLM.") diff --git a/ui.py b/ui.py index 2a47a97..b11b1c2 100644 --- a/ui.py +++ b/ui.py @@ -1,8 +1,14 @@ """UI utilities for LLM Terminal Chat""" +import logging import os from pathlib import Path from prompt_toolkit import PromptSession from prompt_toolkit.completion import PathCompleter +from rich.markdown import Markdown +from rich.console import Console + +logger = logging.getLogger(__name__) + class UI: # ANSI color codes @@ -72,6 +78,20 @@ def show_exit_message(log_path): print(f" {log_path}") print("\n" + UI.colorize("Goodbye!", 'BRIGHT_GREEN')) + @staticmethod + def render_markdown(text): + """Render markdown text with glow-like formatting.""" + if not text: + return + try: + console = Console() + md = Markdown(text) + console.print(md) + except Exception as e: + # Log error with traceback and fallback to plain print if markdown rendering fails + logger.exception("Failed to render markdown") + print(text) + @staticmethod def interactive_selection(prompt_title, prompt_message, no_items_message, items, item_formatter=lambda x: x, allow_new=False,