Coverage for src / lilbee / cli / tui / widgets / message.py: 100%
97 statements
« prev ^ index » next coverage.py v7.13.4, created at 2026-04-29 19:16 +0000
« prev ^ index » next coverage.py v7.13.4, created at 2026-04-29 19:16 +0000
1"""Chat message widgets: user and assistant bubbles."""
3from __future__ import annotations
5import time
6from pathlib import Path
7from typing import ClassVar
9from textual.app import ComposeResult
10from textual.containers import Vertical
11from textual.content import Content
12from textual.widgets import Collapsible, Markdown, Static
14from lilbee.cli.tui import messages as msg
15from lilbee.cli.tui.pill import pill
16from lilbee.config import cfg
18# Minimum interval (seconds) between markdown widget updates during streaming
19_MD_UPDATE_INTERVAL = 0.1
21_SPEAKER_YOU = "[bold $primary]you[/]"
22_SPEAKER_LILBEE = "[bold $success]lilbee[/]"
24_CSS_FILE = Path(__file__).parent / "message.tcss"
25_MESSAGE_CSS = _CSS_FILE.read_text(encoding="utf-8")
28class UserMessage(Vertical):
29 """A user's question in the chat log."""
31 DEFAULT_CSS: ClassVar[str] = _MESSAGE_CSS
33 def __init__(self, text: str) -> None:
34 super().__init__(classes="user-message")
35 self._text = text
37 def compose(self) -> ComposeResult:
38 yield Static(_SPEAKER_YOU, classes="speaker-label")
39 yield Static(self._text, classes="message-content")
42class AssistantMessage(Vertical):
43 """An assistant's response with streaming markdown, reasoning, and citations."""
45 DEFAULT_CSS: ClassVar[str] = _MESSAGE_CSS
47 def __init__(self) -> None:
48 super().__init__(classes="assistant-message")
49 self._reasoning_parts: list[str] = []
50 self._content_parts: list[str] = []
51 self._finished = False
52 self._content_widget: Markdown | Static | None = None
53 self._reasoning_widget: Collapsible | None = None
54 self._reasoning_static: Static | None = None
55 self._citation_widget: Static | None = None
56 self._last_md_update: float = 0.0
57 self._use_markdown: bool = cfg.markdown_rendering
59 def compose(self) -> ComposeResult:
60 yield Static(_SPEAKER_LILBEE, classes="speaker-label")
61 self._reasoning_static = Static("", classes="reasoning-text")
62 self._reasoning_widget = Collapsible(
63 self._reasoning_static,
64 title=msg.CHAT_REASONING_STREAMING,
65 collapsed=True,
66 classes="reasoning-block",
67 )
68 yield self._reasoning_widget
69 self._content_widget = self._build_content_widget()
70 yield self._content_widget
71 self._citation_widget = Static("", classes="source-citation")
72 yield self._citation_widget
74 def _build_content_widget(self) -> Markdown | Static:
75 """Create the content widget based on the current rendering mode."""
76 if self._use_markdown:
77 return Markdown("", classes="response-md")
78 return Static("", classes="response-md")
80 @property
81 def use_markdown(self) -> bool:
82 """Whether this message is using Markdown rendering."""
83 return self._use_markdown
85 async def rebuild_content_widget(self, use_markdown: bool) -> None:
86 """Replace the content widget with a different rendering mode."""
87 if self._content_widget is None:
88 return
89 self._use_markdown = use_markdown
90 old = self._content_widget
91 new_widget = self._build_content_widget()
92 text = "".join(self._content_parts)
93 new_widget.update(text)
94 await self.mount(new_widget, after=old)
95 self._content_widget = new_widget
96 await old.remove()
98 def append_reasoning(self, text: str) -> None:
99 """Append reasoning token (shown in collapsible)."""
100 self._reasoning_parts.append(text)
101 if self._reasoning_widget is not None:
102 self._reasoning_widget.collapsed = False
103 if self._reasoning_static is not None:
104 self._reasoning_static.update("".join(self._reasoning_parts))
106 def append_content(self, text: str) -> None:
107 """Append response content token (debounced markdown updates)."""
108 self._content_parts.append(text)
109 now = time.monotonic()
110 if self._content_widget is not None and now - self._last_md_update >= _MD_UPDATE_INTERVAL:
111 self._last_md_update = now
112 self._content_widget.update("".join(self._content_parts))
113 self.refresh()
115 def finish(self, sources: list[str] | None = None) -> None:
116 """Mark response as complete and show citations."""
117 self._finished = True
118 if self._content_widget is not None and self._content_parts:
119 self._content_widget.update("".join(self._content_parts))
120 self.refresh()
121 if self._reasoning_widget is not None and self._reasoning_parts:
122 token_count = len("".join(self._reasoning_parts).split())
123 self._reasoning_widget.title = msg.CHAT_REASONING_FINISHED.format(tokens=token_count)
124 elif self._reasoning_widget is not None:
125 self._reasoning_widget.display = False
127 if sources and self._citation_widget is not None:
128 self._citation_widget.update(_build_citation_content(sources))
129 elif self._citation_widget is not None:
130 self._citation_widget.display = False
133def _build_citation_content(sources: list[str]) -> Content:
134 """Build a 'sources: pill pill pill' content line from source paths."""
135 parts: list[Content] = [Content.styled(msg.CHAT_SOURCES_LABEL, "$text-muted")]
136 for src in sources:
137 parts.append(Content(" "))
138 parts.append(pill(Path(src).name, "$surface-lighten-2", "$text"))
139 return Content.assemble(*parts)