Coverage for src / lilbee / cli / tui / widgets / crawl_dialog.py: 100%
82 statements
« prev ^ index » next coverage.py v7.13.4, created at 2026-04-29 19:16 +0000
« prev ^ index » next coverage.py v7.13.4, created at 2026-04-29 19:16 +0000
1"""Modal dialog for configuring a web crawl."""
3from __future__ import annotations
5from dataclasses import dataclass
6from typing import ClassVar
8from textual.app import ComposeResult
9from textual.binding import Binding, BindingType
10from textual.containers import Center, Vertical
11from textual.screen import ModalScreen
12from textual.widgets import Button, Checkbox, Collapsible, Input, Label, Static
14from lilbee.cli.tui import messages as msg
17@dataclass(frozen=True)
18class CrawlParams:
19 """Validated crawl parameters returned by CrawlDialog.
21 depth: None = whole-site unbounded. 0 = single URL only. Positive int =
22 explicit link-follow depth cap. max_pages: None = no cap. Positive int =
23 explicit page cap.
24 """
26 url: str
27 depth: int | None
28 max_pages: int | None
31class CrawlDialog(ModalScreen[CrawlParams | None]):
32 """Modal dialog that collects URL, recursion toggle, and optional caps."""
34 CSS_PATH = "crawl_dialog.tcss"
35 AUTO_FOCUS = "#crawl-url-input"
37 BINDINGS: ClassVar[list[BindingType]] = [
38 Binding("escape", "cancel", "Cancel", show=False),
39 ]
41 def compose(self) -> ComposeResult:
42 with Vertical():
43 yield Static(msg.CRAWL_DIALOG_TITLE, id="crawl-title")
44 yield Label(msg.CRAWL_DIALOG_URL_LABEL)
45 yield Input(
46 placeholder=msg.CRAWL_DIALOG_URL_PLACEHOLDER,
47 id="crawl-url-input",
48 )
49 yield Checkbox(
50 msg.CRAWL_DIALOG_RECURSIVE_LABEL,
51 value=True,
52 id="crawl-recursive-checkbox",
53 )
54 with Collapsible(title=msg.CRAWL_DIALOG_ADVANCED_TITLE, id="crawl-advanced"):
55 yield Label(msg.CRAWL_DIALOG_DEPTH_LABEL, classes="crawl-field-label")
56 yield Input(
57 placeholder=msg.CRAWL_DIALOG_DEPTH_PLACEHOLDER,
58 id="crawl-depth-input",
59 )
60 yield Label(msg.CRAWL_DIALOG_MAX_PAGES_LABEL, classes="crawl-field-label")
61 yield Input(
62 placeholder=msg.CRAWL_DIALOG_MAX_PAGES_PLACEHOLDER,
63 id="crawl-max-pages-input",
64 )
65 yield Static("", id="crawl-error")
66 with Center():
67 yield Button(msg.CRAWL_DIALOG_SUBMIT, variant="primary", id="crawl-submit")
68 yield Button(msg.CRAWL_DIALOG_CANCEL, variant="default", id="crawl-cancel")
70 def on_button_pressed(self, event: Button.Pressed) -> None:
71 if event.button.id == "crawl-submit":
72 self._try_submit()
73 else:
74 self.dismiss(None)
76 def on_input_submitted(self, _event: Input.Submitted) -> None:
77 self._try_submit()
79 @staticmethod
80 def _parse_optional_positive_int(value: str) -> int | None:
81 """Parse a positive integer from *value*; empty string returns None.
83 None means "no cap" in the crawl API. Raises ValueError on non-numeric
84 input or non-positive integers.
85 """
86 if not value:
87 return None
88 n = int(value)
89 if n <= 0:
90 raise ValueError
91 return n
93 def _validate(self) -> CrawlParams | str:
94 """Validate inputs. Returns CrawlParams on success, error message on failure."""
95 from lilbee.crawler import is_url, require_valid_crawl_url
97 url = self.query_one("#crawl-url-input", Input).value.strip()
98 recursive = self.query_one("#crawl-recursive-checkbox", Checkbox).value
99 depth_str = self.query_one("#crawl-depth-input", Input).value.strip()
100 max_pages_str = self.query_one("#crawl-max-pages-input", Input).value.strip()
102 if not url:
103 return msg.CRAWL_DIALOG_URL_REQUIRED
105 if not is_url(url):
106 url = f"https://{url}"
108 try:
109 require_valid_crawl_url(url)
110 except ValueError as exc:
111 return msg.CRAWL_DIALOG_INVALID_URL.format(error=exc)
113 if not recursive:
114 return CrawlParams(url=url, depth=0, max_pages=None)
116 try:
117 depth = self._parse_optional_positive_int(depth_str)
118 except ValueError:
119 return msg.CRAWL_DIALOG_INVALID_NUMBER.format(field=msg.CRAWL_DIALOG_DEPTH_LABEL)
121 try:
122 max_pages = self._parse_optional_positive_int(max_pages_str)
123 except ValueError:
124 return msg.CRAWL_DIALOG_INVALID_NUMBER.format(field=msg.CRAWL_DIALOG_MAX_PAGES_LABEL)
126 return CrawlParams(url=url, depth=depth, max_pages=max_pages)
128 def _try_submit(self) -> None:
129 """Validate inputs and dismiss with CrawlParams or show an error."""
130 result = self._validate()
131 error_widget = self.query_one("#crawl-error", Static)
132 # _validate returns str (error) or CrawlParams; isinstance disambiguates
133 if isinstance(result, str):
134 error_widget.update(result)
135 return
136 error_widget.update("")
137 self.dismiss(result)
139 def action_cancel(self) -> None:
140 self.dismiss(None)