Skip to content

Commit e2e514c

Browse files
committed
add ability to prompt the user
1 parent 130e9ba commit e2e514c

File tree

8 files changed

+223
-5
lines changed

8 files changed

+223
-5
lines changed

docs/quickstart.rst

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -129,3 +129,30 @@ split between option argument and value.
129129
Single-character options will not be split from their respective value:
130130

131131
.. structured-tutorial-part::
132+
133+
*************************
134+
Ask the user for feedback
135+
*************************
136+
137+
When running a tutorial, you can prompt the user to inspect the current state. You can ask the user to just
138+
press "enter" or even to confirm that the current state looks okay (with answering "yes" or "now").
139+
140+
When rendering a tutorial, prompt parts are simply skipped.
141+
142+
As an example:
143+
144+
.. literalinclude:: /tutorials/interactive-prompt/tutorial.yaml
145+
:caption: docs/tutorials/interactive-prompt/tutorial.yaml
146+
:language: yaml
147+
148+
.. structured-tutorial:: interactive-prompt/tutorial.yaml
149+
150+
In Sphinx, you can call ``structured-tutorial-part`` only twice, as prompts are simply skipped. The first
151+
part just creates a file. Since ``temporary_directory: true`` in the configuration, this will run in
152+
a temporary directory that is removed after running the tutorial:
153+
154+
.. structured-tutorial-part::
155+
156+
When running the tutorial, the user will now be prompted to confirm the current state. The prompt would
157+
contain the current working directory. Presumably, the user would check the contents of ``test.txt`` in that
158+
directory.
Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
configuration:
2+
run:
3+
# Switch to temporary directory before running the tutorial
4+
temporary_directory: true
5+
parts:
6+
- commands:
7+
- command: echo "some content" > test.txt
8+
- command: echo "About to give you two prompts..."
9+
# Just prompt the user to hit 'enter' at this point:
10+
- prompt: "Hit enter to continue... "
11+
# Ask the user to confirm the current state - if they answer "no", the tutorial will abort.
12+
# In this case, we output the current working directory and ask the user to confirm:
13+
- prompt: |-
14+
Current working directory is {{ cwd }} ...
15+
Is the current state satisfactory (y/n)?
16+
type: confirm
17+
# Set to false to also raise an error if the user just presses enter:
18+
#default: true
19+
# You can also overwrite the error message the user will get:
20+
#error: Custom error encountered.

structured_tutorials/models.py

Lines changed: 18 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -262,6 +262,23 @@ def set_default_context(self) -> Self:
262262
return self
263263

264264

265+
class PromptModel(BaseModel):
266+
"""Allows you to inspect the current state of the tutorial manually."""
267+
268+
model_config = ConfigDict(extra="forbid", title="Prompt Configuration")
269+
prompt: str = Field(description=f"The prompt text. {TEMPLATE_DESCRIPTION}")
270+
type: Literal["enter", "confirm"] = "enter"
271+
default: bool = Field(
272+
default=True, description="For type=`confirm`, the default if the user just presses enter."
273+
)
274+
error: str = Field(
275+
default="State was not confirmed.",
276+
description="For `type=confirm`, the error message if the user does not confirm the current state. "
277+
"{TEMPLATE_DESCRIPTION} The context will also include the `response` variable, representing the user "
278+
"response.",
279+
)
280+
281+
265282
class ConfigurationModel(BaseModel):
266283
"""Initial configuration of a tutorial."""
267284

@@ -285,7 +302,7 @@ class TutorialModel(BaseModel):
285302
description="Directory from which relative file paths are resolved. Defaults to the path of the "
286303
"tutorial file.",
287304
) # absolute path (input: relative to path)
288-
parts: tuple[CommandsPartModel | FilePartModel, ...] = Field(
305+
parts: tuple[CommandsPartModel | FilePartModel | PromptModel, ...] = Field(
289306
description="The individual parts of this tutorial."
290307
)
291308
configuration: ConfigurationModel = Field(default=ConfigurationModel())

structured_tutorials/runners/local.py

Lines changed: 22 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,13 +11,15 @@
1111
import time
1212
from copy import deepcopy
1313
from pathlib import Path
14+
from typing import Any
1415

1516
from jinja2 import Environment
1617

1718
from structured_tutorials.models import (
1819
CleanupCommandModel,
1920
CommandsPartModel,
2021
FilePartModel,
22+
PromptModel,
2123
TestCommandModel,
2224
TestPortModel,
2325
TutorialModel,
@@ -34,8 +36,8 @@ def __init__(self, tutorial: TutorialModel):
3436
self.env = Environment(keep_trailing_newline=True)
3537
self.cleanup: list[CleanupCommandModel] = []
3638

37-
def render(self, value: str) -> str:
38-
return self.env.from_string(value).render(self.context)
39+
def render(self, value: str, **context: Any) -> str:
40+
return self.env.from_string(value).render({**self.context, **context})
3941

4042
def run_test(self, test: TestCommandModel | TestPortModel) -> None:
4143
# If an initial delay is configured, wait that long
@@ -136,8 +138,26 @@ def write_file(self, part: FilePartModel) -> None:
136138
with open(destination, "w") as destination_stream:
137139
destination_stream.write(contents)
138140

141+
def run_prompt(self, part: PromptModel) -> None:
142+
prompt = self.render(part.prompt).strip() + " "
143+
144+
if part.type == "enter":
145+
input(prompt)
146+
else: # type == confirm
147+
valid_inputs = ("n", "no", "yes", "y", "")
148+
while (response := input(prompt).strip().lower()) not in valid_inputs:
149+
print(f"Please enter a valid value ({'/'.join(valid_inputs)}).")
150+
151+
if response in ("n", "no") or (response == "" and not part.default):
152+
error = self.render(part.error, response=response)
153+
raise RuntimeError(error)
154+
139155
def run_parts(self) -> None:
140156
for part in self.tutorial.parts:
157+
if isinstance(part, PromptModel):
158+
self.run_prompt(part)
159+
continue
160+
141161
if part.run.skip:
142162
continue
143163

structured_tutorials/sphinx/utils.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
from sphinx.errors import ConfigError, ExtensionError
1414

1515
from structured_tutorials import templates
16-
from structured_tutorials.models import CommandsPartModel, FilePartModel, TutorialModel
16+
from structured_tutorials.models import CommandsPartModel, FilePartModel, PromptModel, TutorialModel
1717
from structured_tutorials.textwrap import wrap_command_filter
1818

1919
TEMPLATE_DIR = resources.files(templates)
@@ -135,6 +135,12 @@ def render_part(self) -> str:
135135
# Find the next part that is not skipped
136136
for part in self.tutorial.parts[self.next_part :]:
137137
self.next_part += 1
138+
139+
# Ignore prompt models when rendering tutorials.
140+
if isinstance(part, PromptModel):
141+
continue
142+
143+
# If the part is not configured to be skipped for docs, use it.
138144
if not part.doc.skip:
139145
break
140146
else:

structured_tutorials/textwrap.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,6 @@ def wrap_command_filter(command: str, prompt: str, text_width: int) -> str:
8181
command_line = re.sub(r"\s*\n\s*", " ", command_line).strip()
8282
if not command_line:
8383
continue
84-
print(0, line_no, command_line)
8584

8685
wrapper = CommandLineTextWrapper(width=text_width)
8786
if line_no == 1:

tests/runners/test_local.py

Lines changed: 106 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55

66
from pathlib import Path
77
from unittest import mock
8+
from unittest.mock import call
89

910
import pytest
1011
from pytest_subprocess import FakeProcess
@@ -255,6 +256,111 @@ def test_file_part_with_contents_with_destination_template(tmp_path: Path) -> No
255256
runner.run()
256257

257258

259+
@pytest.mark.parametrize("prompt", ("test", "test "))
260+
@pytest.mark.parametrize("answer", ("", "yes", "y", "no", "n", "foobar"))
261+
def test_enter_prompt(prompt: str, answer: str) -> None:
262+
"""Test enter function."""
263+
configuration = TutorialModel.model_validate({"path": "/dummy.yaml", "parts": [{"prompt": prompt}]})
264+
runner = LocalTutorialRunner(configuration)
265+
with mock.patch("builtins.input", return_value=answer, autospec=True) as mock_input:
266+
runner.run()
267+
mock_input.assert_called_once_with(f"{prompt.strip()} ")
268+
269+
270+
@pytest.mark.parametrize("answer", ("", "y", "yes"))
271+
def test_confirm_prompt_confirms(answer: str) -> None:
272+
"""Test confirm prompt where empty answer passes."""
273+
configuration = TutorialModel.model_validate(
274+
{"path": "/dummy.yaml", "parts": [{"prompt": "example:", "type": "confirm"}]}
275+
)
276+
runner = LocalTutorialRunner(configuration)
277+
with mock.patch("builtins.input", return_value=answer, autospec=True) as mock_input:
278+
runner.run()
279+
mock_input.assert_called_once_with("example: ")
280+
281+
282+
@pytest.mark.parametrize("answer", ("y", "yes"))
283+
def test_confirm_prompt_confirms_with_default_false(answer: str) -> None:
284+
"""Test confirm prompt where answer passes with default=False."""
285+
configuration = TutorialModel.model_validate(
286+
{"path": "/dummy.yaml", "parts": [{"prompt": "example:", "type": "confirm", "default": False}]}
287+
)
288+
runner = LocalTutorialRunner(configuration)
289+
with mock.patch("builtins.input", return_value=answer, autospec=True) as mock_input:
290+
runner.run()
291+
mock_input.assert_called_once_with("example: ")
292+
293+
294+
@pytest.mark.parametrize("answer", ("", "n", "no"))
295+
def test_confirm_prompt_does_not_confirm_with_default_false(answer: str) -> None:
296+
"""Test confirm prompt where answer does not confirm with default=False."""
297+
configuration = TutorialModel.model_validate(
298+
{
299+
"path": "/dummy.yaml",
300+
"parts": [{"prompt": "example:", "type": "confirm", "default": False}],
301+
}
302+
)
303+
runner = LocalTutorialRunner(configuration)
304+
with mock.patch("builtins.input", return_value=answer, autospec=True) as mock_input:
305+
with pytest.raises(RuntimeError, match=r"^State was not confirmed\.$"):
306+
runner.run()
307+
mock_input.assert_called_once_with("example: ")
308+
309+
310+
def test_confirm_prompt_with_invalid_response() -> None:
311+
"""Test confirm prompt where we first give an invalid response."""
312+
configuration = TutorialModel.model_validate(
313+
{
314+
"path": "/dummy.yaml",
315+
"parts": [{"prompt": "example:", "type": "confirm", "default": False}],
316+
}
317+
)
318+
runner = LocalTutorialRunner(configuration)
319+
with mock.patch("builtins.input", side_effect=["foobar", "y"], autospec=True) as mock_input:
320+
runner.run()
321+
mock_input.assert_has_calls([call("example: "), call("example: ")])
322+
323+
324+
def test_confirm_prompt_does_not_confirm_error_template() -> None:
325+
"""Test confirm prompt where answer does not confirm with default=False."""
326+
answer = "no"
327+
value = "example value"
328+
configuration = TutorialModel.model_validate(
329+
{
330+
"path": "/dummy.yaml",
331+
"configuration": {"run": {"context": {"example": value}}},
332+
"parts": [
333+
{
334+
"prompt": "example:",
335+
"type": "confirm",
336+
"default": False,
337+
"error": "{{ response }}: {{ example }}: This is wrong.",
338+
}
339+
],
340+
}
341+
)
342+
runner = LocalTutorialRunner(configuration)
343+
with mock.patch("builtins.input", return_value=answer, autospec=True) as mock_input:
344+
with pytest.raises(RuntimeError, match=rf"^{answer}: {value}: This is wrong\.$"):
345+
runner.run()
346+
mock_input.assert_called_once_with("example: ")
347+
348+
349+
def test_prompt_template() -> None:
350+
"""Test that the prompt is rendered as a template."""
351+
configuration = TutorialModel.model_validate(
352+
{
353+
"path": "/dummy.yaml",
354+
"configuration": {"run": {"context": {"example": "dest/"}}},
355+
"parts": [{"prompt": "Go to {{ example }}"}],
356+
}
357+
)
358+
runner = LocalTutorialRunner(configuration)
359+
with mock.patch("builtins.input", return_value="", autospec=True) as mock_input:
360+
runner.run()
361+
mock_input.assert_called_once_with("Go to dest/ ")
362+
363+
258364
def test_temporary_directory(tmp_path: Path, fp: FakeProcess) -> None:
259365
"""Test running in temporary directory."""
260366
fp.register("pwd")

tests/sphinx/test_wrapper.py

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -195,3 +195,26 @@ def test_multiple_parts_with_index_error() -> None:
195195
assert wrapper.render_part() == expected
196196
with pytest.raises(ExtensionError, match=r"No more parts left in tutorial\."):
197197
wrapper.render_part()
198+
199+
200+
@pytest.mark.parametrize(
201+
("parts", "expected"),
202+
(
203+
(
204+
[
205+
{"commands": [{"command": "true 1"}]},
206+
{"prompt": "test"},
207+
{"commands": [{"command": "true 2"}]},
208+
{"commands": [{"command": "true 3"}]},
209+
],
210+
["user@host:~$ true 1\n", "user@host:~$ true 2\n", "user@host:~$ true 3\n"],
211+
),
212+
),
213+
)
214+
def test_prompt(parts: tuple[str, ...], expected: list[str]) -> None:
215+
"""Test rendering a code block that is preceded by a prompt. First rendered part is code block."""
216+
tutorial = TutorialModel.model_validate({"path": Path.cwd(), "parts": parts})
217+
wrapper = TutorialWrapper(tutorial)
218+
assert wrapper.render_part() == f".. code-block:: console\n\n{textwrap.indent(expected[0], ' ')}"
219+
assert wrapper.render_part() == f".. code-block:: console\n\n{textwrap.indent(expected[1], ' ')}"
220+
assert wrapper.render_part() == f".. code-block:: console\n\n{textwrap.indent(expected[2], ' ')}"

0 commit comments

Comments
 (0)