Skip to content

Commit 9c8ab16

Browse files
committed
Refactor time-based tests to use fake and sharp loop time
Utilise a new library — https://github.com/nolar/looptime — to have a fake time in event loops. The library does a lot of things, but the most important one is the sharp time with predictable steps with **no code overhead included**, which is typically random. Everything else goes as a side-effect: e.g. the fast execution of such tests with zero real-time for any fake duration of loop time, convenient `looptime` fixture for assertions, etc. Signed-off-by: Sergey Vasilyev <[email protected]>
1 parent 06d0094 commit 9c8ab16

24 files changed

+401
-494
lines changed

pytest.ini

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
11
[pytest]
22
addopts =
33
--strict-markers
4+
--looptime

requirements.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ coveralls
1212
freezegun
1313
import-linter
1414
isort>=5.5.0
15+
looptime
1516
lxml
1617
# Mypy requires typed-ast, which is broken on PyPy 3.7 (could work in PyPy 3.8).
1718
mypy==0.930; implementation_name == "cpython"

tests/apis/test_api_requests.py

Lines changed: 32 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -138,23 +138,23 @@ async def test_parsing_in_streams(
138138
(delete, 'delete'),
139139
])
140140
async def test_direct_timeout_in_requests(
141-
resp_mocker, aresponses, hostname, fn, method, settings, logger, timer):
141+
resp_mocker, aresponses, hostname, fn, method, settings, logger, looptime):
142142

143143
async def serve_slowly():
144-
await asyncio.sleep(1.0)
144+
await asyncio.sleep(10)
145145
return aiohttp.web.json_response({})
146146

147147
mock = resp_mocker(side_effect=serve_slowly)
148148
aresponses.add(hostname, '/url', method, mock)
149149

150-
with timer, pytest.raises(asyncio.TimeoutError):
151-
timeout = aiohttp.ClientTimeout(total=0.1)
150+
with pytest.raises(asyncio.TimeoutError):
151+
timeout = aiohttp.ClientTimeout(total=1.23)
152152
# aiohttp raises an asyncio.TimeoutError which is automatically retried.
153153
# To reduce the test duration we disable retries for this test.
154154
settings.networking.error_backoffs = None
155155
await fn('/url', timeout=timeout, settings=settings, logger=logger)
156156

157-
assert 0.1 < timer.seconds < 0.2
157+
assert looptime == 1.23
158158

159159

160160
@pytest.mark.parametrize('fn, method', [
@@ -164,84 +164,85 @@ async def serve_slowly():
164164
(delete, 'delete'),
165165
])
166166
async def test_settings_timeout_in_requests(
167-
resp_mocker, aresponses, hostname, fn, method, settings, logger, timer):
167+
resp_mocker, aresponses, hostname, fn, method, settings, logger, looptime):
168168

169169
async def serve_slowly():
170-
await asyncio.sleep(1.0)
170+
await asyncio.sleep(10)
171171
return aiohttp.web.json_response({})
172172

173173
mock = resp_mocker(side_effect=serve_slowly)
174174
aresponses.add(hostname, '/url', method, mock)
175175

176-
with timer, pytest.raises(asyncio.TimeoutError):
177-
settings.networking.request_timeout = 0.1
176+
with pytest.raises(asyncio.TimeoutError):
177+
settings.networking.request_timeout = 1.23
178178
# aiohttp raises an asyncio.TimeoutError which is automatically retried.
179179
# To reduce the test duration we disable retries for this test.
180180
settings.networking.error_backoffs = None
181181
await fn('/url', settings=settings, logger=logger)
182182

183-
assert 0.1 < timer.seconds < 0.2
183+
assert looptime == 1.23
184184

185185

186186
@pytest.mark.parametrize('method', ['get']) # the only supported method at the moment
187187
async def test_direct_timeout_in_streams(
188-
resp_mocker, aresponses, hostname, method, settings, logger, timer):
188+
resp_mocker, aresponses, hostname, method, settings, logger, looptime):
189189

190190
async def serve_slowly():
191-
await asyncio.sleep(1.0)
191+
await asyncio.sleep(10)
192192
return "{}"
193193

194194
mock = resp_mocker(side_effect=serve_slowly)
195195
aresponses.add(hostname, '/url', method, mock)
196196

197-
with timer, pytest.raises(asyncio.TimeoutError):
198-
timeout = aiohttp.ClientTimeout(total=0.1)
197+
with pytest.raises(asyncio.TimeoutError):
198+
timeout = aiohttp.ClientTimeout(total=1.23)
199199
# aiohttp raises an asyncio.TimeoutError which is automatically retried.
200200
# To reduce the test duration we disable retries for this test.
201201
settings.networking.error_backoffs = None
202202
async for _ in stream('/url', timeout=timeout, settings=settings, logger=logger):
203203
pass
204204

205-
assert 0.1 < timer.seconds < 0.2
205+
assert looptime == 1.23
206206

207207

208208
@pytest.mark.parametrize('method', ['get']) # the only supported method at the moment
209209
async def test_settings_timeout_in_streams(
210-
resp_mocker, aresponses, hostname, method, settings, logger, timer):
210+
resp_mocker, aresponses, hostname, method, settings, logger, looptime):
211211

212212
async def serve_slowly():
213-
await asyncio.sleep(1.0)
213+
await asyncio.sleep(10)
214214
return "{}"
215215

216216
mock = resp_mocker(side_effect=serve_slowly)
217217
aresponses.add(hostname, '/url', method, mock)
218218

219-
with timer, pytest.raises(asyncio.TimeoutError):
220-
settings.networking.request_timeout = 0.1
219+
with pytest.raises(asyncio.TimeoutError):
220+
settings.networking.request_timeout = 1.23
221221
# aiohttp raises an asyncio.TimeoutError which is automatically retried.
222222
# To reduce the test duration we disable retries for this test.
223223
settings.networking.error_backoffs = None
224224
async for _ in stream('/url', settings=settings, logger=logger):
225225
pass
226226

227-
assert 0.1 < timer.seconds < 0.2
227+
assert looptime == 1.23
228228

229229

230-
@pytest.mark.parametrize('delay, expected', [
231-
pytest.param(0.0, [], id='instant-none'),
232-
pytest.param(0.1, [{'fake': 'result1'}], id='fast-single'),
233-
pytest.param(9.9, [{'fake': 'result1'}, {'fake': 'result2'}], id='inf-double'),
230+
@pytest.mark.parametrize('delay, expected_times, expected_items', [
231+
pytest.param(0, [], [], id='instant-none'),
232+
pytest.param(2, [1], [{'fake': 'result1'}], id='fast-single'),
233+
pytest.param(9, [1, 4], [{'fake': 'result1'}, {'fake': 'result2'}], id='inf-double'),
234234
])
235235
@pytest.mark.parametrize('method', ['get']) # the only supported method at the moment
236236
async def test_stopper_in_streams(
237-
resp_mocker, aresponses, hostname, method, delay, expected, settings, logger):
237+
resp_mocker, aresponses, hostname, method, delay, settings, logger, looptime,
238+
expected_items, expected_times):
238239

239240
async def stream_slowly(request: aiohttp.ClientRequest):
240241
response = aiohttp.web.StreamResponse()
241242
await response.prepare(request)
242-
await asyncio.sleep(0.05)
243+
await asyncio.sleep(1)
243244
await response.write(b'{"fake": "result1"}\n')
244-
await asyncio.sleep(0.15)
245+
await asyncio.sleep(3)
245246
await response.write(b'{"fake": "result2"}\n')
246247
await response.write_eof()
247248
return response
@@ -252,7 +253,10 @@ async def stream_slowly(request: aiohttp.ClientRequest):
252253
asyncio.get_running_loop().call_later(delay, stopper.set_result, None)
253254

254255
items = []
256+
times = []
255257
async for item in stream('/url', stopper=stopper, settings=settings, logger=logger):
256258
items.append(item)
259+
times.append(float(looptime))
257260

258-
assert items == expected
261+
assert items == expected_items
262+
assert times == expected_times

tests/conftest.py

Lines changed: 6 additions & 64 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,6 @@
66
import logging
77
import re
88
import sys
9-
import time
109
from unittest.mock import Mock
1110

1211
import aiohttp.web
@@ -552,69 +551,6 @@ def no_certvalidator():
552551
yield from _with_module_absent('certvalidator')
553552

554553

555-
#
556-
# Helpers for the timing checks.
557-
#
558-
559-
@pytest.fixture()
560-
def timer():
561-
return Timer()
562-
563-
564-
class Timer(object):
565-
"""
566-
A helper context manager to measure the time of the code-blocks.
567-
Also, supports direct comparison with time-deltas and the numbers of seconds.
568-
569-
Usage:
570-
571-
with Timer() as timer:
572-
do_something()
573-
print(f"Executing for {timer.seconds}s already.")
574-
do_something_else()
575-
576-
print(f"Executed in {timer.seconds}s.")
577-
assert timer < 5.0
578-
"""
579-
580-
def __init__(self):
581-
super().__init__()
582-
self._ts = None
583-
self._te = None
584-
585-
@property
586-
def seconds(self):
587-
if self._ts is None:
588-
return None
589-
elif self._te is None:
590-
return time.perf_counter() - self._ts
591-
else:
592-
return self._te - self._ts
593-
594-
def __repr__(self):
595-
status = 'new' if self._ts is None else 'running' if self._te is None else 'finished'
596-
return f'<Timer: {self.seconds}s ({status})>'
597-
598-
def __enter__(self):
599-
self._ts = time.perf_counter()
600-
self._te = None
601-
return self
602-
603-
def __exit__(self, exc_type, exc_val, exc_tb):
604-
self._te = time.perf_counter()
605-
606-
async def __aenter__(self):
607-
return self.__enter__()
608-
609-
async def __aexit__(self, exc_type, exc_val, exc_tb):
610-
return self.__exit__(exc_type, exc_val, exc_tb)
611-
612-
def __int__(self):
613-
return int(self.seconds)
614-
615-
def __float__(self):
616-
return float(self.seconds)
617-
618554
#
619555
# Helpers for the logging checks.
620556
#
@@ -725,3 +661,9 @@ def _no_asyncio_pending_tasks(event_loop):
725661
remains = after - before
726662
if remains:
727663
pytest.fail(f"Unattended asyncio tasks detected: {remains!r}")
664+
665+
666+
@pytest.fixture()
667+
def loop(event_loop):
668+
"""Sync aiohttp's server-side timeline with kopf's client-side timeline."""
669+
return event_loop

tests/handling/daemons/test_daemon_rematching.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55

66

77
async def test_running_daemon_is_stopped_when_mismatches(
8-
resource, dummy, timer, mocker, caplog, assert_logs, k8s_mocked, simulate_cycle):
8+
resource, dummy, looptime, mocker, caplog, assert_logs, k8s_mocked, simulate_cycle):
99
caplog.set_level(logging.DEBUG)
1010

1111
@kopf.daemon(*resource, id='fn', when=lambda **_: is_matching)
@@ -26,9 +26,8 @@ async def fn(**kwargs):
2626
mocker.resetall()
2727
is_matching = False
2828
await simulate_cycle({})
29-
with timer:
30-
await dummy.wait_for_daemon_done()
29+
await dummy.wait_for_daemon_done()
3130

32-
assert timer.seconds < 0.01 # near-instantly
31+
assert looptime == 0
3332
stopped = dummy.kwargs['stopped']
3433
assert DaemonStoppingReason.FILTERS_MISMATCH in stopped.reason

tests/handling/daemons/test_daemon_termination.py

Lines changed: 9 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99

1010
async def test_daemon_exits_gracefully_and_instantly_on_resource_deletion(
1111
settings, resource, dummy, simulate_cycle,
12-
caplog, assert_logs, k8s_mocked, frozen_time, mocker, timer):
12+
caplog, assert_logs, k8s_mocked, frozen_time, mocker, looptime):
1313
caplog.set_level(logging.DEBUG)
1414

1515
# A daemon-under-test.
@@ -31,18 +31,17 @@ async def fn(**kwargs):
3131
await simulate_cycle(event_object)
3232

3333
# Check that the daemon has exited near-instantly, with no delays.
34-
with timer:
35-
await dummy.wait_for_daemon_done()
34+
await dummy.wait_for_daemon_done()
3635

37-
assert timer.seconds < 0.01 # near-instantly
36+
assert looptime == 0
3837
assert k8s_mocked.sleep.call_count == 0
3938
assert k8s_mocked.patch.call_count == 1
4039
assert k8s_mocked.patch.call_args_list[0][1]['payload']['metadata']['finalizers'] == []
4140

4241

4342
async def test_daemon_exits_gracefully_and_instantly_on_operator_exiting(
4443
settings, resource, dummy, simulate_cycle, background_daemon_killer,
45-
caplog, assert_logs, k8s_mocked, frozen_time, mocker, timer):
44+
caplog, assert_logs, k8s_mocked, frozen_time, mocker, looptime):
4645
caplog.set_level(logging.DEBUG)
4746

4847
# A daemon-under-test.
@@ -63,10 +62,9 @@ async def fn(**kwargs):
6362
background_daemon_killer.cancel()
6463

6564
# Check that the daemon has exited near-instantly, with no delays.
66-
with timer:
67-
await dummy.wait_for_daemon_done()
65+
await dummy.wait_for_daemon_done()
6866

69-
assert timer.seconds < 0.01 # near-instantly
67+
assert looptime == 0
7068
assert k8s_mocked.sleep.call_count == 0
7169
assert k8s_mocked.patch.call_count == 0
7270

@@ -78,7 +76,7 @@ async def fn(**kwargs):
7876
@pytest.mark.usefixtures('background_daemon_killer')
7977
async def test_daemon_exits_gracefully_and_instantly_on_operator_pausing(
8078
settings, memories, resource, dummy, simulate_cycle, conflicts_found,
81-
caplog, assert_logs, k8s_mocked, frozen_time, mocker, timer):
79+
caplog, assert_logs, k8s_mocked, frozen_time, mocker, looptime):
8280
caplog.set_level(logging.DEBUG)
8381

8482
# A daemon-under-test.
@@ -99,9 +97,8 @@ async def fn(**kwargs):
9997
await conflicts_found.turn_to(True)
10098

10199
# Check that the daemon has exited near-instantly, with no delays.
102-
with timer:
103-
await dummy.wait_for_daemon_done()
104-
assert timer.seconds < 0.01 # near-instantly
100+
await dummy.wait_for_daemon_done()
101+
assert looptime == 0
105102

106103
# There is no way to test for re-spawning here: it is done by watch-events,
107104
# which are tested by the paused operators elsewhere (test_daemon_spawning.py).

0 commit comments

Comments
 (0)