2023-01-27 14:13:12 +00:00
|
|
|
from __future__ import annotations
|
|
|
|
|
2019-12-30 11:20:51 +00:00
|
|
|
import shutil
|
|
|
|
from contextlib import contextmanager
|
2023-01-27 14:13:12 +00:00
|
|
|
from typing import Callable, Generator
|
2019-12-30 11:20:51 +00:00
|
|
|
|
|
|
|
import pytest
|
2024-03-30 12:55:00 +00:00
|
|
|
from _pytest.nodes import Node
|
2021-06-21 11:55:01 +00:00
|
|
|
from _pytest.outcomes import Failed
|
2019-12-30 11:20:51 +00:00
|
|
|
|
2022-01-28 18:26:03 +00:00
|
|
|
from trezorlib.debuglink import TrezorClientDebugLink as Client
|
|
|
|
|
2023-01-27 14:13:12 +00:00
|
|
|
from . import common
|
|
|
|
from .common import SCREENS_DIR, UI_TESTS_DIR, TestCase, TestResult
|
2020-05-11 14:23:12 +00:00
|
|
|
from .reporting import testreport
|
2019-12-30 11:20:51 +00:00
|
|
|
|
2023-01-27 14:13:12 +00:00
|
|
|
FIXTURES_SUGGESTION_FILE = UI_TESTS_DIR / "fixtures.suggestion.json"
|
2023-07-14 08:31:39 +00:00
|
|
|
FIXTURES_RESULTS_FILE = UI_TESTS_DIR / "fixtures.results.json"
|
2019-12-30 11:20:51 +00:00
|
|
|
|
2021-09-02 11:04:16 +00:00
|
|
|
|
2023-01-27 14:13:12 +00:00
|
|
|
def _process_recorded(result: TestResult) -> None:
|
|
|
|
# calculate hash
|
|
|
|
result.store_recorded()
|
|
|
|
testreport.recorded(result)
|
2021-09-02 11:04:16 +00:00
|
|
|
|
2021-06-02 13:11:14 +00:00
|
|
|
|
2024-03-30 12:55:00 +00:00
|
|
|
def _process_tested(result: TestResult, item: Node) -> None:
|
2023-01-27 14:13:12 +00:00
|
|
|
if result.expected_hash is None:
|
2024-03-30 12:55:00 +00:00
|
|
|
testreport.missing(result)
|
|
|
|
item.user_properties.append(("ui_missing", None))
|
2023-01-27 14:13:12 +00:00
|
|
|
elif result.actual_hash != result.expected_hash:
|
2024-03-30 12:55:00 +00:00
|
|
|
testreport.failed(result)
|
|
|
|
item.user_properties.append(("ui_failed", None))
|
2019-12-30 11:20:51 +00:00
|
|
|
else:
|
2023-01-27 14:13:12 +00:00
|
|
|
testreport.passed(result)
|
2022-05-25 12:54:03 +00:00
|
|
|
|
|
|
|
|
2019-12-30 11:20:51 +00:00
|
|
|
@contextmanager
|
2022-01-28 18:26:03 +00:00
|
|
|
def screen_recording(
|
|
|
|
client: Client, request: pytest.FixtureRequest
|
|
|
|
) -> Generator[None, None, None]:
|
2020-01-07 09:13:08 +00:00
|
|
|
test_ui = request.config.getoption("ui")
|
2023-01-24 12:24:45 +00:00
|
|
|
if not test_ui:
|
|
|
|
yield
|
|
|
|
return
|
|
|
|
|
2023-01-18 10:57:32 +00:00
|
|
|
record_text_layout = request.config.getoption("record_text_layout")
|
|
|
|
|
2023-01-27 14:13:12 +00:00
|
|
|
testcase = TestCase.build(client, request)
|
|
|
|
testcase.dir.mkdir(exist_ok=True, parents=True)
|
2019-12-30 11:20:51 +00:00
|
|
|
|
2020-01-09 14:25:45 +00:00
|
|
|
# remove previous files
|
2023-01-27 14:13:12 +00:00
|
|
|
shutil.rmtree(testcase.actual_dir, ignore_errors=True)
|
|
|
|
testcase.actual_dir.mkdir()
|
2019-12-30 11:20:51 +00:00
|
|
|
|
|
|
|
try:
|
2023-01-27 14:13:12 +00:00
|
|
|
client.debug.start_recording(str(testcase.actual_dir))
|
2023-01-18 10:57:32 +00:00
|
|
|
if record_text_layout:
|
|
|
|
client.debug.set_screen_text_file(testcase.screen_text_file)
|
|
|
|
client.debug.watch_layout(True)
|
2019-12-30 11:20:51 +00:00
|
|
|
yield
|
2020-02-21 14:47:25 +00:00
|
|
|
finally:
|
2023-01-19 12:35:19 +00:00
|
|
|
client.ensure_open()
|
2024-04-11 11:48:00 +00:00
|
|
|
client.sync_responses()
|
2021-04-09 09:41:50 +00:00
|
|
|
# Wait for response to Initialize, which gives the emulator time to catch up
|
|
|
|
# and redraw the homescreen. Otherwise there's a race condition between that
|
|
|
|
# and stopping recording.
|
2023-01-18 10:57:32 +00:00
|
|
|
if record_text_layout:
|
|
|
|
client.debug.set_screen_text_file(None)
|
|
|
|
client.debug.watch_layout(False)
|
2024-11-19 12:50:29 +00:00
|
|
|
# Instead of client.init_device() we create a new management session
|
|
|
|
client.get_management_session()
|
2020-02-21 14:47:25 +00:00
|
|
|
client.debug.stop_recording()
|
2020-01-31 11:28:44 +00:00
|
|
|
|
2023-01-27 14:13:12 +00:00
|
|
|
result = testcase.build_result(request)
|
2023-01-24 12:24:45 +00:00
|
|
|
if test_ui == "record":
|
2023-01-27 14:13:12 +00:00
|
|
|
_process_recorded(result)
|
2023-01-24 12:24:45 +00:00
|
|
|
else:
|
2024-03-30 12:55:00 +00:00
|
|
|
_process_tested(result, request.node)
|
2021-04-09 09:41:50 +00:00
|
|
|
|
2020-01-31 11:28:44 +00:00
|
|
|
|
2023-01-27 14:13:12 +00:00
|
|
|
def setup(main_runner: bool) -> None:
|
|
|
|
# clear metadata and "actual" recordings before current run, keep "recorded" around
|
|
|
|
if main_runner:
|
|
|
|
for meta in SCREENS_DIR.glob("*/metadata.json"):
|
|
|
|
meta.unlink()
|
|
|
|
shutil.rmtree(meta.parent / "actual", ignore_errors=True)
|
2020-02-17 14:38:26 +00:00
|
|
|
|
2023-01-27 14:13:12 +00:00
|
|
|
# clear testreport
|
|
|
|
testreport.setup(main_runner)
|
2020-02-17 14:38:26 +00:00
|
|
|
|
2020-01-31 11:28:44 +00:00
|
|
|
|
2023-01-27 14:13:12 +00:00
|
|
|
def list_missing() -> set[str]:
|
|
|
|
# Only listing the ones for the current model
|
2023-02-03 15:00:15 +00:00
|
|
|
_, missing = common.prepare_fixtures(
|
|
|
|
TestResult.recent_results(), remove_missing=True
|
|
|
|
)
|
2023-01-27 14:13:12 +00:00
|
|
|
return {test.id for test in missing}
|
2020-01-31 11:28:44 +00:00
|
|
|
|
2020-09-29 17:53:28 +00:00
|
|
|
|
2023-01-27 14:13:12 +00:00
|
|
|
def update_fixtures(remove_missing: bool = False) -> int:
|
|
|
|
"""Update the fixtures.json file with the actual hashes from the latest run.
|
2020-09-29 17:53:28 +00:00
|
|
|
|
2023-01-27 14:13:12 +00:00
|
|
|
Used in --ui=record and in update_fixtures.py
|
|
|
|
"""
|
2023-02-03 15:00:15 +00:00
|
|
|
results = list(TestResult.recent_results())
|
2023-01-27 14:13:12 +00:00
|
|
|
for result in results:
|
|
|
|
result.store_recorded()
|
2020-09-29 17:53:28 +00:00
|
|
|
|
2023-07-14 08:31:39 +00:00
|
|
|
common.write_fixtures_complete(results, remove_missing=remove_missing)
|
2023-01-27 14:13:12 +00:00
|
|
|
return len(results)
|
2020-09-29 17:53:28 +00:00
|
|
|
|
2023-01-02 16:57:07 +00:00
|
|
|
|
2023-01-27 14:13:12 +00:00
|
|
|
def _should_write_ui_report(exitstatus: pytest.ExitCode) -> bool:
|
|
|
|
# generate UI report and check missing only if pytest is exitting cleanly
|
|
|
|
# I.e., the test suite passed or failed (as opposed to ctrl+c break, internal error,
|
|
|
|
# etc.)
|
|
|
|
return exitstatus in (pytest.ExitCode.OK, pytest.ExitCode.TESTS_FAILED)
|
2023-01-02 16:57:07 +00:00
|
|
|
|
|
|
|
|
2023-01-27 14:13:12 +00:00
|
|
|
def terminal_summary(
|
|
|
|
println: Callable[[str], None],
|
|
|
|
ui_option: str,
|
|
|
|
check_missing: bool,
|
|
|
|
exitstatus: pytest.ExitCode,
|
|
|
|
) -> None:
|
|
|
|
println("")
|
|
|
|
|
|
|
|
normal_exit = _should_write_ui_report(exitstatus)
|
|
|
|
missing_tests = list_missing()
|
|
|
|
if ui_option and normal_exit and missing_tests:
|
|
|
|
println(f"{len(missing_tests)} expected UI tests did not run.")
|
|
|
|
if check_missing:
|
|
|
|
println("-------- List of missing tests follows: --------")
|
|
|
|
for test in missing_tests:
|
|
|
|
println("\t" + test)
|
|
|
|
|
|
|
|
if ui_option == "test":
|
|
|
|
println("UI test failed.")
|
|
|
|
elif ui_option == "record":
|
|
|
|
println("Removing missing tests from record.")
|
|
|
|
println("")
|
|
|
|
|
|
|
|
if ui_option == "record" and exitstatus != pytest.ExitCode.OK:
|
|
|
|
println(
|
|
|
|
"\n-------- WARNING! Recording to fixtures.json was disabled due to failed tests. --------"
|
|
|
|
)
|
|
|
|
println("")
|
2023-01-02 16:57:07 +00:00
|
|
|
|
2023-01-27 14:13:12 +00:00
|
|
|
if normal_exit:
|
|
|
|
println("-------- UI tests summary: --------")
|
2024-03-30 12:55:00 +00:00
|
|
|
for result in TestResult.recent_results():
|
|
|
|
if result.passed and not result.ui_passed:
|
|
|
|
println(f"UI_FAILED: {result.test.id} ({result.actual_hash})")
|
2023-01-27 14:13:12 +00:00
|
|
|
println("Run ./tests/show_results.py to open test summary")
|
|
|
|
println("")
|
2023-01-02 16:57:07 +00:00
|
|
|
|
2023-01-27 14:13:12 +00:00
|
|
|
println("-------- Accepting all recent UI changes: --------")
|
|
|
|
println("Run ./tests/update_fixtures.py to apply all changes")
|
|
|
|
println("")
|
2023-01-02 16:57:07 +00:00
|
|
|
|
|
|
|
|
2023-01-27 14:13:12 +00:00
|
|
|
def sessionfinish(
|
2023-01-18 10:57:32 +00:00
|
|
|
exitstatus: pytest.ExitCode,
|
|
|
|
test_ui: str,
|
|
|
|
check_missing: bool,
|
|
|
|
record_text_layout: bool,
|
2023-06-27 12:36:11 +00:00
|
|
|
do_master_diff: bool,
|
2023-01-27 14:13:12 +00:00
|
|
|
) -> pytest.ExitCode:
|
|
|
|
if not _should_write_ui_report(exitstatus):
|
|
|
|
return exitstatus
|
2022-05-25 12:54:03 +00:00
|
|
|
|
2023-06-27 12:36:11 +00:00
|
|
|
testreport.generate_reports(record_text_layout, do_master_diff)
|
2023-07-14 08:31:39 +00:00
|
|
|
|
2024-03-30 12:55:00 +00:00
|
|
|
recents = list(TestResult.recent_results())
|
|
|
|
|
2023-07-14 08:31:39 +00:00
|
|
|
if test_ui == "test":
|
2024-03-30 12:55:00 +00:00
|
|
|
common.write_fixtures_only_new_results(recents, dest=FIXTURES_RESULTS_FILE)
|
|
|
|
if any(t.passed and not t.ui_passed for t in recents):
|
|
|
|
return pytest.ExitCode.TESTS_FAILED
|
2023-07-14 08:31:39 +00:00
|
|
|
|
2023-01-27 14:13:12 +00:00
|
|
|
if test_ui == "test" and check_missing and list_missing():
|
2023-07-14 08:31:39 +00:00
|
|
|
common.write_fixtures_complete(
|
2024-03-30 12:55:00 +00:00
|
|
|
recents,
|
2023-01-27 14:13:12 +00:00
|
|
|
remove_missing=True,
|
|
|
|
dest=FIXTURES_SUGGESTION_FILE,
|
|
|
|
)
|
|
|
|
return pytest.ExitCode.TESTS_FAILED
|
2022-05-25 12:54:03 +00:00
|
|
|
|
2023-01-27 14:13:12 +00:00
|
|
|
if test_ui == "record" and exitstatus == pytest.ExitCode.OK:
|
|
|
|
update_fixtures(check_missing)
|
2020-02-17 14:38:26 +00:00
|
|
|
|
2023-01-27 14:13:12 +00:00
|
|
|
return exitstatus
|
2021-06-21 11:55:01 +00:00
|
|
|
|
|
|
|
|
2022-01-28 18:26:03 +00:00
|
|
|
def main() -> None:
|
2023-02-03 15:00:15 +00:00
|
|
|
for result in TestResult.recent_results():
|
2021-06-21 11:55:01 +00:00
|
|
|
try:
|
2023-01-27 14:13:12 +00:00
|
|
|
_process_tested(result)
|
|
|
|
print("PASSED:", result.test.id)
|
2021-06-21 11:55:01 +00:00
|
|
|
except Failed:
|
2023-01-27 14:13:12 +00:00
|
|
|
print("FAILED:", result.test.id)
|
2021-06-21 11:55:01 +00:00
|
|
|
|
2022-08-15 16:30:45 +00:00
|
|
|
testreport.generate_reports()
|