1
0
mirror of https://github.com/trezor/trezor-firmware.git synced 2024-12-25 07:48:10 +00:00
trezor-firmware/tests/ui_tests/__init__.py

123 lines
3.5 KiB
Python
Raw Normal View History

2019-12-30 11:20:51 +00:00
import hashlib
import json
2019-12-30 11:20:51 +00:00
import re
import shutil
from contextlib import contextmanager
from pathlib import Path
import pytest
from .reporting import testreport
2019-12-30 11:20:51 +00:00
UI_TESTS_DIR = Path(__file__).parent.resolve()
HASH_FILE = UI_TESTS_DIR / "fixtures.json"
HASHES = {}
PROCESSED = set()
2019-12-30 11:20:51 +00:00
2020-01-09 11:29:45 +00:00
def get_test_name(node_id):
2019-12-30 11:20:51 +00:00
# Test item name is usually function name, but when parametrization is used,
# parameters are also part of the name. Some functions have very long parameter
# names (tx hashes etc) that run out of maximum allowable filename length, so
# we limit the name to first 100 chars. This is not a problem with txhashes.
2020-01-09 11:29:45 +00:00
new_name = node_id.replace("tests/device_tests/", "")
# remove ::TestClass:: if present because it is usually the same as the test file name
new_name = re.sub(r"::.*?::", "-", new_name)
new_name = new_name.replace("/", "-") # in case there is "/"
return new_name[:100]
2019-12-30 11:20:51 +00:00
def _process_recorded(screen_path, test_name):
# calculate hash
HASHES[test_name] = _hash_files(screen_path)
2019-12-30 11:20:51 +00:00
_rename_records(screen_path)
PROCESSED.add(test_name)
2019-12-30 11:20:51 +00:00
def _rename_records(screen_path):
# rename screenshots
for index, record in enumerate(sorted(screen_path.iterdir())):
2020-01-06 14:44:30 +00:00
record.replace(screen_path / f"{index:08}.png")
2019-12-30 11:20:51 +00:00
2020-01-06 14:44:30 +00:00
def _hash_files(path):
files = path.iterdir()
2019-12-30 11:20:51 +00:00
hasher = hashlib.sha256()
for file in sorted(files):
2020-01-06 14:44:30 +00:00
hasher.update(file.read_bytes())
2019-12-30 11:20:51 +00:00
return hasher.digest().hex()
def _process_tested(fixture_test_path, test_name):
expected_hash = HASHES.get(test_name)
if expected_hash is None:
raise ValueError("Hash for '%s' not found in fixtures.json" % test_name)
PROCESSED.add(test_name)
2019-12-30 11:20:51 +00:00
actual_path = fixture_test_path / "actual"
2020-01-06 14:44:30 +00:00
actual_hash = _hash_files(actual_path)
2019-12-30 11:20:51 +00:00
2020-01-06 14:44:30 +00:00
_rename_records(actual_path)
2019-12-30 11:20:51 +00:00
if actual_hash != expected_hash:
file_path = testreport.failed(
fixture_test_path, test_name, actual_hash, expected_hash
)
2019-12-30 11:20:51 +00:00
pytest.fail(
"Hash of {} differs.\nExpected: {}\nActual: {}\nDiff file: {}".format(
2020-01-07 14:42:55 +00:00
test_name, expected_hash, actual_hash, file_path
2019-12-30 11:20:51 +00:00
)
)
else:
testreport.passed(fixture_test_path, test_name, actual_hash)
2019-12-30 11:20:51 +00:00
@contextmanager
def screen_recording(client, request):
2020-01-07 09:13:08 +00:00
test_ui = request.config.getoption("ui")
2020-01-09 11:29:45 +00:00
test_name = get_test_name(request.node.nodeid)
screens_test_path = UI_TESTS_DIR / "screens" / test_name
2019-12-30 11:20:51 +00:00
2020-01-07 09:13:08 +00:00
if test_ui == "record":
screen_path = screens_test_path / "recorded"
2019-12-30 11:20:51 +00:00
else:
screen_path = screens_test_path / "actual"
2019-12-30 11:20:51 +00:00
if not screens_test_path.exists():
screens_test_path.mkdir()
# remove previous files
shutil.rmtree(screen_path, ignore_errors=True)
screen_path.mkdir()
2019-12-30 11:20:51 +00:00
try:
client.debug.start_recording(str(screen_path))
yield
2020-01-07 09:13:08 +00:00
if test_ui == "record":
_process_recorded(screen_path, test_name)
2019-12-30 11:20:51 +00:00
else:
_process_tested(screens_test_path, test_name)
finally:
client.debug.stop_recording()
def list_missing():
return set(HASHES.keys()) - PROCESSED
def read_fixtures():
if not HASH_FILE.exists():
raise ValueError("File fixtures.json not found.")
global HASHES
HASHES = json.loads(HASH_FILE.read_text())
def write_fixtures(remove_missing: bool):
if remove_missing:
write = {i: HASHES[i] for i in PROCESSED}
else:
write = HASHES
HASH_FILE.write_text(json.dumps(write, indent="", sort_keys=True) + "\n")