mirror of
https://github.com/trezor/trezor-firmware.git
synced 2025-08-02 11:58:32 +00:00
feat(common, core, python, tests): Eth definitions in zip file
This commit is contained in:
parent
aa835a692d
commit
865765db91
1
.gitignore
vendored
1
.gitignore
vendored
@ -12,4 +12,5 @@ proto.gv*
|
||||
.DS_Store
|
||||
crypto/tests/libtrezor-crypto.so.dSYM/
|
||||
/definitions-latest
|
||||
/definitions-latest.zip
|
||||
definitions-cache.json
|
||||
|
BIN
common/tests/fixtures/ethereum/definitions-latest.zip
vendored
Normal file
BIN
common/tests/fixtures/ethereum/definitions-latest.zip
vendored
Normal file
Binary file not shown.
@ -90,30 +90,6 @@
|
||||
"sig_s": "2200f34790df5a71f7349eb941f11c294e480dadcf15e323851ae43845ee9a80"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "builtin_Ethereum_builtin_Tether_wrong_network_token",
|
||||
"parameters": {
|
||||
"chain_id": 1,
|
||||
"path": "m/44'/60'/0'/0/0",
|
||||
"nonce": "0x0",
|
||||
"gas_price": "0x14",
|
||||
"gas_limit": "0x14",
|
||||
"value": "0x0",
|
||||
"to_address": "0xdac17f958d2ee523a2206206994597c13d831ec7",
|
||||
"tx_type": null,
|
||||
"data": "a9059cbb00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
|
||||
"definitions": {
|
||||
"chain_id": 8,
|
||||
"token_chain_id": 8,
|
||||
"to_address": "0x20e3dd746ddf519b23ffbbb6da7a5d33ea6349d6"
|
||||
}
|
||||
},
|
||||
"result": {
|
||||
"sig_v": 38,
|
||||
"sig_r": "a82c902c3c4d82e9a3d3894bf1298b14217c0d6149faaff26e254b7753063158",
|
||||
"sig_s": "2200f34790df5a71f7349eb941f11c294e480dadcf15e323851ae43845ee9a80"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "builtin_Ethereum_builtin_Tether_wrong_network",
|
||||
"parameters": {
|
||||
@ -136,29 +112,6 @@
|
||||
"sig_s": "2200f34790df5a71f7349eb941f11c294e480dadcf15e323851ae43845ee9a80"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "builtin_Ethereum_builtin_Tether_wrong_token",
|
||||
"parameters": {
|
||||
"chain_id": 1,
|
||||
"path": "m/44'/60'/0'/0/0",
|
||||
"nonce": "0x0",
|
||||
"gas_price": "0x14",
|
||||
"gas_limit": "0x14",
|
||||
"value": "0x0",
|
||||
"to_address": "0xdac17f958d2ee523a2206206994597c13d831ec7",
|
||||
"tx_type": null,
|
||||
"data": "a9059cbb00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
|
||||
"definitions": {
|
||||
"token_chain_id": 8,
|
||||
"to_address": "0x20e3dd746ddf519b23ffbbb6da7a5d33ea6349d6"
|
||||
}
|
||||
},
|
||||
"result": {
|
||||
"sig_v": 38,
|
||||
"sig_r": "a82c902c3c4d82e9a3d3894bf1298b14217c0d6149faaff26e254b7753063158",
|
||||
"sig_s": "2200f34790df5a71f7349eb941f11c294e480dadcf15e323851ae43845ee9a80"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "builtin_Ethereum_extern_adChain_send_network_token",
|
||||
"parameters": {
|
||||
@ -245,29 +198,6 @@
|
||||
"sig_s": "2c08ebde47552e7d407767710c4fd48d8c66e71f28004d87170a430ecf7d56a6"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "builtin_Ethereum_extern_adChain_wrong_token",
|
||||
"parameters": {
|
||||
"chain_id": 1,
|
||||
"path": "m/44'/60'/0'/0/0",
|
||||
"nonce": "0x0",
|
||||
"gas_price": "0x14",
|
||||
"gas_limit": "0x14",
|
||||
"value": "0x0",
|
||||
"to_address": "0xd0d6d6c5fe4a677d343cc433536bb717bae167dd",
|
||||
"tx_type": null,
|
||||
"data": "a9059cbb00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
|
||||
"definitions": {
|
||||
"token_chain_id": 8,
|
||||
"to_address": "0x20e3dd746ddf519b23ffbbb6da7a5d33ea6349d6"
|
||||
}
|
||||
},
|
||||
"result": {
|
||||
"sig_v": 38,
|
||||
"sig_r": "78e9bab4abda13d3cdc2e6dee69e96a7215ff4254b2b9a2321f970941925250c",
|
||||
"sig_s": "2c08ebde47552e7d407767710c4fd48d8c66e71f28004d87170a430ecf7d56a6"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "extern_Ubiq_extern_Sphere_send_network_token",
|
||||
"parameters": {
|
||||
@ -308,29 +238,6 @@
|
||||
"sig_r": "1db04aa641665618467a2e40f840da081601c266050f951c8bb8688efd43a7d9",
|
||||
"sig_s": "5f4bb9b86814cb8647b7aad6e23c73ffbfb284109068d5d1061583bff252ec82"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "extern_Ubiq_extern_Sphere_wrong_token",
|
||||
"parameters": {
|
||||
"chain_id": 8,
|
||||
"path": "m/44'/108'/0'/0/0",
|
||||
"nonce": "0x0",
|
||||
"gas_price": "0x14",
|
||||
"gas_limit": "0x14",
|
||||
"value": "0x0",
|
||||
"to_address": "0x20e3dd746ddf519b23ffbbb6da7a5d33ea6349d6",
|
||||
"tx_type": null,
|
||||
"data": "a9059cbb00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
|
||||
"definitions": {
|
||||
"token_chain_id": 1,
|
||||
"to_address": "0xd0d6d6c5fe4a677d343cc433536bb717bae167dd"
|
||||
}
|
||||
},
|
||||
"result": {
|
||||
"sig_v": 52,
|
||||
"sig_r": "1db04aa641665618467a2e40f840da081601c266050f951c8bb8688efd43a7d9",
|
||||
"sig_s": "5f4bb9b86814cb8647b7aad6e23c73ffbfb284109068d5d1061583bff252ec82"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -90,75 +90,6 @@
|
||||
"sig_s": "47f36b4c02bad110bf7eb4fd71d6cd12bca7443bad3f31700a404302dbbb6e1c"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "builtin_Ethereum_builtin_Tether_wrong_network_token",
|
||||
"parameters": {
|
||||
"data": "a9059cbb00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
|
||||
"path": "m/44'/60'/0'/0/0",
|
||||
"to_address": "0xdac17f958d2ee523a2206206994597c13d831ec7",
|
||||
"chain_id": 1,
|
||||
"nonce": "0x0",
|
||||
"gas_limit": "0x14",
|
||||
"max_gas_fee": "0x14",
|
||||
"max_priority_fee": "0x1",
|
||||
"value": "0x0",
|
||||
"definitions": {
|
||||
"chain_id": 8,
|
||||
"token_chain_id": 8,
|
||||
"to_address": "0x20e3dd746ddf519b23ffbbb6da7a5d33ea6349d6"
|
||||
}
|
||||
},
|
||||
"result": {
|
||||
"sig_v": 1,
|
||||
"sig_r": "8b5002dc26252120e89c5194a72fb96663930c3bb709487bec72132a0c32033f",
|
||||
"sig_s": "47f36b4c02bad110bf7eb4fd71d6cd12bca7443bad3f31700a404302dbbb6e1c"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "builtin_Ethereum_builtin_Tether_wrong_network",
|
||||
"parameters": {
|
||||
"data": "a9059cbb00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
|
||||
"path": "m/44'/60'/0'/0/0",
|
||||
"to_address": "0xdac17f958d2ee523a2206206994597c13d831ec7",
|
||||
"chain_id": 1,
|
||||
"nonce": "0x0",
|
||||
"gas_limit": "0x14",
|
||||
"max_gas_fee": "0x14",
|
||||
"max_priority_fee": "0x1",
|
||||
"value": "0x0",
|
||||
"definitions": {
|
||||
"chain_id": 8
|
||||
}
|
||||
},
|
||||
"result": {
|
||||
"sig_v": 1,
|
||||
"sig_r": "8b5002dc26252120e89c5194a72fb96663930c3bb709487bec72132a0c32033f",
|
||||
"sig_s": "47f36b4c02bad110bf7eb4fd71d6cd12bca7443bad3f31700a404302dbbb6e1c"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "builtin_Ethereum_builtin_Tether_wrong_token",
|
||||
"parameters": {
|
||||
"data": "a9059cbb00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
|
||||
"path": "m/44'/60'/0'/0/0",
|
||||
"to_address": "0xdac17f958d2ee523a2206206994597c13d831ec7",
|
||||
"chain_id": 1,
|
||||
"nonce": "0x0",
|
||||
"gas_limit": "0x14",
|
||||
"max_gas_fee": "0x14",
|
||||
"max_priority_fee": "0x1",
|
||||
"value": "0x0",
|
||||
"definitions": {
|
||||
"token_chain_id": 8,
|
||||
"to_address": "0x20e3dd746ddf519b23ffbbb6da7a5d33ea6349d6"
|
||||
}
|
||||
},
|
||||
"result": {
|
||||
"sig_v": 1,
|
||||
"sig_r": "8b5002dc26252120e89c5194a72fb96663930c3bb709487bec72132a0c32033f",
|
||||
"sig_s": "47f36b4c02bad110bf7eb4fd71d6cd12bca7443bad3f31700a404302dbbb6e1c"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "builtin_Ethereum_extern_adChain_send_network_token",
|
||||
"parameters": {
|
||||
@ -245,29 +176,6 @@
|
||||
"sig_s": "60758c000ee9d276972191c200ef92d395cecf0a9586d65449c73057ae561883"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "builtin_Ethereum_extern_adChain_wrong_token",
|
||||
"parameters": {
|
||||
"data": "a9059cbb00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
|
||||
"path": "m/44'/60'/0'/0/0",
|
||||
"to_address": "0xd0d6d6c5fe4a677d343cc433536bb717bae167dd",
|
||||
"chain_id": 1,
|
||||
"nonce": "0x0",
|
||||
"gas_limit": "0x14",
|
||||
"max_gas_fee": "0x14",
|
||||
"max_priority_fee": "0x1",
|
||||
"value": "0x0",
|
||||
"definitions": {
|
||||
"token_chain_id": 8,
|
||||
"to_address": "0x20e3dd746ddf519b23ffbbb6da7a5d33ea6349d6"
|
||||
}
|
||||
},
|
||||
"result": {
|
||||
"sig_v": 0,
|
||||
"sig_r": "a29ac8d36f0b49b12fcde74878593a75b4990062e8c722da602d4a48733382a2",
|
||||
"sig_s": "60758c000ee9d276972191c200ef92d395cecf0a9586d65449c73057ae561883"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "extern_Ubiq_extern_Sphere_send_network_token",
|
||||
"parameters": {
|
||||
@ -308,29 +216,6 @@
|
||||
"sig_r": "1607bb4422b8cd139228f0d4f48521f6b249f2daeb7a254f9e9e998fd8dd6b68",
|
||||
"sig_s": "4a17515b78d7d70fc8df8b17f602f0cd57a0afe2417cc893cd3cda706ab018fa"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "extern_Ubiq_extern_Sphere_wrong_token",
|
||||
"parameters": {
|
||||
"data": "a9059cbb00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
|
||||
"path": "m/44'/108'/0'/0/0",
|
||||
"to_address": "0x20e3dd746ddf519b23ffbbb6da7a5d33ea6349d6",
|
||||
"chain_id": 8,
|
||||
"nonce": "0x0",
|
||||
"gas_limit": "0x14",
|
||||
"max_gas_fee": "0x14",
|
||||
"max_priority_fee": "0x1",
|
||||
"value": "0x0",
|
||||
"definitions": {
|
||||
"token_chain_id": 1,
|
||||
"to_address": "0xd0d6d6c5fe4a677d343cc433536bb717bae167dd"
|
||||
}
|
||||
},
|
||||
"result": {
|
||||
"sig_v": 1,
|
||||
"sig_r": "1607bb4422b8cd139228f0d4f48521f6b249f2daeb7a254f9e9e998fd8dd6b68",
|
||||
"sig_s": "4a17515b78d7d70fc8df8b17f602f0cd57a0afe2417cc893cd3cda706ab018fa"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -9,8 +9,8 @@ import logging
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import zipfile
|
||||
from binascii import hexlify
|
||||
from collections import defaultdict
|
||||
from typing import Any, TextIO, cast
|
||||
@ -23,8 +23,8 @@ from requests.adapters import HTTPAdapter
|
||||
from urllib3.util.retry import Retry
|
||||
|
||||
from coin_info import Coin, Coins, load_json
|
||||
from merkle_tree import MerkleTree
|
||||
from trezorlib import protobuf
|
||||
from trezorlib.merkle_tree import MerkleTree
|
||||
from trezorlib.messages import (
|
||||
EthereumDefinitionType,
|
||||
EthereumNetworkInfo,
|
||||
@ -459,12 +459,11 @@ def check_tokens_collisions(tokens: list[dict], old_tokens: list[dict] | None) -
|
||||
|
||||
|
||||
def check_bytes_size(
|
||||
value: bytes, max_size: int, label: str, prompt: bool = True
|
||||
actual_size: int, max_size: int, label: str, prompt: bool = True
|
||||
) -> tuple[bool, bool]:
|
||||
"""Check value (of type bytes) size and return tuple - size check and user response"""
|
||||
encoded_size = len(value)
|
||||
if encoded_size > max_size:
|
||||
title = f"Bytes in {label} definition is too long ({encoded_size} > {max_size})"
|
||||
"""Check the actual size and return tuple - size check result and user response"""
|
||||
if actual_size > max_size:
|
||||
title = f"Bytes in {label} definition is too long ({actual_size} > {max_size})"
|
||||
title += " and will be removed from the results" if not prompt else ""
|
||||
print(f"== {title} ==")
|
||||
|
||||
@ -1093,10 +1092,12 @@ def prepare_definitions(
|
||||
)
|
||||
@click.option(
|
||||
"-o",
|
||||
"--outdir",
|
||||
type=click.Path(resolve_path=True, file_okay=False, path_type=pathlib.Path),
|
||||
default="./definitions-latest",
|
||||
help="Path where the generated definitions will be saved. Path will be erased!",
|
||||
"--outfile",
|
||||
type=click.Path(
|
||||
resolve_path=True, dir_okay=False, writable=True, path_type=pathlib.Path
|
||||
),
|
||||
default="./definitions-latest.zip",
|
||||
help="File where the generated definitions will be saved in zip format. Any existing file will be overwritten!",
|
||||
)
|
||||
@click.option(
|
||||
"-k",
|
||||
@ -1107,15 +1108,22 @@ def prepare_definitions(
|
||||
@click.option(
|
||||
"-s",
|
||||
"--signedroot",
|
||||
help="Signed Merkle tree root hash to be added",
|
||||
help="Signed Merkle tree root hash to be added.",
|
||||
)
|
||||
@click.option("-v", "--verbose", is_flag=True, help="Display more info.")
|
||||
@click.option(
|
||||
"-p",
|
||||
"--include-proof",
|
||||
is_flag=True,
|
||||
help="Include Merkle tree proofs into binary blobs.",
|
||||
)
|
||||
@click.option("-v", "--verbose", is_flag=True, help="Display more info")
|
||||
def sign_definitions(
|
||||
deffile: pathlib.Path,
|
||||
outdir: pathlib.Path,
|
||||
outfile: pathlib.Path,
|
||||
publickey: TextIO,
|
||||
signedroot: str,
|
||||
verbose: bool,
|
||||
include_proof: bool,
|
||||
) -> None:
|
||||
"""Generate signed Ethereum definitions for python-trezor and others.
|
||||
If ran without `--publickey` and/or `--signedroot` it prints the computed Merkle tree root hash.
|
||||
@ -1128,49 +1136,10 @@ def sign_definitions(
|
||||
"Options `--publickey` and `--signedroot` must be used together."
|
||||
)
|
||||
|
||||
def save_definition(directory: pathlib.Path, keys: list[str], data: bytes):
|
||||
complete_file_path = directory / ("_".join(keys) + ".dat")
|
||||
|
||||
if complete_file_path.exists():
|
||||
raise click.ClickException(
|
||||
f'Definition "{complete_file_path}" already generated - attempt to generate another definition.'
|
||||
)
|
||||
|
||||
directory.mkdir(parents=True, exist_ok=True)
|
||||
with open(complete_file_path, mode="wb+") as f:
|
||||
f.write(data)
|
||||
|
||||
def generate_token_def(token: Coin):
|
||||
if token["address"] is not None and token["chain_id"] is not None:
|
||||
# save token definition
|
||||
save_definition(
|
||||
outdir / "by_chain_id" / str(token["chain_id"]),
|
||||
["token", token["address"][2:].lower()],
|
||||
token["serialized"],
|
||||
)
|
||||
|
||||
def generate_network_def(network: Coin):
|
||||
if network["chain_id"] is None:
|
||||
return
|
||||
|
||||
# create path for networks identified by chain and slip44 ids
|
||||
network_dir = outdir / "by_chain_id" / str(network["chain_id"])
|
||||
slip44_dir = outdir / "by_slip44" / str(network["slip44"])
|
||||
# save network definition
|
||||
save_definition(network_dir, ["network"], network["serialized"])
|
||||
|
||||
try:
|
||||
# for slip44 == 60 save only Ethereum
|
||||
if network["slip44"] != 60 or network["chain_id"] == 1:
|
||||
save_definition(slip44_dir, ["network"], network["serialized"])
|
||||
except click.ClickException:
|
||||
pass
|
||||
|
||||
# load prepared definitions
|
||||
timestamp, networks, tokens = _load_prepared_definitions(deffile)
|
||||
|
||||
# serialize definitions
|
||||
definitions_by_serialization: dict[bytes, dict] = {}
|
||||
for network in networks:
|
||||
ser = serialize_eth_info(
|
||||
eth_info_from_dict(network, EthereumNetworkInfo),
|
||||
@ -1178,7 +1147,6 @@ def sign_definitions(
|
||||
timestamp,
|
||||
)
|
||||
network["serialized"] = ser
|
||||
definitions_by_serialization[ser] = network
|
||||
for token in tokens:
|
||||
ser = serialize_eth_info(
|
||||
eth_info_from_dict(token, EthereumTokenInfo),
|
||||
@ -1186,13 +1154,15 @@ def sign_definitions(
|
||||
timestamp,
|
||||
)
|
||||
token["serialized"] = ser
|
||||
definitions_by_serialization[ser] = token
|
||||
|
||||
# sort encoded definitions
|
||||
sorted_defs = [network["serialized"] for network in networks] + [
|
||||
token["serialized"] for token in tokens
|
||||
]
|
||||
sorted_defs.sort()
|
||||
|
||||
# build Merkle tree
|
||||
mt = MerkleTree(
|
||||
[network["serialized"] for network in networks]
|
||||
+ [token["serialized"] for token in tokens]
|
||||
)
|
||||
mt = MerkleTree(sorted_defs)
|
||||
|
||||
# print or check tree root hash
|
||||
if publickey is None:
|
||||
@ -1209,43 +1179,104 @@ def sign_definitions(
|
||||
f"Provided `--signedroot` value is not valid for computed Merkle tree root hash ({hexlify(mt.get_root_hash())})."
|
||||
)
|
||||
|
||||
# update definitions
|
||||
signedroot_bytes = bytes.fromhex(signedroot)
|
||||
for ser, proof in mt.get_proofs().items():
|
||||
definition = definitions_by_serialization[ser]
|
||||
def save_definition(
|
||||
path: pathlib.PurePath, keys: list[str], data: bytes, zip_file: zipfile.ZipFile
|
||||
):
|
||||
complete_path = path / ("_".join(keys) + ".dat")
|
||||
|
||||
try:
|
||||
if zip_file.getinfo(str(complete_path)):
|
||||
logging.warning(
|
||||
f'Definition "{complete_path}" already generated - attempt to generate another definition.'
|
||||
)
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
zip_file.writestr(str(complete_path), data)
|
||||
|
||||
def generate_token_def(
|
||||
token: Coin, base_path: pathlib.PurePath, zip_file: zipfile.ZipFile
|
||||
):
|
||||
if token["address"] is not None and token["chain_id"] is not None:
|
||||
# save token definition
|
||||
save_definition(
|
||||
base_path / "by_chain_id" / str(token["chain_id"]),
|
||||
["token", token["address"][2:].lower()],
|
||||
token["serialized"],
|
||||
zip_file,
|
||||
)
|
||||
|
||||
def generate_network_def(
|
||||
network: Coin, base_path: pathlib.PurePath, zip_file: zipfile.ZipFile
|
||||
):
|
||||
if network["chain_id"] is None:
|
||||
return
|
||||
|
||||
# create path for networks identified by chain and slip44 ids
|
||||
network_dir = base_path / "by_chain_id" / str(network["chain_id"])
|
||||
slip44_dir = base_path / "by_slip44" / str(network["slip44"])
|
||||
# save network definition
|
||||
save_definition(network_dir, ["network"], network["serialized"], zip_file)
|
||||
|
||||
# for slip44 == 60 save only Ethereum and for slip44 == 1 save only Goerli
|
||||
if network["slip44"] not in (60, 1) or network["chain_id"] in (1, 420):
|
||||
save_definition(slip44_dir, ["network"], network["serialized"], zip_file)
|
||||
|
||||
def add_proof_to_def(definition: dict) -> None:
|
||||
proof = proofs_dict[definition["serialized"]]
|
||||
# append number of hashes in proof
|
||||
definition["serialized"] += len(proof).to_bytes(1, "big")
|
||||
# append proof itself
|
||||
for p in proof:
|
||||
definition["serialized"] += p
|
||||
# append signed tree root hash
|
||||
definition["serialized"] += signedroot_bytes
|
||||
|
||||
# clear defs directory
|
||||
if outdir.exists():
|
||||
shutil.rmtree(outdir)
|
||||
outdir.mkdir(parents=True)
|
||||
# add proofs (if requested) and signed tree root hash, check serialized size of the definitions and add it to a zip
|
||||
signed_root_bytes = bytes.fromhex(signedroot)
|
||||
|
||||
# check serialized size of the definitions and generate a file for it
|
||||
# update definitions
|
||||
proofs_dict = mt.get_proofs()
|
||||
|
||||
base_path = pathlib.PurePath("definitions-latest")
|
||||
with zipfile.ZipFile(outfile, mode="w") as zip_file:
|
||||
for network in networks:
|
||||
if include_proof:
|
||||
add_proof_to_def(network)
|
||||
# append signed tree root hash
|
||||
network["serialized"] += signed_root_bytes
|
||||
|
||||
network_serialized_length = len(network["serialized"])
|
||||
if not include_proof:
|
||||
# consider size of the proofs that will be added later by user before sending to the device
|
||||
network_serialized_length += mt.get_tree_height() * 32
|
||||
|
||||
check, _ = check_bytes_size(
|
||||
network["serialized"],
|
||||
network_serialized_length,
|
||||
1024,
|
||||
f"network {network['name']} (chain_id={network['chain_id']})",
|
||||
False,
|
||||
)
|
||||
if check:
|
||||
generate_network_def(network)
|
||||
generate_network_def(network, base_path, zip_file)
|
||||
|
||||
for token in tokens:
|
||||
if include_proof:
|
||||
add_proof_to_def(token)
|
||||
# append signed tree root hash
|
||||
token["serialized"] += signed_root_bytes
|
||||
|
||||
token_serialized_length = len(token["serialized"])
|
||||
if not include_proof:
|
||||
# consider size of the proofs that will be added later by user before sending to the device
|
||||
token_serialized_length += mt.get_tree_height() * 32
|
||||
|
||||
check, _ = check_bytes_size(
|
||||
token["serialized"],
|
||||
token_serialized_length,
|
||||
1024,
|
||||
f"token {token['name']} (chain_id={token['chain_id']}, address={token['address']})",
|
||||
False,
|
||||
)
|
||||
if check:
|
||||
generate_token_def(token)
|
||||
generate_token_def(token, base_path, zip_file)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
@ -1,92 +0,0 @@
|
||||
from typing import Optional
|
||||
|
||||
try:
|
||||
from trezor.crypto.hashlib import sha256
|
||||
except ImportError:
|
||||
from hashlib import sha256
|
||||
|
||||
|
||||
class Node:
|
||||
"""
|
||||
Single node of Merkle tree.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self: "Node",
|
||||
*,
|
||||
left: Optional["Node"] = None,
|
||||
right: Optional["Node"] = None,
|
||||
raw_value: Optional[bytes] = None,
|
||||
) -> None:
|
||||
self.is_leaf = raw_value is not None
|
||||
self.raw_value = raw_value
|
||||
|
||||
if self.is_leaf and (left is not None or right is not None):
|
||||
raise ValueError(
|
||||
"Cannot use `raw_value` together with `left` and/or `right` value."
|
||||
)
|
||||
|
||||
self.hash = None
|
||||
self.left_child = left
|
||||
self.right_child = right
|
||||
self.proof_list: list[bytes] = []
|
||||
|
||||
def compute_hash(self) -> bytes:
|
||||
if not self.hash:
|
||||
if self.is_leaf:
|
||||
self.hash = sha256(b"\x00" + self.raw_value).digest()
|
||||
else:
|
||||
left_hash = self.left_child.compute_hash()
|
||||
right_hash = self.right_child.compute_hash()
|
||||
hash_a = min(left_hash, right_hash)
|
||||
hash_b = max(left_hash, right_hash)
|
||||
self.hash = sha256(b"\x01" + hash_a + hash_b).digest()
|
||||
|
||||
# distribute proof
|
||||
self.left_child.add_to_proof(right_hash)
|
||||
self.right_child.add_to_proof(left_hash)
|
||||
|
||||
return self.hash
|
||||
|
||||
def add_to_proof(self, proof: bytes) -> None:
|
||||
self.proof_list.append(proof)
|
||||
if not self.is_leaf:
|
||||
self.left_child.add_to_proof(proof)
|
||||
self.right_child.add_to_proof(proof)
|
||||
|
||||
|
||||
class MerkleTree:
|
||||
"""
|
||||
Simple Merkle tree that implements the building of Merkle tree itself and generate proofs
|
||||
for leaf nodes.
|
||||
"""
|
||||
|
||||
def __init__(self, values: list[bytes]) -> None:
|
||||
self.leaves = [Node(raw_value=v) for v in values]
|
||||
|
||||
# build the tree
|
||||
current_level = [n for n in self.leaves]
|
||||
while len(current_level) > 1:
|
||||
# build one level of the tree
|
||||
next_level = []
|
||||
while len(current_level) // 2:
|
||||
left_node = current_level.pop(0)
|
||||
right_node = current_level.pop(0)
|
||||
next_level.append(Node(left=left_node, right=right_node))
|
||||
|
||||
if len(current_level) == 1:
|
||||
# odd number of nodes on current level so last node will be "joined" on another level
|
||||
next_level.append(current_level.pop(0))
|
||||
|
||||
# switch levels and continue
|
||||
current_level = next_level
|
||||
|
||||
# set root and compute hash
|
||||
self.root_node = current_level[0]
|
||||
self.root_node.compute_hash()
|
||||
|
||||
def get_proofs(self) -> dict[bytes, list[bytes]]:
|
||||
return {n.raw_value: n.proof_list for n in self.leaves}
|
||||
|
||||
def get_root_hash(self) -> bytes:
|
||||
return self.root_node.hash
|
@ -16,7 +16,7 @@ if TYPE_CHECKING:
|
||||
DefType = TypeVar("DefType", EthereumNetworkInfo, EthereumTokenInfo)
|
||||
|
||||
DEFINITIONS_PUBLIC_KEY = b""
|
||||
MIN_DATA_VERSION = 1
|
||||
MIN_DATA_VERSION = 1 # TODO: update
|
||||
FORMAT_VERSION = b"trzd1"
|
||||
|
||||
if __debug__:
|
||||
|
@ -99,7 +99,7 @@ def get_reference_ethereum_network_info(chain_id: int | None = None, slip44: int
|
||||
if cid is not None:
|
||||
return NETWORKS.get(cid[0])
|
||||
|
||||
return network if network else etworks.UNKNOWN_NETWORK
|
||||
return network if network else networks.UNKNOWN_NETWORK
|
||||
|
||||
|
||||
def get_reference_ethereum_token_info(chain_id: int, token_address: str) -> messages.EthereumTokenInfo:
|
||||
|
@ -11,3 +11,7 @@ Definitions are binary encoded and have a special format:
|
||||
1. length of the Merkle tree proof - number of hashes in the proof (unsigned integer, 1 byte)
|
||||
2. proof - individual hashes used to compute Merkle tree root hash (plain bits, N*32 bytes)
|
||||
3. signed Merkle tree root hash (plain bits, 64 bytes)
|
||||
|
||||
This complete format must be used when definition is send to the device, but we generate definitions
|
||||
without parts 3.1 (Merkle tree proof length) and 3.2 (proof itself). For more information look
|
||||
at the [External definitions documentation](../ethereum-definitions.md#external-definitions)
|
||||
|
@ -12,17 +12,28 @@ Location of these definitions is for:
|
||||
* networks - [`networks.json`](https://github.com/trezor/trezor-firmware/blob/master/common/defs/ethereum/networks.json)
|
||||
* tokens - [`tokens.json`](https://github.com/trezor/trezor-firmware/blob/master/common/defs/ethereum/tokens.json)
|
||||
|
||||
Built-in definitions are written by hand and are not subject to any generation described
|
||||
below.
|
||||
Sources for built-in definitions (namely files `../../common/defs/ethereum/networks.json`,
|
||||
`../../common/defs/ethereum/tokens.json`) are written by hand and are not subject
|
||||
to any generation described below.
|
||||
|
||||
## External definitions
|
||||
|
||||
Generated binary blobs are first saved locally (by the script that generates them)
|
||||
and then published online. By "external" is meant definitions not hardcoded in firmware.
|
||||
and then published online. By "external" is meant all the definitions for which the binary
|
||||
blobs are generated - so basically all definitions, because blobs are generated
|
||||
also for built-in definitions.
|
||||
|
||||
#### File structure
|
||||
Saved definitions (binary blobs) are stored in directory (e.g. `definitions-latest/`)
|
||||
with specific file structure:
|
||||
Generated blobs are saved as a zip file. To save some space, zipped definitions
|
||||
does not contain the Merkle tree "proof part" (proof length and the proof itself -
|
||||
see [definition binary format](communication/ethereum-definitions-binary-format.md)
|
||||
for more details). Merkle tree proof is the most repetitive part of the definitions
|
||||
and it can be easily computed when needed. Using this script
|
||||
[`python/tools/eth_defs_unpack.py`](https://github.com/trezor/trezor-firmware/blob/master/python/tools/eth_defs_unpack.py)
|
||||
the zip with definitions will be unpacked and all the definitions will be completed.
|
||||
|
||||
#### Zip/directory file structure
|
||||
Saved definitions (binary blobs) are stored in zip file with specific file
|
||||
structure (same as the structure of completed unpacked definitions):
|
||||
````
|
||||
definitions-latest/
|
||||
├── by_chain_id/
|
||||
@ -57,7 +68,7 @@ Notice that token definitions are only accessible by `CHAIN_ID` and `TOKEN_ADDRE
|
||||
Generated binary definitions are available online at [publicly accessible website](https://data.trezor.io/eth_definitions) # TODO: update url.
|
||||
|
||||
To get the desired definition (one at a time), URL can be composed in multiple ways
|
||||
and the structure is the same as it is described in the [file structure](#file-structure)
|
||||
and the structure is the same as it is described in the [Zip/directory file structure](#zipdirectory-file-structure)
|
||||
section. Base URL format is `https://data.trezor.io/eth_definitions/LOOKUP_TYPE/ID/NAME`
|
||||
where:
|
||||
* `LOOKUP_TYPE` is one of `by_chain_id` or `by_slip44`
|
||||
@ -65,7 +76,7 @@ where:
|
||||
* `NAME` is either:
|
||||
* `network.dat` for network definition at given chain ID or SLIP44 ID or
|
||||
* `token_"TOKEN_ADDRESS".dat` for token definition at given chain ID and token address
|
||||
(see [file structure](#file-structure) section on how to format the token address)
|
||||
(see [Zip/directory file structure](#zipdirectory-file-structure) section on how to format the token address)
|
||||
|
||||
Definitions could be also downloaded by one request in a ZIP file at https://data.trezor.io/eth_definitions/definitions.zip # TODO: update url.
|
||||
|
||||
@ -73,7 +84,7 @@ Definitions could be also downloaded by one request in a ZIP file at https://dat
|
||||
|
||||
Automatic manipulation with the definitions is implemented in `trezorctl` tool.
|
||||
All Ethereum commands that do work with definitions contains contains options
|
||||
to automatically download online definitions or use locally stored definitions.
|
||||
to automatically download online definition(s) or use locally stored definition(s).
|
||||
For more info look at the `trezorctl` [documentation]
|
||||
(https://github.com/trezor/trezor-firmware/blob/master/python/docs/README.rst).
|
||||
|
||||
@ -125,8 +136,9 @@ and the root hash is computed.
|
||||
and the root hash is computed again. Then this hash is verified against signed hash
|
||||
from previous step using public key to ensure that nothing has changed. If everything
|
||||
is ok the binary encoded (see
|
||||
[definition binary format](communication/ethereum-definitions-binary-format.md) section)
|
||||
definitions are generated to directory with specific [file structure](#file-structure).
|
||||
[definition binary format](communication/ethereum-definitions-binary-format.md) document)
|
||||
definitions are generated to zip file with specific file structure
|
||||
(see [Zip/directory file structure](#zipdirectory-file-structure) section).
|
||||
|
||||
### 3. Publish the definitions
|
||||
|
||||
|
@ -18,9 +18,7 @@ import json
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
import tarfile
|
||||
from decimal import Decimal
|
||||
from io import BytesIO
|
||||
from typing import (
|
||||
NoReturn,
|
||||
TYPE_CHECKING,
|
||||
@ -73,6 +71,8 @@ ETHER_UNITS = {
|
||||
# So that we can import the web3 library only when really used and reuse the instance
|
||||
_WEB3_INSTANCE: Optional["web3.Web3"] = None
|
||||
|
||||
DEFS_ZIP_FILENAME = "definitions-latest.zip"
|
||||
|
||||
|
||||
def _print_eth_dependencies_and_die() -> NoReturn:
|
||||
click.echo("Ethereum requirements not installed.")
|
||||
@ -157,7 +157,7 @@ def _erc20_contract(token_address: str, to_address: str, amount: int) -> str:
|
||||
"outputs": [{"name": "", "type": "bool"}],
|
||||
}
|
||||
]
|
||||
contract = _get_web3().eth.contract(address=token_address, abi=min_abi) # type: ignore [Argument of type "str" cannot be assigned to parameter "address" of type "Address | ChecksumAddress | ENS" in function "contract"]
|
||||
contract = _get_web3().eth.contract(address=token_address, abi=min_abi)
|
||||
return contract.encodeABI("transfer", [to_address, amount])
|
||||
|
||||
|
||||
@ -170,7 +170,7 @@ def _format_access_list(
|
||||
|
||||
|
||||
def _get_ethereum_definitions(
|
||||
definitions_dir: Optional[pathlib.Path] = None,
|
||||
definitions_zip: Optional[pathlib.Path] = None,
|
||||
network_def_file: Optional[BinaryIO] = None,
|
||||
token_def_file: Optional[BinaryIO] = None,
|
||||
download_definitions: bool = False,
|
||||
@ -181,7 +181,7 @@ def _get_ethereum_definitions(
|
||||
count_of_options_used = sum(
|
||||
bool(o)
|
||||
for o in (
|
||||
definitions_dir,
|
||||
definitions_zip,
|
||||
(network_def_file or token_def_file),
|
||||
download_definitions,
|
||||
)
|
||||
@ -196,16 +196,16 @@ def _get_ethereum_definitions(
|
||||
slip44 = UH_(slip44_hardened)
|
||||
|
||||
defs = ethereum.messages.EthereumDefinitions()
|
||||
if definitions_dir is not None:
|
||||
if definitions_zip is not None:
|
||||
if chain_id is not None or slip44 is not None:
|
||||
defs.encoded_network = ethereum.get_definition_from_path(
|
||||
ethereum.get_network_definition_path(definitions_dir, chain_id, slip44)
|
||||
defs.encoded_network = ethereum.get_definition_from_zip(
|
||||
definitions_zip,
|
||||
ethereum.get_network_definition_path(chain_id, slip44),
|
||||
)
|
||||
if chain_id is not None and token_address is not None:
|
||||
defs.encoded_token = ethereum.get_definition_from_path(
|
||||
ethereum.get_token_definition_path(
|
||||
definitions_dir, chain_id, token_address
|
||||
)
|
||||
defs.encoded_token = ethereum.get_definition_from_zip(
|
||||
definitions_zip,
|
||||
ethereum.get_token_definition_path(chain_id, token_address),
|
||||
)
|
||||
elif network_def_file is not None or token_def_file is not None:
|
||||
if network_def_file is not None:
|
||||
@ -232,30 +232,30 @@ def _get_ethereum_definitions(
|
||||
# commands start here
|
||||
|
||||
|
||||
definitions_dir_option = click.option(
|
||||
"--definitions-dir",
|
||||
definitions_zip_option = click.option(
|
||||
"--definitions-zip",
|
||||
type=click.Path(
|
||||
exists=True, file_okay=False, resolve_path=True, path_type=pathlib.Path
|
||||
exists=True, dir_okay=False, resolve_path=True, path_type=pathlib.Path
|
||||
),
|
||||
help="Directory with stored definitions. Directory structure must be the same as the command "
|
||||
"`trezorctl ethereum download-definitions` outputs.it is when. Mutually exclusive with `--network-def`, `--token-def` "
|
||||
help="Zip file with stored definitions. Zip file could be obtained using command "
|
||||
"`trezorctl ethereum download-definitions`. Mutually exclusive with `--network-def`, `--token-def` "
|
||||
"and `--download-definitions`.",
|
||||
)
|
||||
network_def_option = click.option(
|
||||
"--network-def",
|
||||
type=click.File(mode="rb"),
|
||||
help="Binary file with network definition. Mutually exclusive with `--definitions-dir` and `--download-definitions`.",
|
||||
help="Binary file with network definition. Mutually exclusive with `--definitions-zip` and `--download-definitions`.",
|
||||
)
|
||||
token_def_options = click.option(
|
||||
"--token-def",
|
||||
type=click.File(mode="rb"),
|
||||
help="Binary file with token definition. Mutually exclusive with `--definitions-dir` and `--download-definitions`.",
|
||||
help="Binary file with token definition. Mutually exclusive with `--definitions-zip` and `--download-definitions`.",
|
||||
)
|
||||
download_definitions_option = click.option(
|
||||
"--download-definitions",
|
||||
is_flag=True,
|
||||
help="Automatically download required definitions from `data.trezor.io/definitions/???` and use them. "
|
||||
"Mutually exclusive with `--definitions-dir`, `--network-def` and `--token-def`.", # TODO: add link?, replace this ur with function used to download defs
|
||||
"Mutually exclusive with `--definitions-zip`, `--network-def` and `--token-def`.", # TODO: add link?, replace this ur with function used to download defs
|
||||
)
|
||||
|
||||
|
||||
@ -267,35 +267,32 @@ def cli() -> None:
|
||||
@cli.command()
|
||||
@click.option(
|
||||
"-o",
|
||||
"--outdir",
|
||||
type=click.Path(resolve_path=True, file_okay=False, path_type=pathlib.Path),
|
||||
default="./definitions-latest",
|
||||
"--outfile",
|
||||
type=click.Path(
|
||||
resolve_path=True, dir_okay=False, writable=True, path_type=pathlib.Path
|
||||
),
|
||||
default=f"./{DEFS_ZIP_FILENAME}",
|
||||
help="File path to use to save downloaded definitions. Existing file will be overwritten!",
|
||||
)
|
||||
@click.option("-u", "--unpack", is_flag=True)
|
||||
def download_definitions(outdir: pathlib.Path, unpack: bool) -> None:
|
||||
"""Download all Ethereum network and token definitions and save them."""
|
||||
archive_filename = "definitions.tar.gz"
|
||||
def download_definitions(outfile: pathlib.Path) -> None:
|
||||
"""Download all Ethereum network and token definitions stored in zip file
|
||||
and save them to `outfile`.
|
||||
"""
|
||||
|
||||
# TODO: change once we know the urls
|
||||
archived_definitions = ethereum.download_from_url(
|
||||
"https://data.trezor.io/eth_definitions/" + archive_filename
|
||||
"https://data.trezor.io/eth_definitions/" + DEFS_ZIP_FILENAME
|
||||
)
|
||||
|
||||
# unpack and/or save
|
||||
if unpack:
|
||||
# TODO: implement once we know archive format
|
||||
file_io = BytesIO(archived_definitions)
|
||||
with tarfile.open(fileobj=file_io, mode="r:gz") as tar:
|
||||
tar.extractall(outdir)
|
||||
else:
|
||||
with open(archive_filename, mode="wb+") as f:
|
||||
# save
|
||||
with open(outfile, mode="wb+") as f:
|
||||
f.write(archived_definitions)
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.option("-n", "--address", required=True, help=PATH_HELP)
|
||||
@click.option("-d", "--show-display", is_flag=True)
|
||||
@definitions_dir_option
|
||||
@definitions_zip_option
|
||||
@network_def_option
|
||||
@download_definitions_option
|
||||
@with_client
|
||||
@ -303,14 +300,14 @@ def get_address(
|
||||
client: "TrezorClient",
|
||||
address: str,
|
||||
show_display: bool,
|
||||
definitions_dir: pathlib.Path,
|
||||
definitions_zip: pathlib.Path,
|
||||
network_def: BinaryIO,
|
||||
download_definitions: bool,
|
||||
) -> str:
|
||||
"""Get Ethereum address in hex encoding."""
|
||||
address_n = tools.parse_path(address)
|
||||
defs = _get_ethereum_definitions(
|
||||
definitions_dir=definitions_dir,
|
||||
definitions_zip=definitions_zip,
|
||||
network_def_file=network_def,
|
||||
download_definitions=download_definitions,
|
||||
slip44_hardened=address_n[1],
|
||||
@ -391,7 +388,7 @@ def get_public_node(
|
||||
)
|
||||
@click.argument("to_address")
|
||||
@click.argument("amount", callback=_amount_to_int)
|
||||
@definitions_dir_option
|
||||
@definitions_zip_option
|
||||
@network_def_option
|
||||
@token_def_options
|
||||
@download_definitions_option
|
||||
@ -413,7 +410,7 @@ def sign_tx(
|
||||
max_priority_fee: Optional[int],
|
||||
access_list: List[ethereum.messages.EthereumAccessList],
|
||||
eip2718_type: Optional[int],
|
||||
definitions_dir: pathlib.Path,
|
||||
definitions_zip: pathlib.Path,
|
||||
network_def: BinaryIO,
|
||||
token_def: BinaryIO,
|
||||
download_definitions: bool,
|
||||
@ -457,7 +454,7 @@ def sign_tx(
|
||||
sys.exit(1)
|
||||
|
||||
defs = _get_ethereum_definitions(
|
||||
definitions_dir=definitions_dir,
|
||||
definitions_zip=definitions_zip,
|
||||
network_def_file=network_def,
|
||||
token_def_file=token_def,
|
||||
download_definitions=download_definitions,
|
||||
@ -476,7 +473,7 @@ def sign_tx(
|
||||
amount = 0
|
||||
# to_address has changed, reload definitions
|
||||
defs = _get_ethereum_definitions(
|
||||
definitions_dir=definitions_dir,
|
||||
definitions_zip=definitions_zip,
|
||||
network_def_file=network_def,
|
||||
token_def_file=token_def,
|
||||
download_definitions=download_definitions,
|
||||
@ -588,7 +585,7 @@ def sign_tx(
|
||||
@cli.command()
|
||||
@click.option("-n", "--address", required=True, help=PATH_HELP)
|
||||
@click.argument("message")
|
||||
@definitions_dir_option
|
||||
@definitions_zip_option
|
||||
@network_def_option
|
||||
@download_definitions_option
|
||||
@with_client
|
||||
@ -596,14 +593,14 @@ def sign_message(
|
||||
client: "TrezorClient",
|
||||
address: str,
|
||||
message: str,
|
||||
definitions_dir: pathlib.Path,
|
||||
definitions_zip: pathlib.Path,
|
||||
network_def: BinaryIO,
|
||||
download_definitions: bool,
|
||||
) -> Dict[str, str]:
|
||||
"""Sign message with Ethereum address."""
|
||||
address_n = tools.parse_path(address)
|
||||
defs = _get_ethereum_definitions(
|
||||
definitions_dir=definitions_dir,
|
||||
definitions_zip=definitions_zip,
|
||||
network_def_file=network_def,
|
||||
download_definitions=download_definitions,
|
||||
slip44_hardened=address_n[1],
|
||||
@ -625,7 +622,7 @@ def sign_message(
|
||||
help="Be compatible with Metamask's signTypedData_v4 implementation",
|
||||
)
|
||||
@click.argument("file", type=click.File("r"))
|
||||
@definitions_dir_option
|
||||
@definitions_zip_option
|
||||
@network_def_option
|
||||
@token_def_options
|
||||
@download_definitions_option
|
||||
@ -635,7 +632,7 @@ def sign_typed_data(
|
||||
address: str,
|
||||
metamask_v4_compat: bool,
|
||||
file: TextIO,
|
||||
definitions_dir: pathlib.Path,
|
||||
definitions_zip: pathlib.Path,
|
||||
network_def: BinaryIO,
|
||||
token_def: BinaryIO,
|
||||
download_definitions: bool,
|
||||
@ -649,7 +646,7 @@ def sign_typed_data(
|
||||
address_n = tools.parse_path(address)
|
||||
data = json.loads(file.read())
|
||||
defs = _get_ethereum_definitions(
|
||||
definitions_dir=definitions_dir,
|
||||
definitions_zip=definitions_zip,
|
||||
network_def_file=network_def,
|
||||
token_def_file=token_def,
|
||||
download_definitions=download_definitions,
|
||||
@ -689,7 +686,7 @@ def verify_message(
|
||||
@click.option("-n", "--address", required=True, help=PATH_HELP)
|
||||
@click.argument("domain_hash_hex")
|
||||
@click.argument("message_hash_hex")
|
||||
@definitions_dir_option
|
||||
@definitions_zip_option
|
||||
@network_def_option
|
||||
@download_definitions_option
|
||||
@with_client
|
||||
@ -698,7 +695,7 @@ def sign_typed_data_hash(
|
||||
address: str,
|
||||
domain_hash_hex: str,
|
||||
message_hash_hex: str,
|
||||
definitions_dir: pathlib.Path,
|
||||
definitions_zip: pathlib.Path,
|
||||
network_def: BinaryIO,
|
||||
download_definitions: bool,
|
||||
) -> Dict[str, str]:
|
||||
@ -713,7 +710,7 @@ def sign_typed_data_hash(
|
||||
domain_hash = ethereum.decode_hex(domain_hash_hex)
|
||||
message_hash = ethereum.decode_hex(message_hash_hex) if message_hash_hex else None
|
||||
defs = _get_ethereum_definitions(
|
||||
definitions_dir=definitions_dir,
|
||||
definitions_zip=definitions_zip,
|
||||
network_def_file=network_def,
|
||||
download_definitions=download_definitions,
|
||||
slip44_hardened=address_n[1],
|
||||
|
@ -16,11 +16,12 @@
|
||||
|
||||
import pathlib
|
||||
import re
|
||||
import zipfile
|
||||
from typing import TYPE_CHECKING, Any, AnyStr, Dict, List, Optional, Tuple
|
||||
|
||||
import requests
|
||||
|
||||
from . import exceptions, messages
|
||||
from . import exceptions, merkle_tree, messages
|
||||
from .tools import expect, prepare_message_bytes, session
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@ -31,6 +32,7 @@ if TYPE_CHECKING:
|
||||
|
||||
# TODO: change once we know the urls
|
||||
DEFS_BASE_URL = "https://data.trezor.io/eth_definitions/{lookup_type}/{id}/{name}"
|
||||
DEFS_ZIP_TOPLEVEL_DIR = pathlib.PurePath("definitions-latest")
|
||||
DEFS_NETWORK_BY_CHAINID_LOOKUP_TYPE = "by_chain_id"
|
||||
DEFS_NETWORK_BY_SLIP44_LOOKUP_TYPE = "by_slip44"
|
||||
DEFS_NETWORK_URI_NAME = "network.dat"
|
||||
@ -201,10 +203,9 @@ def get_token_definition_url(chain_id: int = None, token_address: str = None) ->
|
||||
|
||||
|
||||
def get_network_definition_path(
|
||||
base_path: pathlib.Path,
|
||||
chain_id: Optional[int] = None,
|
||||
slip44: Optional[int] = None,
|
||||
) -> pathlib.Path:
|
||||
) -> pathlib.PurePath:
|
||||
if not ((chain_id is None) != (slip44 is None)): # not XOR
|
||||
raise ValueError(
|
||||
"Exactly one of chain_id or slip44 parameters are needed to construct network definition path."
|
||||
@ -212,14 +213,14 @@ def get_network_definition_path(
|
||||
|
||||
if chain_id is not None:
|
||||
return (
|
||||
base_path
|
||||
DEFS_ZIP_TOPLEVEL_DIR
|
||||
/ DEFS_NETWORK_BY_CHAINID_LOOKUP_TYPE
|
||||
/ str(chain_id)
|
||||
/ DEFS_NETWORK_URI_NAME
|
||||
)
|
||||
else:
|
||||
return (
|
||||
base_path
|
||||
DEFS_ZIP_TOPLEVEL_DIR
|
||||
/ DEFS_NETWORK_BY_SLIP44_LOOKUP_TYPE
|
||||
/ str(slip44)
|
||||
/ DEFS_NETWORK_URI_NAME
|
||||
@ -227,10 +228,9 @@ def get_network_definition_path(
|
||||
|
||||
|
||||
def get_token_definition_path(
|
||||
base_path: pathlib.Path,
|
||||
chain_id: int = None,
|
||||
token_address: str = None,
|
||||
) -> pathlib.Path:
|
||||
) -> pathlib.PurePath:
|
||||
if chain_id is None or token_address is None:
|
||||
raise ValueError(
|
||||
"Both chain_id and token_address parameters are needed to construct token definition path."
|
||||
@ -241,21 +241,62 @@ def get_token_definition_path(
|
||||
addr = addr[2:]
|
||||
|
||||
return (
|
||||
base_path
|
||||
DEFS_ZIP_TOPLEVEL_DIR
|
||||
/ DEFS_NETWORK_BY_CHAINID_LOOKUP_TYPE
|
||||
/ str(chain_id)
|
||||
/ DEFS_TOKEN_URI_NAME.format(hex_address=addr)
|
||||
)
|
||||
|
||||
|
||||
def get_definition_from_path(
|
||||
path: pathlib.Path,
|
||||
) -> Optional[bytes]:
|
||||
if not path.exists() or not path.is_file():
|
||||
return None
|
||||
def get_all_completed_definitions_from_zip(
|
||||
zip_file: pathlib.Path,
|
||||
) -> Dict[str, Optional[bytes]]:
|
||||
if not zip_file.exists() or not zip_file.is_file():
|
||||
return {}
|
||||
|
||||
with open(path, mode="rb") as f:
|
||||
return f.read()
|
||||
all_definitions_dict = {}
|
||||
mt_leaves = []
|
||||
signature = None
|
||||
|
||||
with zipfile.ZipFile(zip_file) as zf:
|
||||
names = zf.namelist()
|
||||
signature = zf.read(names[0])[-64:]
|
||||
|
||||
for name in names:
|
||||
all_definitions_dict[name] = zf.read(name)[:-64]
|
||||
if name.startswith(
|
||||
str(DEFS_ZIP_TOPLEVEL_DIR / DEFS_NETWORK_BY_CHAINID_LOOKUP_TYPE)
|
||||
):
|
||||
mt_leaves.append(all_definitions_dict[name])
|
||||
|
||||
# sort encoded definitions
|
||||
mt_leaves.sort()
|
||||
|
||||
# build Merkle tree
|
||||
mt = merkle_tree.MerkleTree(mt_leaves)
|
||||
proofs = mt.get_proofs()
|
||||
|
||||
# complete definitions
|
||||
for path, definition in all_definitions_dict.items():
|
||||
proof = proofs[definition]
|
||||
# append number of hashes in proof
|
||||
all_definitions_dict[path] += len(proof).to_bytes(1, "big")
|
||||
# append proof itself
|
||||
for p in proof:
|
||||
all_definitions_dict[path] += p
|
||||
# append signed tree root hash
|
||||
all_definitions_dict[path] += signature
|
||||
|
||||
return all_definitions_dict
|
||||
|
||||
|
||||
def get_definition_from_zip(
|
||||
zip_file: pathlib.Path,
|
||||
path_inside_zip: pathlib.PurePath,
|
||||
) -> Optional[bytes]:
|
||||
all_defs = get_all_completed_definitions_from_zip(zip_file)
|
||||
|
||||
return all_defs.get(str(path_inside_zip))
|
||||
|
||||
|
||||
# ====== Client functions ====== #
|
||||
|
115
python/src/trezorlib/merkle_tree.py
Executable file
115
python/src/trezorlib/merkle_tree.py
Executable file
@ -0,0 +1,115 @@
|
||||
# This file is part of the Trezor project.
|
||||
#
|
||||
# Copyright (C) 2012-2022 SatoshiLabs and contributors
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License version 3
|
||||
# as published by the Free Software Foundation.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the License along with this library.
|
||||
# If not, see <https://www.gnu.org/licenses/lgpl-3.0.html>.
|
||||
|
||||
from hashlib import sha256
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
|
||||
class Node:
|
||||
"""
|
||||
Single node of Merkle tree.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self: "Node",
|
||||
*,
|
||||
left: Optional["Node"] = None,
|
||||
right: Optional["Node"] = None,
|
||||
raw_value: Optional[bytes] = None,
|
||||
) -> None:
|
||||
self.raw_value = raw_value
|
||||
|
||||
if self.raw_value and (left is not None or right is not None):
|
||||
raise ValueError(
|
||||
"Cannot use `raw_value` together with `left` and/or `right` value."
|
||||
)
|
||||
|
||||
if not self.raw_value and (left is None or right is None):
|
||||
raise ValueError(
|
||||
"`left` and `right` value must not be None when not using `raw_value`."
|
||||
)
|
||||
|
||||
self.hash = None
|
||||
self.left_child = left
|
||||
self.right_child = right
|
||||
self.proof_list: List[bytes] = []
|
||||
|
||||
def compute_hash(self) -> bytes:
|
||||
if not self.hash:
|
||||
if self.raw_value:
|
||||
self.hash = sha256(b"\x00" + self.raw_value).digest()
|
||||
else:
|
||||
left_hash = self.left_child.compute_hash() # type: ignore ["compute_hash" is not a known member of "None"]
|
||||
right_hash = self.right_child.compute_hash() # type: ignore ["compute_hash" is not a known member of "None"]
|
||||
hash_a = min(left_hash, right_hash)
|
||||
hash_b = max(left_hash, right_hash)
|
||||
self.hash = sha256(b"\x01" + hash_a + hash_b).digest()
|
||||
|
||||
# distribute proof
|
||||
self.left_child.add_to_proof(right_hash) # type: ignore ["add_to_proof" is not a known member of "None"]
|
||||
self.right_child.add_to_proof(left_hash) # type: ignore ["add_to_proof" is not a known member of "None"]
|
||||
|
||||
return self.hash
|
||||
|
||||
def add_to_proof(self, proof: bytes) -> None:
|
||||
self.proof_list.append(proof)
|
||||
if not self.raw_value:
|
||||
self.left_child.add_to_proof(proof) # type: ignore ["add_to_proof" is not a known member of "None"]
|
||||
self.right_child.add_to_proof(proof) # type: ignore ["add_to_proof" is not a known member of "None"]
|
||||
|
||||
|
||||
class MerkleTree:
|
||||
"""
|
||||
Simple Merkle tree that implements the building of Merkle tree itself and generate proofs
|
||||
for leaf nodes.
|
||||
"""
|
||||
|
||||
def __init__(self, values: List[bytes]) -> None:
|
||||
self.leaves = [Node(raw_value=v) for v in values]
|
||||
self.height = 0
|
||||
|
||||
# build the tree
|
||||
current_level = [n for n in self.leaves]
|
||||
while len(current_level) > 1:
|
||||
# build one level of the tree
|
||||
next_level = []
|
||||
while len(current_level) // 2:
|
||||
left_node = current_level.pop()
|
||||
right_node = current_level.pop()
|
||||
next_level.append(Node(left=left_node, right=right_node))
|
||||
|
||||
if len(current_level) == 1:
|
||||
# odd number of nodes on current level so last node will be "joined" on another level
|
||||
next_level.append(current_level.pop())
|
||||
|
||||
# switch levels and continue
|
||||
self.height += 1
|
||||
current_level = next_level
|
||||
|
||||
# set root and compute hash
|
||||
self.root_node = current_level[0]
|
||||
self.root_node.compute_hash()
|
||||
|
||||
def get_proofs(self) -> Dict[bytes, List[bytes]]:
|
||||
return {
|
||||
n.raw_value: n.proof_list for n in self.leaves if n.raw_value is not None
|
||||
}
|
||||
|
||||
def get_tree_height(self) -> int:
|
||||
return self.height
|
||||
|
||||
def get_root_hash(self) -> bytes:
|
||||
return self.root_node.hash # type: ignore [Expression of type "bytes | None" cannot be assigned to return type "bytes"]
|
67
python/tools/eth_defs_unpack.py
Executable file
67
python/tools/eth_defs_unpack.py
Executable file
@ -0,0 +1,67 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# This file is part of the Trezor project.
|
||||
#
|
||||
# Copyright (C) 2012-2022 SatoshiLabs and contributors
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License version 3
|
||||
# as published by the Free Software Foundation.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the License along with this library.
|
||||
# If not, see <https://www.gnu.org/licenses/lgpl-3.0.html>.
|
||||
|
||||
import pathlib
|
||||
import click
|
||||
|
||||
from trezorlib import ethereum
|
||||
from trezorlib.cli import ethereum as ethereum_cli
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.option(
|
||||
"-z", "--definitions-zip",
|
||||
type=click.Path(
|
||||
exists=True, dir_okay=False, resolve_path=True, path_type=pathlib.Path
|
||||
),
|
||||
default=f"./{ethereum_cli.DEFS_ZIP_FILENAME}",
|
||||
help="Zip file with stored definitions. Zip file could be obtained using command"
|
||||
"`trezorctl ethereum download-definitions`.",
|
||||
)
|
||||
@click.option(
|
||||
"-o",
|
||||
"--outdir",
|
||||
type=click.Path(
|
||||
resolve_path=True, file_okay=False, writable=True, path_type=pathlib.Path
|
||||
),
|
||||
default="./",
|
||||
help="Directory path where the definitions will be unpacked. Zip file contains top level"
|
||||
f"dir \"{ethereum.DEFS_ZIP_TOPLEVEL_DIR}\" and this will be unpacked to desired \"outdir\""
|
||||
"path. Any colliding directories will be overwritten!",
|
||||
)
|
||||
def unpack_definitions(
|
||||
definitions_zip: pathlib.Path,
|
||||
outdir: pathlib.Path,
|
||||
) -> None:
|
||||
"""Script that unpacks and completes (insert missing Merkle Tree proofs
|
||||
into the definitions) the Ethereum definitions (networks and tokens).
|
||||
"""
|
||||
all_defs = ethereum.get_all_completed_definitions_from_zip(definitions_zip)
|
||||
|
||||
for path, definition in all_defs.items():
|
||||
if not definition:
|
||||
continue
|
||||
|
||||
filepath = outdir / path
|
||||
filepath.parent.mkdir(parents=True, exist_ok=True)
|
||||
with open(filepath, mode="wb+") as f:
|
||||
f.write(definition)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unpack_definitions()
|
@ -35,11 +35,11 @@ def test_getaddress(client: Client, parameters, result):
|
||||
"slip44", encoded_network_slip44
|
||||
)
|
||||
|
||||
encoded_network = ethereum.get_definition_from_path(
|
||||
encoded_network = ethereum.get_definition_from_zip(
|
||||
COMMON_FIXTURES_DIR / "ethereum" / "definitions-latest.zip",
|
||||
ethereum.get_network_definition_path(
|
||||
base_path=COMMON_FIXTURES_DIR / "ethereum" / "definitions-latest",
|
||||
slip44=encoded_network_slip44,
|
||||
)
|
||||
),
|
||||
)
|
||||
assert (
|
||||
ethereum.get_address(client, address_n, encoded_network=encoded_network)
|
||||
|
@ -25,6 +25,8 @@ from ...common import COMMON_FIXTURES_DIR, parametrize_using_common_fixtures
|
||||
|
||||
SHOW_MORE = (143, 167)
|
||||
|
||||
DEFS_ZIP_FILE_PATH = COMMON_FIXTURES_DIR / "ethereum" / "definitions-latest.zip"
|
||||
|
||||
pytestmark = [pytest.mark.altcoin, pytest.mark.ethereum]
|
||||
|
||||
|
||||
@ -40,11 +42,11 @@ def test_ethereum_sign_typed_data(client: Client, parameters, result):
|
||||
)
|
||||
|
||||
defs = ethereum.messages.EthereumDefinitions(
|
||||
encoded_network=ethereum.get_definition_from_path(
|
||||
encoded_network=ethereum.get_definition_from_zip(
|
||||
DEFS_ZIP_FILE_PATH,
|
||||
ethereum.get_network_definition_path(
|
||||
base_path=COMMON_FIXTURES_DIR / "ethereum" / "definitions-latest",
|
||||
slip44=encoded_network_slip44,
|
||||
)
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
@ -88,11 +90,11 @@ def test_ethereum_sign_typed_data_blind(client: Client, parameters, result):
|
||||
"slip44", encoded_network_slip44
|
||||
)
|
||||
|
||||
encoded_network = ethereum.get_definition_from_path(
|
||||
encoded_network = ethereum.get_definition_from_zip(
|
||||
DEFS_ZIP_FILE_PATH,
|
||||
ethereum.get_network_definition_path(
|
||||
base_path=COMMON_FIXTURES_DIR / "ethereum" / "definitions-latest",
|
||||
slip44=encoded_network_slip44,
|
||||
)
|
||||
),
|
||||
)
|
||||
ret = ethereum.sign_typed_data_hash(
|
||||
client,
|
||||
@ -207,11 +209,11 @@ def input_flow_cancel(client: Client):
|
||||
@pytest.mark.skip_t1
|
||||
def test_ethereum_sign_typed_data_show_more_button(client: Client):
|
||||
defs = ethereum.messages.EthereumDefinitions(
|
||||
encoded_network=ethereum.get_definition_from_path(
|
||||
encoded_network=ethereum.get_definition_from_zip(
|
||||
DEFS_ZIP_FILE_PATH,
|
||||
ethereum.get_network_definition_path(
|
||||
base_path=COMMON_FIXTURES_DIR / "ethereum" / "definitions-latest",
|
||||
slip44=60,
|
||||
)
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
@ -230,11 +232,11 @@ def test_ethereum_sign_typed_data_show_more_button(client: Client):
|
||||
@pytest.mark.skip_t1
|
||||
def test_ethereum_sign_typed_data_cancel(client: Client):
|
||||
defs = ethereum.messages.EthereumDefinitions(
|
||||
encoded_network=ethereum.get_definition_from_path(
|
||||
encoded_network=ethereum.get_definition_from_zip(
|
||||
DEFS_ZIP_FILE_PATH,
|
||||
ethereum.get_network_definition_path(
|
||||
base_path=COMMON_FIXTURES_DIR / "ethereum" / "definitions-latest",
|
||||
slip44=60,
|
||||
)
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -35,11 +35,11 @@ def test_signmessage(client: Client, parameters, result):
|
||||
"slip44", encoded_network_slip44
|
||||
)
|
||||
|
||||
encoded_network = ethereum.get_definition_from_path(
|
||||
encoded_network = ethereum.get_definition_from_zip(
|
||||
COMMON_FIXTURES_DIR / "ethereum" / "definitions-latest.zip",
|
||||
ethereum.get_network_definition_path(
|
||||
base_path=COMMON_FIXTURES_DIR / "ethereum" / "definitions-latest",
|
||||
slip44=encoded_network_slip44,
|
||||
)
|
||||
),
|
||||
)
|
||||
res = ethereum.sign_message(client, address_n, parameters["msg"], encoded_network)
|
||||
assert res.address == result["address"]
|
||||
|
@ -29,6 +29,8 @@ TO_ADDR = "0x1d1c328764a41bda0492b66baa30c4a339ff85ef"
|
||||
SHOW_ALL = (143, 167)
|
||||
GO_BACK = (16, 220)
|
||||
|
||||
DEFS_ZIP_FILE_PATH = COMMON_FIXTURES_DIR / "ethereum" / "definitions-latest.zip"
|
||||
|
||||
pytestmark = [pytest.mark.altcoin, pytest.mark.ethereum]
|
||||
|
||||
|
||||
@ -37,25 +39,25 @@ def get_definitions(
|
||||
) -> messages.EthereumDefinitions:
|
||||
chain_id = parameters["chain_id"]
|
||||
token_chain_id = parameters["chain_id"]
|
||||
token_address = parameters["to_address"]
|
||||
token_address = parameters.get("to_address")
|
||||
|
||||
if "definitions" in parameters:
|
||||
chain_id = parameters["definitions"].get("chain_id", chain_id)
|
||||
token_chain_id = parameters["definitions"].get("token_chain_id", token_chain_id)
|
||||
token_address = parameters["definitions"].get("to_address", token_address)
|
||||
|
||||
encoded_network = ethereum.get_definition_from_path(
|
||||
encoded_network = ethereum.get_definition_from_zip(
|
||||
DEFS_ZIP_FILE_PATH,
|
||||
ethereum.get_network_definition_path(
|
||||
COMMON_FIXTURES_DIR / "ethereum" / "definitions-latest",
|
||||
chain_id,
|
||||
),
|
||||
)
|
||||
)
|
||||
encoded_token = ethereum.get_definition_from_path(
|
||||
encoded_token = ethereum.get_definition_from_zip(
|
||||
DEFS_ZIP_FILE_PATH,
|
||||
ethereum.get_token_definition_path(
|
||||
COMMON_FIXTURES_DIR / "ethereum" / "definitions-latest",
|
||||
token_chain_id,
|
||||
token_address,
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
return messages.EthereumDefinitions(
|
||||
@ -91,7 +93,7 @@ def test_signtx(client: Client, parameters, result):
|
||||
assert sig_v == result["sig_v"]
|
||||
|
||||
|
||||
def test_signtx_missing_extern_definitions(client: Client):
|
||||
def test_signtx_missing_or_wrong_extern_definitions(client: Client):
|
||||
chain_id = 8 # Ubiq
|
||||
to_address = "0x20e3dd746ddf519b23ffbbb6da7a5d33ea6349d6" # Sphere
|
||||
|
||||
@ -124,31 +126,6 @@ def test_signtx_missing_extern_definitions(client: Client):
|
||||
"to_address": "",
|
||||
}
|
||||
), # missing network and token
|
||||
]
|
||||
|
||||
for definitions in definitions_vector:
|
||||
with pytest.raises(TrezorFailure, match=r"DataError:.*Forbidden key path"):
|
||||
sign_tx_call(definitions=definitions)
|
||||
|
||||
|
||||
def test_signtx_wrong_extern_definitions(client: Client):
|
||||
chain_id = 8 # Ubiq
|
||||
to_address = "0x20e3dd746ddf519b23ffbbb6da7a5d33ea6349d6" # Sphere
|
||||
|
||||
sign_tx_call = partial(
|
||||
ethereum.sign_tx,
|
||||
client,
|
||||
n=parse_path("m/44'/108'/0'/0/0"),
|
||||
nonce=0,
|
||||
gas_price=20,
|
||||
gas_limit=20,
|
||||
to=to_address,
|
||||
chain_id=chain_id,
|
||||
value=0,
|
||||
data=b"a9059cbb00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
|
||||
)
|
||||
|
||||
definitions_vector = [
|
||||
get_definitions(
|
||||
{
|
||||
"chain_id": chain_id,
|
||||
@ -164,13 +141,19 @@ def test_signtx_wrong_extern_definitions(client: Client):
|
||||
"to_address": "0xd0d6d6c5fe4a677d343cc433536bb717bae167dd",
|
||||
}
|
||||
), # wrong network and token
|
||||
get_definitions(
|
||||
{
|
||||
"chain_id": chain_id,
|
||||
"definitions": {
|
||||
"token_chain_id": 1,
|
||||
"to_address": "0xd0d6d6c5fe4a677d343cc433536bb717bae167dd",
|
||||
},
|
||||
}
|
||||
), # wrong token
|
||||
]
|
||||
|
||||
for definitions in definitions_vector:
|
||||
with pytest.raises(
|
||||
TrezorFailure,
|
||||
match=r"DataError:.*Network definition mismatch",
|
||||
):
|
||||
with pytest.raises(TrezorFailure, match=r"DataError"):
|
||||
sign_tx_call(definitions=definitions)
|
||||
|
||||
|
||||
@ -199,7 +182,7 @@ def test_signtx_eip1559(client: Client, parameters, result):
|
||||
assert sig_v == result["sig_v"]
|
||||
|
||||
|
||||
def test_signtx_eip1559_missing_extern_definitions(client: Client):
|
||||
def test_signtx_eip1559_missing_or_wrong_extern_definitions(client: Client):
|
||||
chain_id = 8 # Ubiq
|
||||
to_address = "0x20e3dd746ddf519b23ffbbb6da7a5d33ea6349d6" # Sphere
|
||||
|
||||
@ -233,32 +216,6 @@ def test_signtx_eip1559_missing_extern_definitions(client: Client):
|
||||
"to_address": "",
|
||||
}
|
||||
), # missing network and token
|
||||
]
|
||||
|
||||
for definitions in definitions_vector:
|
||||
with pytest.raises(TrezorFailure, match=r"DataError:.*Forbidden key path"):
|
||||
sign_tx_eip1559_call(definitions=definitions)
|
||||
|
||||
|
||||
def test_signtx_eip1559_wrong_extern_definitions(client: Client):
|
||||
chain_id = 8 # Ubiq
|
||||
to_address = "0x20e3dd746ddf519b23ffbbb6da7a5d33ea6349d6" # Sphere
|
||||
|
||||
sign_tx_eip1559_call = partial(
|
||||
ethereum.sign_tx_eip1559,
|
||||
client,
|
||||
n=parse_path("m/44'/108'/0'/0/0"),
|
||||
nonce=0,
|
||||
gas_limit=20,
|
||||
max_gas_fee=20,
|
||||
max_priority_fee=1,
|
||||
to=to_address,
|
||||
chain_id=chain_id,
|
||||
value=0,
|
||||
data=b"a9059cbb00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
|
||||
)
|
||||
|
||||
definitions_vector = [
|
||||
get_definitions(
|
||||
{
|
||||
"chain_id": chain_id,
|
||||
@ -274,13 +231,19 @@ def test_signtx_eip1559_wrong_extern_definitions(client: Client):
|
||||
"to_address": "0xd0d6d6c5fe4a677d343cc433536bb717bae167dd",
|
||||
}
|
||||
), # wrong network and token
|
||||
get_definitions(
|
||||
{
|
||||
"chain_id": chain_id,
|
||||
"definitions": {
|
||||
"token_chain_id": 1,
|
||||
"to_address": "0xd0d6d6c5fe4a677d343cc433536bb717bae167dd",
|
||||
},
|
||||
}
|
||||
), # wrong token
|
||||
]
|
||||
|
||||
for definitions in definitions_vector:
|
||||
with pytest.raises(
|
||||
TrezorFailure,
|
||||
match=r"DataError:.*Network definition mismatch",
|
||||
):
|
||||
with pytest.raises(TrezorFailure, match=r"DataError"):
|
||||
sign_tx_eip1559_call(definitions=definitions)
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user