mirror of
https://github.com/trezor/trezor-firmware.git
synced 2024-11-18 21:48:13 +00:00
chore(core): add underscores to private constants to reduce code size
This commit is contained in:
parent
f1c7129d06
commit
49765830de
@ -86,10 +86,10 @@ PATTERN_UNCHAINED_UNHARDENED = (
|
|||||||
PATTERN_UNCHAINED_DEPRECATED = "m/45'/coin_type'/account'/[0-1000000]/address_index"
|
PATTERN_UNCHAINED_DEPRECATED = "m/45'/coin_type'/account'/[0-1000000]/address_index"
|
||||||
|
|
||||||
# SLIP-44 coin type for Bitcoin
|
# SLIP-44 coin type for Bitcoin
|
||||||
SLIP44_BITCOIN = const(0)
|
_SLIP44_BITCOIN = const(0)
|
||||||
|
|
||||||
# SLIP-44 coin type for all Testnet coins
|
# SLIP-44 coin type for all Testnet coins
|
||||||
SLIP44_TESTNET = const(1)
|
_SLIP44_TESTNET = const(1)
|
||||||
|
|
||||||
|
|
||||||
def validate_path_against_script_type(
|
def validate_path_against_script_type(
|
||||||
@ -112,7 +112,7 @@ def validate_path_against_script_type(
|
|||||||
|
|
||||||
if script_type == InputScriptType.SPENDADDRESS and not multisig:
|
if script_type == InputScriptType.SPENDADDRESS and not multisig:
|
||||||
patterns.append(PATTERN_BIP44)
|
patterns.append(PATTERN_BIP44)
|
||||||
if coin.slip44 == SLIP44_BITCOIN:
|
if coin.slip44 == _SLIP44_BITCOIN:
|
||||||
patterns.append(PATTERN_GREENADDRESS_A)
|
patterns.append(PATTERN_GREENADDRESS_A)
|
||||||
patterns.append(PATTERN_GREENADDRESS_B)
|
patterns.append(PATTERN_GREENADDRESS_B)
|
||||||
|
|
||||||
@ -121,11 +121,11 @@ def validate_path_against_script_type(
|
|||||||
and multisig
|
and multisig
|
||||||
):
|
):
|
||||||
patterns.append(PATTERN_BIP48_RAW)
|
patterns.append(PATTERN_BIP48_RAW)
|
||||||
if coin.slip44 == SLIP44_BITCOIN or (
|
if coin.slip44 == _SLIP44_BITCOIN or (
|
||||||
coin.fork_id is not None and coin.slip44 != SLIP44_TESTNET
|
coin.fork_id is not None and coin.slip44 != _SLIP44_TESTNET
|
||||||
):
|
):
|
||||||
patterns.append(PATTERN_BIP45)
|
patterns.append(PATTERN_BIP45)
|
||||||
if coin.slip44 == SLIP44_BITCOIN:
|
if coin.slip44 == _SLIP44_BITCOIN:
|
||||||
patterns.append(PATTERN_GREENADDRESS_A)
|
patterns.append(PATTERN_GREENADDRESS_A)
|
||||||
patterns.append(PATTERN_GREENADDRESS_B)
|
patterns.append(PATTERN_GREENADDRESS_B)
|
||||||
if coin.coin_name in BITCOIN_NAMES:
|
if coin.coin_name in BITCOIN_NAMES:
|
||||||
@ -137,7 +137,7 @@ def validate_path_against_script_type(
|
|||||||
patterns.append(PATTERN_BIP49)
|
patterns.append(PATTERN_BIP49)
|
||||||
if multisig:
|
if multisig:
|
||||||
patterns.append(PATTERN_BIP48_P2SHSEGWIT)
|
patterns.append(PATTERN_BIP48_P2SHSEGWIT)
|
||||||
if coin.slip44 == SLIP44_BITCOIN:
|
if coin.slip44 == _SLIP44_BITCOIN:
|
||||||
patterns.append(PATTERN_GREENADDRESS_A)
|
patterns.append(PATTERN_GREENADDRESS_A)
|
||||||
patterns.append(PATTERN_GREENADDRESS_B)
|
patterns.append(PATTERN_GREENADDRESS_B)
|
||||||
if coin.coin_name in BITCOIN_NAMES:
|
if coin.coin_name in BITCOIN_NAMES:
|
||||||
@ -147,7 +147,7 @@ def validate_path_against_script_type(
|
|||||||
patterns.append(PATTERN_BIP84)
|
patterns.append(PATTERN_BIP84)
|
||||||
if multisig:
|
if multisig:
|
||||||
patterns.append(PATTERN_BIP48_SEGWIT)
|
patterns.append(PATTERN_BIP48_SEGWIT)
|
||||||
if coin.slip44 == SLIP44_BITCOIN:
|
if coin.slip44 == _SLIP44_BITCOIN:
|
||||||
patterns.append(PATTERN_GREENADDRESS_A)
|
patterns.append(PATTERN_GREENADDRESS_A)
|
||||||
patterns.append(PATTERN_GREENADDRESS_B)
|
patterns.append(PATTERN_GREENADDRESS_B)
|
||||||
|
|
||||||
@ -170,12 +170,12 @@ def get_schemas_for_coin(
|
|||||||
]
|
]
|
||||||
|
|
||||||
# patterns without coin_type field must be treated as if coin_type == 0
|
# patterns without coin_type field must be treated as if coin_type == 0
|
||||||
if coin.slip44 == SLIP44_BITCOIN or (
|
if coin.slip44 == _SLIP44_BITCOIN or (
|
||||||
coin.fork_id is not None and coin.slip44 != SLIP44_TESTNET
|
coin.fork_id is not None and coin.slip44 != _SLIP44_TESTNET
|
||||||
):
|
):
|
||||||
patterns.append(PATTERN_BIP45)
|
patterns.append(PATTERN_BIP45)
|
||||||
|
|
||||||
if coin.slip44 == SLIP44_BITCOIN:
|
if coin.slip44 == _SLIP44_BITCOIN:
|
||||||
patterns.extend(
|
patterns.extend(
|
||||||
(
|
(
|
||||||
PATTERN_GREENADDRESS_A,
|
PATTERN_GREENADDRESS_A,
|
||||||
@ -229,9 +229,9 @@ def get_schemas_from_patterns(
|
|||||||
# cannot allow spending any testnet coins from Bitcoin paths, because
|
# cannot allow spending any testnet coins from Bitcoin paths, because
|
||||||
# otherwise an attacker could trick the user into spending BCH on a Bitcoin
|
# otherwise an attacker could trick the user into spending BCH on a Bitcoin
|
||||||
# path by signing a seemingly harmless BCH Testnet transaction.
|
# path by signing a seemingly harmless BCH Testnet transaction.
|
||||||
if coin.fork_id is not None and coin.slip44 != SLIP44_TESTNET:
|
if coin.fork_id is not None and coin.slip44 != _SLIP44_TESTNET:
|
||||||
schemas.extend(
|
schemas.extend(
|
||||||
PathSchema.parse(pattern, SLIP44_BITCOIN) for pattern in patterns
|
PathSchema.parse(pattern, _SLIP44_BITCOIN) for pattern in patterns
|
||||||
)
|
)
|
||||||
|
|
||||||
return schemas
|
return schemas
|
||||||
|
@ -17,10 +17,10 @@ from .approvers import BasicApprover
|
|||||||
from .bitcoin import Bitcoin
|
from .bitcoin import Bitcoin
|
||||||
from .progress import progress
|
from .progress import progress
|
||||||
|
|
||||||
DECRED_SERIALIZE_FULL = const(0 << 16)
|
_DECRED_SERIALIZE_FULL = const(0 << 16)
|
||||||
DECRED_SERIALIZE_NO_WITNESS = const(1 << 16)
|
_DECRED_SERIALIZE_NO_WITNESS = const(1 << 16)
|
||||||
DECRED_SERIALIZE_WITNESS_SIGNING = const(3 << 16)
|
_DECRED_SERIALIZE_WITNESS_SIGNING = const(3 << 16)
|
||||||
DECRED_SCRIPT_VERSION = const(0)
|
_DECRED_SCRIPT_VERSION = const(0)
|
||||||
OUTPUT_SCRIPT_NULL_SSTXCHANGE = (
|
OUTPUT_SCRIPT_NULL_SSTXCHANGE = (
|
||||||
b"\xBD\x76\xA9\x14\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x88\xAC"
|
b"\xBD\x76\xA9\x14\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x88\xAC"
|
||||||
)
|
)
|
||||||
@ -158,7 +158,7 @@ class Decred(Bitcoin):
|
|||||||
write_compact_size(self.serialized_tx, self.tx_info.tx.inputs_count)
|
write_compact_size(self.serialized_tx, self.tx_info.tx.inputs_count)
|
||||||
|
|
||||||
writers.write_uint32(
|
writers.write_uint32(
|
||||||
self.h_prefix, self.tx_info.tx.version | DECRED_SERIALIZE_NO_WITNESS
|
self.h_prefix, self.tx_info.tx.version | _DECRED_SERIALIZE_NO_WITNESS
|
||||||
)
|
)
|
||||||
write_compact_size(self.h_prefix, self.tx_info.tx.inputs_count)
|
write_compact_size(self.h_prefix, self.tx_info.tx.inputs_count)
|
||||||
|
|
||||||
@ -223,7 +223,7 @@ class Decred(Bitcoin):
|
|||||||
|
|
||||||
h_witness = self.create_hash_writer()
|
h_witness = self.create_hash_writer()
|
||||||
writers.write_uint32(
|
writers.write_uint32(
|
||||||
h_witness, self.tx_info.tx.version | DECRED_SERIALIZE_WITNESS_SIGNING
|
h_witness, self.tx_info.tx.version | _DECRED_SERIALIZE_WITNESS_SIGNING
|
||||||
)
|
)
|
||||||
write_compact_size(h_witness, self.tx_info.tx.inputs_count)
|
write_compact_size(h_witness, self.tx_info.tx.inputs_count)
|
||||||
|
|
||||||
@ -314,7 +314,7 @@ class Decred(Bitcoin):
|
|||||||
raise wire.DataError("Script version must be provided")
|
raise wire.DataError("Script version must be provided")
|
||||||
writers.write_uint16(w, txo.decred_script_version)
|
writers.write_uint16(w, txo.decred_script_version)
|
||||||
else:
|
else:
|
||||||
writers.write_uint16(w, DECRED_SCRIPT_VERSION)
|
writers.write_uint16(w, _DECRED_SCRIPT_VERSION)
|
||||||
writers.write_bytes_prefixed(w, script_pubkey)
|
writers.write_bytes_prefixed(w, script_pubkey)
|
||||||
|
|
||||||
def process_sstx_commitment_owned(self, txo: TxOutput) -> bytearray:
|
def process_sstx_commitment_owned(self, txo: TxOutput) -> bytearray:
|
||||||
@ -381,9 +381,9 @@ class Decred(Bitcoin):
|
|||||||
# The upper 16 bits of the transaction version specify the serialization
|
# The upper 16 bits of the transaction version specify the serialization
|
||||||
# format and the lower 16 bits specify the version number.
|
# format and the lower 16 bits specify the version number.
|
||||||
if witness_marker:
|
if witness_marker:
|
||||||
version = tx.version | DECRED_SERIALIZE_FULL
|
version = tx.version | _DECRED_SERIALIZE_FULL
|
||||||
else:
|
else:
|
||||||
version = tx.version | DECRED_SERIALIZE_NO_WITNESS
|
version = tx.version | _DECRED_SERIALIZE_NO_WITNESS
|
||||||
|
|
||||||
writers.write_uint32(w, version)
|
writers.write_uint32(w, version)
|
||||||
|
|
||||||
|
@ -32,7 +32,7 @@ if TYPE_CHECKING:
|
|||||||
from ..common import SigHashType
|
from ..common import SigHashType
|
||||||
from ..writers import Writer
|
from ..writers import Writer
|
||||||
|
|
||||||
OVERWINTERED = const(0x8000_0000)
|
_OVERWINTERED = const(0x8000_0000)
|
||||||
|
|
||||||
|
|
||||||
class Zip243SigHasher:
|
class Zip243SigHasher:
|
||||||
@ -70,7 +70,7 @@ class Zip243SigHasher:
|
|||||||
zero_hash = b"\x00" * TX_HASH_SIZE
|
zero_hash = b"\x00" * TX_HASH_SIZE
|
||||||
|
|
||||||
# 1. nVersion | fOverwintered
|
# 1. nVersion | fOverwintered
|
||||||
write_uint32(h_preimage, tx.version | OVERWINTERED)
|
write_uint32(h_preimage, tx.version | _OVERWINTERED)
|
||||||
# 2. nVersionGroupId
|
# 2. nVersionGroupId
|
||||||
write_uint32(h_preimage, tx.version_group_id)
|
write_uint32(h_preimage, tx.version_group_id)
|
||||||
# 3. hashPrevouts
|
# 3. hashPrevouts
|
||||||
@ -181,7 +181,7 @@ class ZcashV4(Bitcoinlike):
|
|||||||
if tx.version_group_id is None:
|
if tx.version_group_id is None:
|
||||||
raise wire.DataError("Version group ID is missing")
|
raise wire.DataError("Version group ID is missing")
|
||||||
# nVersion | fOverwintered
|
# nVersion | fOverwintered
|
||||||
write_uint32(w, tx.version | OVERWINTERED)
|
write_uint32(w, tx.version | _OVERWINTERED)
|
||||||
write_uint32(w, tx.version_group_id) # nVersionGroupId
|
write_uint32(w, tx.version_group_id) # nVersionGroupId
|
||||||
|
|
||||||
def write_tx_footer(self, w: Writer, tx: SignTx | PrevTx) -> None:
|
def write_tx_footer(self, w: Writer, tx: SignTx | PrevTx) -> None:
|
||||||
|
@ -2,21 +2,21 @@ from micropython import const
|
|||||||
|
|
||||||
from apps.common.paths import HARDENED, PathSchema
|
from apps.common.paths import HARDENED, PathSchema
|
||||||
|
|
||||||
SLIP44_ID = 1815
|
_SLIP44_ID = const(1815)
|
||||||
|
|
||||||
BYRON_ROOT = [44 | HARDENED, SLIP44_ID | HARDENED]
|
BYRON_ROOT = [44 | HARDENED, _SLIP44_ID | HARDENED]
|
||||||
SHELLEY_ROOT = [1852 | HARDENED, SLIP44_ID | HARDENED]
|
SHELLEY_ROOT = [1852 | HARDENED, _SLIP44_ID | HARDENED]
|
||||||
MULTISIG_ROOT = [1854 | HARDENED, SLIP44_ID | HARDENED]
|
MULTISIG_ROOT = [1854 | HARDENED, _SLIP44_ID | HARDENED]
|
||||||
MINTING_ROOT = [1855 | HARDENED, SLIP44_ID | HARDENED]
|
MINTING_ROOT = [1855 | HARDENED, _SLIP44_ID | HARDENED]
|
||||||
|
|
||||||
# fmt: off
|
# fmt: off
|
||||||
SCHEMA_PUBKEY = PathSchema.parse("m/[44,1852,1854]'/coin_type'/account'/*", SLIP44_ID)
|
SCHEMA_PUBKEY = PathSchema.parse("m/[44,1852,1854]'/coin_type'/account'/*", _SLIP44_ID)
|
||||||
# minting has a specific schema for key derivation - see CIP-1855
|
# minting has a specific schema for key derivation - see CIP-1855
|
||||||
SCHEMA_MINT = PathSchema.parse(f"m/1855'/coin_type'/[0-{HARDENED - 1}]'", SLIP44_ID)
|
SCHEMA_MINT = PathSchema.parse(f"m/1855'/coin_type'/[0-{HARDENED - 1}]'", _SLIP44_ID)
|
||||||
SCHEMA_PAYMENT = PathSchema.parse("m/[44,1852]'/coin_type'/account'/[0,1]/address_index", SLIP44_ID)
|
SCHEMA_PAYMENT = PathSchema.parse("m/[44,1852]'/coin_type'/account'/[0,1]/address_index", _SLIP44_ID)
|
||||||
# staking is only allowed on Shelley paths with suffix /2/0
|
# staking is only allowed on Shelley paths with suffix /2/0
|
||||||
SCHEMA_STAKING = PathSchema.parse("m/1852'/coin_type'/account'/2/0", SLIP44_ID)
|
SCHEMA_STAKING = PathSchema.parse("m/1852'/coin_type'/account'/2/0", _SLIP44_ID)
|
||||||
SCHEMA_STAKING_ANY_ACCOUNT = PathSchema.parse(f"m/1852'/coin_type'/[0-{HARDENED - 1}]'/2/0", SLIP44_ID)
|
SCHEMA_STAKING_ANY_ACCOUNT = PathSchema.parse(f"m/1852'/coin_type'/[0-{HARDENED - 1}]'/2/0", _SLIP44_ID)
|
||||||
# fmt: on
|
# fmt: on
|
||||||
|
|
||||||
ACCOUNT_PATH_INDEX = const(2)
|
ACCOUNT_PATH_INDEX = const(2)
|
||||||
|
@ -50,37 +50,37 @@ if TYPE_CHECKING:
|
|||||||
messages.CardanoTxItemAck | messages.CardanoTxWitnessResponse
|
messages.CardanoTxItemAck | messages.CardanoTxWitnessResponse
|
||||||
)
|
)
|
||||||
|
|
||||||
MINTING_POLICY_ID_LENGTH = 28
|
_MINTING_POLICY_ID_LENGTH = const(28)
|
||||||
MAX_ASSET_NAME_LENGTH = 32
|
_MAX_ASSET_NAME_LENGTH = const(32)
|
||||||
|
|
||||||
TX_BODY_KEY_INPUTS = const(0)
|
_TX_BODY_KEY_INPUTS = const(0)
|
||||||
TX_BODY_KEY_OUTPUTS = const(1)
|
_TX_BODY_KEY_OUTPUTS = const(1)
|
||||||
TX_BODY_KEY_FEE = const(2)
|
_TX_BODY_KEY_FEE = const(2)
|
||||||
TX_BODY_KEY_TTL = const(3)
|
_TX_BODY_KEY_TTL = const(3)
|
||||||
TX_BODY_KEY_CERTIFICATES = const(4)
|
_TX_BODY_KEY_CERTIFICATES = const(4)
|
||||||
TX_BODY_KEY_WITHDRAWALS = const(5)
|
_TX_BODY_KEY_WITHDRAWALS = const(5)
|
||||||
TX_BODY_KEY_AUXILIARY_DATA = const(7)
|
_TX_BODY_KEY_AUXILIARY_DATA = const(7)
|
||||||
TX_BODY_KEY_VALIDITY_INTERVAL_START = const(8)
|
_TX_BODY_KEY_VALIDITY_INTERVAL_START = const(8)
|
||||||
TX_BODY_KEY_MINT = const(9)
|
_TX_BODY_KEY_MINT = const(9)
|
||||||
TX_BODY_KEY_SCRIPT_DATA_HASH = const(11)
|
_TX_BODY_KEY_SCRIPT_DATA_HASH = const(11)
|
||||||
TX_BODY_KEY_COLLATERAL_INPUTS = const(13)
|
_TX_BODY_KEY_COLLATERAL_INPUTS = const(13)
|
||||||
TX_BODY_KEY_REQUIRED_SIGNERS = const(14)
|
_TX_BODY_KEY_REQUIRED_SIGNERS = const(14)
|
||||||
TX_BODY_KEY_NETWORK_ID = const(15)
|
_TX_BODY_KEY_NETWORK_ID = const(15)
|
||||||
TX_BODY_KEY_COLLATERAL_RETURN = const(16)
|
_TX_BODY_KEY_COLLATERAL_RETURN = const(16)
|
||||||
TX_BODY_KEY_TOTAL_COLLATERAL = const(17)
|
_TX_BODY_KEY_TOTAL_COLLATERAL = const(17)
|
||||||
TX_BODY_KEY_REFERENCE_INPUTS = const(18)
|
_TX_BODY_KEY_REFERENCE_INPUTS = const(18)
|
||||||
|
|
||||||
BABBAGE_OUTPUT_KEY_ADDRESS = const(0)
|
_BABBAGE_OUTPUT_KEY_ADDRESS = const(0)
|
||||||
BABBAGE_OUTPUT_KEY_AMOUNT = const(1)
|
_BABBAGE_OUTPUT_KEY_AMOUNT = const(1)
|
||||||
BABBAGE_OUTPUT_KEY_DATUM_OPTION = const(2)
|
_BABBAGE_OUTPUT_KEY_DATUM_OPTION = const(2)
|
||||||
BABBAGE_OUTPUT_KEY_REFERENCE_SCRIPT = const(3)
|
_BABBAGE_OUTPUT_KEY_REFERENCE_SCRIPT = const(3)
|
||||||
|
|
||||||
DATUM_OPTION_KEY_HASH = const(0)
|
_DATUM_OPTION_KEY_HASH = const(0)
|
||||||
DATUM_OPTION_KEY_INLINE = const(1)
|
_DATUM_OPTION_KEY_INLINE = const(1)
|
||||||
|
|
||||||
POOL_REGISTRATION_CERTIFICATE_ITEMS_COUNT = 10
|
_POOL_REGISTRATION_CERTIFICATE_ITEMS_COUNT = const(10)
|
||||||
|
|
||||||
MAX_CHUNK_SIZE = 1024
|
_MAX_CHUNK_SIZE = const(1024)
|
||||||
|
|
||||||
|
|
||||||
class Signer:
|
class Signer:
|
||||||
@ -154,30 +154,30 @@ class Signer:
|
|||||||
inputs_list: HashBuilderList[tuple[bytes, int]] = HashBuilderList(
|
inputs_list: HashBuilderList[tuple[bytes, int]] = HashBuilderList(
|
||||||
self.msg.inputs_count
|
self.msg.inputs_count
|
||||||
)
|
)
|
||||||
with self.tx_dict.add(TX_BODY_KEY_INPUTS, inputs_list):
|
with self.tx_dict.add(_TX_BODY_KEY_INPUTS, inputs_list):
|
||||||
await self._process_inputs(inputs_list)
|
await self._process_inputs(inputs_list)
|
||||||
|
|
||||||
outputs_list: HashBuilderList = HashBuilderList(self.msg.outputs_count)
|
outputs_list: HashBuilderList = HashBuilderList(self.msg.outputs_count)
|
||||||
with self.tx_dict.add(TX_BODY_KEY_OUTPUTS, outputs_list):
|
with self.tx_dict.add(_TX_BODY_KEY_OUTPUTS, outputs_list):
|
||||||
await self._process_outputs(outputs_list)
|
await self._process_outputs(outputs_list)
|
||||||
|
|
||||||
self.tx_dict.add(TX_BODY_KEY_FEE, self.msg.fee)
|
self.tx_dict.add(_TX_BODY_KEY_FEE, self.msg.fee)
|
||||||
|
|
||||||
if self.msg.ttl is not None:
|
if self.msg.ttl is not None:
|
||||||
self.tx_dict.add(TX_BODY_KEY_TTL, self.msg.ttl)
|
self.tx_dict.add(_TX_BODY_KEY_TTL, self.msg.ttl)
|
||||||
|
|
||||||
if self.msg.certificates_count > 0:
|
if self.msg.certificates_count > 0:
|
||||||
certificates_list: HashBuilderList = HashBuilderList(
|
certificates_list: HashBuilderList = HashBuilderList(
|
||||||
self.msg.certificates_count
|
self.msg.certificates_count
|
||||||
)
|
)
|
||||||
with self.tx_dict.add(TX_BODY_KEY_CERTIFICATES, certificates_list):
|
with self.tx_dict.add(_TX_BODY_KEY_CERTIFICATES, certificates_list):
|
||||||
await self._process_certificates(certificates_list)
|
await self._process_certificates(certificates_list)
|
||||||
|
|
||||||
if self.msg.withdrawals_count > 0:
|
if self.msg.withdrawals_count > 0:
|
||||||
withdrawals_dict: HashBuilderDict[bytes, int] = HashBuilderDict(
|
withdrawals_dict: HashBuilderDict[bytes, int] = HashBuilderDict(
|
||||||
self.msg.withdrawals_count, wire.ProcessError("Invalid withdrawal")
|
self.msg.withdrawals_count, wire.ProcessError("Invalid withdrawal")
|
||||||
)
|
)
|
||||||
with self.tx_dict.add(TX_BODY_KEY_WITHDRAWALS, withdrawals_dict):
|
with self.tx_dict.add(_TX_BODY_KEY_WITHDRAWALS, withdrawals_dict):
|
||||||
await self._process_withdrawals(withdrawals_dict)
|
await self._process_withdrawals(withdrawals_dict)
|
||||||
|
|
||||||
if self.msg.has_auxiliary_data:
|
if self.msg.has_auxiliary_data:
|
||||||
@ -185,7 +185,7 @@ class Signer:
|
|||||||
|
|
||||||
if self.msg.validity_interval_start is not None:
|
if self.msg.validity_interval_start is not None:
|
||||||
self.tx_dict.add(
|
self.tx_dict.add(
|
||||||
TX_BODY_KEY_VALIDITY_INTERVAL_START, self.msg.validity_interval_start
|
_TX_BODY_KEY_VALIDITY_INTERVAL_START, self.msg.validity_interval_start
|
||||||
)
|
)
|
||||||
|
|
||||||
if self.msg.minting_asset_groups_count > 0:
|
if self.msg.minting_asset_groups_count > 0:
|
||||||
@ -193,7 +193,7 @@ class Signer:
|
|||||||
self.msg.minting_asset_groups_count,
|
self.msg.minting_asset_groups_count,
|
||||||
wire.ProcessError("Invalid mint token bundle"),
|
wire.ProcessError("Invalid mint token bundle"),
|
||||||
)
|
)
|
||||||
with self.tx_dict.add(TX_BODY_KEY_MINT, minting_dict):
|
with self.tx_dict.add(_TX_BODY_KEY_MINT, minting_dict):
|
||||||
await self._process_minting(minting_dict)
|
await self._process_minting(minting_dict)
|
||||||
|
|
||||||
if self.msg.script_data_hash is not None:
|
if self.msg.script_data_hash is not None:
|
||||||
@ -204,7 +204,7 @@ class Signer:
|
|||||||
tuple[bytes, int]
|
tuple[bytes, int]
|
||||||
] = HashBuilderList(self.msg.collateral_inputs_count)
|
] = HashBuilderList(self.msg.collateral_inputs_count)
|
||||||
with self.tx_dict.add(
|
with self.tx_dict.add(
|
||||||
TX_BODY_KEY_COLLATERAL_INPUTS, collateral_inputs_list
|
_TX_BODY_KEY_COLLATERAL_INPUTS, collateral_inputs_list
|
||||||
):
|
):
|
||||||
await self._process_collateral_inputs(collateral_inputs_list)
|
await self._process_collateral_inputs(collateral_inputs_list)
|
||||||
|
|
||||||
@ -212,23 +212,23 @@ class Signer:
|
|||||||
required_signers_list: HashBuilderList[bytes] = HashBuilderList(
|
required_signers_list: HashBuilderList[bytes] = HashBuilderList(
|
||||||
self.msg.required_signers_count
|
self.msg.required_signers_count
|
||||||
)
|
)
|
||||||
with self.tx_dict.add(TX_BODY_KEY_REQUIRED_SIGNERS, required_signers_list):
|
with self.tx_dict.add(_TX_BODY_KEY_REQUIRED_SIGNERS, required_signers_list):
|
||||||
await self._process_required_signers(required_signers_list)
|
await self._process_required_signers(required_signers_list)
|
||||||
|
|
||||||
if self.msg.include_network_id:
|
if self.msg.include_network_id:
|
||||||
self.tx_dict.add(TX_BODY_KEY_NETWORK_ID, self.msg.network_id)
|
self.tx_dict.add(_TX_BODY_KEY_NETWORK_ID, self.msg.network_id)
|
||||||
|
|
||||||
if self.msg.has_collateral_return:
|
if self.msg.has_collateral_return:
|
||||||
await self._process_collateral_return()
|
await self._process_collateral_return()
|
||||||
|
|
||||||
if self.msg.total_collateral is not None:
|
if self.msg.total_collateral is not None:
|
||||||
self.tx_dict.add(TX_BODY_KEY_TOTAL_COLLATERAL, self.msg.total_collateral)
|
self.tx_dict.add(_TX_BODY_KEY_TOTAL_COLLATERAL, self.msg.total_collateral)
|
||||||
|
|
||||||
if self.msg.reference_inputs_count > 0:
|
if self.msg.reference_inputs_count > 0:
|
||||||
reference_inputs_list: HashBuilderList[tuple[bytes, int]] = HashBuilderList(
|
reference_inputs_list: HashBuilderList[tuple[bytes, int]] = HashBuilderList(
|
||||||
self.msg.reference_inputs_count
|
self.msg.reference_inputs_count
|
||||||
)
|
)
|
||||||
with self.tx_dict.add(TX_BODY_KEY_REFERENCE_INPUTS, reference_inputs_list):
|
with self.tx_dict.add(_TX_BODY_KEY_REFERENCE_INPUTS, reference_inputs_list):
|
||||||
await self._process_reference_inputs(reference_inputs_list)
|
await self._process_reference_inputs(reference_inputs_list)
|
||||||
|
|
||||||
def _validate_tx_init(self) -> None:
|
def _validate_tx_init(self) -> None:
|
||||||
@ -469,15 +469,15 @@ class Signer:
|
|||||||
Note that it is to be used also for outputs with no Plutus elements.
|
Note that it is to be used also for outputs with no Plutus elements.
|
||||||
"""
|
"""
|
||||||
address = self._get_output_address(output)
|
address = self._get_output_address(output)
|
||||||
output_dict.add(BABBAGE_OUTPUT_KEY_ADDRESS, address)
|
output_dict.add(_BABBAGE_OUTPUT_KEY_ADDRESS, address)
|
||||||
|
|
||||||
if output.asset_groups_count == 0:
|
if output.asset_groups_count == 0:
|
||||||
# Only amount is added to the dict.
|
# Only amount is added to the dict.
|
||||||
output_dict.add(BABBAGE_OUTPUT_KEY_AMOUNT, output.amount)
|
output_dict.add(_BABBAGE_OUTPUT_KEY_AMOUNT, output.amount)
|
||||||
else:
|
else:
|
||||||
# [amount, asset_groups] is added to the dict.
|
# [amount, asset_groups] is added to the dict.
|
||||||
output_value_list: HashBuilderList = HashBuilderList(2)
|
output_value_list: HashBuilderList = HashBuilderList(2)
|
||||||
with output_dict.add(BABBAGE_OUTPUT_KEY_AMOUNT, output_value_list):
|
with output_dict.add(_BABBAGE_OUTPUT_KEY_AMOUNT, output_value_list):
|
||||||
await self._process_output_value(output_value_list, output, should_show)
|
await self._process_output_value(output_value_list, output, should_show)
|
||||||
|
|
||||||
if output.datum_hash is not None:
|
if output.datum_hash is not None:
|
||||||
@ -486,13 +486,13 @@ class Signer:
|
|||||||
layout.confirm_datum_hash(self.ctx, output.datum_hash)
|
layout.confirm_datum_hash(self.ctx, output.datum_hash)
|
||||||
)
|
)
|
||||||
output_dict.add(
|
output_dict.add(
|
||||||
BABBAGE_OUTPUT_KEY_DATUM_OPTION,
|
_BABBAGE_OUTPUT_KEY_DATUM_OPTION,
|
||||||
(DATUM_OPTION_KEY_HASH, output.datum_hash),
|
(_DATUM_OPTION_KEY_HASH, output.datum_hash),
|
||||||
)
|
)
|
||||||
elif output.inline_datum_size > 0:
|
elif output.inline_datum_size > 0:
|
||||||
inline_datum_list: HashBuilderList = HashBuilderList(2)
|
inline_datum_list: HashBuilderList = HashBuilderList(2)
|
||||||
with output_dict.add(BABBAGE_OUTPUT_KEY_DATUM_OPTION, inline_datum_list):
|
with output_dict.add(_BABBAGE_OUTPUT_KEY_DATUM_OPTION, inline_datum_list):
|
||||||
inline_datum_list.append(DATUM_OPTION_KEY_INLINE)
|
inline_datum_list.append(_DATUM_OPTION_KEY_INLINE)
|
||||||
inline_datum_cbor: HashBuilderEmbeddedCBOR = HashBuilderEmbeddedCBOR(
|
inline_datum_cbor: HashBuilderEmbeddedCBOR = HashBuilderEmbeddedCBOR(
|
||||||
output.inline_datum_size
|
output.inline_datum_size
|
||||||
)
|
)
|
||||||
@ -506,7 +506,7 @@ class Signer:
|
|||||||
output.reference_script_size
|
output.reference_script_size
|
||||||
)
|
)
|
||||||
with output_dict.add(
|
with output_dict.add(
|
||||||
BABBAGE_OUTPUT_KEY_REFERENCE_SCRIPT, reference_script_cbor
|
_BABBAGE_OUTPUT_KEY_REFERENCE_SCRIPT, reference_script_cbor
|
||||||
):
|
):
|
||||||
await self._process_reference_script(
|
await self._process_reference_script(
|
||||||
reference_script_cbor, output.reference_script_size, should_show
|
reference_script_cbor, output.reference_script_size, should_show
|
||||||
@ -571,7 +571,7 @@ class Signer:
|
|||||||
else wire.ProcessError("Invalid token bundle in output")
|
else wire.ProcessError("Invalid token bundle in output")
|
||||||
)
|
)
|
||||||
|
|
||||||
if len(asset_group.policy_id) != MINTING_POLICY_ID_LENGTH:
|
if len(asset_group.policy_id) != _MINTING_POLICY_ID_LENGTH:
|
||||||
raise INVALID_TOKEN_BUNDLE
|
raise INVALID_TOKEN_BUNDLE
|
||||||
if asset_group.tokens_count == 0:
|
if asset_group.tokens_count == 0:
|
||||||
raise INVALID_TOKEN_BUNDLE
|
raise INVALID_TOKEN_BUNDLE
|
||||||
@ -612,7 +612,7 @@ class Signer:
|
|||||||
if token.amount is None or token.mint_amount is not None:
|
if token.amount is None or token.mint_amount is not None:
|
||||||
raise INVALID_TOKEN_BUNDLE
|
raise INVALID_TOKEN_BUNDLE
|
||||||
|
|
||||||
if len(token.asset_name_bytes) > MAX_ASSET_NAME_LENGTH:
|
if len(token.asset_name_bytes) > _MAX_ASSET_NAME_LENGTH:
|
||||||
raise INVALID_TOKEN_BUNDLE
|
raise INVALID_TOKEN_BUNDLE
|
||||||
|
|
||||||
# inline datum
|
# inline datum
|
||||||
@ -686,7 +686,7 @@ class Signer:
|
|||||||
assert pool_parameters is not None # _validate_certificate
|
assert pool_parameters is not None # _validate_certificate
|
||||||
|
|
||||||
pool_items_list: HashBuilderList = HashBuilderList(
|
pool_items_list: HashBuilderList = HashBuilderList(
|
||||||
POOL_REGISTRATION_CERTIFICATE_ITEMS_COUNT
|
_POOL_REGISTRATION_CERTIFICATE_ITEMS_COUNT
|
||||||
)
|
)
|
||||||
with certificates_list.append(pool_items_list):
|
with certificates_list.append(pool_items_list):
|
||||||
for item in certificates.cborize_pool_registration_init(
|
for item in certificates.cborize_pool_registration_init(
|
||||||
@ -844,7 +844,7 @@ class Signer:
|
|||||||
self.msg.network_id,
|
self.msg.network_id,
|
||||||
self.should_show_details,
|
self.should_show_details,
|
||||||
)
|
)
|
||||||
self.tx_dict.add(TX_BODY_KEY_AUXILIARY_DATA, auxiliary_data_hash)
|
self.tx_dict.add(_TX_BODY_KEY_AUXILIARY_DATA, auxiliary_data_hash)
|
||||||
|
|
||||||
await self.ctx.call(auxiliary_data_supplement, messages.CardanoTxHostAck)
|
await self.ctx.call(auxiliary_data_supplement, messages.CardanoTxHostAck)
|
||||||
|
|
||||||
@ -901,7 +901,7 @@ class Signer:
|
|||||||
await self._show_if_showing_details(
|
await self._show_if_showing_details(
|
||||||
layout.confirm_script_data_hash(self.ctx, self.msg.script_data_hash)
|
layout.confirm_script_data_hash(self.ctx, self.msg.script_data_hash)
|
||||||
)
|
)
|
||||||
self.tx_dict.add(TX_BODY_KEY_SCRIPT_DATA_HASH, self.msg.script_data_hash)
|
self.tx_dict.add(_TX_BODY_KEY_SCRIPT_DATA_HASH, self.msg.script_data_hash)
|
||||||
|
|
||||||
def _validate_script_data_hash(self) -> None:
|
def _validate_script_data_hash(self) -> None:
|
||||||
assert self.msg.script_data_hash is not None
|
assert self.msg.script_data_hash is not None
|
||||||
@ -993,7 +993,7 @@ class Signer:
|
|||||||
output_items_count = 2
|
output_items_count = 2
|
||||||
if output.format == CardanoTxOutputSerializationFormat.ARRAY_LEGACY:
|
if output.format == CardanoTxOutputSerializationFormat.ARRAY_LEGACY:
|
||||||
output_list: HashBuilderList = HashBuilderList(output_items_count)
|
output_list: HashBuilderList = HashBuilderList(output_items_count)
|
||||||
with self.tx_dict.add(TX_BODY_KEY_COLLATERAL_RETURN, output_list):
|
with self.tx_dict.add(_TX_BODY_KEY_COLLATERAL_RETURN, output_list):
|
||||||
await self._process_legacy_output(
|
await self._process_legacy_output(
|
||||||
output_list, output, should_show_tokens
|
output_list, output, should_show_tokens
|
||||||
)
|
)
|
||||||
@ -1001,7 +1001,7 @@ class Signer:
|
|||||||
output_dict: HashBuilderDict[int, Any] = HashBuilderDict(
|
output_dict: HashBuilderDict[int, Any] = HashBuilderDict(
|
||||||
output_items_count, wire.ProcessError("Invalid collateral return")
|
output_items_count, wire.ProcessError("Invalid collateral return")
|
||||||
)
|
)
|
||||||
with self.tx_dict.add(TX_BODY_KEY_COLLATERAL_RETURN, output_dict):
|
with self.tx_dict.add(_TX_BODY_KEY_COLLATERAL_RETURN, output_dict):
|
||||||
await self._process_babbage_output(
|
await self._process_babbage_output(
|
||||||
output_dict, output, should_show_tokens
|
output_dict, output, should_show_tokens
|
||||||
)
|
)
|
||||||
@ -1190,7 +1190,7 @@ class Signer:
|
|||||||
|
|
||||||
def _get_chunks_count(self, data_size: int) -> int:
|
def _get_chunks_count(self, data_size: int) -> int:
|
||||||
assert data_size > 0
|
assert data_size > 0
|
||||||
return (data_size - 1) // MAX_CHUNK_SIZE + 1
|
return (data_size - 1) // _MAX_CHUNK_SIZE + 1
|
||||||
|
|
||||||
def _validate_chunk(
|
def _validate_chunk(
|
||||||
self,
|
self,
|
||||||
@ -1199,9 +1199,9 @@ class Signer:
|
|||||||
chunks_count: int,
|
chunks_count: int,
|
||||||
error: wire.ProcessError,
|
error: wire.ProcessError,
|
||||||
) -> None:
|
) -> None:
|
||||||
if chunk_number < chunks_count - 1 and len(chunk_data) != MAX_CHUNK_SIZE:
|
if chunk_number < chunks_count - 1 and len(chunk_data) != _MAX_CHUNK_SIZE:
|
||||||
raise error
|
raise error
|
||||||
if chunk_number == chunks_count - 1 and len(chunk_data) > MAX_CHUNK_SIZE:
|
if chunk_number == chunks_count - 1 and len(chunk_data) > _MAX_CHUNK_SIZE:
|
||||||
raise error
|
raise error
|
||||||
|
|
||||||
def _get_byron_witness(
|
def _get_byron_witness(
|
||||||
|
@ -62,14 +62,14 @@ PROPOSAL_HASH_SIZE = const(32)
|
|||||||
PUBLIC_KEY_HASH_SIZE = const(20)
|
PUBLIC_KEY_HASH_SIZE = const(20)
|
||||||
TAGGED_PUBKEY_HASH_SIZE = 1 + PUBLIC_KEY_HASH_SIZE
|
TAGGED_PUBKEY_HASH_SIZE = 1 + PUBLIC_KEY_HASH_SIZE
|
||||||
CONTRACT_ID_SIZE = const(22)
|
CONTRACT_ID_SIZE = const(22)
|
||||||
ED25519_PUBLIC_KEY_SIZE = const(32)
|
_ED25519_PUBLIC_KEY_SIZE = const(32)
|
||||||
SECP256K1_PUBLIC_KEY_SIZE = const(33)
|
_SECP256K1_PUBLIC_KEY_SIZE = const(33)
|
||||||
P256_PUBLIC_KEY_SIZE = const(33)
|
_P256_PUBLIC_KEY_SIZE = const(33)
|
||||||
|
|
||||||
PUBLIC_KEY_TAG_TO_SIZE = {
|
PUBLIC_KEY_TAG_TO_SIZE = {
|
||||||
0: ED25519_PUBLIC_KEY_SIZE,
|
0: _ED25519_PUBLIC_KEY_SIZE,
|
||||||
1: SECP256K1_PUBLIC_KEY_SIZE,
|
1: _SECP256K1_PUBLIC_KEY_SIZE,
|
||||||
2: P256_PUBLIC_KEY_SIZE,
|
2: _P256_PUBLIC_KEY_SIZE,
|
||||||
}
|
}
|
||||||
|
|
||||||
OP_TAG_ENDORSEMENT = const(0)
|
OP_TAG_ENDORSEMENT = const(0)
|
||||||
@ -84,7 +84,7 @@ OP_TAG_TRANSACTION = const(108)
|
|||||||
OP_TAG_ORIGINATION = const(109)
|
OP_TAG_ORIGINATION = const(109)
|
||||||
OP_TAG_DELEGATION = const(110)
|
OP_TAG_DELEGATION = const(110)
|
||||||
|
|
||||||
EP_TAG_NAMED = const(255)
|
_EP_TAG_NAMED = const(255)
|
||||||
|
|
||||||
|
|
||||||
def base58_encode_check(payload: bytes, prefix: str | None = None) -> str:
|
def base58_encode_check(payload: bytes, prefix: str | None = None) -> str:
|
||||||
@ -127,7 +127,7 @@ def check_tx_params_size(params: bytes) -> None:
|
|||||||
try:
|
try:
|
||||||
r = BufferReader(params)
|
r = BufferReader(params)
|
||||||
tag = r.get()
|
tag = r.get()
|
||||||
if tag == EP_TAG_NAMED:
|
if tag == _EP_TAG_NAMED:
|
||||||
n = r.get()
|
n = r.get()
|
||||||
r.read(n)
|
r.read(n)
|
||||||
elif tag > 4:
|
elif tag > 4:
|
||||||
|
@ -29,7 +29,7 @@ if TYPE_CHECKING:
|
|||||||
)
|
)
|
||||||
from apps.bitcoin.keychain import Keychain
|
from apps.bitcoin.keychain import Keychain
|
||||||
|
|
||||||
OVERWINTERED = const(0x8000_0000)
|
_OVERWINTERED = const(0x8000_0000)
|
||||||
|
|
||||||
|
|
||||||
class Zcash(Bitcoinlike):
|
class Zcash(Bitcoinlike):
|
||||||
@ -103,7 +103,7 @@ class Zcash(Bitcoinlike):
|
|||||||
assert tx.branch_id is not None # checked in sanitize_*
|
assert tx.branch_id is not None # checked in sanitize_*
|
||||||
assert tx.expiry is not None
|
assert tx.expiry is not None
|
||||||
|
|
||||||
write_uint32_le(w, tx.version | OVERWINTERED) # nVersion | fOverwintered
|
write_uint32_le(w, tx.version | _OVERWINTERED) # nVersion | fOverwintered
|
||||||
write_uint32_le(w, tx.version_group_id) # nVersionGroupId
|
write_uint32_le(w, tx.version_group_id) # nVersionGroupId
|
||||||
write_uint32_le(w, tx.branch_id) # nConsensusBranchId
|
write_uint32_le(w, tx.branch_id) # nConsensusBranchId
|
||||||
write_uint32_le(w, tx.lock_time) # lock_time
|
write_uint32_le(w, tx.lock_time) # lock_time
|
||||||
|
@ -13,7 +13,7 @@ if TYPE_CHECKING:
|
|||||||
|
|
||||||
SD_CARD_HOT_SWAPPABLE = False
|
SD_CARD_HOT_SWAPPABLE = False
|
||||||
SD_SALT_LEN_BYTES = const(32)
|
SD_SALT_LEN_BYTES = const(32)
|
||||||
SD_SALT_AUTH_TAG_LEN_BYTES = const(16)
|
_SD_SALT_AUTH_TAG_LEN_BYTES = const(16)
|
||||||
|
|
||||||
|
|
||||||
class WrongSdCard(Exception):
|
class WrongSdCard(Exception):
|
||||||
@ -28,7 +28,7 @@ def compute_auth_tag(salt: bytes, auth_key: bytes) -> bytes:
|
|||||||
from trezor.crypto import hmac
|
from trezor.crypto import hmac
|
||||||
|
|
||||||
digest = hmac(hmac.SHA256, auth_key, salt).digest()
|
digest = hmac(hmac.SHA256, auth_key, salt).digest()
|
||||||
return digest[:SD_SALT_AUTH_TAG_LEN_BYTES]
|
return digest[:_SD_SALT_AUTH_TAG_LEN_BYTES]
|
||||||
|
|
||||||
|
|
||||||
def _get_device_dir() -> str:
|
def _get_device_dir() -> str:
|
||||||
@ -46,7 +46,7 @@ def _load_salt(auth_key: bytes, path: str) -> bytearray | None:
|
|||||||
try:
|
try:
|
||||||
with io.fatfs.open(path, "r") as f:
|
with io.fatfs.open(path, "r") as f:
|
||||||
salt = bytearray(SD_SALT_LEN_BYTES)
|
salt = bytearray(SD_SALT_LEN_BYTES)
|
||||||
stored_tag = bytearray(SD_SALT_AUTH_TAG_LEN_BYTES)
|
stored_tag = bytearray(_SD_SALT_AUTH_TAG_LEN_BYTES)
|
||||||
f.read(salt)
|
f.read(salt)
|
||||||
f.read(stored_tag)
|
f.read(stored_tag)
|
||||||
except io.fatfs.FatFSError:
|
except io.fatfs.FatFSError:
|
||||||
|
@ -7,7 +7,7 @@ if TYPE_CHECKING:
|
|||||||
from trezor.utils import Writer
|
from trezor.utils import Writer
|
||||||
|
|
||||||
# Maximum length of a DER-encoded secp256k1 or secp256p1 signature.
|
# Maximum length of a DER-encoded secp256k1 or secp256p1 signature.
|
||||||
MAX_DER_SIGNATURE_LENGTH = const(72)
|
_MAX_DER_SIGNATURE_LENGTH = const(72)
|
||||||
|
|
||||||
|
|
||||||
def encode_length(l: int) -> bytes:
|
def encode_length(l: int) -> bytes:
|
||||||
@ -82,7 +82,7 @@ def read_int(r: BufferReader) -> memoryview:
|
|||||||
|
|
||||||
def encode_seq(seq: tuple[bytes, ...]) -> bytes:
|
def encode_seq(seq: tuple[bytes, ...]) -> bytes:
|
||||||
# Preallocate space for a signature, which is all that this function ever encodes.
|
# Preallocate space for a signature, which is all that this function ever encodes.
|
||||||
buffer = empty_bytearray(MAX_DER_SIGNATURE_LENGTH)
|
buffer = empty_bytearray(_MAX_DER_SIGNATURE_LENGTH)
|
||||||
buffer.append(0x30)
|
buffer.append(0x30)
|
||||||
for i in seq:
|
for i in seq:
|
||||||
write_int(buffer, i)
|
write_int(buffer, i)
|
||||||
|
@ -3,22 +3,21 @@ import utime
|
|||||||
from micropython import const
|
from micropython import const
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
NOTSET = const(0)
|
_DEBUG = const(10)
|
||||||
DEBUG = const(10)
|
_INFO = const(20)
|
||||||
INFO = const(20)
|
_WARNING = const(30)
|
||||||
WARNING = const(30)
|
_ERROR = const(40)
|
||||||
ERROR = const(40)
|
_CRITICAL = const(50)
|
||||||
CRITICAL = const(50)
|
|
||||||
|
|
||||||
_leveldict = {
|
_leveldict = {
|
||||||
DEBUG: ("DEBUG", "32"),
|
_DEBUG: ("DEBUG", "32"),
|
||||||
INFO: ("INFO", "36"),
|
_INFO: ("INFO", "36"),
|
||||||
WARNING: ("WARNING", "33"),
|
_WARNING: ("WARNING", "33"),
|
||||||
ERROR: ("ERROR", "31"),
|
_ERROR: ("ERROR", "31"),
|
||||||
CRITICAL: ("CRITICAL", "1;31"),
|
_CRITICAL: ("CRITICAL", "1;31"),
|
||||||
}
|
}
|
||||||
|
|
||||||
level = DEBUG
|
level = _DEBUG
|
||||||
color = True
|
color = True
|
||||||
|
|
||||||
|
|
||||||
@ -37,23 +36,23 @@ def _log(name: str, mlevel: int, msg: str, *args: Any) -> None:
|
|||||||
|
|
||||||
|
|
||||||
def debug(name: str, msg: str, *args: Any) -> None:
|
def debug(name: str, msg: str, *args: Any) -> None:
|
||||||
_log(name, DEBUG, msg, *args)
|
_log(name, _DEBUG, msg, *args)
|
||||||
|
|
||||||
|
|
||||||
def info(name: str, msg: str, *args: Any) -> None:
|
def info(name: str, msg: str, *args: Any) -> None:
|
||||||
_log(name, INFO, msg, *args)
|
_log(name, _INFO, msg, *args)
|
||||||
|
|
||||||
|
|
||||||
def warning(name: str, msg: str, *args: Any) -> None:
|
def warning(name: str, msg: str, *args: Any) -> None:
|
||||||
_log(name, WARNING, msg, *args)
|
_log(name, _WARNING, msg, *args)
|
||||||
|
|
||||||
|
|
||||||
def error(name: str, msg: str, *args: Any) -> None:
|
def error(name: str, msg: str, *args: Any) -> None:
|
||||||
_log(name, ERROR, msg, *args)
|
_log(name, _ERROR, msg, *args)
|
||||||
|
|
||||||
|
|
||||||
def critical(name: str, msg: str, *args: Any) -> None:
|
def critical(name: str, msg: str, *args: Any) -> None:
|
||||||
_log(name, CRITICAL, msg, *args)
|
_log(name, _CRITICAL, msg, *args)
|
||||||
|
|
||||||
|
|
||||||
def exception(name: str, exc: BaseException) -> None:
|
def exception(name: str, exc: BaseException) -> None:
|
||||||
@ -63,13 +62,13 @@ def exception(name: str, exc: BaseException) -> None:
|
|||||||
if exc.__class__.__name__ == "Result":
|
if exc.__class__.__name__ == "Result":
|
||||||
_log(
|
_log(
|
||||||
name,
|
name,
|
||||||
DEBUG,
|
_DEBUG,
|
||||||
"ui.Result: %s",
|
"ui.Result: %s",
|
||||||
exc.value, # type: ignore[Cannot access member "value" for type "BaseException"]
|
exc.value, # type: ignore[Cannot access member "value" for type "BaseException"]
|
||||||
)
|
)
|
||||||
elif exc.__class__.__name__ == "Cancelled":
|
elif exc.__class__.__name__ == "Cancelled":
|
||||||
_log(name, DEBUG, "ui.Cancelled")
|
_log(name, _DEBUG, "ui.Cancelled")
|
||||||
else:
|
else:
|
||||||
_log(name, ERROR, "exception:")
|
_log(name, _ERROR, "exception:")
|
||||||
# since mypy 0.770 we cannot override sys, so print_exception is unknown
|
# since mypy 0.770 we cannot override sys, so print_exception is unknown
|
||||||
sys.print_exception(exc) # type: ignore ["print_exception" is not a known member of module]
|
sys.print_exception(exc) # type: ignore ["print_exception" is not a known member of module]
|
||||||
|
@ -25,7 +25,7 @@ HEIGHT = Display.HEIGHT
|
|||||||
|
|
||||||
# viewport margins
|
# viewport margins
|
||||||
VIEWX = const(6)
|
VIEWX = const(6)
|
||||||
VIEWY = const(9)
|
_VIEWY = const(9)
|
||||||
|
|
||||||
# channel used to cancel layouts, see `Cancelled` exception
|
# channel used to cancel layouts, see `Cancelled` exception
|
||||||
layout_chan = loop.chan()
|
layout_chan = loop.chan()
|
||||||
@ -193,9 +193,9 @@ def grid(
|
|||||||
n_x: int = 3, # number of rows in the table
|
n_x: int = 3, # number of rows in the table
|
||||||
n_y: int = 5, # number of columns in the table
|
n_y: int = 5, # number of columns in the table
|
||||||
start_x: int = VIEWX, # where the table starts on x-axis
|
start_x: int = VIEWX, # where the table starts on x-axis
|
||||||
start_y: int = VIEWY, # where the table starts on y-axis
|
start_y: int = _VIEWY, # where the table starts on y-axis
|
||||||
end_x: int = (WIDTH - VIEWX), # where the table ends on x-axis
|
end_x: int = (WIDTH - VIEWX), # where the table ends on x-axis
|
||||||
end_y: int = (HEIGHT - VIEWY), # where the table ends on y-axis
|
end_y: int = (HEIGHT - _VIEWY), # where the table ends on y-axis
|
||||||
cells_x: int = 1, # number of cells to be merged into one in the direction of x-axis
|
cells_x: int = 1, # number of cells to be merged into one in the direction of x-axis
|
||||||
cells_y: int = 1, # number of cells to be merged into one in the direction of y-axis
|
cells_y: int = 1, # number of cells to be merged into one in the direction of y-axis
|
||||||
spacing: int = 0, # spacing size between cells
|
spacing: int = 0, # spacing size between cells
|
||||||
|
@ -13,10 +13,10 @@ bus = io.USB(
|
|||||||
)
|
)
|
||||||
|
|
||||||
UDP_PORT = 0
|
UDP_PORT = 0
|
||||||
WIRE_PORT_OFFSET = const(0)
|
_WIRE_PORT_OFFSET = const(0)
|
||||||
DEBUGLINK_PORT_OFFSET = const(1)
|
_DEBUGLINK_PORT_OFFSET = const(1)
|
||||||
WEBAUTHN_PORT_OFFSET = const(2)
|
_WEBAUTHN_PORT_OFFSET = const(2)
|
||||||
VCP_PORT_OFFSET = const(3)
|
_VCP_PORT_OFFSET = const(3)
|
||||||
|
|
||||||
if utils.EMULATOR:
|
if utils.EMULATOR:
|
||||||
import uos
|
import uos
|
||||||
@ -35,7 +35,7 @@ iface_wire = io.WebUSB(
|
|||||||
iface_num=id_wire,
|
iface_num=id_wire,
|
||||||
ep_in=0x81 + id_wire,
|
ep_in=0x81 + id_wire,
|
||||||
ep_out=0x01 + id_wire,
|
ep_out=0x01 + id_wire,
|
||||||
emu_port=UDP_PORT + WIRE_PORT_OFFSET,
|
emu_port=UDP_PORT + _WIRE_PORT_OFFSET,
|
||||||
)
|
)
|
||||||
bus.add(iface_wire)
|
bus.add(iface_wire)
|
||||||
|
|
||||||
@ -57,7 +57,7 @@ if __debug__ and ENABLE_IFACE_DEBUG:
|
|||||||
iface_num=id_debug,
|
iface_num=id_debug,
|
||||||
ep_in=0x81 + id_debug,
|
ep_in=0x81 + id_debug,
|
||||||
ep_out=0x01 + id_debug,
|
ep_out=0x01 + id_debug,
|
||||||
emu_port=UDP_PORT + DEBUGLINK_PORT_OFFSET,
|
emu_port=UDP_PORT + _DEBUGLINK_PORT_OFFSET,
|
||||||
)
|
)
|
||||||
bus.add(iface_debug)
|
bus.add(iface_debug)
|
||||||
|
|
||||||
@ -68,7 +68,7 @@ if not utils.BITCOIN_ONLY and ENABLE_IFACE_WEBAUTHN:
|
|||||||
iface_num=id_webauthn,
|
iface_num=id_webauthn,
|
||||||
ep_in=0x81 + id_webauthn,
|
ep_in=0x81 + id_webauthn,
|
||||||
ep_out=0x01 + id_webauthn,
|
ep_out=0x01 + id_webauthn,
|
||||||
emu_port=UDP_PORT + WEBAUTHN_PORT_OFFSET,
|
emu_port=UDP_PORT + _WEBAUTHN_PORT_OFFSET,
|
||||||
# fmt: off
|
# fmt: off
|
||||||
report_desc=bytes([
|
report_desc=bytes([
|
||||||
0x06, 0xd0, 0xf1, # USAGE_PAGE (FIDO Alliance)
|
0x06, 0xd0, 0xf1, # USAGE_PAGE (FIDO Alliance)
|
||||||
@ -102,6 +102,6 @@ if __debug__ and ENABLE_IFACE_VCP:
|
|||||||
ep_in=0x81 + id_vcp,
|
ep_in=0x81 + id_vcp,
|
||||||
ep_out=0x01 + id_vcp,
|
ep_out=0x01 + id_vcp,
|
||||||
ep_cmd=0x81 + id_vcp_data,
|
ep_cmd=0x81 + id_vcp_data,
|
||||||
emu_port=UDP_PORT + VCP_PORT_OFFSET,
|
emu_port=UDP_PORT + _VCP_PORT_OFFSET,
|
||||||
)
|
)
|
||||||
bus.add(iface_vcp)
|
bus.add(iface_vcp)
|
||||||
|
Loading…
Reference in New Issue
Block a user