mirror of
https://github.com/trezor/trezor-firmware.git
synced 2024-11-28 18:38:39 +00:00
style(all): use f-strings for formatting
[no changelog]
This commit is contained in:
parent
32be2c09b9
commit
85ba7c12ba
@ -16,7 +16,7 @@ class Device:
|
|||||||
def run_trezorctl(self, cmd: str, **kwargs):
|
def run_trezorctl(self, cmd: str, **kwargs):
|
||||||
full_cmd = "trezorctl "
|
full_cmd = "trezorctl "
|
||||||
full_cmd += cmd
|
full_cmd += cmd
|
||||||
self.log("[software/trezorctl] Running '{}'".format(full_cmd))
|
self.log(f"[software/trezorctl] Running '{full_cmd}'")
|
||||||
return run(full_cmd, shell=True, check=True, **kwargs)
|
return run(full_cmd, shell=True, check=True, **kwargs)
|
||||||
|
|
||||||
def check_model(self, model=None):
|
def check_model(self, model=None):
|
||||||
@ -26,11 +26,9 @@ class Device:
|
|||||||
self.run_trezorctl("get-features | grep version")
|
self.run_trezorctl("get-features | grep version")
|
||||||
lines = res.stdout.splitlines()
|
lines = res.stdout.splitlines()
|
||||||
if len(lines) != 1:
|
if len(lines) != 1:
|
||||||
raise RuntimeError("{} trezors connected".format(len(lines)))
|
raise RuntimeError(f"{len(lines)} trezors connected")
|
||||||
if model and model not in lines[0]:
|
if model and model not in lines[0]:
|
||||||
raise RuntimeError(
|
raise RuntimeError(f"invalid trezor model connected (expected {model})")
|
||||||
"invalid trezor model connected (expected {})".format(model)
|
|
||||||
)
|
|
||||||
return lines[0].split()[0]
|
return lines[0].split()[0]
|
||||||
|
|
||||||
def reboot(self):
|
def reboot(self):
|
||||||
@ -41,7 +39,7 @@ class Device:
|
|||||||
self.now()
|
self.now()
|
||||||
self.log("[hardware/usb] Turning power on...")
|
self.log("[hardware/usb] Turning power on...")
|
||||||
run(
|
run(
|
||||||
"uhubctl -l {} -p {} -a on".format(self.uhub_location, self.device_port),
|
f"uhubctl -l {self.uhub_location} -p {self.device_port} -a on",
|
||||||
shell=True,
|
shell=True,
|
||||||
check=True,
|
check=True,
|
||||||
)
|
)
|
||||||
@ -51,9 +49,7 @@ class Device:
|
|||||||
self.now()
|
self.now()
|
||||||
self.log("[hardware/usb] Turning power off...")
|
self.log("[hardware/usb] Turning power off...")
|
||||||
run(
|
run(
|
||||||
"uhubctl -l {} -p {} -r 100 -a off".format(
|
f"uhubctl -l {self.uhub_location} -p {self.device_port} -r 100 -a off",
|
||||||
self.uhub_location, self.device_port
|
|
||||||
),
|
|
||||||
shell=True,
|
shell=True,
|
||||||
check=True,
|
check=True,
|
||||||
)
|
)
|
||||||
@ -65,9 +61,9 @@ class Device:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def wait(seconds):
|
def wait(seconds):
|
||||||
Device.now()
|
Device.now()
|
||||||
Device.log("[software] Waiting for {} seconds...".format(seconds))
|
Device.log(f"[software] Waiting for {seconds} seconds...")
|
||||||
time.sleep(seconds)
|
time.sleep(seconds)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def now():
|
def now():
|
||||||
Device.log("\n[timestamp] {}".format(datetime.datetime.now()))
|
Device.log(f"\n[timestamp] {datetime.datetime.now()}")
|
||||||
|
@ -10,16 +10,14 @@ class TrezorOne(Device):
|
|||||||
|
|
||||||
def touch(self, location, action):
|
def touch(self, location, action):
|
||||||
self.now()
|
self.now()
|
||||||
self.log(
|
self.log(f"[hardware/trezor] Touching the {location} button by {action}...")
|
||||||
"[hardware/trezor] Touching the {} button by {}...".format(location, action)
|
self.serial.write(f"{location} {action}\n".encode())
|
||||||
)
|
|
||||||
self.serial.write(("{} {}\n".format(location, action)).encode())
|
|
||||||
|
|
||||||
def update_firmware(self, file=None):
|
def update_firmware(self, file=None):
|
||||||
if file:
|
if file:
|
||||||
unofficial = True
|
unofficial = True
|
||||||
trezorctlcmd = "firmware-update -s -f {} &".format(file)
|
trezorctlcmd = f"firmware-update -s -f {file} &"
|
||||||
self.log("[software] Updating the firmware to {}".format(file))
|
self.log(f"[software] Updating the firmware to {file}")
|
||||||
else:
|
else:
|
||||||
unofficial = False
|
unofficial = False
|
||||||
trezorctlcmd = "firmware-update &"
|
trezorctlcmd = "firmware-update &"
|
||||||
|
@ -21,8 +21,8 @@ class TrezorT(Device):
|
|||||||
self.power_on()
|
self.power_on()
|
||||||
|
|
||||||
self.wait(5)
|
self.wait(5)
|
||||||
self.log("[software] Updating the firmware to {}".format(file))
|
self.log(f"[software] Updating the firmware to {file}")
|
||||||
self.run_trezorctl("firmware-update -s -f {}".format(file))
|
self.run_trezorctl(f"firmware-update -s -f {file}")
|
||||||
|
|
||||||
# after firmware-update finishes wait for reboot
|
# after firmware-update finishes wait for reboot
|
||||||
self.wait(15)
|
self.wait(15)
|
||||||
|
@ -10,7 +10,7 @@ class Message(object):
|
|||||||
self.fname = basename(fname)
|
self.fname = basename(fname)
|
||||||
self.name = name
|
self.name = name
|
||||||
if len(attrs) == 0:
|
if len(attrs) == 0:
|
||||||
raise ValueError("message '%s' has no attributes" % name)
|
raise ValueError(f"message {name} has no attributes")
|
||||||
t = attrs[0][0]
|
t = attrs[0][0]
|
||||||
if t in ["start", "end", "auxstart", "auxend", "embed", "ignore"]:
|
if t in ["start", "end", "auxstart", "auxend", "embed", "ignore"]:
|
||||||
self.typ = t
|
self.typ = t
|
||||||
@ -19,19 +19,14 @@ class Message(object):
|
|||||||
self.typ = "normal"
|
self.typ = "normal"
|
||||||
attrs = attrs
|
attrs = attrs
|
||||||
else:
|
else:
|
||||||
raise ValueError("wrong message type in message '%s'" % name)
|
raise ValueError(f"wrong message type in message {name}")
|
||||||
self.next = []
|
self.next = []
|
||||||
for a in attrs:
|
for a in attrs:
|
||||||
if a[0] == "next":
|
if a[0] == "next":
|
||||||
self.next.append(a[1])
|
self.next.append(a[1])
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return '%s(type=%s, fname="%s", next=%s)' % (
|
return f'{self.name}(type={self.typ}, fname="{self.fname}", next={self.next})'
|
||||||
self.name,
|
|
||||||
self.typ,
|
|
||||||
self.fname,
|
|
||||||
self.next,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def generate_messages(files):
|
def generate_messages(files):
|
||||||
|
@ -73,7 +73,7 @@ def check_type(val, types, nullable=False, empty=False, regex=None, choice=None)
|
|||||||
|
|
||||||
# check type
|
# check type
|
||||||
if not isinstance(val, types):
|
if not isinstance(val, types):
|
||||||
raise TypeError("Wrong type (expected: {})".format(types))
|
raise TypeError(f"Wrong type (expected: {types})")
|
||||||
|
|
||||||
# check empty
|
# check empty
|
||||||
if isinstance(val, (list, dict)) and not empty and not val:
|
if isinstance(val, (list, dict)) and not empty and not val:
|
||||||
@ -86,12 +86,12 @@ def check_type(val, types, nullable=False, empty=False, regex=None, choice=None)
|
|||||||
if types is not str:
|
if types is not str:
|
||||||
raise TypeError("Wrong type for regex check")
|
raise TypeError("Wrong type for regex check")
|
||||||
if not re.search(regex, val):
|
if not re.search(regex, val):
|
||||||
raise ValueError("Value does not match regex {}".format(regex))
|
raise ValueError(f"Value does not match regex {regex}")
|
||||||
|
|
||||||
# check choice
|
# check choice
|
||||||
if choice is not None and val not in choice:
|
if choice is not None and val not in choice:
|
||||||
choice_str = ", ".join(choice)
|
choice_str = ", ".join(choice)
|
||||||
raise ValueError("Value not allowed, use one of: {}".format(choice_str))
|
raise ValueError(f"Value not allowed, use one of: {choice_str}")
|
||||||
|
|
||||||
|
|
||||||
def check_key(key, types, optional=False, **kwargs):
|
def check_key(key, types, optional=False, **kwargs):
|
||||||
@ -100,11 +100,11 @@ def check_key(key, types, optional=False, **kwargs):
|
|||||||
if optional:
|
if optional:
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
raise KeyError("{}: Missing key".format(key))
|
raise KeyError(f"{key}: Missing key")
|
||||||
try:
|
try:
|
||||||
check_type(coin[key], types, **kwargs)
|
check_type(coin[key], types, **kwargs)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise ValueError("{}: {}".format(key, e)) from e
|
raise ValueError(f"{key}: {e}") from e
|
||||||
|
|
||||||
return do_check
|
return do_check
|
||||||
|
|
||||||
@ -216,7 +216,7 @@ def _load_btc_coins():
|
|||||||
coin.update(
|
coin.update(
|
||||||
name=coin["coin_label"],
|
name=coin["coin_label"],
|
||||||
shortcut=coin["coin_shortcut"],
|
shortcut=coin["coin_shortcut"],
|
||||||
key="bitcoin:{}".format(coin["coin_shortcut"]),
|
key=f"bitcoin:{coin['coin_shortcut']}",
|
||||||
icon=str(file.with_suffix(".png")),
|
icon=str(file.with_suffix(".png")),
|
||||||
)
|
)
|
||||||
coins.append(coin)
|
coins.append(coin)
|
||||||
@ -280,7 +280,7 @@ def _load_erc20_tokens():
|
|||||||
chain_id=network["chain_id"],
|
chain_id=network["chain_id"],
|
||||||
address_bytes=bytes.fromhex(token["address"][2:]),
|
address_bytes=bytes.fromhex(token["address"][2:]),
|
||||||
shortcut=token["symbol"],
|
shortcut=token["symbol"],
|
||||||
key="erc20:{}:{}".format(chain, token["symbol"]),
|
key=f"erc20:{chain}:{token['symbol']}",
|
||||||
)
|
)
|
||||||
tokens.append(token)
|
tokens.append(token)
|
||||||
|
|
||||||
@ -292,7 +292,7 @@ def _load_nem_mosaics():
|
|||||||
mosaics = load_json("nem/nem_mosaics.json")
|
mosaics = load_json("nem/nem_mosaics.json")
|
||||||
for mosaic in mosaics:
|
for mosaic in mosaics:
|
||||||
shortcut = mosaic["ticker"].strip()
|
shortcut = mosaic["ticker"].strip()
|
||||||
mosaic.update(shortcut=shortcut, key="nem:{}".format(shortcut))
|
mosaic.update(shortcut=shortcut, key=f"nem:{shortcut}")
|
||||||
return mosaics
|
return mosaics
|
||||||
|
|
||||||
|
|
||||||
@ -300,7 +300,7 @@ def _load_misc():
|
|||||||
"""Loads miscellaneous networks from `misc/misc.json`"""
|
"""Loads miscellaneous networks from `misc/misc.json`"""
|
||||||
others = load_json("misc/misc.json")
|
others = load_json("misc/misc.json")
|
||||||
for other in others:
|
for other in others:
|
||||||
other.update(key="misc:{}".format(other["shortcut"]))
|
other.update(key=f"misc:{other['shortcut']}")
|
||||||
return others
|
return others
|
||||||
|
|
||||||
|
|
||||||
@ -544,7 +544,7 @@ def deduplicate_keys(all_coins):
|
|||||||
elif "chain_id" in coin:
|
elif "chain_id" in coin:
|
||||||
coin["key"] += ":" + str(coin["chain_id"])
|
coin["key"] += ":" + str(coin["chain_id"])
|
||||||
else:
|
else:
|
||||||
coin["key"] += ":{}".format(i)
|
coin["key"] += f":{i}"
|
||||||
coin["dup_key_nontoken"] = True
|
coin["dup_key_nontoken"] = True
|
||||||
|
|
||||||
|
|
||||||
|
@ -149,8 +149,8 @@ def highlight_key(coin, color):
|
|||||||
else:
|
else:
|
||||||
keylist[-1] = crayon(color, keylist[-1], bold=True)
|
keylist[-1] = crayon(color, keylist[-1], bold=True)
|
||||||
key = crayon(color, ":".join(keylist))
|
key = crayon(color, ":".join(keylist))
|
||||||
name = crayon(None, "({})".format(coin["name"]), dim=True)
|
name = crayon(None, f"({coin['name']})", dim=True)
|
||||||
return "{} {}".format(key, name)
|
return f"{key} {name}"
|
||||||
|
|
||||||
|
|
||||||
def find_collisions(coins, field):
|
def find_collisions(coins, field):
|
||||||
@ -169,9 +169,7 @@ def check_eth(coins):
|
|||||||
check_passed = True
|
check_passed = True
|
||||||
chains = find_collisions(coins, "chain")
|
chains = find_collisions(coins, "chain")
|
||||||
for key, bucket in chains.items():
|
for key, bucket in chains.items():
|
||||||
bucket_str = ", ".join(
|
bucket_str = ", ".join(f"{coin['key']} ({coin['name']})" for coin in bucket)
|
||||||
"{} ({})".format(coin["key"], coin["name"]) for coin in bucket
|
|
||||||
)
|
|
||||||
chain_name_str = "colliding chain name " + crayon(None, key, bold=True) + ":"
|
chain_name_str = "colliding chain name " + crayon(None, key, bold=True) + ":"
|
||||||
print_log(logging.ERROR, chain_name_str, bucket_str)
|
print_log(logging.ERROR, chain_name_str, bucket_str)
|
||||||
check_passed = False
|
check_passed = False
|
||||||
@ -240,7 +238,7 @@ def check_btc(coins):
|
|||||||
else:
|
else:
|
||||||
# collision between some unsupported networks is OK
|
# collision between some unsupported networks is OK
|
||||||
level = logging.INFO
|
level = logging.INFO
|
||||||
print_log(level, "{} {}:".format(prefix, key), collision_str(bucket))
|
print_log(level, f"{prefix} {key}:", collision_str(bucket))
|
||||||
|
|
||||||
return failed
|
return failed
|
||||||
|
|
||||||
@ -298,7 +296,7 @@ def check_dups(buckets, print_at_level=logging.WARNING):
|
|||||||
prefix = crayon("green", "*", bold=True) + prefix
|
prefix = crayon("green", "*", bold=True) + prefix
|
||||||
|
|
||||||
highlighted = highlight_key(coin, color)
|
highlighted = highlight_key(coin, color)
|
||||||
return "{}{}".format(prefix, highlighted)
|
return f"{prefix}{highlighted}"
|
||||||
|
|
||||||
check_passed = True
|
check_passed = True
|
||||||
|
|
||||||
@ -344,7 +342,7 @@ def check_dups(buckets, print_at_level=logging.WARNING):
|
|||||||
if symbol == "_override":
|
if symbol == "_override":
|
||||||
print_log(level, "force-set duplicates:", dup_str)
|
print_log(level, "force-set duplicates:", dup_str)
|
||||||
else:
|
else:
|
||||||
print_log(level, "duplicate symbol {}:".format(symbol.upper()), dup_str)
|
print_log(level, f"duplicate symbol {symbol.upper()}:", dup_str)
|
||||||
|
|
||||||
return check_passed
|
return check_passed
|
||||||
|
|
||||||
@ -417,14 +415,12 @@ def check_key_uniformity(coins):
|
|||||||
keyset = set(coin.keys()) | IGNORE_NONUNIFORM_KEYS
|
keyset = set(coin.keys()) | IGNORE_NONUNIFORM_KEYS
|
||||||
missing = ", ".join(reference_keyset - keyset)
|
missing = ", ".join(reference_keyset - keyset)
|
||||||
if missing:
|
if missing:
|
||||||
print_log(
|
print_log(logging.ERROR, f"coin {key} has missing keys: {missing}")
|
||||||
logging.ERROR, "coin {} has missing keys: {}".format(key, missing)
|
|
||||||
)
|
|
||||||
additional = ", ".join(keyset - reference_keyset)
|
additional = ", ".join(keyset - reference_keyset)
|
||||||
if additional:
|
if additional:
|
||||||
print_log(
|
print_log(
|
||||||
logging.ERROR,
|
logging.ERROR,
|
||||||
"coin {} has superfluous keys: {}".format(key, additional),
|
f"coin {key} has superfluous keys: {additional}",
|
||||||
)
|
)
|
||||||
|
|
||||||
return False
|
return False
|
||||||
@ -446,7 +442,7 @@ def check_segwit(coins):
|
|||||||
print_log(
|
print_log(
|
||||||
logging.ERROR,
|
logging.ERROR,
|
||||||
coin["name"],
|
coin["name"],
|
||||||
"segwit is True => %s should be set" % field,
|
f"segwit is True => {field} should be set",
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
@ -455,7 +451,7 @@ def check_segwit(coins):
|
|||||||
print_log(
|
print_log(
|
||||||
logging.ERROR,
|
logging.ERROR,
|
||||||
coin["name"],
|
coin["name"],
|
||||||
"segwit is False => %s should NOT be set" % field,
|
f"segwit is False => {field} should NOT be set",
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
@ -863,7 +859,7 @@ def render(paths, outfile, verbose, bitcoin_only):
|
|||||||
|
|
||||||
def do_render(src, dst):
|
def do_render(src, dst):
|
||||||
if verbose:
|
if verbose:
|
||||||
click.echo("Rendering {} => {}".format(src, dst))
|
click.echo(f"Rendering {src} => {dst}")
|
||||||
render_file(src, dst, defs, support_info)
|
render_file(src, dst, defs, support_info)
|
||||||
|
|
||||||
# single in-out case
|
# single in-out case
|
||||||
@ -878,7 +874,7 @@ def render(paths, outfile, verbose, bitcoin_only):
|
|||||||
files = []
|
files = []
|
||||||
for path in paths:
|
for path in paths:
|
||||||
if not os.path.exists(path):
|
if not os.path.exists(path):
|
||||||
click.echo("Path {} does not exist".format(path))
|
click.echo(f"Path {path} does not exist")
|
||||||
elif os.path.isdir(path):
|
elif os.path.isdir(path):
|
||||||
files += glob.glob(os.path.join(path, "*.mako"))
|
files += glob.glob(os.path.join(path, "*.mako"))
|
||||||
else:
|
else:
|
||||||
@ -887,7 +883,7 @@ def render(paths, outfile, verbose, bitcoin_only):
|
|||||||
# render each file
|
# render each file
|
||||||
for file in files:
|
for file in files:
|
||||||
if not file.endswith(".mako"):
|
if not file.endswith(".mako"):
|
||||||
click.echo("File {} does not end with .mako".format(file))
|
click.echo(f"File {file} does not end with .mako")
|
||||||
else:
|
else:
|
||||||
target = file[: -len(".mako")]
|
target = file[: -len(".mako")]
|
||||||
with open(target, "w") as dst:
|
with open(target, "w") as dst:
|
||||||
|
@ -47,7 +47,7 @@ def init(api_key, refresh=None):
|
|||||||
first_100 = all_ids[:100]
|
first_100 = all_ids[:100]
|
||||||
all_ids = all_ids[100:]
|
all_ids = all_ids[100:]
|
||||||
time.sleep(2.2)
|
time.sleep(2.2)
|
||||||
print("Fetching metadata, {} coins remaining...".format(len(all_ids)))
|
print(f"Fetching metadata, {len(all_ids)} coins remaining...")
|
||||||
metadata = call(
|
metadata = call(
|
||||||
"cryptocurrency/info", api_key, params={"id": ",".join(first_100)}
|
"cryptocurrency/info", api_key, params={"id": ",".join(first_100)}
|
||||||
)
|
)
|
||||||
|
@ -96,12 +96,12 @@ def main(filename, cost, skip, txsize, refresh, api_key, verbose):
|
|||||||
short = coin["coin_shortcut"]
|
short = coin["coin_shortcut"]
|
||||||
|
|
||||||
if any(re.search(s, coin["coin_name"]) is not None for s in skip):
|
if any(re.search(s, coin["coin_name"]) is not None for s in skip):
|
||||||
logging.warning("{}:\tskipping because --skip matches".format(short))
|
logging.warning(f"{short}:\tskipping because --skip matches")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
price = marketcap.fiat_price(short)
|
price = marketcap.fiat_price(short)
|
||||||
if price is None:
|
if price is None:
|
||||||
logging.error("{}:\tno price data, skipping".format(short))
|
logging.error(f"{short}:\tno price data, skipping")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
old_maxfee_kb = coin["maxfee_kb"]
|
old_maxfee_kb = coin["maxfee_kb"]
|
||||||
@ -111,14 +111,12 @@ def main(filename, cost, skip, txsize, refresh, api_key, verbose):
|
|||||||
with open(filename, "w") as fh:
|
with open(filename, "w") as fh:
|
||||||
json.dump(coin, fh, indent=2)
|
json.dump(coin, fh, indent=2)
|
||||||
fh.write("\n")
|
fh.write("\n")
|
||||||
logging.info(
|
logging.info(f"{short}:\tupdated {old_maxfee_kb} -> {new_maxfee_kb}")
|
||||||
"{}:\tupdated {} -> {}".format(short, old_maxfee_kb, new_maxfee_kb)
|
|
||||||
)
|
|
||||||
delta = delta_percent(old_maxfee_kb, new_maxfee_kb)
|
delta = delta_percent(old_maxfee_kb, new_maxfee_kb)
|
||||||
if delta > MAX_DELTA_PERCENT:
|
if delta > MAX_DELTA_PERCENT:
|
||||||
logging.warning("{}:\tprice has changed by {} %".format(short, delta))
|
logging.warning(f"{short}:\tprice has changed by {delta} %")
|
||||||
else:
|
else:
|
||||||
logging.info("{}:\tno change".format(short))
|
logging.info(f"{short}:\tno change")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -82,7 +82,7 @@ class AllocCounter:
|
|||||||
|
|
||||||
allocs_per_last_line = allocs_now - self.last_alloc_count
|
allocs_per_last_line = allocs_now - self.last_alloc_count
|
||||||
self.count_last_line(allocs_per_last_line)
|
self.count_last_line(allocs_per_last_line)
|
||||||
self.last_line = "{}:{}".format(frame.f_code.co_filename, frame.f_lineno)
|
self.last_line = f"{frame.f_code.co_filename}:{frame.f_lineno}"
|
||||||
self.last_alloc_count = micropython.alloc_count()
|
self.last_alloc_count = micropython.alloc_count()
|
||||||
|
|
||||||
def dump_data(self, filename):
|
def dump_data(self, filename):
|
||||||
|
@ -26,7 +26,7 @@ def generate(env):
|
|||||||
# replace "utils.BITCOIN_ONLY" with literal constant (True/False)
|
# replace "utils.BITCOIN_ONLY" with literal constant (True/False)
|
||||||
# so the compiler can optimize out the things we don't want
|
# so the compiler can optimize out the things we don't want
|
||||||
btc_only = env['bitcoin_only'] == '1'
|
btc_only = env['bitcoin_only'] == '1'
|
||||||
interim = "%s.i" % target[:-4] # replace .mpy with .i
|
interim = f"{target[:-4]}.i" # replace .mpy with .i
|
||||||
sed_scripts = " ".join([
|
sed_scripts = " ".join([
|
||||||
f"-e 's/utils\.BITCOIN_ONLY/{btc_only}/g'",
|
f"-e 's/utils\.BITCOIN_ONLY/{btc_only}/g'",
|
||||||
"-e 's/if TYPE_CHECKING/if False/'",
|
"-e 's/if TYPE_CHECKING/if False/'",
|
||||||
|
@ -7,7 +7,7 @@ def process(source, target):
|
|||||||
for line in source:
|
for line in source:
|
||||||
for match in re_qstr.findall(line):
|
for match in re_qstr.findall(line):
|
||||||
name = match.replace("MP_QSTR_", "")
|
name = match.replace("MP_QSTR_", "")
|
||||||
target.write("Q(%s)\n" % name)
|
target.write(f"Q({name})\n")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -81,7 +81,7 @@ async def get_address(
|
|||||||
pubnodes = [hd.node for hd in msg.multisig.pubkeys]
|
pubnodes = [hd.node for hd in msg.multisig.pubkeys]
|
||||||
multisig_index = multisig_pubkey_index(msg.multisig, node.public_key())
|
multisig_index = multisig_pubkey_index(msg.multisig, node.public_key())
|
||||||
|
|
||||||
title = "Multisig %d of %d" % (msg.multisig.m, len(pubnodes))
|
title = f"Multisig {msg.multisig.m} of {len(pubnodes)}"
|
||||||
await show_address(
|
await show_address(
|
||||||
ctx,
|
ctx,
|
||||||
address=address_short,
|
address=address_short,
|
||||||
|
@ -35,7 +35,7 @@ def format_coin_amount(amount: int, coin: CoinInfo, amount_unit: AmountUnit) ->
|
|||||||
decimals -= 3
|
decimals -= 3
|
||||||
shortcut = "m" + shortcut
|
shortcut = "m" + shortcut
|
||||||
# we don't need to do anything for AmountUnit.BITCOIN
|
# we don't need to do anything for AmountUnit.BITCOIN
|
||||||
return "%s %s" % (format_amount(amount, decimals), shortcut)
|
return f"{format_amount(amount, decimals)} {shortcut}"
|
||||||
|
|
||||||
|
|
||||||
async def confirm_output(
|
async def confirm_output(
|
||||||
|
@ -1,12 +1,15 @@
|
|||||||
|
from micropython import const
|
||||||
from ustruct import unpack
|
from ustruct import unpack
|
||||||
|
|
||||||
from trezor.strings import format_amount
|
from trezor.strings import format_amount
|
||||||
|
|
||||||
|
_OMNI_DECIMALS = const(8)
|
||||||
|
|
||||||
currencies = {
|
currencies = {
|
||||||
1: ("OMNI", True),
|
1: ("OMNI", _OMNI_DECIMALS),
|
||||||
2: ("tOMNI", True),
|
2: ("tOMNI", _OMNI_DECIMALS),
|
||||||
3: ("MAID", False),
|
3: ("MAID", 0),
|
||||||
31: ("USDT", True),
|
31: ("USDT", _OMNI_DECIMALS),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -20,11 +23,8 @@ def parse(data: bytes) -> str:
|
|||||||
tx_version, tx_type = unpack(">HH", data[4:8])
|
tx_version, tx_type = unpack(">HH", data[4:8])
|
||||||
if tx_version == 0 and tx_type == 0 and len(data) == 20: # OMNI simple send
|
if tx_version == 0 and tx_type == 0 and len(data) == 20: # OMNI simple send
|
||||||
currency, amount = unpack(">IQ", data[8:20])
|
currency, amount = unpack(">IQ", data[8:20])
|
||||||
suffix, divisible = currencies.get(currency, ("UNKN", False))
|
suffix, decimals = currencies.get(currency, ("UNKN", 0))
|
||||||
return "Simple send of %s %s" % (
|
return f"Simple send of {format_amount(amount, decimals)} {suffix}"
|
||||||
format_amount(amount, 8 if divisible else 0),
|
|
||||||
suffix,
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
# unknown OMNI transaction
|
# unknown OMNI transaction
|
||||||
return "Unknown transaction"
|
return "Unknown transaction"
|
||||||
|
@ -188,9 +188,9 @@ class Credential:
|
|||||||
return [(None, format_script_hash(self.script_hash))]
|
return [(None, format_script_hash(self.script_hash))]
|
||||||
elif self.pointer:
|
elif self.pointer:
|
||||||
return [
|
return [
|
||||||
("Block: %s" % self.pointer.block_index, None),
|
(f"Block: {self.pointer.block_index}", None),
|
||||||
("Transaction: %s" % self.pointer.tx_index, None),
|
(f"Transaction: {self.pointer.tx_index}", None),
|
||||||
("Certificate: %s" % self.pointer.certificate_index, None),
|
(f"Certificate: {self.pointer.certificate_index}", None),
|
||||||
]
|
]
|
||||||
else:
|
else:
|
||||||
return []
|
return []
|
||||||
|
@ -12,11 +12,11 @@ MINTING_ROOT = [1855 | HARDENED, SLIP44_ID | HARDENED]
|
|||||||
# fmt: off
|
# fmt: off
|
||||||
SCHEMA_PUBKEY = PathSchema.parse("m/[44,1852,1854]'/coin_type'/account'/*", SLIP44_ID)
|
SCHEMA_PUBKEY = PathSchema.parse("m/[44,1852,1854]'/coin_type'/account'/*", SLIP44_ID)
|
||||||
# minting has a specific schema for key derivation - see CIP-1855
|
# minting has a specific schema for key derivation - see CIP-1855
|
||||||
SCHEMA_MINT = PathSchema.parse("m/1855'/coin_type'/[0-%s]'" % (HARDENED - 1), SLIP44_ID)
|
SCHEMA_MINT = PathSchema.parse(f"m/1855'/coin_type'/[0-{HARDENED - 1}]'", SLIP44_ID)
|
||||||
SCHEMA_PAYMENT = PathSchema.parse("m/[44,1852]'/coin_type'/account'/[0,1]/address_index", SLIP44_ID)
|
SCHEMA_PAYMENT = PathSchema.parse("m/[44,1852]'/coin_type'/account'/[0,1]/address_index", SLIP44_ID)
|
||||||
# staking is only allowed on Shelley paths with suffix /2/0
|
# staking is only allowed on Shelley paths with suffix /2/0
|
||||||
SCHEMA_STAKING = PathSchema.parse("m/[1852]'/coin_type'/account'/2/0", SLIP44_ID)
|
SCHEMA_STAKING = PathSchema.parse("m/1852'/coin_type'/account'/2/0", SLIP44_ID)
|
||||||
SCHEMA_STAKING_ANY_ACCOUNT = PathSchema.parse("m/[1852]'/coin_type'/[0-%s]'/2/0" % (HARDENED - 1), SLIP44_ID)
|
SCHEMA_STAKING_ANY_ACCOUNT = PathSchema.parse(f"m/1852'/coin_type'/[0-{HARDENED - 1}]'/2/0", SLIP44_ID)
|
||||||
# fmt: on
|
# fmt: on
|
||||||
|
|
||||||
ACCOUNT_PATH_INDEX = const(2)
|
ACCOUNT_PATH_INDEX = const(2)
|
||||||
|
@ -22,7 +22,7 @@ def variable_length_encode(number: int) -> bytes:
|
|||||||
https://en.wikipedia.org/wiki/Variable-length_quantity
|
https://en.wikipedia.org/wiki/Variable-length_quantity
|
||||||
"""
|
"""
|
||||||
if number < 0:
|
if number < 0:
|
||||||
raise ValueError("Negative numbers not supported. Number supplied: %s" % number)
|
raise ValueError(f"Negative numbers not supported. Number supplied: {number}")
|
||||||
|
|
||||||
encoded = [number & 0x7F]
|
encoded = [number & 0x7F]
|
||||||
while number > 0x7F:
|
while number > 0x7F:
|
||||||
@ -40,7 +40,7 @@ def format_account_number(path: list[int]) -> str:
|
|||||||
if len(path) <= ACCOUNT_PATH_INDEX:
|
if len(path) <= ACCOUNT_PATH_INDEX:
|
||||||
raise ValueError("Path is too short.")
|
raise ValueError("Path is too short.")
|
||||||
|
|
||||||
return "#%d" % (unharden(path[ACCOUNT_PATH_INDEX]) + 1)
|
return f"#{unharden(path[ACCOUNT_PATH_INDEX]) + 1}"
|
||||||
|
|
||||||
|
|
||||||
def format_optional_int(number: int | None) -> str:
|
def format_optional_int(number: int | None) -> str:
|
||||||
|
@ -83,7 +83,7 @@ CERTIFICATE_TYPE_NAMES = {
|
|||||||
|
|
||||||
|
|
||||||
def format_coin_amount(amount: int) -> str:
|
def format_coin_amount(amount: int) -> str:
|
||||||
return "%s %s" % (format_amount(amount, 6), "ADA")
|
return f"{format_amount(amount, 6)} ADA"
|
||||||
|
|
||||||
|
|
||||||
def is_printable_ascii_bytestring(bytestr: bytes) -> bool:
|
def is_printable_ascii_bytestring(bytestr: bytes) -> bool:
|
||||||
@ -104,14 +104,14 @@ async def show_native_script(
|
|||||||
elif script.key_hash:
|
elif script.key_hash:
|
||||||
script_type_name_suffix = "hash"
|
script_type_name_suffix = "hash"
|
||||||
|
|
||||||
|
if indices_str:
|
||||||
|
script_heading = "Script " + indices_str
|
||||||
|
else:
|
||||||
|
script_heading = "Script"
|
||||||
|
|
||||||
props: list[PropertyType] = [
|
props: list[PropertyType] = [
|
||||||
(
|
(
|
||||||
"Script%s - %s %s:"
|
f"{script_heading} - {SCRIPT_TYPE_NAMES[script.type]} {script_type_name_suffix}:",
|
||||||
% (
|
|
||||||
(" " + indices_str if indices_str else ""),
|
|
||||||
SCRIPT_TYPE_NAMES[script.type],
|
|
||||||
script_type_name_suffix,
|
|
||||||
),
|
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
@ -126,8 +126,7 @@ async def show_native_script(
|
|||||||
assert script.required_signatures_count is not None # validate_script
|
assert script.required_signatures_count is not None # validate_script
|
||||||
props.append(
|
props.append(
|
||||||
(
|
(
|
||||||
"Requires %s out of %s signatures."
|
f"Requires {script.required_signatures_count} out of {len(script.scripts)} signatures.",
|
||||||
% (script.required_signatures_count, len(script.scripts)),
|
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@ -144,7 +143,7 @@ async def show_native_script(
|
|||||||
CardanoNativeScriptType.N_OF_K,
|
CardanoNativeScriptType.N_OF_K,
|
||||||
):
|
):
|
||||||
assert script.scripts # validate_script
|
assert script.scripts # validate_script
|
||||||
props.append(("Contains %i nested scripts." % len(script.scripts), None))
|
props.append((f"Contains {len(script.scripts)} nested scripts.", None))
|
||||||
|
|
||||||
await confirm_properties(
|
await confirm_properties(
|
||||||
ctx,
|
ctx,
|
||||||
@ -263,7 +262,7 @@ async def _show_credential(
|
|||||||
if is_change_output:
|
if is_change_output:
|
||||||
title = "Confirm transaction"
|
title = "Confirm transaction"
|
||||||
else:
|
else:
|
||||||
title = "%s address" % ADDRESS_TYPE_NAMES[credential.address_type]
|
title = f"{ADDRESS_TYPE_NAMES[credential.address_type]} address"
|
||||||
|
|
||||||
props: list[PropertyType] = []
|
props: list[PropertyType] = []
|
||||||
|
|
||||||
@ -279,8 +278,7 @@ async def _show_credential(
|
|||||||
credential_title = credential.get_title()
|
credential_title = credential.get_title()
|
||||||
props.append(
|
props.append(
|
||||||
(
|
(
|
||||||
"%s %s credential is a %s:"
|
f"{address_usage} {credential.type_name} credential is a {credential_title}:",
|
||||||
% (address_usage, credential.type_name, credential_title),
|
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@ -295,8 +293,7 @@ async def _show_credential(
|
|||||||
if credential.is_no_staking:
|
if credential.is_no_staking:
|
||||||
props.append(
|
props.append(
|
||||||
(
|
(
|
||||||
"%s address - no staking rewards."
|
f"{ADDRESS_TYPE_NAMES[credential.address_type]} address - no staking rewards.",
|
||||||
% ADDRESS_TYPE_NAMES[credential.address_type],
|
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@ -350,7 +347,7 @@ async def confirm_witness_request(
|
|||||||
"confirm_total",
|
"confirm_total",
|
||||||
title="Confirm transaction",
|
title="Confirm transaction",
|
||||||
data=address_n_to_str(witness_path),
|
data=address_n_to_str(witness_path),
|
||||||
description="Sign transaction with %s:" % path_title,
|
description=f"Sign transaction with {path_title}:",
|
||||||
br_code=ButtonRequestType.Other,
|
br_code=ButtonRequestType.Other,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -368,14 +365,10 @@ async def confirm_transaction(
|
|||||||
]
|
]
|
||||||
|
|
||||||
if is_network_id_verifiable:
|
if is_network_id_verifiable:
|
||||||
props.append(
|
props.append((f"Network: {protocol_magics.to_ui_string(protocol_magic)}", None))
|
||||||
("Network: %s" % protocol_magics.to_ui_string(protocol_magic), None)
|
|
||||||
)
|
|
||||||
|
|
||||||
props.append(
|
props.append((f"Valid since: {format_optional_int(validity_interval_start)}", None))
|
||||||
("Valid since: %s" % format_optional_int(validity_interval_start), None)
|
props.append((f"TTL: {format_optional_int(ttl)}", None))
|
||||||
)
|
|
||||||
props.append(("TTL: %s" % format_optional_int(ttl), None))
|
|
||||||
|
|
||||||
await confirm_properties(
|
await confirm_properties(
|
||||||
ctx,
|
ctx,
|
||||||
@ -401,7 +394,7 @@ async def confirm_certificate(
|
|||||||
if certificate.path:
|
if certificate.path:
|
||||||
props.append(
|
props.append(
|
||||||
(
|
(
|
||||||
"for account %s:" % format_account_number(certificate.path),
|
f"for account {format_account_number(certificate.path)}:",
|
||||||
address_n_to_str(to_account_path(certificate.path)),
|
address_n_to_str(to_account_path(certificate.path)),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
@ -428,7 +421,7 @@ async def confirm_stake_pool_parameters(
|
|||||||
margin_percentage = (
|
margin_percentage = (
|
||||||
100.0 * pool_parameters.margin_numerator / pool_parameters.margin_denominator
|
100.0 * pool_parameters.margin_numerator / pool_parameters.margin_denominator
|
||||||
)
|
)
|
||||||
percentage_formatted = ("%f" % margin_percentage).rstrip("0").rstrip(".")
|
percentage_formatted = str(float(margin_percentage)).rstrip("0").rstrip(".")
|
||||||
await confirm_properties(
|
await confirm_properties(
|
||||||
ctx,
|
ctx,
|
||||||
"confirm_pool_registration",
|
"confirm_pool_registration",
|
||||||
@ -559,7 +552,7 @@ async def confirm_withdrawal(
|
|||||||
if withdrawal.path:
|
if withdrawal.path:
|
||||||
props.append(
|
props.append(
|
||||||
(
|
(
|
||||||
"for account %s:" % format_account_number(withdrawal.path),
|
f"for account {format_account_number(withdrawal.path)}:",
|
||||||
address_n_to_str(to_account_path(withdrawal.path)),
|
address_n_to_str(to_account_path(withdrawal.path)),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@ -593,7 +586,7 @@ async def confirm_catalyst_registration(
|
|||||||
("Catalyst voting key registration", None),
|
("Catalyst voting key registration", None),
|
||||||
("Voting public key:", public_key),
|
("Voting public key:", public_key),
|
||||||
(
|
(
|
||||||
"Staking key for account %s:" % format_account_number(staking_path),
|
f"Staking key for account {format_account_number(staking_path)}:",
|
||||||
address_n_to_str(staking_path),
|
address_n_to_str(staking_path),
|
||||||
),
|
),
|
||||||
("Rewards go to:", reward_address),
|
("Rewards go to:", reward_address),
|
||||||
@ -630,8 +623,6 @@ async def confirm_token_minting(
|
|||||||
ctx: wire.Context, policy_id: bytes, token: CardanoToken
|
ctx: wire.Context, policy_id: bytes, token: CardanoToken
|
||||||
) -> None:
|
) -> None:
|
||||||
assert token.mint_amount is not None # _validate_token
|
assert token.mint_amount is not None # _validate_token
|
||||||
is_minting = token.mint_amount >= 0
|
|
||||||
|
|
||||||
await confirm_properties(
|
await confirm_properties(
|
||||||
ctx,
|
ctx,
|
||||||
"confirm_mint",
|
"confirm_mint",
|
||||||
@ -645,7 +636,7 @@ async def confirm_token_minting(
|
|||||||
),
|
),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"Amount %s:" % ("minted" if is_minting else "burned"),
|
"Amount minted:" if token.mint_amount >= 0 else "Amount burned:",
|
||||||
format_amount(token.mint_amount, 0),
|
format_amount(token.mint_amount, 0),
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
@ -674,7 +665,7 @@ async def show_cardano_address(
|
|||||||
if not protocol_magics.is_mainnet(protocol_magic):
|
if not protocol_magics.is_mainnet(protocol_magic):
|
||||||
network_name = protocol_magics.to_ui_string(protocol_magic)
|
network_name = protocol_magics.to_ui_string(protocol_magic)
|
||||||
|
|
||||||
title = "%s address" % ADDRESS_TYPE_NAMES[address_parameters.address_type]
|
title = f"{ADDRESS_TYPE_NAMES[address_parameters.address_type]} address"
|
||||||
address_extra = None
|
address_extra = None
|
||||||
title_qr = title
|
title_qr = title
|
||||||
if address_parameters.address_type in (
|
if address_parameters.address_type in (
|
||||||
|
@ -1033,7 +1033,7 @@ async def _fail_or_warn_path(
|
|||||||
ctx: wire.Context, path: list[int], path_name: str
|
ctx: wire.Context, path: list[int], path_name: str
|
||||||
) -> None:
|
) -> None:
|
||||||
if safety_checks.is_strict():
|
if safety_checks.is_strict():
|
||||||
raise wire.DataError("Invalid %s" % path_name.lower())
|
raise wire.DataError(f"Invalid {path_name.lower()}")
|
||||||
else:
|
else:
|
||||||
await show_warning_path(ctx, path, path_name)
|
await show_warning_path(ctx, path, path_name)
|
||||||
|
|
||||||
@ -1045,7 +1045,7 @@ def _fail_if_strict_and_unusual(
|
|||||||
return
|
return
|
||||||
|
|
||||||
if Credential.payment_credential(address_parameters).is_unusual_path:
|
if Credential.payment_credential(address_parameters).is_unusual_path:
|
||||||
raise wire.DataError("Invalid %s" % CHANGE_OUTPUT_PATH_NAME.lower())
|
raise wire.DataError(f"Invalid {CHANGE_OUTPUT_PATH_NAME.lower()}")
|
||||||
|
|
||||||
if Credential.stake_credential(address_parameters).is_unusual_path:
|
if Credential.stake_credential(address_parameters).is_unusual_path:
|
||||||
raise wire.DataError("Invalid %s" % CHANGE_OUTPUT_STAKING_PATH_NAME.lower())
|
raise wire.DataError(f"Invalid {CHANGE_OUTPUT_STAKING_PATH_NAME.lower()}")
|
||||||
|
@ -58,7 +58,7 @@ def _header(typ: int, l: int) -> bytes:
|
|||||||
elif l < 2 ** 64:
|
elif l < 2 ** 64:
|
||||||
return struct.pack(">BQ", typ + 27, l)
|
return struct.pack(">BQ", typ + 27, l)
|
||||||
else:
|
else:
|
||||||
raise NotImplementedError("Length %d not supported" % l)
|
raise NotImplementedError # Length not supported
|
||||||
|
|
||||||
|
|
||||||
def _cbor_encode(value: Value) -> Iterator[bytes]:
|
def _cbor_encode(value: Value) -> Iterator[bytes]:
|
||||||
@ -128,7 +128,7 @@ def _read_length(r: utils.BufferReader, aux: int) -> int:
|
|||||||
elif aux == _CBOR_UINT64_FOLLOWS:
|
elif aux == _CBOR_UINT64_FOLLOWS:
|
||||||
return readers.read_uint64_be(r)
|
return readers.read_uint64_be(r)
|
||||||
else:
|
else:
|
||||||
raise NotImplementedError("Length %d not supported" % aux)
|
raise NotImplementedError # Length not supported
|
||||||
|
|
||||||
|
|
||||||
def _cbor_decode(r: utils.BufferReader) -> Value:
|
def _cbor_decode(r: utils.BufferReader) -> Value:
|
||||||
|
@ -1830,4 +1830,4 @@ def by_name(name: str) -> CoinInfo:
|
|||||||
overwintered=True,
|
overwintered=True,
|
||||||
confidential_assets=None,
|
confidential_assets=None,
|
||||||
)
|
)
|
||||||
raise ValueError('Unknown coin name "%s"' % name)
|
raise ValueError # Unknown coin name
|
||||||
|
@ -159,4 +159,4 @@ def by_name(name: str) -> CoinInfo:
|
|||||||
% endfor
|
% endfor
|
||||||
)
|
)
|
||||||
% endfor
|
% endfor
|
||||||
raise ValueError('Unknown coin name "%s"' % name)
|
raise ValueError # Unknown coin name
|
||||||
|
@ -27,7 +27,7 @@ async def _request_from_user(ctx: wire.Context) -> str:
|
|||||||
passphrase = await _request_on_host(ctx)
|
passphrase = await _request_on_host(ctx)
|
||||||
if len(passphrase.encode()) > _MAX_PASSPHRASE_LEN:
|
if len(passphrase.encode()) > _MAX_PASSPHRASE_LEN:
|
||||||
raise wire.DataError(
|
raise wire.DataError(
|
||||||
"Maximum passphrase length is %d bytes" % _MAX_PASSPHRASE_LEN
|
f"Maximum passphrase length is {_MAX_PASSPHRASE_LEN} bytes"
|
||||||
)
|
)
|
||||||
|
|
||||||
return passphrase
|
return passphrase
|
||||||
|
@ -30,4 +30,4 @@ def decode_message(message: bytes) -> str:
|
|||||||
try:
|
try:
|
||||||
return bytes(message).decode()
|
return bytes(message).decode()
|
||||||
except UnicodeError:
|
except UnicodeError:
|
||||||
return "hex(%s)" % hexlify(message).decode()
|
return f"hex({hexlify(message).decode()})"
|
||||||
|
@ -110,9 +110,7 @@ if __debug__:
|
|||||||
layout_change_chan.putters.clear()
|
layout_change_chan.putters.clear()
|
||||||
await ui.wait_until_layout_is_running()
|
await ui.wait_until_layout_is_running()
|
||||||
storage.watch_layout_changes = bool(msg.watch)
|
storage.watch_layout_changes = bool(msg.watch)
|
||||||
log.debug(
|
log.debug(__name__, "Watch layout changes: %s", storage.watch_layout_changes)
|
||||||
__name__, "Watch layout changes: {}".format(storage.watch_layout_changes)
|
|
||||||
)
|
|
||||||
return Success()
|
return Success()
|
||||||
|
|
||||||
async def dispatch_DebugLinkDecision(
|
async def dispatch_DebugLinkDecision(
|
||||||
|
@ -149,8 +149,8 @@ async def confirm_action_voteproducer(
|
|||||||
"confirm_voteproducer",
|
"confirm_voteproducer",
|
||||||
title="Vote for producers",
|
title="Vote for producers",
|
||||||
props=(
|
props=(
|
||||||
("{:2d}. {}".format(wi + 1, helpers.eos_name_to_string(producer)), None)
|
(f"{wi:2d}. {helpers.eos_name_to_string(producer)}", None)
|
||||||
for wi, producer in enumerate(msg.producers)
|
for wi, producer in enumerate(msg.producers, 1)
|
||||||
),
|
),
|
||||||
icon=ui.ICON_CONFIRM,
|
icon=ui.ICON_CONFIRM,
|
||||||
br_code=ButtonRequestType.ConfirmOutput,
|
br_code=ButtonRequestType.ConfirmOutput,
|
||||||
@ -300,35 +300,35 @@ def authorization_fields(auth: EosAuthorization) -> list[tuple[str, str | None]]
|
|||||||
fields = []
|
fields = []
|
||||||
fields.append(("Threshold:", str(auth.threshold)))
|
fields.append(("Threshold:", str(auth.threshold)))
|
||||||
|
|
||||||
for i, key in enumerate(auth.keys):
|
for i, key in enumerate(auth.keys, 1):
|
||||||
_key = helpers.public_key_to_wif(bytes(key.key))
|
_key = helpers.public_key_to_wif(bytes(key.key))
|
||||||
_weight = str(key.weight)
|
_weight = str(key.weight)
|
||||||
|
|
||||||
header = "Key #{}:".format(i + 1)
|
header = f"Key #{i}:"
|
||||||
w_header = "Key #{} Weight:".format(i + 1)
|
w_header = f"Key #{i} Weight:"
|
||||||
|
|
||||||
fields.append((header, _key))
|
fields.append((header, _key))
|
||||||
fields.append((w_header, _weight))
|
fields.append((w_header, _weight))
|
||||||
|
|
||||||
for i, account in enumerate(auth.accounts):
|
for i, account in enumerate(auth.accounts, 1):
|
||||||
_account = helpers.eos_name_to_string(account.account.actor)
|
_account = helpers.eos_name_to_string(account.account.actor)
|
||||||
_permission = helpers.eos_name_to_string(account.account.permission)
|
_permission = helpers.eos_name_to_string(account.account.permission)
|
||||||
|
|
||||||
a_header = "Account #{}:".format(i + 1)
|
a_header = f"Account #{i}:"
|
||||||
p_header = "Acc Permission #{}:".format(i + 1)
|
p_header = f"Acc Permission #{i}:"
|
||||||
w_header = "Account #{} weight:".format(i + 1)
|
w_header = f"Account #{i} weight:"
|
||||||
|
|
||||||
fields.append((a_header, _account))
|
fields.append((a_header, _account))
|
||||||
fields.append((p_header, _permission))
|
fields.append((p_header, _permission))
|
||||||
fields.append((w_header, str(account.weight)))
|
fields.append((w_header, str(account.weight)))
|
||||||
|
|
||||||
for i, wait in enumerate(auth.waits):
|
for i, wait in enumerate(auth.waits, 1):
|
||||||
_wait = str(wait.wait_sec)
|
_wait = str(wait.wait_sec)
|
||||||
_weight = str(wait.weight)
|
_weight = str(wait.weight)
|
||||||
|
|
||||||
header = "Delay #{}".format(i + 1)
|
header = f"Delay #{i}"
|
||||||
w_header = "Delay #{} weight:".format(i + 1)
|
w_header = f"Delay #{i} weight:"
|
||||||
fields.append((header, "{} sec".format(_wait)))
|
fields.append((header, f"{_wait} sec"))
|
||||||
fields.append((w_header, _weight))
|
fields.append((w_header, _weight))
|
||||||
|
|
||||||
return fields
|
return fields
|
||||||
|
@ -35,9 +35,9 @@ def eos_asset_to_string(asset: EosAsset) -> str:
|
|||||||
integer = "0"
|
integer = "0"
|
||||||
fraction = amount_digits[-precision:]
|
fraction = amount_digits[-precision:]
|
||||||
|
|
||||||
return "{}.{} {}".format(integer, fraction, symbol)
|
return f"{integer}.{fraction} {symbol}"
|
||||||
else:
|
else:
|
||||||
return "{} {}".format(amount_digits, symbol)
|
return f"{amount_digits} {symbol}"
|
||||||
|
|
||||||
|
|
||||||
def public_key_to_wif(pub_key: bytes) -> str:
|
def public_key_to_wif(pub_key: bytes) -> str:
|
||||||
|
@ -100,7 +100,7 @@ def require_confirm_data(ctx: Context, data: bytes, data_total: int) -> Awaitabl
|
|||||||
ctx,
|
ctx,
|
||||||
"confirm_data",
|
"confirm_data",
|
||||||
title="Confirm data",
|
title="Confirm data",
|
||||||
description="Size: %d bytes" % data_total,
|
description=f"Size: {data_total} bytes",
|
||||||
data=data,
|
data=data,
|
||||||
br_code=ButtonRequestType.SignTx,
|
br_code=ButtonRequestType.SignTx,
|
||||||
)
|
)
|
||||||
@ -121,4 +121,4 @@ def format_ethereum_amount(
|
|||||||
suffix = "Wei " + suffix
|
suffix = "Wei " + suffix
|
||||||
decimals = 0
|
decimals = 0
|
||||||
|
|
||||||
return "%s %s" % (format_amount(value, decimals), suffix)
|
return f"{format_amount(value, decimals)} {suffix}"
|
||||||
|
@ -18,9 +18,7 @@ def validate_homescreen(homescreen: bytes) -> None:
|
|||||||
|
|
||||||
if len(homescreen) > storage.device.HOMESCREEN_MAXSIZE:
|
if len(homescreen) > storage.device.HOMESCREEN_MAXSIZE:
|
||||||
raise wire.DataError(
|
raise wire.DataError(
|
||||||
"Homescreen is too large, maximum size is {} bytes".format(
|
f"Homescreen is too large, maximum size is {storage.device.HOMESCREEN_MAXSIZE} bytes"
|
||||||
storage.device.HOMESCREEN_MAXSIZE
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -82,9 +82,7 @@ def _validate(msg: RecoveryDevice) -> None:
|
|||||||
# check that only allowed fields are set
|
# check that only allowed fields are set
|
||||||
for key, value in msg.__dict__.items():
|
for key, value in msg.__dict__.items():
|
||||||
if key not in DRY_RUN_ALLOWED_FIELDS and value is not None:
|
if key not in DRY_RUN_ALLOWED_FIELDS and value is not None:
|
||||||
raise wire.ProcessError(
|
raise wire.ProcessError(f"Forbidden field set in dry-run: {key}")
|
||||||
"Forbidden field set in dry-run: {}".format(key)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def _continue_dialog(ctx: wire.Context, msg: RecoveryDevice) -> None:
|
async def _continue_dialog(ctx: wire.Context, msg: RecoveryDevice) -> None:
|
||||||
|
@ -187,11 +187,11 @@ async def _request_share_first_screen(
|
|||||||
await _request_share_next_screen(ctx)
|
await _request_share_next_screen(ctx)
|
||||||
else:
|
else:
|
||||||
await layout.homescreen_dialog(
|
await layout.homescreen_dialog(
|
||||||
ctx, "Enter share", "Enter any share", "(%d words)" % word_count
|
ctx, "Enter share", "Enter any share", f"({word_count} words)"
|
||||||
)
|
)
|
||||||
else: # BIP-39
|
else: # BIP-39
|
||||||
await layout.homescreen_dialog(
|
await layout.homescreen_dialog(
|
||||||
ctx, "Enter seed", "Enter recovery seed", "(%d words)" % word_count
|
ctx, "Enter seed", "Enter recovery seed", f"({word_count} words)"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -61,21 +61,15 @@ async def _show_confirmation_success(
|
|||||||
if group_index is None:
|
if group_index is None:
|
||||||
subheader = "You have finished\nverifying your\nrecovery shares."
|
subheader = "You have finished\nverifying your\nrecovery shares."
|
||||||
else:
|
else:
|
||||||
subheader = (
|
subheader = f"You have finished\nverifying your\nrecovery shares\nfor group {group_index + 1}."
|
||||||
"You have finished\nverifying your\nrecovery shares\nfor group %s."
|
|
||||||
% (group_index + 1)
|
|
||||||
)
|
|
||||||
text = ""
|
text = ""
|
||||||
|
|
||||||
else:
|
else:
|
||||||
if group_index is None:
|
if group_index is None:
|
||||||
subheader = "Recovery share #%s\nchecked successfully." % (share_index + 1)
|
subheader = f"Recovery share #{share_index + 1}\nchecked successfully."
|
||||||
text = "Continue with share #%s." % (share_index + 2)
|
text = f"Continue with share #{share_index + 2}."
|
||||||
else:
|
else:
|
||||||
subheader = "Group %s - Share %s\nchecked successfully." % (
|
subheader = f"Group {group_index + 1} - Share {share_index + 1}\nchecked successfully."
|
||||||
(group_index + 1),
|
|
||||||
(share_index + 1),
|
|
||||||
)
|
|
||||||
text = "Continue with the next\nshare."
|
text = "Continue with the next\nshare."
|
||||||
|
|
||||||
return await show_success(ctx, "success_recovery", text, subheader=subheader)
|
return await show_success(ctx, "success_recovery", text, subheader=subheader)
|
||||||
@ -87,7 +81,7 @@ async def _show_confirmation_failure(
|
|||||||
if share_index is None:
|
if share_index is None:
|
||||||
header = "Recovery seed"
|
header = "Recovery seed"
|
||||||
else:
|
else:
|
||||||
header = "Recovery share #%s" % (share_index + 1)
|
header = f"Recovery share #{share_index + 1}"
|
||||||
await show_warning(
|
await show_warning(
|
||||||
ctx,
|
ctx,
|
||||||
"warning_backup_check",
|
"warning_backup_check",
|
||||||
|
@ -47,7 +47,7 @@ async def require_confirm_ecdh_session_key(
|
|||||||
proto = identity.proto.upper() if identity.proto else "identity"
|
proto = identity.proto.upper() if identity.proto else "identity"
|
||||||
await confirm_address(
|
await confirm_address(
|
||||||
ctx,
|
ctx,
|
||||||
"Decrypt %s" % proto,
|
f"Decrypt {proto}",
|
||||||
serialize_identity_without_proto(identity),
|
serialize_identity_without_proto(identity),
|
||||||
description=None,
|
description=None,
|
||||||
icon=ui.ICON_DEFAULT,
|
icon=ui.ICON_DEFAULT,
|
||||||
|
@ -26,9 +26,7 @@ if __debug__:
|
|||||||
diff = PREV_MEM - free
|
diff = PREV_MEM - free
|
||||||
log.debug(
|
log.debug(
|
||||||
__name__,
|
__name__,
|
||||||
"======= {} {} Diff: {} Free: {} Allocated: {}".format(
|
f"======= {CUR_MES} {x} Diff: {diff} Free: {free} Allocated: {gc.mem_alloc()}",
|
||||||
CUR_MES, x, diff, free, gc.mem_alloc()
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
micropython.mem_info()
|
micropython.mem_info()
|
||||||
gc.collect()
|
gc.collect()
|
||||||
|
@ -20,7 +20,7 @@ if False:
|
|||||||
|
|
||||||
|
|
||||||
def _format_amount(value):
|
def _format_amount(value):
|
||||||
return "%s XMR" % strings.format_amount(value, 12)
|
return f"{strings.format_amount(value, 12)} XMR"
|
||||||
|
|
||||||
|
|
||||||
async def require_confirm_watchkey(ctx):
|
async def require_confirm_watchkey(ctx):
|
||||||
@ -209,7 +209,7 @@ class LiveRefreshStep(ui.Component):
|
|||||||
p = (1000 * current // 8) % 1000
|
p = (1000 * current // 8) % 1000
|
||||||
ui.display.loader(p, True, 18, ui.WHITE, ui.BG)
|
ui.display.loader(p, True, 18, ui.WHITE, ui.BG)
|
||||||
ui.display.text_center(
|
ui.display.text_center(
|
||||||
ui.WIDTH // 2, 145, "%d" % current, ui.NORMAL, ui.FG, ui.BG
|
ui.WIDTH // 2, 145, str(current), ui.NORMAL, ui.FG, ui.BG
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -217,19 +217,19 @@ async def transaction_step(state: State, step: int, sub_step: int | None = None)
|
|||||||
if step == 0:
|
if step == 0:
|
||||||
info = ["Signing..."]
|
info = ["Signing..."]
|
||||||
elif step == state.STEP_INP:
|
elif step == state.STEP_INP:
|
||||||
info = ["Processing inputs", "%d/%d" % (sub_step + 1, state.input_count)]
|
info = ["Processing inputs", f"{sub_step + 1}/{state.input_count}"]
|
||||||
elif step == state.STEP_PERM:
|
elif step == state.STEP_PERM:
|
||||||
info = ["Sorting..."]
|
info = ["Sorting..."]
|
||||||
elif step == state.STEP_VINI:
|
elif step == state.STEP_VINI:
|
||||||
info = ["Hashing inputs", "%d/%d" % (sub_step + 1, state.input_count)]
|
info = ["Hashing inputs", f"{sub_step + 1}/{state.input_count}"]
|
||||||
elif step == state.STEP_ALL_IN:
|
elif step == state.STEP_ALL_IN:
|
||||||
info = ["Processing..."]
|
info = ["Processing..."]
|
||||||
elif step == state.STEP_OUT:
|
elif step == state.STEP_OUT:
|
||||||
info = ["Processing outputs", "%d/%d" % (sub_step + 1, state.output_count)]
|
info = ["Processing outputs", f"{sub_step + 1}/{state.output_count}"]
|
||||||
elif step == state.STEP_ALL_OUT:
|
elif step == state.STEP_ALL_OUT:
|
||||||
info = ["Postprocessing..."]
|
info = ["Postprocessing..."]
|
||||||
elif step == state.STEP_SIGN:
|
elif step == state.STEP_SIGN:
|
||||||
info = ["Signing inputs", "%d/%d" % (sub_step + 1, state.input_count)]
|
info = ["Signing inputs", f"{sub_step + 1}/{state.input_count}"]
|
||||||
else:
|
else:
|
||||||
info = ["Processing..."]
|
info = ["Processing..."]
|
||||||
|
|
||||||
|
@ -41,8 +41,7 @@ async def set_input(
|
|||||||
# real_output denotes which output in outputs is the real one (ours)
|
# real_output denotes which output in outputs is the real one (ours)
|
||||||
if src_entr.real_output >= len(src_entr.outputs):
|
if src_entr.real_output >= len(src_entr.outputs):
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"real_output index %s bigger than output_keys.size() %s"
|
f"real_output index {src_entr.real_output} bigger than output_keys.size() {len(src_entr.outputs)}"
|
||||||
% (src_entr.real_output, len(src_entr.outputs))
|
|
||||||
)
|
)
|
||||||
state.summary_inputs_money += src_entr.amount
|
state.summary_inputs_money += src_entr.amount
|
||||||
|
|
||||||
|
@ -312,7 +312,7 @@ def _rsig_bp(state: State) -> bytes:
|
|||||||
|
|
||||||
rsig = _dump_rsig_bp(rsig)
|
rsig = _dump_rsig_bp(rsig)
|
||||||
state.mem_trace(
|
state.mem_trace(
|
||||||
"post-bp-ser, size: %s" % len(rsig) if __debug__ else None, collect=True
|
f"post-bp-ser, size: {len(rsig)}" if __debug__ else None, collect=True
|
||||||
)
|
)
|
||||||
|
|
||||||
# state cleanup
|
# state cleanup
|
||||||
|
@ -87,18 +87,12 @@ def _validate(state: State):
|
|||||||
# Fee test
|
# Fee test
|
||||||
if state.fee != (state.summary_inputs_money - state.summary_outs_money):
|
if state.fee != (state.summary_inputs_money - state.summary_outs_money):
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Fee invalid %s vs %s, out: %s"
|
f"Fee invalid {state.fee} vs {state.summary_inputs_money - state.summary_outs_money}, out: {state.summary_outs_money}"
|
||||||
% (
|
|
||||||
state.fee,
|
|
||||||
state.summary_inputs_money - state.summary_outs_money,
|
|
||||||
state.summary_outs_money,
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if state.summary_outs_money > state.summary_inputs_money:
|
if state.summary_outs_money > state.summary_inputs_money:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Transaction inputs money (%s) less than outputs money (%s)"
|
f"Transaction inputs money ({state.summary_inputs_money}) less than outputs money ({state.summary_outs_money})"
|
||||||
% (state.summary_inputs_money, state.summary_outs_money)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -654,7 +654,7 @@ class KeyVPowers(KeyVBase):
|
|||||||
else crypto.sc_mul_into(self.cur, self.cur, self.x)
|
else crypto.sc_mul_into(self.cur, self.cur, self.x)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
raise IndexError("Only linear scan allowed: %s, %s" % (prev, item))
|
raise IndexError(f"Only linear scan allowed: {prev}, {item}")
|
||||||
|
|
||||||
def set_state(self, idx, val):
|
def set_state(self, idx, val):
|
||||||
self.last_idx = idx
|
self.last_idx = idx
|
||||||
|
@ -40,7 +40,7 @@ def net_version(
|
|||||||
elif network_type == NetworkTypes.STAGENET:
|
elif network_type == NetworkTypes.STAGENET:
|
||||||
c_net = StageNet
|
c_net = StageNet
|
||||||
else:
|
else:
|
||||||
raise ValueError("Unknown network type: %s" % network_type)
|
raise ValueError(f"Unknown network type: {network_type}")
|
||||||
|
|
||||||
prefix = c_net.PUBLIC_ADDRESS_BASE58_PREFIX
|
prefix = c_net.PUBLIC_ADDRESS_BASE58_PREFIX
|
||||||
if is_subaddr:
|
if is_subaddr:
|
||||||
|
@ -106,7 +106,7 @@ class VariantType(XmrType):
|
|||||||
if isinstance(elem, ftype):
|
if isinstance(elem, ftype):
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
raise ValueError("Unrecognized variant: %s" % elem)
|
raise ValueError(f"Unrecognized variant: {elem}")
|
||||||
|
|
||||||
dump_uint(writer, ftype.VARIANT_CODE, 1)
|
dump_uint(writer, ftype.VARIANT_CODE, 1)
|
||||||
ftype.dump(writer, elem)
|
ftype.dump(writer, elem)
|
||||||
@ -120,7 +120,7 @@ class VariantType(XmrType):
|
|||||||
fvalue = ftype.load(reader)
|
fvalue = ftype.load(reader)
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
raise ValueError("Unknown tag: %s" % tag)
|
raise ValueError(f"Unknown tag: {tag}")
|
||||||
return fvalue
|
return fvalue
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
@ -22,7 +22,7 @@ async def require_confirm_fee(ctx, action: str, fee: int):
|
|||||||
"confirm_fee",
|
"confirm_fee",
|
||||||
title="Confirm fee",
|
title="Confirm fee",
|
||||||
content=action + "\n{}",
|
content=action + "\n{}",
|
||||||
param="{} XEM".format(format_amount(fee, NEM_MAX_DIVISIBILITY)),
|
param=f"{format_amount(fee, NEM_MAX_DIVISIBILITY)} XEM",
|
||||||
hide_continue=True,
|
hide_continue=True,
|
||||||
br_code=ButtonRequestType.ConfirmOutput,
|
br_code=ButtonRequestType.ConfirmOutput,
|
||||||
)
|
)
|
||||||
@ -44,7 +44,7 @@ async def require_confirm_final(ctx, fee: int):
|
|||||||
"confirm_final",
|
"confirm_final",
|
||||||
title="Final confirm",
|
title="Final confirm",
|
||||||
content="Sign this transaction\n{}\nfor network fee?",
|
content="Sign this transaction\n{}\nfor network fee?",
|
||||||
param="and pay {} XEM".format(format_amount(fee, NEM_MAX_DIVISIBILITY)),
|
param=f"and pay {format_amount(fee, NEM_MAX_DIVISIBILITY)} XEM",
|
||||||
hide_continue=True,
|
hide_continue=True,
|
||||||
hold=True,
|
hold=True,
|
||||||
)
|
)
|
||||||
|
@ -90,8 +90,8 @@ async def ask_transfer_mosaic(
|
|||||||
"confirm_mosaic_transfer",
|
"confirm_mosaic_transfer",
|
||||||
title="Confirm mosaic",
|
title="Confirm mosaic",
|
||||||
props=[
|
props=[
|
||||||
("Confirm transfer of", "%s raw units" % mosaic_quantity),
|
("Confirm transfer of", f"{mosaic_quantity} raw units"),
|
||||||
("of", "%s.%s" % (mosaic.namespace, mosaic.mosaic)),
|
("of", f"{mosaic.namespace}.{mosaic.mosaic}"),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -139,7 +139,7 @@ async def _require_confirm_transfer(ctx, recipient, value):
|
|||||||
await confirm_output(
|
await confirm_output(
|
||||||
ctx,
|
ctx,
|
||||||
recipient,
|
recipient,
|
||||||
amount="Send {} XEM".format(format_amount(value, NEM_MAX_DIVISIBILITY)),
|
amount=f"Send {format_amount(value, NEM_MAX_DIVISIBILITY)} XEM",
|
||||||
font_amount=ui.BOLD,
|
font_amount=ui.BOLD,
|
||||||
title="Confirm transfer",
|
title="Confirm transfer",
|
||||||
to_str="\nto\n",
|
to_str="\nto\n",
|
||||||
|
@ -96,9 +96,9 @@ def _validate_common(common: NEMTransactionCommon, inner: bool = False):
|
|||||||
|
|
||||||
if err:
|
if err:
|
||||||
if inner:
|
if inner:
|
||||||
raise ProcessError("No %s provided in inner transaction" % err)
|
raise ProcessError(f"No {err} provided in inner transaction")
|
||||||
else:
|
else:
|
||||||
raise ProcessError("No %s provided" % err)
|
raise ProcessError(f"No {err} provided")
|
||||||
|
|
||||||
if common.signer is not None:
|
if common.signer is not None:
|
||||||
_validate_public_key(
|
_validate_public_key(
|
||||||
@ -108,9 +108,9 @@ def _validate_common(common: NEMTransactionCommon, inner: bool = False):
|
|||||||
|
|
||||||
def _validate_public_key(public_key: bytes, err_msg: str):
|
def _validate_public_key(public_key: bytes, err_msg: str):
|
||||||
if not public_key:
|
if not public_key:
|
||||||
raise ProcessError("%s (none provided)" % err_msg)
|
raise ProcessError(f"{err_msg} (none provided)")
|
||||||
if len(public_key) != NEM_PUBLIC_KEY_SIZE:
|
if len(public_key) != NEM_PUBLIC_KEY_SIZE:
|
||||||
raise ProcessError("%s (invalid length)" % err_msg)
|
raise ProcessError(f"{err_msg} (invalid length)")
|
||||||
|
|
||||||
|
|
||||||
def _validate_importance_transfer(importance_transfer: NEMImportanceTransfer):
|
def _validate_importance_transfer(importance_transfer: NEMImportanceTransfer):
|
||||||
|
@ -114,7 +114,7 @@ async def confirm_manage_offer_op(ctx: Context, op: StellarManageOfferOp) -> Non
|
|||||||
text = "Delete"
|
text = "Delete"
|
||||||
else:
|
else:
|
||||||
text = "Update"
|
text = "Update"
|
||||||
text += " #%d" % op.offer_id
|
text += f" #{op.offer_id}"
|
||||||
await _confirm_offer(ctx, text, op)
|
await _confirm_offer(ctx, text, op)
|
||||||
|
|
||||||
|
|
||||||
@ -131,7 +131,7 @@ async def _confirm_offer(
|
|||||||
("Selling:", format_amount(op.amount, op.selling_asset)),
|
("Selling:", format_amount(op.amount, op.selling_asset)),
|
||||||
("Buying:", format_asset(op.buying_asset)),
|
("Buying:", format_asset(op.buying_asset)),
|
||||||
(
|
(
|
||||||
"Price per {}:".format(format_asset(op.buying_asset)),
|
f"Price per {format_asset(op.buying_asset)}:",
|
||||||
str(op.price_n / op.price_d),
|
str(op.price_n / op.price_d),
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
@ -279,6 +279,6 @@ async def confirm_asset_issuer(ctx: Context, asset: StellarAsset) -> None:
|
|||||||
ctx,
|
ctx,
|
||||||
"Confirm Issuer",
|
"Confirm Issuer",
|
||||||
asset.issuer,
|
asset.issuer,
|
||||||
description="{} issuer:".format(asset.code),
|
description=f"{asset.code} issuer:",
|
||||||
br_type="confirm_asset_issuer",
|
br_type="confirm_asset_issuer",
|
||||||
)
|
)
|
||||||
|
@ -412,10 +412,9 @@ class U2fCredential(Credential):
|
|||||||
if app is not None:
|
if app is not None:
|
||||||
return app.label
|
return app.label
|
||||||
|
|
||||||
return "%s...%s" % (
|
start = hexlify(self.rp_id_hash[:4]).decode()
|
||||||
hexlify(self.rp_id_hash[:4]).decode(),
|
end = hexlify(self.rp_id_hash[-4:]).decode()
|
||||||
hexlify(self.rp_id_hash[-4:]).decode(),
|
return f"{start}...{end}"
|
||||||
)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_key_handle(key_handle: bytes, rp_id_hash: bytes) -> "U2fCredential":
|
def from_key_handle(key_handle: bytes, rp_id_hash: bytes) -> "U2fCredential":
|
||||||
|
@ -334,7 +334,7 @@ def resp_cmd_authenticate(siglen: int) -> dict:
|
|||||||
def overlay_struct(buf: bytearray, desc: dict) -> Any:
|
def overlay_struct(buf: bytearray, desc: dict) -> Any:
|
||||||
desc_size = uctypes.sizeof(desc, uctypes.BIG_ENDIAN) # type: ignore
|
desc_size = uctypes.sizeof(desc, uctypes.BIG_ENDIAN) # type: ignore
|
||||||
if desc_size > len(buf):
|
if desc_size > len(buf):
|
||||||
raise ValueError("desc is too big (%d > %d)" % (desc_size, len(buf)))
|
raise ValueError(f"desc is too big ({desc_size} > {len(buf)})")
|
||||||
return uctypes.struct(uctypes.addressof(buf), desc, uctypes.BIG_ENDIAN)
|
return uctypes.struct(uctypes.addressof(buf), desc, uctypes.BIG_ENDIAN)
|
||||||
|
|
||||||
|
|
||||||
|
@ -68,7 +68,7 @@ class DataCache:
|
|||||||
...
|
...
|
||||||
|
|
||||||
def get(self, key: int, default: T | None = None) -> bytes | T | None: # noqa: F811
|
def get(self, key: int, default: T | None = None) -> bytes | T | None: # noqa: F811
|
||||||
utils.ensure(key < len(self.fields), "failed to load key %d" % key)
|
utils.ensure(key < len(self.fields), f"failed to load key {key}")
|
||||||
if self.data[key][0] != 1:
|
if self.data[key][0] != 1:
|
||||||
return default
|
return default
|
||||||
return bytes(self.data[key][1:])
|
return bytes(self.data[key][1:])
|
||||||
|
@ -31,7 +31,7 @@ def compute_auth_tag(salt: bytes, auth_key: bytes) -> bytes:
|
|||||||
|
|
||||||
|
|
||||||
def _get_device_dir() -> str:
|
def _get_device_dir() -> str:
|
||||||
return "/trezor/device_{}".format(storage.device.get_device_id().lower())
|
return f"/trezor/device_{storage.device.get_device_id().lower()}"
|
||||||
|
|
||||||
|
|
||||||
def _get_salt_path(new: bool = False) -> str:
|
def _get_salt_path(new: bool = False) -> str:
|
||||||
|
@ -213,9 +213,7 @@ def split_ems(
|
|||||||
|
|
||||||
if group_threshold > len(groups):
|
if group_threshold > len(groups):
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"The requested group threshold ({}) must not exceed the number of groups ({}).".format(
|
f"The requested group threshold ({group_threshold}) must not exceed the number of groups ({len(groups)})."
|
||||||
group_threshold, len(groups)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if any(
|
if any(
|
||||||
@ -273,17 +271,13 @@ def recover_ems(mnemonics: list[str]) -> tuple[int, int, bytes]:
|
|||||||
|
|
||||||
if len(groups) != group_threshold:
|
if len(groups) != group_threshold:
|
||||||
raise MnemonicError(
|
raise MnemonicError(
|
||||||
"Wrong number of mnemonic groups. Expected {} groups, but {} were provided.".format(
|
f"Wrong number of mnemonic groups. Expected {group_threshold} groups, but {len(groups)} were provided."
|
||||||
group_threshold, len(groups)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
for group_index, group in groups.items():
|
for group_index, group in groups.items():
|
||||||
if len(group[1]) != group[0]: # group[0] is threshold
|
if len(group[1]) != group[0]: # group[0] is threshold
|
||||||
raise MnemonicError(
|
raise MnemonicError(
|
||||||
"Wrong number of mnemonics. Expected {} mnemonics, but {} were provided.".format(
|
f"Wrong number of mnemonics. Expected {group[0]} mnemonics, but {len(group[1])} were provided."
|
||||||
group[0], len(group[1])
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
group_shares = [
|
group_shares = [
|
||||||
@ -302,9 +296,7 @@ def decode_mnemonic(mnemonic: str) -> Share:
|
|||||||
|
|
||||||
if len(mnemonic_data) < _MIN_MNEMONIC_LENGTH_WORDS:
|
if len(mnemonic_data) < _MIN_MNEMONIC_LENGTH_WORDS:
|
||||||
raise MnemonicError(
|
raise MnemonicError(
|
||||||
"Invalid mnemonic length. The length of each mnemonic must be at least {} words.".format(
|
f"Invalid mnemonic length. The length of each mnemonic must be at least {_MIN_MNEMONIC_LENGTH_WORDS} words."
|
||||||
_MIN_MNEMONIC_LENGTH_WORDS
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
padding_len = (_RADIX_BITS * (len(mnemonic_data) - _METADATA_LENGTH_WORDS)) % 16
|
padding_len = (_RADIX_BITS * (len(mnemonic_data) - _METADATA_LENGTH_WORDS)) % 16
|
||||||
@ -487,21 +479,17 @@ def _split_secret(
|
|||||||
) -> list[tuple[int, bytes]]:
|
) -> list[tuple[int, bytes]]:
|
||||||
if threshold < 1:
|
if threshold < 1:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"The requested threshold ({}) must be a positive integer.".format(threshold)
|
f"The requested threshold ({threshold}) must be a positive integer."
|
||||||
)
|
)
|
||||||
|
|
||||||
if threshold > share_count:
|
if threshold > share_count:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"The requested threshold ({}) must not exceed the number of shares ({}).".format(
|
f"The requested threshold ({threshold}) must not exceed the number of shares ({share_count})."
|
||||||
threshold, share_count
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if share_count > MAX_SHARE_COUNT:
|
if share_count > MAX_SHARE_COUNT:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"The requested number of shares ({}) must not exceed {}.".format(
|
f"The requested number of shares ({share_count}) must not exceed {MAX_SHARE_COUNT}."
|
||||||
share_count, MAX_SHARE_COUNT
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# If the threshold is 1, then the digest of the shared secret is not used.
|
# If the threshold is 1, then the digest of the shared secret is not used.
|
||||||
@ -615,9 +603,7 @@ def _decode_mnemonics(
|
|||||||
|
|
||||||
if len(identifiers) != 1 or len(iteration_exponents) != 1:
|
if len(identifiers) != 1 or len(iteration_exponents) != 1:
|
||||||
raise MnemonicError(
|
raise MnemonicError(
|
||||||
"Invalid set of mnemonics. All mnemonics must begin with the same {} words.".format(
|
f"Invalid set of mnemonics. All mnemonics must begin with the same {_ID_EXP_LENGTH_WORDS} words."
|
||||||
_ID_EXP_LENGTH_WORDS
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if len(group_thresholds) != 1:
|
if len(group_thresholds) != 1:
|
||||||
|
@ -231,7 +231,7 @@ class sleep(Syscall):
|
|||||||
Example:
|
Example:
|
||||||
|
|
||||||
>>> planned = await loop.sleep(1000) # sleep for 1s
|
>>> planned = await loop.sleep(1000) # sleep for 1s
|
||||||
>>> print('missed by %d ms', utime.ticks_diff(utime.ticks_ms(), planned))
|
>>> print(f"missed by {utime.ticks_diff(utime.ticks_ms(), planned)} ms")
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, delay_ms: int) -> None:
|
def __init__(self, delay_ms: int) -> None:
|
||||||
|
@ -34,7 +34,7 @@ def show_pin_timeout(seconds: int, progress: int, message: str) -> bool:
|
|||||||
elif seconds == 1:
|
elif seconds == 1:
|
||||||
remaining = "1 second left"
|
remaining = "1 second left"
|
||||||
else:
|
else:
|
||||||
remaining = "%d seconds left" % seconds
|
remaining = f"{seconds} seconds left"
|
||||||
ui.display.bar(0, ui.HEIGHT - 42, ui.WIDTH, 25, ui.BG)
|
ui.display.bar(0, ui.HEIGHT - 42, ui.WIDTH, 25, ui.BG)
|
||||||
ui.display.text_center(
|
ui.display.text_center(
|
||||||
ui.WIDTH // 2, ui.HEIGHT - 22, remaining, ui.BOLD, ui.FG, ui.BG
|
ui.WIDTH // 2, ui.HEIGHT - 22, remaining, ui.BOLD, ui.FG, ui.BG
|
||||||
|
@ -5,11 +5,7 @@ def format_amount(amount: int, decimals: int) -> str:
|
|||||||
else:
|
else:
|
||||||
sign = ""
|
sign = ""
|
||||||
d = pow(10, decimals)
|
d = pow(10, decimals)
|
||||||
s = (
|
s = f"{sign}{amount // d}.{amount % d:0{decimals}}".rstrip("0").rstrip(".")
|
||||||
("%s%d.%0*d" % (sign, amount // d, decimals, amount % d))
|
|
||||||
.rstrip("0")
|
|
||||||
.rstrip(".")
|
|
||||||
)
|
|
||||||
return s
|
return s
|
||||||
|
|
||||||
|
|
||||||
|
@ -251,4 +251,4 @@ class Button(ui.Component):
|
|||||||
if __debug__:
|
if __debug__:
|
||||||
|
|
||||||
def read_content(self) -> list[str]:
|
def read_content(self) -> list[str]:
|
||||||
return ["<Button: {}>".format(self.text)]
|
return [f"<Button: {self.text}>"]
|
||||||
|
@ -59,11 +59,11 @@ class Slip39NumInput(ui.Component):
|
|||||||
first_line_text = "Only one share will"
|
first_line_text = "Only one share will"
|
||||||
second_line_text = "be created."
|
second_line_text = "be created."
|
||||||
else:
|
else:
|
||||||
first_line_text = "%s people or locations" % count
|
first_line_text = f"{count} people or locations"
|
||||||
second_line_text = "will each hold one share."
|
second_line_text = "will each hold one share."
|
||||||
else:
|
else:
|
||||||
first_line_text = "Set the total number of"
|
first_line_text = "Set the total number of"
|
||||||
second_line_text = "shares in Group %s." % (self.group_id + 1)
|
second_line_text = f"shares in Group {self.group_id + 1}."
|
||||||
ui.display.bar(0, 110, ui.WIDTH, 52, ui.BG)
|
ui.display.bar(0, 110, ui.WIDTH, 52, ui.BG)
|
||||||
ui.display.text(12, 130, first_line_text, ui.NORMAL, ui.FG, ui.BG)
|
ui.display.text(12, 130, first_line_text, ui.NORMAL, ui.FG, ui.BG)
|
||||||
ui.display.text(12, 156, second_line_text, ui.NORMAL, ui.FG, ui.BG)
|
ui.display.text(12, 156, second_line_text, ui.NORMAL, ui.FG, ui.BG)
|
||||||
@ -73,12 +73,12 @@ class Slip39NumInput(ui.Component):
|
|||||||
if count == 1:
|
if count == 1:
|
||||||
second_line_text = "1 share."
|
second_line_text = "1 share."
|
||||||
elif count == self.input.max_count:
|
elif count == self.input.max_count:
|
||||||
second_line_text = "all %s of the shares." % count
|
second_line_text = f"all {count} of the shares."
|
||||||
else:
|
else:
|
||||||
second_line_text = "any %s of the shares." % count
|
second_line_text = f"any {count} of the shares."
|
||||||
else:
|
else:
|
||||||
first_line_text = "The required number of "
|
first_line_text = "The required number of "
|
||||||
second_line_text = "shares to form Group %s." % (self.group_id + 1)
|
second_line_text = f"shares to form Group {self.group_id + 1}."
|
||||||
ui.display.bar(0, 110, ui.WIDTH, 52, ui.BG)
|
ui.display.bar(0, 110, ui.WIDTH, 52, ui.BG)
|
||||||
ui.display.text(12, 130, first_line_text, ui.NORMAL, ui.FG, ui.BG)
|
ui.display.text(12, 130, first_line_text, ui.NORMAL, ui.FG, ui.BG)
|
||||||
ui.display.text(12, 156, second_line_text, ui.NORMAL, ui.FG, ui.BG)
|
ui.display.text(12, 156, second_line_text, ui.NORMAL, ui.FG, ui.BG)
|
||||||
@ -127,12 +127,10 @@ class MnemonicWordSelect(ui.Layout):
|
|||||||
if share_index is None:
|
if share_index is None:
|
||||||
self.text: ui.Component = Text("Check seed")
|
self.text: ui.Component = Text("Check seed")
|
||||||
elif group_index is None:
|
elif group_index is None:
|
||||||
self.text = Text("Check share #%s" % (share_index + 1))
|
self.text = Text(f"Check share #{share_index + 1}")
|
||||||
else:
|
else:
|
||||||
self.text = Text(
|
self.text = Text(f"Check G{group_index + 1} - Share {share_index + 1}")
|
||||||
"Check G%s - Share %s" % ((group_index + 1), (share_index + 1))
|
self.text.normal(f"Select word {word_index + 1} of {count}:")
|
||||||
)
|
|
||||||
self.text.normal("Select word %d of %d:" % (word_index + 1, count))
|
|
||||||
|
|
||||||
def dispatch(self, event: int, x: int, y: int) -> None:
|
def dispatch(self, event: int, x: int, y: int) -> None:
|
||||||
for btn in self.buttons:
|
for btn in self.buttons:
|
||||||
|
@ -19,7 +19,8 @@ async def button_request(
|
|||||||
br_type: str,
|
br_type: str,
|
||||||
code: ButtonRequestType = ButtonRequestType.Other,
|
code: ButtonRequestType = ButtonRequestType.Other,
|
||||||
) -> None:
|
) -> None:
|
||||||
log.debug(__name__, "ButtonRequest.type={}".format(br_type))
|
if __debug__:
|
||||||
|
log.debug(__name__, "ButtonRequest.type=%s", br_type)
|
||||||
workflow.close_others()
|
workflow.close_others()
|
||||||
await ctx.call(ButtonRequest(code=code), ButtonAck)
|
await ctx.call(ButtonRequest(code=code), ButtonAck)
|
||||||
|
|
||||||
|
@ -248,7 +248,7 @@ def _show_address(
|
|||||||
network: str | None = None,
|
network: str | None = None,
|
||||||
extra: str | None = None,
|
extra: str | None = None,
|
||||||
) -> ui.Layout:
|
) -> ui.Layout:
|
||||||
para = [(ui.NORMAL, "%s network" % network)] if network is not None else []
|
para = [(ui.NORMAL, f"{network} network")] if network is not None else []
|
||||||
if extra is not None:
|
if extra is not None:
|
||||||
para.append((ui.BOLD, extra))
|
para.append((ui.BOLD, extra))
|
||||||
para.extend(
|
para.extend(
|
||||||
@ -340,7 +340,7 @@ async def show_address(
|
|||||||
if is_multisig:
|
if is_multisig:
|
||||||
for i, xpub in enumerate(xpubs):
|
for i, xpub in enumerate(xpubs):
|
||||||
cancel = "Next" if i < len(xpubs) - 1 else "Address"
|
cancel = "Next" if i < len(xpubs) - 1 else "Address"
|
||||||
title_xpub = "XPUB #%d" % (i + 1)
|
title_xpub = f"XPUB #{i + 1}"
|
||||||
title_xpub += " (yours)" if i == multisig_index else " (cosigner)"
|
title_xpub += " (yours)" if i == multisig_index else " (cosigner)"
|
||||||
if is_confirmed(
|
if is_confirmed(
|
||||||
await interact(
|
await interact(
|
||||||
@ -864,7 +864,7 @@ async def confirm_coinjoin(
|
|||||||
text = Text("Authorize CoinJoin", ui.ICON_RECOVERY, new_lines=False)
|
text = Text("Authorize CoinJoin", ui.ICON_RECOVERY, new_lines=False)
|
||||||
if fee_per_anonymity is not None:
|
if fee_per_anonymity is not None:
|
||||||
text.normal("Fee per anonymity set:\n")
|
text.normal("Fee per anonymity set:\n")
|
||||||
text.bold("{} %\n".format(fee_per_anonymity))
|
text.bold(f"{fee_per_anonymity} %\n")
|
||||||
text.normal("Maximum total fees:\n")
|
text.normal("Maximum total fees:\n")
|
||||||
text.bold(total_fee)
|
text.bold(total_fee)
|
||||||
await raise_if_cancelled(
|
await raise_if_cancelled(
|
||||||
@ -876,7 +876,7 @@ async def confirm_coinjoin(
|
|||||||
async def confirm_sign_identity(
|
async def confirm_sign_identity(
|
||||||
ctx: wire.GenericContext, proto: str, identity: str, challenge_visual: str | None
|
ctx: wire.GenericContext, proto: str, identity: str, challenge_visual: str | None
|
||||||
) -> None:
|
) -> None:
|
||||||
text = Text("Sign %s" % proto, new_lines=False)
|
text = Text(f"Sign {proto}", new_lines=False)
|
||||||
if challenge_visual:
|
if challenge_visual:
|
||||||
text.normal(challenge_visual)
|
text.normal(challenge_visual)
|
||||||
text.br()
|
text.br()
|
||||||
@ -890,7 +890,7 @@ async def confirm_signverify(
|
|||||||
ctx: wire.GenericContext, coin: str, message: str, address: str | None = None
|
ctx: wire.GenericContext, coin: str, message: str, address: str | None = None
|
||||||
) -> None:
|
) -> None:
|
||||||
if address:
|
if address:
|
||||||
header = "Verify {} message".format(coin)
|
header = f"Verify {coin} message"
|
||||||
font = ui.MONO
|
font = ui.MONO
|
||||||
br_type = "verify_message"
|
br_type = "verify_message"
|
||||||
|
|
||||||
@ -901,7 +901,7 @@ async def confirm_signverify(
|
|||||||
interact(ctx, Confirm(text), br_type, ButtonRequestType.Other)
|
interact(ctx, Confirm(text), br_type, ButtonRequestType.Other)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
header = "Sign {} message".format(coin)
|
header = f"Sign {coin} message"
|
||||||
font = ui.NORMAL
|
font = ui.NORMAL
|
||||||
br_type = "sign_message"
|
br_type = "sign_message"
|
||||||
|
|
||||||
@ -963,7 +963,7 @@ async def request_pin_on_device(
|
|||||||
elif attempts_remaining == 1:
|
elif attempts_remaining == 1:
|
||||||
subprompt = "This is your last attempt"
|
subprompt = "This is your last attempt"
|
||||||
else:
|
else:
|
||||||
subprompt = "%s attempts remaining" % attempts_remaining
|
subprompt = f"{attempts_remaining} attempts remaining"
|
||||||
|
|
||||||
dialog = pin.PinDialog(prompt, subprompt, allow_cancel)
|
dialog = pin.PinDialog(prompt, subprompt, allow_cancel)
|
||||||
while True:
|
while True:
|
||||||
|
@ -34,7 +34,7 @@ async def confirm_total_ripple(
|
|||||||
) -> None:
|
) -> None:
|
||||||
title = "Confirm sending"
|
title = "Confirm sending"
|
||||||
text = Text(title, ui.ICON_SEND, ui.GREEN, new_lines=False)
|
text = Text(title, ui.ICON_SEND, ui.GREEN, new_lines=False)
|
||||||
text.bold("{} XRP\n".format(amount))
|
text.bold(f"{amount} XRP\n")
|
||||||
text.normal("to\n")
|
text.normal("to\n")
|
||||||
text.mono(*chunks_intersperse(address, MONO_ADDR_PER_LINE))
|
text.mono(*chunks_intersperse(address, MONO_ADDR_PER_LINE))
|
||||||
|
|
||||||
|
@ -41,10 +41,10 @@ async def request_word(
|
|||||||
) -> str:
|
) -> str:
|
||||||
if is_slip39:
|
if is_slip39:
|
||||||
keyboard: Slip39Keyboard | Bip39Keyboard = Slip39Keyboard(
|
keyboard: Slip39Keyboard | Bip39Keyboard = Slip39Keyboard(
|
||||||
"Type word %s of %s:" % (word_index + 1, word_count)
|
f"Type word {word_index + 1} of {word_count}:"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
keyboard = Bip39Keyboard("Type word %s of %s:" % (word_index + 1, word_count))
|
keyboard = Bip39Keyboard(f"Type word {word_index + 1} of {word_count}:")
|
||||||
|
|
||||||
word: str = await ctx.wait(keyboard)
|
word: str = await ctx.wait(keyboard)
|
||||||
return word
|
return word
|
||||||
@ -93,9 +93,9 @@ async def show_group_share_success(
|
|||||||
) -> None:
|
) -> None:
|
||||||
text = Text("Success", ui.ICON_CONFIRM)
|
text = Text("Success", ui.ICON_CONFIRM)
|
||||||
text.bold("You have entered")
|
text.bold("You have entered")
|
||||||
text.bold("Share %s" % (share_index + 1))
|
text.bold(f"Share {share_index + 1}")
|
||||||
text.normal("from")
|
text.normal("from")
|
||||||
text.bold("Group %s" % (group_index + 1))
|
text.bold(f"Group {group_index + 1}")
|
||||||
|
|
||||||
await raise_if_cancelled(
|
await raise_if_cancelled(
|
||||||
interact(
|
interact(
|
||||||
|
@ -32,9 +32,9 @@ async def show_share_words(
|
|||||||
if share_index is None:
|
if share_index is None:
|
||||||
header_title = "Recovery seed"
|
header_title = "Recovery seed"
|
||||||
elif group_index is None:
|
elif group_index is None:
|
||||||
header_title = "Recovery share #%s" % (share_index + 1)
|
header_title = f"Recovery share #{share_index + 1}"
|
||||||
else:
|
else:
|
||||||
header_title = "Group %s - Share %s" % ((group_index + 1), (share_index + 1))
|
header_title = f"Group {group_index + 1} - Share {share_index + 1}"
|
||||||
header_icon = ui.ICON_RESET
|
header_icon = ui.ICON_RESET
|
||||||
pages: list[ui.Component] = [] # ui page components
|
pages: list[ui.Component] = [] # ui page components
|
||||||
shares_words_check = [] # check we display correct data
|
shares_words_check = [] # check we display correct data
|
||||||
@ -42,10 +42,10 @@ async def show_share_words(
|
|||||||
# first page
|
# first page
|
||||||
text = Text(header_title, header_icon)
|
text = Text(header_title, header_icon)
|
||||||
text.bold("Write down these")
|
text.bold("Write down these")
|
||||||
text.bold("%s words:" % len(share_words))
|
text.bold(f"{len(share_words)} words:")
|
||||||
text.br_half()
|
text.br_half()
|
||||||
for index, word in first:
|
for index, word in first:
|
||||||
text.mono("%s. %s" % (index + 1, word))
|
text.mono(f"{index + 1}. {word}")
|
||||||
shares_words_check.append(word)
|
shares_words_check.append(word)
|
||||||
pages.append(text)
|
pages.append(text)
|
||||||
|
|
||||||
@ -53,17 +53,17 @@ async def show_share_words(
|
|||||||
for chunk in chunks:
|
for chunk in chunks:
|
||||||
text = Text(header_title, header_icon)
|
text = Text(header_title, header_icon)
|
||||||
for index, word in chunk:
|
for index, word in chunk:
|
||||||
text.mono("%s. %s" % (index + 1, word))
|
text.mono(f"{index + 1}. {word}")
|
||||||
shares_words_check.append(word)
|
shares_words_check.append(word)
|
||||||
pages.append(text)
|
pages.append(text)
|
||||||
|
|
||||||
# last page
|
# last page
|
||||||
text = Text(header_title, header_icon)
|
text = Text(header_title, header_icon)
|
||||||
for index, word in last:
|
for index, word in last:
|
||||||
text.mono("%s. %s" % (index + 1, word))
|
text.mono(f"{index + 1}. {word}")
|
||||||
shares_words_check.append(word)
|
shares_words_check.append(word)
|
||||||
text.br_half()
|
text.br_half()
|
||||||
text.bold("I wrote down all %s" % len(share_words))
|
text.bold(f"I wrote down all {len(share_words)}")
|
||||||
text.bold("words in order.")
|
text.bold("words in order.")
|
||||||
pages.append(text)
|
pages.append(text)
|
||||||
|
|
||||||
@ -208,23 +208,23 @@ async def slip39_prompt_threshold(
|
|||||||
text = "The threshold sets the number of shares "
|
text = "The threshold sets the number of shares "
|
||||||
if group_id is None:
|
if group_id is None:
|
||||||
text += "needed to recover your wallet. "
|
text += "needed to recover your wallet. "
|
||||||
text += "Set it to %s and you will need " % count
|
text += f"Set it to {count} and you will need "
|
||||||
if num_of_shares == 1:
|
if num_of_shares == 1:
|
||||||
text += "1 share."
|
text += "1 share."
|
||||||
elif num_of_shares == count:
|
elif num_of_shares == count:
|
||||||
text += "all %s of your %s shares." % (count, num_of_shares)
|
text += f"all {count} of your {num_of_shares} shares."
|
||||||
else:
|
else:
|
||||||
text += "any %s of your %s shares." % (count, num_of_shares)
|
text += f"any {count} of your {num_of_shares} shares."
|
||||||
else:
|
else:
|
||||||
text += "needed to form a group. "
|
text += "needed to form a group. "
|
||||||
text += "Set it to %s and you will " % count
|
text += f"Set it to {count} and you will "
|
||||||
if num_of_shares == 1:
|
if num_of_shares == 1:
|
||||||
text += "need 1 share "
|
text += "need 1 share "
|
||||||
elif num_of_shares == count:
|
elif num_of_shares == count:
|
||||||
text += "need all %s of %s shares " % (count, num_of_shares)
|
text += f"need all {count} of {num_of_shares} shares "
|
||||||
else:
|
else:
|
||||||
text += "need any %s of %s shares " % (count, num_of_shares)
|
text += f"need any {count} of {num_of_shares} shares "
|
||||||
text += "to form Group %s." % (group_id + 1)
|
text += f"to form Group {group_id + 1}."
|
||||||
info = InfoConfirm(text)
|
info = InfoConfirm(text)
|
||||||
await info
|
await info
|
||||||
|
|
||||||
@ -276,7 +276,7 @@ async def slip39_prompt_number_of_shares(
|
|||||||
"Next you will choose "
|
"Next you will choose "
|
||||||
"the threshold number of "
|
"the threshold number of "
|
||||||
"shares needed to form "
|
"shares needed to form "
|
||||||
"Group %s." % (group_id + 1)
|
f"Group {group_id + 1}."
|
||||||
)
|
)
|
||||||
await info
|
await info
|
||||||
|
|
||||||
|
@ -93,9 +93,9 @@ def presize_module(modname: str, size: int) -> None:
|
|||||||
"""
|
"""
|
||||||
module = sys.modules[modname]
|
module = sys.modules[modname]
|
||||||
for i in range(size):
|
for i in range(size):
|
||||||
setattr(module, "___PRESIZE_MODULE_%d" % i, None)
|
setattr(module, f"___PRESIZE_MODULE_{i}", None)
|
||||||
for i in range(size):
|
for i in range(size):
|
||||||
delattr(module, "___PRESIZE_MODULE_%d" % i)
|
delattr(module, f"___PRESIZE_MODULE_{i}")
|
||||||
|
|
||||||
|
|
||||||
if __debug__:
|
if __debug__:
|
||||||
@ -103,7 +103,7 @@ if __debug__:
|
|||||||
def mem_dump(filename: str) -> None:
|
def mem_dump(filename: str) -> None:
|
||||||
from micropython import mem_info
|
from micropython import mem_info
|
||||||
|
|
||||||
print("### sysmodules (%d):" % len(sys.modules))
|
print(f"### sysmodules ({len(sys.modules)}):")
|
||||||
for mod in sys.modules:
|
for mod in sys.modules:
|
||||||
print("*", mod)
|
print("*", mod)
|
||||||
if EMULATOR:
|
if EMULATOR:
|
||||||
@ -321,7 +321,7 @@ def obj_repr(o: object) -> str:
|
|||||||
d = {attr: getattr(o, attr, None) for attr in o.__slots__}
|
d = {attr: getattr(o, attr, None) for attr in o.__slots__}
|
||||||
else:
|
else:
|
||||||
d = o.__dict__
|
d = o.__dict__
|
||||||
return "<%s: %s>" % (o.__class__.__name__, d)
|
return f"<{o.__class__.__name__}: {d}>"
|
||||||
|
|
||||||
|
|
||||||
def truncate_utf8(string: str, max_bytes: int) -> str:
|
def truncate_utf8(string: str, max_bytes: int) -> str:
|
||||||
@ -373,14 +373,14 @@ if __debug__:
|
|||||||
yield " " + subline
|
yield " " + subline
|
||||||
elif val and isinstance(val, list) and type(val[0]) == type(msg):
|
elif val and isinstance(val, list) and type(val[0]) == type(msg):
|
||||||
# non-empty list of protobuf messages
|
# non-empty list of protobuf messages
|
||||||
yield " {}: [".format(key)
|
yield f" {key}: ["
|
||||||
for subval in val:
|
for subval in val:
|
||||||
sublines = dump_protobuf_lines(subval)
|
sublines = dump_protobuf_lines(subval)
|
||||||
for subline in sublines:
|
for subline in sublines:
|
||||||
yield " " + subline
|
yield " " + subline
|
||||||
yield " ]"
|
yield " ]"
|
||||||
else:
|
else:
|
||||||
yield " {}: {!r}".format(key, val)
|
yield f" {key}: {repr(val)}"
|
||||||
|
|
||||||
yield "}"
|
yield "}"
|
||||||
|
|
||||||
|
@ -297,7 +297,7 @@ async def _handle_single_message(
|
|||||||
try:
|
try:
|
||||||
msg_type = protobuf.type_for_wire(msg.type).MESSAGE_NAME
|
msg_type = protobuf.type_for_wire(msg.type).MESSAGE_NAME
|
||||||
except Exception:
|
except Exception:
|
||||||
msg_type = "%d - unknown message type" % msg.type
|
msg_type = f"{msg.type} - unknown message type"
|
||||||
log.debug(
|
log.debug(
|
||||||
__name__,
|
__name__,
|
||||||
"%s:%x receive: <%s>",
|
"%s:%x receive: <%s>",
|
||||||
@ -364,7 +364,7 @@ async def _handle_single_message(
|
|||||||
# - something canceled the workflow from the outside
|
# - something canceled the workflow from the outside
|
||||||
if __debug__:
|
if __debug__:
|
||||||
if isinstance(exc, ActionCancelled):
|
if isinstance(exc, ActionCancelled):
|
||||||
log.debug(__name__, "cancelled: {}".format(exc.message))
|
log.debug(__name__, "cancelled: %s", exc.message)
|
||||||
elif isinstance(exc, loop.TaskClosed):
|
elif isinstance(exc, loop.TaskClosed):
|
||||||
log.debug(__name__, "cancelled: loop task was closed")
|
log.debug(__name__, "cancelled: loop task was closed")
|
||||||
else:
|
else:
|
||||||
|
@ -72,7 +72,7 @@ def test_touch(v):
|
|||||||
touch = True
|
touch = True
|
||||||
else:
|
else:
|
||||||
if io.poll([io.TOUCH], r, 10000) and r[0] == io.TOUCH and r[1][0] == io.TOUCH_END:
|
if io.poll([io.TOUCH], r, 10000) and r[0] == io.TOUCH and r[1][0] == io.TOUCH_END:
|
||||||
print('OK %d %d' % (r[1][1], r[1][2]))
|
print(f'OK {r[1][1]} {r[1][2]}')
|
||||||
break
|
break
|
||||||
if utime.ticks_us() > deadline:
|
if utime.ticks_us() > deadline:
|
||||||
print('ERROR TIMEOUT')
|
print('ERROR TIMEOUT')
|
||||||
|
@ -26,17 +26,13 @@ class TestPathSchemas(unittest.TestCase):
|
|||||||
def assertMatch(self, schema, path):
|
def assertMatch(self, schema, path):
|
||||||
self.assertTrue(
|
self.assertTrue(
|
||||||
schema.match(path),
|
schema.match(path),
|
||||||
"Expected schema {!r} to match path {}".format(
|
f"Expected schema {repr(schema)} to match path {address_n_to_str(path)}",
|
||||||
schema, address_n_to_str(path)
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def assertMismatch(self, schema, path):
|
def assertMismatch(self, schema, path):
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
schema.match(path),
|
schema.match(path),
|
||||||
"Expected schema {!r} to not match path {}".format(
|
f"Expected schema {repr(schema)} to not match path {address_n_to_str(path)}",
|
||||||
schema, address_n_to_str(path)
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def assertEqualSchema(self, schema_a, schema_b):
|
def assertEqualSchema(self, schema_a, schema_b):
|
||||||
@ -48,7 +44,7 @@ class TestPathSchemas(unittest.TestCase):
|
|||||||
ensure(
|
ensure(
|
||||||
all(is_equal(a, b) for a, b in zip(schema_a.schema, schema_b.schema))
|
all(is_equal(a, b) for a, b in zip(schema_a.schema, schema_b.schema))
|
||||||
and is_equal(schema_a.trailing_components, schema_b.trailing_components),
|
and is_equal(schema_a.trailing_components, schema_b.trailing_components),
|
||||||
"Schemas differ:\nA = {!r}\nB = {!r}".format(schema_a, schema_b),
|
f"Schemas differ:\nA = {repr(schema_a)}\nB = {repr(schema_b)}"
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_always_never_matching(self):
|
def test_always_never_matching(self):
|
||||||
|
@ -14,90 +14,90 @@ class TestTrezorIoFatfs(unittest.TestCase):
|
|||||||
sdcard.power_off()
|
sdcard.power_off()
|
||||||
|
|
||||||
def _filename(self, suffix=""):
|
def _filename(self, suffix=""):
|
||||||
return "FILE%s.TXT" % suffix
|
return f"FILE{suffix}.TXT"
|
||||||
|
|
||||||
def _dirname(self, suffix=""):
|
def _dirname(self, suffix=""):
|
||||||
return "TREZOR%s" % suffix
|
return f"TREZOR{suffix}"
|
||||||
|
|
||||||
def test_basic(self):
|
def test_basic(self):
|
||||||
# test just the stuff in setup and teardown
|
# test just the stuff in setup and teardown
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def test_mkdir(self):
|
def test_mkdir(self):
|
||||||
fatfs.mkdir("/%s" % self._dirname())
|
fatfs.mkdir(f"/{self._dirname()}")
|
||||||
s = fatfs.stat("/%s" % self._dirname())
|
s = fatfs.stat(f"/{self._dirname()}")
|
||||||
self.assertEqual(s, (0, "---d-", self._dirname()))
|
self.assertEqual(s, (0, "---d-", self._dirname()))
|
||||||
|
|
||||||
def test_listdir(self):
|
def test_listdir(self):
|
||||||
fatfs.mkdir("/%s" % self._dirname())
|
fatfs.mkdir(f"/{self._dirname()}")
|
||||||
with fatfs.open("/%s" % self._filename(), "w") as f:
|
with fatfs.open(f"{self._filename()}", "w") as f:
|
||||||
f.write(bytearray(b"test"))
|
f.write(bytearray(b"test"))
|
||||||
with fatfs.open("/%s/%s" % (self._dirname(), self._filename("2")), "w") as f:
|
with fatfs.open(f"/{self._dirname()}/{self._filename('2')}", "w") as f:
|
||||||
f.write(bytearray(b"testtest"))
|
f.write(bytearray(b"testtest"))
|
||||||
l = [e for e in fatfs.listdir("/")]
|
l = [e for e in fatfs.listdir("/")]
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
l, [(0, "---d-", self._dirname()), (4, "----a", self._filename())]
|
l, [(0, "---d-", self._dirname()), (4, "----a", self._filename())]
|
||||||
)
|
)
|
||||||
l = [e for e in fatfs.listdir("/%s" % self._dirname())]
|
l = [e for e in fatfs.listdir(f"/{self._dirname()}")]
|
||||||
self.assertEqual(l, [(8, "----a", self._filename("2"))])
|
self.assertEqual(l, [(8, "----a", self._filename("2"))])
|
||||||
|
|
||||||
def test_unlink(self):
|
def test_unlink(self):
|
||||||
fatfs.mkdir("/%s" % self._dirname())
|
fatfs.mkdir(f"/{self._dirname()}")
|
||||||
with fatfs.open("/%s" % self._filename(), "w") as f:
|
with fatfs.open(f"/{self._filename()}", "w") as f:
|
||||||
f.write(bytearray(b"test"))
|
f.write(bytearray(b"test"))
|
||||||
s = fatfs.stat("/%s" % self._dirname())
|
s = fatfs.stat(f"/{self._dirname()}")
|
||||||
self.assertEqual(s, (0, "---d-", self._dirname()))
|
self.assertEqual(s, (0, "---d-", self._dirname()))
|
||||||
s = fatfs.stat("/%s" % self._filename())
|
s = fatfs.stat(f"/{self._filename()}")
|
||||||
self.assertEqual(s, (4, "----a", self._filename()))
|
self.assertEqual(s, (4, "----a", self._filename()))
|
||||||
fatfs.unlink("/%s" % self._dirname())
|
fatfs.unlink(f"/{self._dirname()}")
|
||||||
fatfs.unlink("/%s" % self._filename())
|
fatfs.unlink(f"/{self._filename()}")
|
||||||
with self.assertRaises(fatfs.FatFSError):
|
with self.assertRaises(fatfs.FatFSError):
|
||||||
fatfs.stat("/%s" % self._dirname())
|
fatfs.stat(f"/{self._dirname()}")
|
||||||
with self.assertRaises(fatfs.FatFSError):
|
with self.assertRaises(fatfs.FatFSError):
|
||||||
self.assertRaises(fatfs.stat("/%s" % self._filename()))
|
self.assertRaises(fatfs.stat(f"/{self._filename()}"))
|
||||||
|
|
||||||
def test_rename(self):
|
def test_rename(self):
|
||||||
fatfs.mkdir("/%s" % self._dirname())
|
fatfs.mkdir(f"/{self._dirname()}")
|
||||||
with fatfs.open("/%s" % self._filename(), "w") as f:
|
with fatfs.open(f"/{self._filename()}", "w") as f:
|
||||||
f.write(bytearray(b"test"))
|
f.write(bytearray(b"test"))
|
||||||
s = fatfs.stat("/%s" % self._dirname())
|
s = fatfs.stat(f"/{self._dirname()}")
|
||||||
self.assertEqual(s, (0, "---d-", self._dirname()))
|
self.assertEqual(s, (0, "---d-", self._dirname()))
|
||||||
s = fatfs.stat("/%s" % self._filename())
|
s = fatfs.stat(f"/{self._filename()}")
|
||||||
self.assertEqual(s, (4, "----a", self._filename()))
|
self.assertEqual(s, (4, "----a", self._filename()))
|
||||||
fatfs.rename("/%s" % self._dirname(), "/%s" % self._dirname("2"))
|
fatfs.rename(f"/{self._dirname()}", f"/{self._dirname('2')}")
|
||||||
fatfs.rename("/%s" % self._filename(), "/%s" % self._filename("2"))
|
fatfs.rename(f"/{self._filename()}", f"/{self._filename('2')}")
|
||||||
with self.assertRaises(fatfs.FatFSError):
|
with self.assertRaises(fatfs.FatFSError):
|
||||||
fatfs.stat("/%s" % self._dirname())
|
fatfs.stat(f"/{self._dirname()}")
|
||||||
with self.assertRaises(fatfs.FatFSError):
|
with self.assertRaises(fatfs.FatFSError):
|
||||||
self.assertRaises(fatfs.stat("/%s" % self._filename()))
|
self.assertRaises(fatfs.stat(f"/{self._filename()}"))
|
||||||
s = fatfs.stat("/%s" % self._dirname("2"))
|
s = fatfs.stat(f"/{self._dirname('2')}")
|
||||||
self.assertEqual(s, (0, "---d-", self._dirname("2")))
|
self.assertEqual(s, (0, "---d-", self._dirname("2")))
|
||||||
s = fatfs.stat("/%s" % self._filename("2"))
|
s = fatfs.stat(f"/{self._filename('2')}")
|
||||||
self.assertEqual(s, (4, "----a", self._filename("2")))
|
self.assertEqual(s, (4, "----a", self._filename("2")))
|
||||||
|
|
||||||
def test_open_rw(self):
|
def test_open_rw(self):
|
||||||
with fatfs.open("/%s" % self._filename(), "w") as f:
|
with fatfs.open(f"/{self._filename()}", "w") as f:
|
||||||
f.write(bytearray(b"test"))
|
f.write(bytearray(b"test"))
|
||||||
with fatfs.open("/%s" % self._filename(), "r") as f:
|
with fatfs.open(f"/{self._filename()}", "r") as f:
|
||||||
b = bytearray(100)
|
b = bytearray(100)
|
||||||
r = f.read(b)
|
r = f.read(b)
|
||||||
self.assertEqual(r, 4)
|
self.assertEqual(r, 4)
|
||||||
self.assertEqual(bytes(b[:4]), b"test")
|
self.assertEqual(bytes(b[:4]), b"test")
|
||||||
|
|
||||||
def test_open_a(self):
|
def test_open_a(self):
|
||||||
with fatfs.open("/%s" % self._filename(), "w") as f:
|
with fatfs.open(f"/{self._filename()}", "w") as f:
|
||||||
f.write(bytearray(b"test" * 200))
|
f.write(bytearray(b"test" * 200))
|
||||||
with fatfs.open("/%s" % self._filename(), "a") as f:
|
with fatfs.open(f"{self._filename()}", "a") as f:
|
||||||
f.seek(800)
|
f.seek(800)
|
||||||
f.write(bytearray(b"TEST" * 200))
|
f.write(bytearray(b"TEST" * 200))
|
||||||
with fatfs.open("/%s" % self._filename(), "r") as f:
|
with fatfs.open(f"/{self._filename()}", "r") as f:
|
||||||
b = bytearray(2000)
|
b = bytearray(2000)
|
||||||
r = f.read(b)
|
r = f.read(b)
|
||||||
self.assertEqual(r, 1600)
|
self.assertEqual(r, 1600)
|
||||||
self.assertEqual(bytes(b[:1600]), b"test" * 200 + b"TEST" * 200)
|
self.assertEqual(bytes(b[:1600]), b"test" * 200 + b"TEST" * 200)
|
||||||
|
|
||||||
def test_seek(self):
|
def test_seek(self):
|
||||||
with fatfs.open("/%s" % self._filename(), "w+") as f:
|
with fatfs.open(f"/{self._filename()}", "w+") as f:
|
||||||
f.write(bytearray(b"test" * 10))
|
f.write(bytearray(b"test" * 10))
|
||||||
f.seek(2)
|
f.seek(2)
|
||||||
b = bytearray(8)
|
b = bytearray(8)
|
||||||
@ -106,23 +106,23 @@ class TestTrezorIoFatfs(unittest.TestCase):
|
|||||||
self.assertEqual(bytes(b[:8]), b"sttestte")
|
self.assertEqual(bytes(b[:8]), b"sttestte")
|
||||||
|
|
||||||
def test_truncate(self):
|
def test_truncate(self):
|
||||||
with fatfs.open("/%s" % self._filename(), "w") as f:
|
with fatfs.open(f"/{self._filename()}", "w") as f:
|
||||||
f.write(bytearray(b"test" * 100))
|
f.write(bytearray(b"test" * 100))
|
||||||
s = fatfs.stat("/%s" % self._filename())
|
s = fatfs.stat(f"/{self._filename()}")
|
||||||
self.assertEqual(s, (400, "----a", self._filename()))
|
self.assertEqual(s, (400, "----a", self._filename()))
|
||||||
with fatfs.open("/%s" % self._filename(), "a") as f:
|
with fatfs.open(f"/{self._filename()}", "a") as f:
|
||||||
f.seek(111)
|
f.seek(111)
|
||||||
f.truncate()
|
f.truncate()
|
||||||
s = fatfs.stat("/%s" % self._filename())
|
s = fatfs.stat(f"/{self._filename()}")
|
||||||
self.assertEqual(s, (111, "----a", self._filename()))
|
self.assertEqual(s, (111, "----a", self._filename()))
|
||||||
|
|
||||||
|
|
||||||
class TestTrezorIoFatfsLfn(TestTrezorIoFatfs):
|
class TestTrezorIoFatfsLfn(TestTrezorIoFatfs):
|
||||||
def _filename(self, suffix=""):
|
def _filename(self, suffix=""):
|
||||||
return "reallylongfilename%s.textfile" % suffix
|
return f"reallylongfilename{suffix}.textfile"
|
||||||
|
|
||||||
def _dirname(self, suffix=""):
|
def _dirname(self, suffix=""):
|
||||||
return "reallylongdirname%s" % suffix
|
return f"reallylongdirname{suffix}"
|
||||||
|
|
||||||
|
|
||||||
class TestTrezorIoFatfsMounting(unittest.TestCase):
|
class TestTrezorIoFatfsMounting(unittest.TestCase):
|
||||||
|
@ -18,7 +18,7 @@ class AssertRaisesContext:
|
|||||||
|
|
||||||
def __exit__(self, exc_type, exc_value, tb):
|
def __exit__(self, exc_type, exc_value, tb):
|
||||||
if exc_type is None:
|
if exc_type is None:
|
||||||
ensure(False, "%r not raised" % self.expected)
|
ensure(False, f"{repr(self.expected)} not raised")
|
||||||
if issubclass(exc_type, self.expected):
|
if issubclass(exc_type, self.expected):
|
||||||
self.value = exc_value
|
self.value = exc_value
|
||||||
return True
|
return True
|
||||||
@ -32,7 +32,7 @@ class TestCase:
|
|||||||
|
|
||||||
def assertEqual(self, x, y, msg=''):
|
def assertEqual(self, x, y, msg=''):
|
||||||
if not msg:
|
if not msg:
|
||||||
msg = "%r vs (expected) %r" % (x, y)
|
msg = f"{repr(x)} vs (expected) {repr(y)}"
|
||||||
|
|
||||||
if x.__class__ == y.__class__ and x.__class__.__name__ == "Msg":
|
if x.__class__ == y.__class__ and x.__class__.__name__ == "Msg":
|
||||||
self.assertMessageEqual(x, y)
|
self.assertMessageEqual(x, y)
|
||||||
@ -41,7 +41,7 @@ class TestCase:
|
|||||||
|
|
||||||
def assertNotEqual(self, x, y, msg=''):
|
def assertNotEqual(self, x, y, msg=''):
|
||||||
if not msg:
|
if not msg:
|
||||||
msg = "%r not expected to be equal %r" % (x, y)
|
msg = f"{repr(x)} not expected to be equal {repr(y)}"
|
||||||
ensure(x != y, msg)
|
ensure(x != y, msg)
|
||||||
|
|
||||||
def assertAlmostEqual(self, x, y, places=None, msg='', delta=None):
|
def assertAlmostEqual(self, x, y, places=None, msg='', delta=None):
|
||||||
@ -54,14 +54,14 @@ class TestCase:
|
|||||||
if abs(x - y) <= delta:
|
if abs(x - y) <= delta:
|
||||||
return
|
return
|
||||||
if not msg:
|
if not msg:
|
||||||
msg = '%r != %r within %r delta' % (x, y, delta)
|
msg = f'{repr(x)} != {repr(y)} within {repr(delta)} delta'
|
||||||
else:
|
else:
|
||||||
if places is None:
|
if places is None:
|
||||||
places = 7
|
places = 7
|
||||||
if round(abs(y - x), places) == 0:
|
if round(abs(y - x), places) == 0:
|
||||||
return
|
return
|
||||||
if not msg:
|
if not msg:
|
||||||
msg = '%r != %r within %r places' % (x, y, places)
|
msg = f'{repr(x)} != {repr(y)} within {repr(places)} places'
|
||||||
|
|
||||||
ensure(False, msg)
|
ensure(False, msg)
|
||||||
|
|
||||||
@ -73,50 +73,50 @@ class TestCase:
|
|||||||
if not (x == y) and abs(x - y) > delta:
|
if not (x == y) and abs(x - y) > delta:
|
||||||
return
|
return
|
||||||
if not msg:
|
if not msg:
|
||||||
msg = '%r == %r within %r delta' % (x, y, delta)
|
msg = f'{repr(x)} == {repr(y)} within {repr(delta)} delta'
|
||||||
else:
|
else:
|
||||||
if places is None:
|
if places is None:
|
||||||
places = 7
|
places = 7
|
||||||
if not (x == y) and round(abs(y - x), places) != 0:
|
if not (x == y) and round(abs(y - x), places) != 0:
|
||||||
return
|
return
|
||||||
if not msg:
|
if not msg:
|
||||||
msg = '%r == %r within %r places' % (x, y, places)
|
msg = f'{repr(x)} == {repr(y)} within {repr(places)} places'
|
||||||
|
|
||||||
ensure(False, msg)
|
ensure(False, msg)
|
||||||
|
|
||||||
def assertIs(self, x, y, msg=''):
|
def assertIs(self, x, y, msg=''):
|
||||||
if not msg:
|
if not msg:
|
||||||
msg = "%r is not %r" % (x, y)
|
msg = f"{repr(x)} is not {repr(y)}"
|
||||||
ensure(x is y, msg)
|
ensure(x is y, msg)
|
||||||
|
|
||||||
def assertIsNot(self, x, y, msg=''):
|
def assertIsNot(self, x, y, msg=''):
|
||||||
if not msg:
|
if not msg:
|
||||||
msg = "%r is %r" % (x, y)
|
msg = f"{repr(x)} is {repr(y)}"
|
||||||
ensure(x is not y, msg)
|
ensure(x is not y, msg)
|
||||||
|
|
||||||
def assertIsNone(self, x, msg=''):
|
def assertIsNone(self, x, msg=''):
|
||||||
if not msg:
|
if not msg:
|
||||||
msg = "%r is not None" % x
|
msg = f"{repr(x)} is not None"
|
||||||
ensure(x is None, msg)
|
ensure(x is None, msg)
|
||||||
|
|
||||||
def assertIsNotNone(self, x, msg=''):
|
def assertIsNotNone(self, x, msg=''):
|
||||||
if not msg:
|
if not msg:
|
||||||
msg = "%r is None" % x
|
msg = f"{repr(x)} is None"
|
||||||
ensure(x is not None, msg)
|
ensure(x is not None, msg)
|
||||||
|
|
||||||
def assertTrue(self, x, msg=''):
|
def assertTrue(self, x, msg=''):
|
||||||
if not msg:
|
if not msg:
|
||||||
msg = "Expected %r to be True" % x
|
msg = f"Expected {repr(x)} to be True"
|
||||||
ensure(x, msg)
|
ensure(x, msg)
|
||||||
|
|
||||||
def assertFalse(self, x, msg=''):
|
def assertFalse(self, x, msg=''):
|
||||||
if not msg:
|
if not msg:
|
||||||
msg = "Expected %r to be False" % x
|
msg = f"Expected {repr(x)} to be False"
|
||||||
ensure(not x, msg)
|
ensure(not x, msg)
|
||||||
|
|
||||||
def assertIn(self, x, y, msg=''):
|
def assertIn(self, x, y, msg=''):
|
||||||
if not msg:
|
if not msg:
|
||||||
msg = "Expected %r to be in %r" % (x, y)
|
msg = f"Expected {repr(x)} to be in {repr(y)}"
|
||||||
ensure(x in y, msg)
|
ensure(x in y, msg)
|
||||||
|
|
||||||
def assertIsInstance(self, x, y, msg=''):
|
def assertIsInstance(self, x, y, msg=''):
|
||||||
@ -132,7 +132,7 @@ class TestCase:
|
|||||||
return
|
return
|
||||||
raise
|
raise
|
||||||
else:
|
else:
|
||||||
ensure(False, "%r not raised" % exc)
|
ensure(False, f"{repr(exc)} not raised")
|
||||||
|
|
||||||
def assertListEqual(self, x, y, msg=''):
|
def assertListEqual(self, x, y, msg=''):
|
||||||
if len(x) != len(y):
|
if len(x) != len(y):
|
||||||
@ -160,7 +160,7 @@ class TestCase:
|
|||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
x.MESSAGE_NAME,
|
x.MESSAGE_NAME,
|
||||||
y.MESSAGE_NAME,
|
y.MESSAGE_NAME,
|
||||||
"Expected {}, found {}".format(x.MESSAGE_NAME, y.MESSAGE_NAME)
|
f"Expected {x.MESSAGE_NAME}, found {y.MESSAGE_NAME}"
|
||||||
)
|
)
|
||||||
xdict = x.__dict__
|
xdict = x.__dict__
|
||||||
ydict = y.__dict__
|
ydict = y.__dict__
|
||||||
@ -169,7 +169,7 @@ class TestCase:
|
|||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
xdict[key],
|
xdict[key],
|
||||||
ydict[key],
|
ydict[key],
|
||||||
"At {}.{} expected {}, found {}".format(x.MESSAGE_NAME, key, xdict[key], ydict[key])
|
f"At {x.MESSAGE_NAME}.{key} expected {xdict[key]}, found {ydict[key]}"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -236,9 +236,9 @@ def run_class(c, test_result):
|
|||||||
test_result.testsRun += 1
|
test_result.testsRun += 1
|
||||||
retval = m()
|
retval = m()
|
||||||
if isinstance(retval, generator_type):
|
if isinstance(retval, generator_type):
|
||||||
raise RuntimeError("{} must not be a generator (it is async, uses yield or await).".format(name))
|
raise RuntimeError(f"{name} must not be a generator (it is async, uses yield or await).")
|
||||||
elif retval is not None:
|
elif retval is not None:
|
||||||
raise RuntimeError("{} should not return a result.".format(name))
|
raise RuntimeError(f"{name} should not return a result.")
|
||||||
finally:
|
finally:
|
||||||
tear_down()
|
tear_down()
|
||||||
print(" ok")
|
print(" ok")
|
||||||
@ -268,14 +268,14 @@ def main(module="__main__"):
|
|||||||
suite.addTest(c)
|
suite.addTest(c)
|
||||||
runner = TestRunner()
|
runner = TestRunner()
|
||||||
result = runner.run(suite)
|
result = runner.run(suite)
|
||||||
msg = "Ran %d tests" % result.testsRun
|
msg = f"Ran {result.testsRun} tests"
|
||||||
result_strs = []
|
result_strs = []
|
||||||
if result.skippedNum > 0:
|
if result.skippedNum > 0:
|
||||||
result_strs.append("{} skipped".format(result.skippedNum))
|
result_strs.append(f"{result.skippedNum} skipped")
|
||||||
if result.failuresNum > 0:
|
if result.failuresNum > 0:
|
||||||
result_strs.append("{} failed".format(result.failuresNum))
|
result_strs.append(f"{result.failuresNum} failed")
|
||||||
if result.errorsNum > 0:
|
if result.errorsNum > 0:
|
||||||
result_strs.append("{} errored".format(result.errorsNum))
|
result_strs.append(f"{result.errorsNum} errored")
|
||||||
if result_strs:
|
if result_strs:
|
||||||
msg += " (" + ", ".join(result_strs) + ")"
|
msg += " (" + ", ".join(result_strs) + ")"
|
||||||
print(msg)
|
print(msg)
|
||||||
|
@ -48,7 +48,7 @@ def annotate(obj, filename):
|
|||||||
if obj.type == "total":
|
if obj.type == "total":
|
||||||
alloc_str = lambda line: str(line["total_allocs"])
|
alloc_str = lambda line: str(line["total_allocs"])
|
||||||
else:
|
else:
|
||||||
alloc_str = lambda line: "{:.2f}".format(line["avg_allocs"])
|
alloc_str = lambda line: f"{line['avg_allocs']:.2f}"
|
||||||
|
|
||||||
filedata = obj.data[filename]
|
filedata = obj.data[filename]
|
||||||
|
|
||||||
@ -83,10 +83,10 @@ def _list(obj, sort_by="avg_allocs", reverse=False):
|
|||||||
def list(obj, reverse):
|
def list(obj, reverse):
|
||||||
if obj.type == "total":
|
if obj.type == "total":
|
||||||
field = "total_allocs"
|
field = "total_allocs"
|
||||||
format_num = lambda l: "{}".format(l[2])
|
format_num = lambda l: f"{l[2]}"
|
||||||
else:
|
else:
|
||||||
field = "avg_allocs"
|
field = "avg_allocs"
|
||||||
format_num = lambda l: "{:.2f}".format(l[1])
|
format_num = lambda l: f"{l[1]:.2f}"
|
||||||
|
|
||||||
file_sums = _list(obj, field, reverse)
|
file_sums = _list(obj, field, reverse)
|
||||||
|
|
||||||
@ -112,7 +112,7 @@ class HtmlTable:
|
|||||||
self.f.write("<tr>")
|
self.f.write("<tr>")
|
||||||
for td in tds:
|
for td in tds:
|
||||||
if isinstance(td, tuple):
|
if isinstance(td, tuple):
|
||||||
self.f.write("<td {}><tt>{}</tt></td>".format(td[0], td[1]))
|
self.f.write(f"<td {td[0]}><tt>{td[1]}</tt></td>")
|
||||||
else:
|
else:
|
||||||
self.f.write(f"<td><tt>{td}</tt></td>")
|
self.f.write(f"<td><tt>{td}</tt></td>")
|
||||||
self.f.write("</tr>")
|
self.f.write("</tr>")
|
||||||
@ -129,9 +129,7 @@ def html(obj, htmldir):
|
|||||||
with open(f"{htmldir}/index.html", "w") as f:
|
with open(f"{htmldir}/index.html", "w") as f:
|
||||||
f.write("<html>")
|
f.write("<html>")
|
||||||
f.write(
|
f.write(
|
||||||
"<h3>Total allocations: {}</h3>".format(
|
f"<h3>Total allocations: {sum(total_sum for _, _, total_sum in file_sums)}</h3>"
|
||||||
sum(total_sum for _, _, total_sum in file_sums)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
with HtmlTable(f) as table:
|
with HtmlTable(f) as table:
|
||||||
table.tr((style_right, "avg"), (style_right, "total"), "")
|
table.tr((style_right, "avg"), (style_right, "total"), "")
|
||||||
|
@ -68,8 +68,8 @@ def store_to_file(dest, parts):
|
|||||||
if os.path.isdir(os.path.join(dest, package)):
|
if os.path.isdir(os.path.join(dest, package)):
|
||||||
if not line.strip():
|
if not line.strip():
|
||||||
continue
|
continue
|
||||||
print("Package exists: {}".format(package))
|
print(f"Package exists: {package}")
|
||||||
print("You should set 'package:' in {}".format(line.strip()))
|
print(f"You should set 'package:' in {line.strip()}")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if not os.path.exists(filepath):
|
if not os.path.exists(filepath):
|
||||||
|
@ -50,7 +50,7 @@ def parse_privkey_args(privkey_data: List[str]) -> Tuple[int, List[bytes]]:
|
|||||||
privkeys.append(bytes.fromhex(key_hex))
|
privkeys.append(bytes.fromhex(key_hex))
|
||||||
sigmask |= 1 << (int(idx) - 1)
|
sigmask |= 1 << (int(idx) - 1)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
click.echo("Could not parse key: {}".format(key))
|
click.echo(f"Could not parse key: {key}")
|
||||||
click.echo("Keys must be in the format: <key index>:<hex-encoded key>")
|
click.echo("Keys must be in the format: <key index>:<hex-encoded key>")
|
||||||
raise click.ClickException("Unrecognized key format.")
|
raise click.ClickException("Unrecognized key format.")
|
||||||
return sigmask, privkeys
|
return sigmask, privkeys
|
||||||
@ -59,30 +59,28 @@ def parse_privkey_args(privkey_data: List[str]) -> Tuple[int, List[bytes]]:
|
|||||||
def process_remote_signers(fw, addrs: List[str]) -> Tuple[int, List[bytes]]:
|
def process_remote_signers(fw, addrs: List[str]) -> Tuple[int, List[bytes]]:
|
||||||
if len(addrs) < fw.sigs_required:
|
if len(addrs) < fw.sigs_required:
|
||||||
raise click.ClickException(
|
raise click.ClickException(
|
||||||
"Not enough signers (need at least {})".format(fw.sigs_required)
|
f"Not enough signers (need at least {fw.sigs_required})"
|
||||||
)
|
)
|
||||||
|
|
||||||
digest = fw.digest()
|
digest = fw.digest()
|
||||||
name = fw.NAME
|
name = fw.NAME
|
||||||
|
|
||||||
def mkproxy(addr):
|
def mkproxy(addr):
|
||||||
return Pyro4.Proxy("PYRO:keyctl@{}:{}".format(addr, PORT))
|
return Pyro4.Proxy(f"PYRO:keyctl@{addr}:{PORT}")
|
||||||
|
|
||||||
sigmask = 0
|
sigmask = 0
|
||||||
pks, Rs = [], []
|
pks, Rs = [], []
|
||||||
for addr in addrs:
|
for addr in addrs:
|
||||||
click.echo("Connecting to {}...".format(addr))
|
click.echo(f"Connecting to {addr}...")
|
||||||
with mkproxy(addr) as proxy:
|
with mkproxy(addr) as proxy:
|
||||||
pk, R = proxy.get_commit(name, digest)
|
pk, R = proxy.get_commit(name, digest)
|
||||||
if pk not in fw.public_keys:
|
if pk not in fw.public_keys:
|
||||||
raise click.ClickException(
|
raise click.ClickException(
|
||||||
"Signer at {} commits with unknown public key {}".format(addr, pk.hex())
|
f"Signer at {addr} commits with unknown public key {pk.hex()}"
|
||||||
)
|
)
|
||||||
idx = fw.public_keys.index(pk)
|
idx = fw.public_keys.index(pk)
|
||||||
click.echo(
|
click.echo(
|
||||||
"Signer at {} commits with public key #{}: {}".format(
|
f"Signer at {addr} commits with public key #{idx + 1}: {pk.hex()}"
|
||||||
addr, idx + 1, pk.hex()
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
sigmask |= 1 << idx
|
sigmask |= 1 << idx
|
||||||
pks.append(pk)
|
pks.append(pk)
|
||||||
@ -95,7 +93,7 @@ def process_remote_signers(fw, addrs: List[str]) -> Tuple[int, List[bytes]]:
|
|||||||
# collect signatures
|
# collect signatures
|
||||||
sigs = []
|
sigs = []
|
||||||
for addr in addrs:
|
for addr in addrs:
|
||||||
click.echo("Waiting for {} to sign... ".format(addr), nl=False)
|
click.echo(f"Waiting for {addr} to sign... ", nl=False)
|
||||||
with mkproxy(addr) as proxy:
|
with mkproxy(addr) as proxy:
|
||||||
sig = proxy.get_signature(name, digest, global_R, global_pk)
|
sig = proxy.get_signature(name, digest, global_R, global_pk)
|
||||||
sigs.append(sig)
|
sigs.append(sig)
|
||||||
@ -259,8 +257,8 @@ def cli(
|
|||||||
if remote:
|
if remote:
|
||||||
if Pyro4 is None:
|
if Pyro4 is None:
|
||||||
raise click.ClickException("Please install Pyro4 for remote signing.")
|
raise click.ClickException("Please install Pyro4 for remote signing.")
|
||||||
click.echo(fw.format())
|
click.echo(fw)
|
||||||
click.echo("Signing with {} remote participants.".format(len(remote)))
|
click.echo(f"Signing with {len(remote)} remote participants.")
|
||||||
sigmask, signature = process_remote_signers(fw, remote)
|
sigmask, signature = process_remote_signers(fw, remote)
|
||||||
|
|
||||||
if signature:
|
if signature:
|
||||||
|
@ -17,13 +17,13 @@ class HIDInterface:
|
|||||||
|
|
||||||
def __uhid_read(self, length):
|
def __uhid_read(self, length):
|
||||||
data = os.read(self.file_descriptor, length)
|
data = os.read(self.file_descriptor, length)
|
||||||
logger.log_raw("{} >".format(HIDInterface.uhid_device), data.hex())
|
logger.log_raw(f"{HIDInterface.uhid_device} >", data.hex())
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def __uhid_write(self, data):
|
def __uhid_write(self, data):
|
||||||
bytes_written = os.write(self.file_descriptor, data)
|
bytes_written = os.write(self.file_descriptor, data)
|
||||||
assert bytes_written == len(data)
|
assert bytes_written == len(data)
|
||||||
logger.log_raw("{} <".format(HIDInterface.uhid_device), data.hex())
|
logger.log_raw(f"{HIDInterface.uhid_device} <", data.hex())
|
||||||
|
|
||||||
def create_device(self):
|
def create_device(self):
|
||||||
name = b"Virtual Trezor"
|
name = b"Virtual Trezor"
|
||||||
@ -61,33 +61,31 @@ class HIDInterface:
|
|||||||
self.__uhid_write(buf)
|
self.__uhid_write(buf)
|
||||||
logger.log_uhid_event(
|
logger.log_uhid_event(
|
||||||
"UHID_CREATE2",
|
"UHID_CREATE2",
|
||||||
"name='{}' phys='{}' uniq=0x{} rd_size={} bus=0x{:04x} vendor=0x{:04x} product=0x{:04x} version=0x{:04x} country=0x{:04x} rd_data=0x{}".format(
|
f"name='{name.decode()}' "
|
||||||
name.decode("ascii"),
|
f"phys='{phys.decode()}' "
|
||||||
phys.decode("ascii"),
|
f"uniq=0x{uniq.hex()} "
|
||||||
uniq.hex(),
|
f"rd_size={len(rd_data)} "
|
||||||
len(rd_data),
|
f"bus=0x{bus:04x} "
|
||||||
bus,
|
f"vendor=0x{vendor:04x} "
|
||||||
vendor,
|
f"product=0x{product:04x} "
|
||||||
product,
|
f"version=0x{version:04x} "
|
||||||
version,
|
f"country=0x{country:04x} "
|
||||||
country,
|
f"rd_data=0x{rd_data.hex()}",
|
||||||
rd_data.hex(),
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def write_data(self, data):
|
def write_data(self, data):
|
||||||
buf = uhid.create_input2_event(data)
|
buf = uhid.create_input2_event(data)
|
||||||
self.__uhid_write(buf)
|
self.__uhid_write(buf)
|
||||||
logger.log_uhid_event(
|
logger.log_uhid_event(
|
||||||
"UHID_INPUT2", "data=0x{} size={}".format(data.hex(), len(data))
|
"UHID_INPUT2", f"data=0x{data.hex()} size={len(data)}"
|
||||||
)
|
)
|
||||||
logger.log_hid_packet("DEVICE_OUTPUT", "0x{}".format(data.hex()))
|
logger.log_hid_packet("DEVICE_OUTPUT", f"0x{data.hex()}")
|
||||||
|
|
||||||
def process_event(self):
|
def process_event(self):
|
||||||
ev_type, request = uhid.parse_event(self.__uhid_read(uhid.EVENT_LENGTH))
|
ev_type, request = uhid.parse_event(self.__uhid_read(uhid.EVENT_LENGTH))
|
||||||
if ev_type == uhid.EVENT_TYPE_START:
|
if ev_type == uhid.EVENT_TYPE_START:
|
||||||
dev_flags, = request
|
dev_flags, = request
|
||||||
logger.log_uhid_event("UHID_START", "dev_flags=0b{:08b}".format(dev_flags))
|
logger.log_uhid_event("UHID_START", f"dev_flags=0b{dev_flags:08b}")
|
||||||
elif ev_type == uhid.EVENT_TYPE_STOP:
|
elif ev_type == uhid.EVENT_TYPE_STOP:
|
||||||
logger.log_uhid_event("UHID_STOP")
|
logger.log_uhid_event("UHID_STOP")
|
||||||
elif ev_type == uhid.EVENT_TYPE_OPEN:
|
elif ev_type == uhid.EVENT_TYPE_OPEN:
|
||||||
@ -98,12 +96,12 @@ class HIDInterface:
|
|||||||
data, size, rtype = request
|
data, size, rtype = request
|
||||||
logger.log_uhid_event(
|
logger.log_uhid_event(
|
||||||
"UHID_OUTPUT",
|
"UHID_OUTPUT",
|
||||||
"data=0x{} size={} rtype={}".format(data.hex(), size, rtype),
|
f"data=0x{data.hex()} size={size} rtype={rtype}",
|
||||||
)
|
)
|
||||||
logger.log_hid_packet("DEVICE_INPUT", "0x{}".format(data[1:].hex()))
|
logger.log_hid_packet("DEVICE_INPUT", f"0x{data[1:].hex()}")
|
||||||
return data[1:]
|
return data[1:]
|
||||||
else:
|
else:
|
||||||
logger.log_uhid_event(
|
logger.log_uhid_event(
|
||||||
"UNKNOWN_EVENT",
|
"UNKNOWN_EVENT",
|
||||||
"ev_type={} request=0x{}".format(ev_type, request.hex()),
|
f"ev_type={ev_type} request=0x{request.hex()}",
|
||||||
)
|
)
|
||||||
|
@ -10,7 +10,7 @@ def __get_timestamp():
|
|||||||
|
|
||||||
def __log_message(message):
|
def __log_message(message):
|
||||||
if log_timestamps == True:
|
if log_timestamps == True:
|
||||||
print("{}\t{}".format(__get_timestamp(), message))
|
print(f"{__get_timestamp()}\t{message}")
|
||||||
else:
|
else:
|
||||||
print(message)
|
print(message)
|
||||||
|
|
||||||
@ -18,16 +18,16 @@ def __log_message(message):
|
|||||||
def log_uhid_event(event_name, params=None):
|
def log_uhid_event(event_name, params=None):
|
||||||
if log_level == "uhid-event":
|
if log_level == "uhid-event":
|
||||||
if params:
|
if params:
|
||||||
__log_message("{}\t{}".format(event_name, params))
|
__log_message(f"{event_name}\t{params}")
|
||||||
else:
|
else:
|
||||||
__log_message(event_name)
|
__log_message(event_name)
|
||||||
|
|
||||||
|
|
||||||
def log_hid_packet(packet_name, payload):
|
def log_hid_packet(packet_name, payload):
|
||||||
if log_level == "hid-packet":
|
if log_level == "hid-packet":
|
||||||
__log_message("{}\t{}".format(packet_name, payload))
|
__log_message(f"{packet_name}\t{payload}")
|
||||||
|
|
||||||
|
|
||||||
def log_raw(direction, payload):
|
def log_raw(direction, payload):
|
||||||
if log_level == "raw":
|
if log_level == "raw":
|
||||||
__log_message("{}\t{}".format(direction, payload))
|
__log_message(f"{direction}\t{payload}")
|
||||||
|
@ -22,18 +22,14 @@ class UDPInterface:
|
|||||||
)
|
)
|
||||||
assert bytes_sent == len(data)
|
assert bytes_sent == len(data)
|
||||||
logger.log_raw(
|
logger.log_raw(
|
||||||
"{}:{} < {}:{}".format(
|
f"{self.destination_ip}:{self.destination_port} < {self.bind_ip}:{self.bind_port}",
|
||||||
self.destination_ip, self.destination_port, self.bind_ip, self.bind_port
|
|
||||||
),
|
|
||||||
data.hex(),
|
data.hex(),
|
||||||
)
|
)
|
||||||
|
|
||||||
def read(self, length):
|
def read(self, length):
|
||||||
data, address = self.socket.recvfrom(length)
|
data, address = self.socket.recvfrom(length)
|
||||||
logger.log_raw(
|
logger.log_raw(
|
||||||
"{}:{} < {}:{}".format(
|
f"{self.bind_ip}:{self.bind_port} < {address[0]}:{address[1]}",
|
||||||
self.bind_ip, self.bind_port, address[0], address[1]
|
|
||||||
),
|
|
||||||
data.hex(),
|
data.hex(),
|
||||||
)
|
)
|
||||||
return data
|
return data
|
||||||
|
@ -11,10 +11,10 @@ EVENT_TYPE_INTPUT2 = 12
|
|||||||
DATA_MAX = 4096
|
DATA_MAX = 4096
|
||||||
EVENT_LENGTH = 4380
|
EVENT_LENGTH = 4380
|
||||||
|
|
||||||
INPUT2_REQ_FMT = "< H {}s".format(DATA_MAX)
|
INPUT2_REQ_FMT = f"< H {DATA_MAX}s"
|
||||||
CREATE2_REQ_FMT = "< 128s 64s 64s H H L L L L {}s".format(DATA_MAX)
|
CREATE2_REQ_FMT = f"< 128s 64s 64s H H L L L L {DATA_MAX}s"
|
||||||
START_REQ_FMT = "< Q"
|
START_REQ_FMT = "< Q"
|
||||||
OUTPUT_REQ_FMT = "< {}s H B".format(DATA_MAX)
|
OUTPUT_REQ_FMT = f"< {DATA_MAX}s H B"
|
||||||
|
|
||||||
|
|
||||||
def pack_event(ev_type, request):
|
def pack_event(ev_type, request):
|
||||||
|
@ -6,6 +6,6 @@ combined = bl + 32768 * b"\xff" + fw
|
|||||||
|
|
||||||
open("combined.bin", "wb").write(combined)
|
open("combined.bin", "wb").write(combined)
|
||||||
|
|
||||||
print("bootloader : %d bytes" % len(bl))
|
print(f"bootloader : {len(bl)} bytes")
|
||||||
print("firmware : %d bytes" % len(fw))
|
print(f"firmware : {len(fw)} bytes")
|
||||||
print("combined : %d bytes" % len(combined))
|
print(f"combined : {len(combined)} bytes")
|
||||||
|
@ -9,8 +9,7 @@ fn = sys.argv[1]
|
|||||||
fs = os.stat(fn).st_size
|
fs = os.stat(fn).st_size
|
||||||
if fs > MAXSIZE:
|
if fs > MAXSIZE:
|
||||||
raise Exception(
|
raise Exception(
|
||||||
"bootloader has to be smaller than %d bytes (current size is %d)"
|
f"bootloader has to be smaller than {MAXSIZE} bytes (current size is {fs})"
|
||||||
% (MAXSIZE, fs)
|
|
||||||
)
|
)
|
||||||
with open(fn, "ab") as f:
|
with open(fn, "ab") as f:
|
||||||
f.write(b"\x00" * (TOTALSIZE - fs))
|
f.write(b"\x00" * (TOTALSIZE - fs))
|
||||||
|
@ -126,7 +126,7 @@ def check_signatures(data):
|
|||||||
signature = data[SIGNATURES_START + 64 * x : SIGNATURES_START + 64 * x + 64]
|
signature = data[SIGNATURES_START + 64 * x : SIGNATURES_START + 64 * x + 64]
|
||||||
|
|
||||||
if indexes[x] == 0:
|
if indexes[x] == 0:
|
||||||
print("Slot #%d" % (x + 1), "is empty")
|
print(f"Slot #{x + 1}", "is empty")
|
||||||
else:
|
else:
|
||||||
pk = pubkeys[indexes[x]]
|
pk = pubkeys[indexes[x]]
|
||||||
verify = ecdsa.VerifyingKey.from_string(
|
verify = ecdsa.VerifyingKey.from_string(
|
||||||
@ -139,13 +139,13 @@ def check_signatures(data):
|
|||||||
verify.verify(signature, to_sign, hashfunc=hashlib.sha256)
|
verify.verify(signature, to_sign, hashfunc=hashlib.sha256)
|
||||||
|
|
||||||
if indexes[x] in used:
|
if indexes[x] in used:
|
||||||
print("Slot #%d signature: DUPLICATE" % (x + 1), signature.hex())
|
print(f"Slot #{x + 1} signature: DUPLICATE", signature.hex())
|
||||||
else:
|
else:
|
||||||
used.append(indexes[x])
|
used.append(indexes[x])
|
||||||
print("Slot #%d signature: VALID" % (x + 1), signature.hex())
|
print(f"Slot #{x + 1} signature: VALID", signature.hex())
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
print("Slot #%d signature: INVALID" % (x + 1), signature.hex())
|
print(f"Slot #{x + 1} signature: INVALID", signature.hex())
|
||||||
|
|
||||||
|
|
||||||
def modify(data, slot, index, signature):
|
def modify(data, slot, index, signature):
|
||||||
@ -160,7 +160,7 @@ def modify(data, slot, index, signature):
|
|||||||
def sign(data, is_pem):
|
def sign(data, is_pem):
|
||||||
# Ask for index and private key and signs the firmware
|
# Ask for index and private key and signs the firmware
|
||||||
|
|
||||||
slot = int(input("Enter signature slot (1-%d): " % SLOTS))
|
slot = int(input(f"Enter signature slot (1-{SLOTS}): "))
|
||||||
if slot < 1 or slot > SLOTS:
|
if slot < 1 or slot > SLOTS:
|
||||||
raise Exception("Invalid slot")
|
raise Exception("Invalid slot")
|
||||||
|
|
||||||
@ -236,7 +236,7 @@ def main(args):
|
|||||||
|
|
||||||
data = update_hashes_in_header(data)
|
data = update_hashes_in_header(data)
|
||||||
|
|
||||||
print("Firmware size %d bytes" % len(data))
|
print(f"Firmware size {len(data)} bytes")
|
||||||
|
|
||||||
check_size(data)
|
check_size(data)
|
||||||
check_signatures(data)
|
check_signatures(data)
|
||||||
|
@ -15,8 +15,8 @@ bl_hash = ", ".join("0x%02x" % x for x in bytearray(bh))
|
|||||||
bl_data = ", ".join("0x%02x" % x for x in bytearray(data))
|
bl_data = ", ".join("0x%02x" % x for x in bytearray(data))
|
||||||
|
|
||||||
with open("bl_data.h", "wt") as f:
|
with open("bl_data.h", "wt") as f:
|
||||||
f.write("static const uint8_t bl_hash[32] = {%s};\n" % bl_hash)
|
f.write(f"static const uint8_t bl_hash[32] = {{{bl_hash}}};\n")
|
||||||
f.write("static const uint8_t bl_data[32768] = {%s};\n" % bl_data)
|
f.write(f"static const uint8_t bl_data[32768] = {{{bl_data}}};\n")
|
||||||
|
|
||||||
# make sure the last item listed in known_bootloader function
|
# make sure the last item listed in known_bootloader function
|
||||||
# is our bootloader
|
# is our bootloader
|
||||||
|
@ -58,26 +58,26 @@ def handle_message(fh, fl, skipped, message):
|
|||||||
no_fsm = options.Extensions[wire_no_fsm]
|
no_fsm = options.Extensions[wire_no_fsm]
|
||||||
|
|
||||||
if getattr(options, "deprecated", None):
|
if getattr(options, "deprecated", None):
|
||||||
fh.write("\t// Message %s is deprecated\n" % short_name)
|
fh.write(f"\t// Message {short_name} is deprecated\n")
|
||||||
return
|
return
|
||||||
if bootloader:
|
if bootloader:
|
||||||
fh.write("\t// Message %s is used in bootloader mode only\n" % short_name)
|
fh.write(f"\t// Message {short_name} is used in bootloader mode only\n")
|
||||||
return
|
return
|
||||||
if no_fsm:
|
if no_fsm:
|
||||||
fh.write("\t// Message %s is not used in FSM\n" % short_name)
|
fh.write(f"\t// Message {short_name} is not used in FSM\n")
|
||||||
return
|
return
|
||||||
|
|
||||||
if direction == "i":
|
if direction == "i":
|
||||||
process_func = "(void (*)(const void *))fsm_msg%s" % short_name
|
process_func = f"(void (*)(const void *))fsm_msg{short_name}"
|
||||||
else:
|
else:
|
||||||
process_func = "0"
|
process_func = "0"
|
||||||
|
|
||||||
fh.write(
|
fh.write(
|
||||||
TEMPLATE.format(
|
TEMPLATE.format(
|
||||||
type="'%c'," % interface,
|
type=f"'{interface}',",
|
||||||
dir="'%c'," % direction,
|
dir=f"'{direction}',",
|
||||||
msg_id="MessageType_%s," % name,
|
msg_id=f"MessageType_{name},",
|
||||||
fields="%s_fields," % short_name,
|
fields=f"{short_name}_fields,",
|
||||||
process_func=process_func,
|
process_func=process_func,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@ -97,14 +97,12 @@ def handle_message(fh, fl, skipped, message):
|
|||||||
|
|
||||||
if encoded_size:
|
if encoded_size:
|
||||||
fl.write(
|
fl.write(
|
||||||
'_Static_assert(%s >= sizeof(%s_size), "msg buffer too small");\n'
|
f'_Static_assert({encoded_size} >= sizeof({short_name}_size), "msg buffer too small");\n'
|
||||||
% (encoded_size, short_name)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if decoded_size:
|
if decoded_size:
|
||||||
fl.write(
|
fl.write(
|
||||||
'_Static_assert(%s >= sizeof(%s), "msg buffer too small");\n'
|
f'_Static_assert({decoded_size} >= sizeof({short_name}), "msg buffer too small");\n'
|
||||||
% (decoded_size, short_name)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -130,7 +128,7 @@ for extension in (wire_in, wire_out, wire_debug_in, wire_debug_out):
|
|||||||
fh.write("\n#if DEBUG_LINK\n")
|
fh.write("\n#if DEBUG_LINK\n")
|
||||||
fl.write("\n#if DEBUG_LINK\n")
|
fl.write("\n#if DEBUG_LINK\n")
|
||||||
|
|
||||||
fh.write("\n\t// {label}\n\n".format(label=LABELS[extension]))
|
fh.write(f"\n\t// {LABELS[extension]}\n\n")
|
||||||
|
|
||||||
for message in messages[extension]:
|
for message in messages[extension]:
|
||||||
if message.name in SPECIAL_DEBUG_MESSAGES:
|
if message.name in SPECIAL_DEBUG_MESSAGES:
|
||||||
|
@ -25,11 +25,11 @@ for fn in sorted(glob.glob("*.png")):
|
|||||||
name = os.path.splitext(fn)[0]
|
name = os.path.splitext(fn)[0]
|
||||||
w, h = im.size
|
w, h = im.size
|
||||||
if w % 8 != 0:
|
if w % 8 != 0:
|
||||||
raise Exception("Width must be divisible by 8! (%s is %dx%d)" % (fn, w, h))
|
raise Exception(f"Width must be divisible by 8! ({fn} is {w}x{h})")
|
||||||
img = list(im.getdata())
|
img = list(im.getdata())
|
||||||
hdrs.append("extern const BITMAP bmp_%s;\n" % name)
|
hdrs.append(f"extern const BITMAP bmp_{name};\n")
|
||||||
imgs.append("const BITMAP bmp_%s = {%d, %d, bmp_%s_data};\n" % (name, w, h, name))
|
imgs.append(f"const BITMAP bmp_{name} = {{{w}, {h}, bmp_{name}_data}};\n")
|
||||||
data.append("const uint8_t bmp_%s_data[] = { %s};\n" % (name, encode_pixels(img)))
|
data.append(f"const uint8_t bmp_{name}_data[] = {{ {encode_pixels(img)}}};\n")
|
||||||
cnt += 1
|
cnt += 1
|
||||||
|
|
||||||
with open("../bitmaps.c", "wt") as f:
|
with open("../bitmaps.c", "wt") as f:
|
||||||
|
@ -93,11 +93,10 @@ with open("handlers.c", "wt") as f:
|
|||||||
f.write('#include "layout.h"\n')
|
f.write('#include "layout.h"\n')
|
||||||
f.write('#include "oled.h"\n\n')
|
f.write('#include "oled.h"\n\n')
|
||||||
for i in handlers:
|
for i in handlers:
|
||||||
f.write("void __attribute__((noreturn)) %s(void)\n" % i)
|
f.write(f"void __attribute__((noreturn)) {i}(void)\n")
|
||||||
f.write("{\n")
|
f.write("{\n")
|
||||||
f.write(
|
f.write(
|
||||||
'\tlayoutDialog(DIALOG_ICON_ERROR, NULL, NULL, NULL, "Encountered", NULL, "%s", NULL, "Please restart", "the device.");\n'
|
f'\tlayoutDialog(DIALOG_ICON_ERROR, NULL, NULL, NULL, "Encountered", NULL, "{i.upper()}", NULL, "Please restart", "the device.");\n'
|
||||||
% i.upper()
|
|
||||||
)
|
)
|
||||||
f.write("\tfor (;;) {} // loop forever\n")
|
f.write("\tfor (;;) {} // loop forever\n")
|
||||||
f.write("}\n\n")
|
f.write("}\n\n")
|
||||||
|
@ -37,5 +37,4 @@ if __name__ == "__main__":
|
|||||||
print("Filename :", args.file.name)
|
print("Filename :", args.file.name)
|
||||||
print("Fingerprint :", fingerprint)
|
print("Fingerprint :", fingerprint)
|
||||||
|
|
||||||
print("Size : {} bytes (out of {} maximum)"
|
print(f"Size : {size} bytes (out of {args.max_size} maximum)")
|
||||||
.format(size, args.max_size))
|
|
||||||
|
@ -28,4 +28,4 @@ while True:
|
|||||||
except Exception:
|
except Exception:
|
||||||
time.sleep(0.05)
|
time.sleep(0.05)
|
||||||
end = time.monotonic()
|
end = time.monotonic()
|
||||||
print("waited for {:.3f}s".format(end - start))
|
print(f"waited for {end - start:.3f}s")
|
||||||
|
@ -52,7 +52,7 @@ setup(
|
|||||||
author_email="info@trezor.io",
|
author_email="info@trezor.io",
|
||||||
license="LGPLv3",
|
license="LGPLv3",
|
||||||
description="Python library for communicating with Trezor Hardware Wallet",
|
description="Python library for communicating with Trezor Hardware Wallet",
|
||||||
long_description="{}\n\n{}".format(read("README.md"), read("CHANGELOG.md")),
|
long_description=read("README.md") + "\n\n" + read("CHANGELOG.md"),
|
||||||
long_description_content_type="text/markdown",
|
long_description_content_type="text/markdown",
|
||||||
url="https://github.com/trezor/trezor-firmware/tree/master/python",
|
url="https://github.com/trezor/trezor-firmware/tree/master/python",
|
||||||
packages=find_packages("src"),
|
packages=find_packages("src"),
|
||||||
|
@ -47,13 +47,11 @@ class Emulator:
|
|||||||
storage=None,
|
storage=None,
|
||||||
headless=False,
|
headless=False,
|
||||||
debug=True,
|
debug=True,
|
||||||
extra_args=()
|
extra_args=(),
|
||||||
):
|
):
|
||||||
self.executable = Path(executable).resolve()
|
self.executable = Path(executable).resolve()
|
||||||
if not executable.exists():
|
if not executable.exists():
|
||||||
raise ValueError(
|
raise ValueError(f"emulator executable not found: {self.executable}")
|
||||||
"emulator executable not found: {}".format(self.executable)
|
|
||||||
)
|
|
||||||
|
|
||||||
self.profile_dir = Path(profile_dir).resolve()
|
self.profile_dir = Path(profile_dir).resolve()
|
||||||
if not self.profile_dir.exists():
|
if not self.profile_dir.exists():
|
||||||
@ -87,7 +85,7 @@ class Emulator:
|
|||||||
return os.environ.copy()
|
return os.environ.copy()
|
||||||
|
|
||||||
def _get_transport(self):
|
def _get_transport(self):
|
||||||
return UdpTransport("127.0.0.1:{}".format(self.port))
|
return UdpTransport(f"127.0.0.1:{self.port}")
|
||||||
|
|
||||||
def wait_until_ready(self, timeout=EMULATOR_WAIT_TIME):
|
def wait_until_ready(self, timeout=EMULATOR_WAIT_TIME):
|
||||||
transport = self._get_transport()
|
transport = self._get_transport()
|
||||||
@ -109,7 +107,7 @@ class Emulator:
|
|||||||
finally:
|
finally:
|
||||||
transport.close()
|
transport.close()
|
||||||
|
|
||||||
LOG.info("Emulator ready after {:.3f} seconds".format(time.monotonic() - start))
|
LOG.info(f"Emulator ready after {time.monotonic() - start:.3f} seconds")
|
||||||
|
|
||||||
def wait(self, timeout=None):
|
def wait(self, timeout=None):
|
||||||
ret = self.process.wait(timeout=timeout)
|
ret = self.process.wait(timeout=timeout)
|
||||||
@ -149,9 +147,7 @@ class Emulator:
|
|||||||
self.wait_until_ready()
|
self.wait_until_ready()
|
||||||
except TimeoutError:
|
except TimeoutError:
|
||||||
# Assuming that after the default 60-second timeout, the process is stuck
|
# Assuming that after the default 60-second timeout, the process is stuck
|
||||||
LOG.warning(
|
LOG.warning(f"Emulator did not come up after {EMULATOR_WAIT_TIME} seconds")
|
||||||
"Emulator did not come up after {} seconds".format(EMULATOR_WAIT_TIME)
|
|
||||||
)
|
|
||||||
self.process.kill()
|
self.process.kill()
|
||||||
raise
|
raise
|
||||||
|
|
||||||
@ -175,7 +171,7 @@ class Emulator:
|
|||||||
try:
|
try:
|
||||||
self.process.wait(EMULATOR_WAIT_TIME)
|
self.process.wait(EMULATOR_WAIT_TIME)
|
||||||
end = time.monotonic()
|
end = time.monotonic()
|
||||||
LOG.info("Emulator shut down after {:.3f} seconds".format(end - start))
|
LOG.info(f"Emulator shut down after {end - start:.3f} seconds")
|
||||||
except subprocess.TimeoutExpired:
|
except subprocess.TimeoutExpired:
|
||||||
LOG.info("Emulator seems stuck. Sending kill signal.")
|
LOG.info("Emulator seems stuck. Sending kill signal.")
|
||||||
self.process.kill()
|
self.process.kill()
|
||||||
@ -210,7 +206,7 @@ class CoreEmulator(Emulator):
|
|||||||
sdcard=None,
|
sdcard=None,
|
||||||
disable_animation=True,
|
disable_animation=True,
|
||||||
heap_size="20M",
|
heap_size="20M",
|
||||||
**kwargs
|
**kwargs,
|
||||||
):
|
):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
if workdir is not None:
|
if workdir is not None:
|
||||||
@ -244,7 +240,7 @@ class CoreEmulator(Emulator):
|
|||||||
def make_args(self):
|
def make_args(self):
|
||||||
pyopt = "-O0" if self.debug else "-O1"
|
pyopt = "-O0" if self.debug else "-O1"
|
||||||
return (
|
return (
|
||||||
[pyopt, "-X", "heapsize={}".format(self.heap_size)]
|
[pyopt, "-X", f"heapsize={self.heap_size}"]
|
||||||
+ self.main_args
|
+ self.main_args
|
||||||
+ self.extra_args
|
+ self.extra_args
|
||||||
)
|
)
|
||||||
|
@ -137,7 +137,7 @@ def _format_container(
|
|||||||
output = repr(value)
|
output = repr(value)
|
||||||
else:
|
else:
|
||||||
output = value.hex()
|
output = value.hex()
|
||||||
return "{} bytes {}{}".format(length, output, suffix)
|
return f"{length} bytes {output}{suffix}"
|
||||||
|
|
||||||
if isinstance(value, Enum):
|
if isinstance(value, Enum):
|
||||||
return str(value)
|
return str(value)
|
||||||
@ -152,7 +152,7 @@ def _format_version(version: c.Container) -> str:
|
|||||||
str(version[k]) for k in ("major", "minor", "patch") if k in version
|
str(version[k]) for k in ("major", "minor", "patch") if k in version
|
||||||
)
|
)
|
||||||
if "build" in version:
|
if "build" in version:
|
||||||
version_str += " build {}".format(version.build)
|
version_str += f" build {version.build}"
|
||||||
return version_str
|
return version_str
|
||||||
|
|
||||||
|
|
||||||
@ -212,7 +212,7 @@ class VendorHeader(SignableImage):
|
|||||||
vhash = compute_vhash(vh)
|
vhash = compute_vhash(vh)
|
||||||
output = [
|
output = [
|
||||||
"Vendor Header " + _format_container(vh),
|
"Vendor Header " + _format_container(vh),
|
||||||
"Pubkey bundle hash: {}".format(vhash.hex()),
|
f"Pubkey bundle hash: {vhash.hex()}",
|
||||||
]
|
]
|
||||||
else:
|
else:
|
||||||
output = [
|
output = [
|
||||||
@ -226,13 +226,11 @@ class VendorHeader(SignableImage):
|
|||||||
fingerprint = firmware.header_digest(vh)
|
fingerprint = firmware.header_digest(vh)
|
||||||
|
|
||||||
if not terse:
|
if not terse:
|
||||||
output.append(
|
output.append(f"Fingerprint: {click.style(fingerprint.hex(), bold=True)}")
|
||||||
"Fingerprint: {}".format(click.style(fingerprint.hex(), bold=True))
|
|
||||||
)
|
|
||||||
|
|
||||||
sig_status = self.check_signature()
|
sig_status = self.check_signature()
|
||||||
sym = SYM_OK if sig_status.is_ok() else SYM_FAIL
|
sym = SYM_OK if sig_status.is_ok() else SYM_FAIL
|
||||||
output.append("{} Signature is {}".format(sym, sig_status.value))
|
output.append(f"{sym} Signature is {sig_status.value}")
|
||||||
|
|
||||||
return "\n".join(output)
|
return "\n".join(output)
|
||||||
|
|
||||||
@ -278,7 +276,7 @@ class BinImage(SignableImage):
|
|||||||
hashes_out = []
|
hashes_out = []
|
||||||
for expected, actual in zip(self.header.hashes, self.code_hashes):
|
for expected, actual in zip(self.header.hashes, self.code_hashes):
|
||||||
status = SYM_OK if expected == actual else SYM_FAIL
|
status = SYM_OK if expected == actual else SYM_FAIL
|
||||||
hashes_out.append(LiteralStr("{} {}".format(status, expected.hex())))
|
hashes_out.append(LiteralStr(f"{status} {expected.hex()}"))
|
||||||
|
|
||||||
if all(all_zero(h) for h in self.header.hashes):
|
if all(all_zero(h) for h in self.header.hashes):
|
||||||
hash_status = Status.MISSING
|
hash_status = Status.MISSING
|
||||||
@ -294,10 +292,8 @@ class BinImage(SignableImage):
|
|||||||
|
|
||||||
output = [
|
output = [
|
||||||
"Firmware Header " + _format_container(header_out),
|
"Firmware Header " + _format_container(header_out),
|
||||||
"Fingerprint: {}".format(click.style(self.digest().hex(), bold=True)),
|
f"Fingerprint: {click.style(self.digest().hex(), bold=True)}",
|
||||||
"{} Signature is {}, hashes are {}".format(
|
f"{all_ok} Signature is {sig_status.value}, hashes are {hash_status.value}",
|
||||||
all_ok, sig_status.value, hash_status.value
|
|
||||||
),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
return "\n".join(output)
|
return "\n".join(output)
|
||||||
|
@ -271,7 +271,7 @@ def sign_tx(
|
|||||||
idx = res.serialized.signature_index
|
idx = res.serialized.signature_index
|
||||||
sig = res.serialized.signature
|
sig = res.serialized.signature
|
||||||
if signatures[idx] is not None:
|
if signatures[idx] is not None:
|
||||||
raise ValueError("Signature for index %d already filled" % idx)
|
raise ValueError(f"Signature for index {idx} already filled")
|
||||||
signatures[idx] = sig
|
signatures[idx] = sig
|
||||||
|
|
||||||
if res.request_type == R.TXFINISHED:
|
if res.request_type == R.TXFINISHED:
|
||||||
|
@ -78,7 +78,7 @@ class TrezorConnection:
|
|||||||
except Exception:
|
except Exception:
|
||||||
click.echo("Failed to find a Trezor device.")
|
click.echo("Failed to find a Trezor device.")
|
||||||
if self.path is not None:
|
if self.path is not None:
|
||||||
click.echo("Using path: {}".format(self.path))
|
click.echo(f"Using path: {self.path}")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -58,12 +58,12 @@ def send_bytes(obj, message_name_or_type, hex_data):
|
|||||||
response_type, response_data = transport.read()
|
response_type, response_data = transport.read()
|
||||||
transport.end_session()
|
transport.end_session()
|
||||||
|
|
||||||
click.echo("Response type: {}".format(response_type))
|
click.echo(f"Response type: {response_type}")
|
||||||
click.echo("Response data: {}".format(response_data.hex()))
|
click.echo(f"Response data: {response_data.hex()}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
msg = mapping.decode(response_type, response_data)
|
msg = mapping.decode(response_type, response_data)
|
||||||
click.echo("Parsed message:")
|
click.echo("Parsed message:")
|
||||||
click.echo(protobuf.format_message(msg))
|
click.echo(protobuf.format_message(msg))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
click.echo("Could not parse response: {}".format(e))
|
click.echo(f"Could not parse response: {e}")
|
||||||
|
@ -37,7 +37,7 @@ def get_public_key(client, address, show_display):
|
|||||||
"""Get Eos public key in base58 encoding."""
|
"""Get Eos public key in base58 encoding."""
|
||||||
address_n = tools.parse_path(address)
|
address_n = tools.parse_path(address)
|
||||||
res = eos.get_public_key(client, address_n, show_display)
|
res = eos.get_public_key(client, address_n, show_display)
|
||||||
return "WIF: {}\nRaw: {}".format(res.wif_public_key, res.raw_public_key.hex())
|
return f"WIF: {res.wif_public_key}\nRaw: {res.raw_public_key.hex()}"
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
|
@ -303,7 +303,7 @@ def sign_tx(
|
|||||||
"to": to_address,
|
"to": to_address,
|
||||||
"from": from_address,
|
"from": from_address,
|
||||||
"value": amount,
|
"value": amount,
|
||||||
"data": "0x%s" % data.hex(),
|
"data": f"0x{data.hex()}",
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -361,16 +361,17 @@ def sign_tx(
|
|||||||
transaction = rlp.encode(
|
transaction = rlp.encode(
|
||||||
(tx_type, nonce, gas_price, gas_limit, to, amount, data) + sig
|
(tx_type, nonce, gas_price, gas_limit, to, amount, data) + sig
|
||||||
)
|
)
|
||||||
tx_hex = "0x%s%s" % (
|
if eip2718_type is not None:
|
||||||
str(eip2718_type).zfill(2) if eip2718_type is not None else "",
|
eip2718_prefix = f"{eip2718_type:02x}"
|
||||||
transaction.hex(),
|
else:
|
||||||
)
|
eip2718_prefix = ""
|
||||||
|
tx_hex = f"0x{eip2718_prefix}{transaction.hex()}"
|
||||||
|
|
||||||
if publish:
|
if publish:
|
||||||
tx_hash = w3.eth.sendRawTransaction(tx_hex).hex()
|
tx_hash = w3.eth.sendRawTransaction(tx_hex).hex()
|
||||||
return "Transaction published with ID: %s" % tx_hash
|
return f"Transaction published with ID: {tx_hash}"
|
||||||
else:
|
else:
|
||||||
return "Signed raw transaction:\n%s" % tx_hex
|
return f"Signed raw transaction:\n{tx_hex}"
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@ -384,7 +385,7 @@ def sign_message(client, address, message):
|
|||||||
output = {
|
output = {
|
||||||
"message": message,
|
"message": message,
|
||||||
"address": ret.address,
|
"address": ret.address,
|
||||||
"signature": "0x%s" % ret.signature.hex(),
|
"signature": f"0x{ret.signature.hex()}",
|
||||||
}
|
}
|
||||||
return output
|
return output
|
||||||
|
|
||||||
|
@ -41,30 +41,30 @@ def credentials_list(client):
|
|||||||
creds = fido.list_credentials(client)
|
creds = fido.list_credentials(client)
|
||||||
for cred in creds:
|
for cred in creds:
|
||||||
click.echo("")
|
click.echo("")
|
||||||
click.echo("WebAuthn credential at index {}:".format(cred.index))
|
click.echo(f"WebAuthn credential at index {cred.index}:")
|
||||||
if cred.rp_id is not None:
|
if cred.rp_id is not None:
|
||||||
click.echo(" Relying party ID: {}".format(cred.rp_id))
|
click.echo(f" Relying party ID: {cred.rp_id}")
|
||||||
if cred.rp_name is not None:
|
if cred.rp_name is not None:
|
||||||
click.echo(" Relying party name: {}".format(cred.rp_name))
|
click.echo(f" Relying party name: {cred.rp_name}")
|
||||||
if cred.user_id is not None:
|
if cred.user_id is not None:
|
||||||
click.echo(" User ID: {}".format(cred.user_id.hex()))
|
click.echo(f" User ID: {cred.user_id.hex()}")
|
||||||
if cred.user_name is not None:
|
if cred.user_name is not None:
|
||||||
click.echo(" User name: {}".format(cred.user_name))
|
click.echo(f" User name: {cred.user_name}")
|
||||||
if cred.user_display_name is not None:
|
if cred.user_display_name is not None:
|
||||||
click.echo(" User display name: {}".format(cred.user_display_name))
|
click.echo(f" User display name: {cred.user_display_name}")
|
||||||
if cred.creation_time is not None:
|
if cred.creation_time is not None:
|
||||||
click.echo(" Creation time: {}".format(cred.creation_time))
|
click.echo(f" Creation time: {cred.creation_time}")
|
||||||
if cred.hmac_secret is not None:
|
if cred.hmac_secret is not None:
|
||||||
click.echo(" hmac-secret enabled: {}".format(cred.hmac_secret))
|
click.echo(f" hmac-secret enabled: {cred.hmac_secret}")
|
||||||
if cred.use_sign_count is not None:
|
if cred.use_sign_count is not None:
|
||||||
click.echo(" Use signature counter: {}".format(cred.use_sign_count))
|
click.echo(f" Use signature counter: {cred.use_sign_count}")
|
||||||
if cred.algorithm is not None:
|
if cred.algorithm is not None:
|
||||||
algorithm = ALGORITHM_NAME.get(cred.algorithm, cred.algorithm)
|
algorithm = ALGORITHM_NAME.get(cred.algorithm, cred.algorithm)
|
||||||
click.echo(" Algorithm: {}".format(algorithm))
|
click.echo(f" Algorithm: {algorithm}")
|
||||||
if cred.curve is not None:
|
if cred.curve is not None:
|
||||||
curve = CURVE_NAME.get(cred.curve, cred.curve)
|
curve = CURVE_NAME.get(cred.curve, cred.curve)
|
||||||
click.echo(" Curve: {}".format(curve))
|
click.echo(f" Curve: {curve}")
|
||||||
click.echo(" Credential ID: {}".format(cred.id.hex()))
|
click.echo(f" Credential ID: {cred.id.hex()}")
|
||||||
|
|
||||||
if not creds:
|
if not creds:
|
||||||
click.echo("There are no resident credentials stored on the device.")
|
click.echo("There are no resident credentials stored on the device.")
|
||||||
|
@ -58,8 +58,6 @@ def sign_tx(client, address, file, broadcast):
|
|||||||
payload = {"data": transaction.data.hex(), "signature": transaction.signature.hex()}
|
payload = {"data": transaction.data.hex(), "signature": transaction.signature.hex()}
|
||||||
|
|
||||||
if broadcast:
|
if broadcast:
|
||||||
return requests.post(
|
return requests.post(f"{broadcast}/transaction/announce", json=payload).json()
|
||||||
"{}/transaction/announce".format(broadcast), json=payload
|
|
||||||
).json()
|
|
||||||
else:
|
else:
|
||||||
return payload
|
return payload
|
||||||
|
@ -165,7 +165,7 @@ def cli(ctx, path, verbose, is_json, passphrase_on_host, session_id):
|
|||||||
try:
|
try:
|
||||||
session_id = bytes.fromhex(session_id)
|
session_id = bytes.fromhex(session_id)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise click.ClickException("Not a valid session id: {}".format(session_id))
|
raise click.ClickException(f"Not a valid session id: {session_id}")
|
||||||
|
|
||||||
ctx.obj = TrezorConnection(path, session_id, passphrase_on_host)
|
ctx.obj = TrezorConnection(path, session_id, passphrase_on_host)
|
||||||
|
|
||||||
@ -185,9 +185,9 @@ def print_result(res, is_json, **kwargs):
|
|||||||
for k, v in res.items():
|
for k, v in res.items():
|
||||||
if isinstance(v, dict):
|
if isinstance(v, dict):
|
||||||
for kk, vv in v.items():
|
for kk, vv in v.items():
|
||||||
click.echo("%s.%s: %s" % (k, kk, vv))
|
click.echo(f"{k}.{kk}: {vv}")
|
||||||
else:
|
else:
|
||||||
click.echo("%s: %s" % (k, v))
|
click.echo(f"{k}: {v}")
|
||||||
elif isinstance(res, protobuf.MessageType):
|
elif isinstance(res, protobuf.MessageType):
|
||||||
click.echo(protobuf.format_message(res))
|
click.echo(protobuf.format_message(res))
|
||||||
elif res is not None:
|
elif res is not None:
|
||||||
@ -197,10 +197,10 @@ def print_result(res, is_json, **kwargs):
|
|||||||
def format_device_name(features):
|
def format_device_name(features):
|
||||||
model = features.model or "1"
|
model = features.model or "1"
|
||||||
if features.bootloader_mode:
|
if features.bootloader_mode:
|
||||||
return "Trezor {} bootloader".format(model)
|
return f"Trezor {model} bootloader"
|
||||||
|
|
||||||
label = features.label or "(unnamed)"
|
label = features.label or "(unnamed)"
|
||||||
return "{} [Trezor {}, {}]".format(label, model, features.device_id)
|
return f"{label} [Trezor {model}, {features.device_id}]"
|
||||||
|
|
||||||
|
|
||||||
#
|
#
|
||||||
@ -217,7 +217,7 @@ def list_devices(no_resolve):
|
|||||||
|
|
||||||
for transport in enumerate_devices():
|
for transport in enumerate_devices():
|
||||||
client = TrezorClient(transport, ui=ui.ClickUI())
|
client = TrezorClient(transport, ui=ui.ClickUI())
|
||||||
click.echo("{} - {}".format(transport, format_device_name(client.features)))
|
click.echo(f"{transport} - {format_device_name(client.features)}")
|
||||||
client.end_session()
|
client.end_session()
|
||||||
|
|
||||||
|
|
||||||
@ -308,14 +308,14 @@ def wait_for_emulator(obj, timeout):
|
|||||||
path = obj.path
|
path = obj.path
|
||||||
if path:
|
if path:
|
||||||
if not path.startswith("udp:"):
|
if not path.startswith("udp:"):
|
||||||
raise click.ClickException("You must use UDP path, not {}".format(path))
|
raise click.ClickException(f"You must use UDP path, not {path}")
|
||||||
path = path.replace("udp:", "")
|
path = path.replace("udp:", "")
|
||||||
|
|
||||||
start = time.monotonic()
|
start = time.monotonic()
|
||||||
UdpTransport(path).wait_until_ready(timeout)
|
UdpTransport(path).wait_until_ready(timeout)
|
||||||
end = time.monotonic()
|
end = time.monotonic()
|
||||||
|
|
||||||
LOG.info("Waited for {:.3f} seconds".format(end - start))
|
LOG.info(f"Waited for {end - start:.3f} seconds")
|
||||||
|
|
||||||
|
|
||||||
#
|
#
|
||||||
|
@ -89,7 +89,7 @@ class TrezorClient:
|
|||||||
ui,
|
ui,
|
||||||
session_id=None,
|
session_id=None,
|
||||||
):
|
):
|
||||||
LOG.info("creating client instance for device: {}".format(transport.get_path()))
|
LOG.info(f"creating client instance for device: {transport.get_path()}")
|
||||||
self.transport = transport
|
self.transport = transport
|
||||||
self.ui = ui
|
self.ui = ui
|
||||||
self.session_counter = 0
|
self.session_counter = 0
|
||||||
@ -118,15 +118,13 @@ class TrezorClient:
|
|||||||
def _raw_write(self, msg):
|
def _raw_write(self, msg):
|
||||||
__tracebackhide__ = True # for pytest # pylint: disable=W0612
|
__tracebackhide__ = True # for pytest # pylint: disable=W0612
|
||||||
LOG.debug(
|
LOG.debug(
|
||||||
"sending message: {}".format(msg.__class__.__name__),
|
f"sending message: {msg.__class__.__name__}",
|
||||||
extra={"protobuf": msg},
|
extra={"protobuf": msg},
|
||||||
)
|
)
|
||||||
msg_type, msg_bytes = mapping.encode(msg)
|
msg_type, msg_bytes = mapping.encode(msg)
|
||||||
LOG.log(
|
LOG.log(
|
||||||
DUMP_BYTES,
|
DUMP_BYTES,
|
||||||
"encoded as type {} ({} bytes): {}".format(
|
f"encoded as type {msg_type} ({len(msg_bytes)} bytes): {msg_bytes.hex()}",
|
||||||
msg_type, len(msg_bytes), msg_bytes.hex()
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
self.transport.write(msg_type, msg_bytes)
|
self.transport.write(msg_type, msg_bytes)
|
||||||
|
|
||||||
@ -135,13 +133,11 @@ class TrezorClient:
|
|||||||
msg_type, msg_bytes = self.transport.read()
|
msg_type, msg_bytes = self.transport.read()
|
||||||
LOG.log(
|
LOG.log(
|
||||||
DUMP_BYTES,
|
DUMP_BYTES,
|
||||||
"received type {} ({} bytes): {}".format(
|
f"received type {msg_type} ({len(msg_bytes)} bytes): {msg_bytes.hex()}",
|
||||||
msg_type, len(msg_bytes), msg_bytes.hex()
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
msg = mapping.decode(msg_type, msg_bytes)
|
msg = mapping.decode(msg_type, msg_bytes)
|
||||||
LOG.debug(
|
LOG.debug(
|
||||||
"received message: {}".format(msg.__class__.__name__),
|
f"received message: {msg.__class__.__name__}",
|
||||||
extra={"protobuf": msg},
|
extra={"protobuf": msg},
|
||||||
)
|
)
|
||||||
return msg
|
return msg
|
||||||
|
@ -53,15 +53,13 @@ class DebugLink:
|
|||||||
|
|
||||||
def _call(self, msg, nowait=False):
|
def _call(self, msg, nowait=False):
|
||||||
LOG.debug(
|
LOG.debug(
|
||||||
"sending message: {}".format(msg.__class__.__name__),
|
f"sending message: {msg.__class__.__name__}",
|
||||||
extra={"protobuf": msg},
|
extra={"protobuf": msg},
|
||||||
)
|
)
|
||||||
msg_type, msg_bytes = mapping.encode(msg)
|
msg_type, msg_bytes = mapping.encode(msg)
|
||||||
LOG.log(
|
LOG.log(
|
||||||
DUMP_BYTES,
|
DUMP_BYTES,
|
||||||
"encoded as type {} ({} bytes): {}".format(
|
f"encoded as type {msg_type} ({len(msg_bytes)} bytes): {msg_bytes.hex()}",
|
||||||
msg_type, len(msg_bytes), msg_bytes.hex()
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
self.transport.write(msg_type, msg_bytes)
|
self.transport.write(msg_type, msg_bytes)
|
||||||
if nowait:
|
if nowait:
|
||||||
@ -70,13 +68,11 @@ class DebugLink:
|
|||||||
ret_type, ret_bytes = self.transport.read()
|
ret_type, ret_bytes = self.transport.read()
|
||||||
LOG.log(
|
LOG.log(
|
||||||
DUMP_BYTES,
|
DUMP_BYTES,
|
||||||
"received type {} ({} bytes): {}".format(
|
f"received type {msg_type} ({len(msg_bytes)} bytes): {msg_bytes.hex()}",
|
||||||
msg_type, len(msg_bytes), msg_bytes.hex()
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
msg = mapping.decode(ret_type, ret_bytes)
|
msg = mapping.decode(ret_type, ret_bytes)
|
||||||
LOG.debug(
|
LOG.debug(
|
||||||
"received message: {}".format(msg.__class__.__name__),
|
f"received message: {msg.__class__.__name__}",
|
||||||
extra={"protobuf": msg},
|
extra={"protobuf": msg},
|
||||||
)
|
)
|
||||||
return msg
|
return msg
|
||||||
@ -328,15 +324,15 @@ class MessageFilter:
|
|||||||
field_str = textwrap.indent(field_str, " ").lstrip()
|
field_str = textwrap.indent(field_str, " ").lstrip()
|
||||||
fields.append((field.name, field_str))
|
fields.append((field.name, field_str))
|
||||||
|
|
||||||
pairs = ["{}={}".format(k, v) for k, v in fields]
|
pairs = [f"{k}={v}" for k, v in fields]
|
||||||
oneline_str = ", ".join(pairs)
|
oneline_str = ", ".join(pairs)
|
||||||
if len(oneline_str) < maxwidth:
|
if len(oneline_str) < maxwidth:
|
||||||
return "{}({})".format(self.message_type.__name__, oneline_str)
|
return f"{self.message_type.__name__}({oneline_str})"
|
||||||
else:
|
else:
|
||||||
item = []
|
item = []
|
||||||
item.append("{}(".format(self.message_type.__name__))
|
item.append(f"{self.message_type.__name__}(")
|
||||||
for pair in pairs:
|
for pair in pairs:
|
||||||
item.append(" {}".format(pair))
|
item.append(f" {pair}")
|
||||||
item.append(")")
|
item.append(")")
|
||||||
return "\n".join(item)
|
return "\n".join(item)
|
||||||
|
|
||||||
@ -567,7 +563,7 @@ class TrezorClientDebugLink(TrezorClient):
|
|||||||
for i in range(start_at, stop_at):
|
for i in range(start_at, stop_at):
|
||||||
exp = expected[i]
|
exp = expected[i]
|
||||||
prefix = " " if i != current else ">>> "
|
prefix = " " if i != current else ">>> "
|
||||||
output.append(textwrap.indent(exp.format(), prefix))
|
output.append(textwrap.indent(exp, prefix))
|
||||||
if stop_at < len(expected):
|
if stop_at < len(expected):
|
||||||
omitted = len(expected) - stop_at
|
omitted = len(expected) - stop_at
|
||||||
output.append(f" (...{omitted} following responses omitted)")
|
output.append(f" (...{omitted} following responses omitted)")
|
||||||
|
@ -344,7 +344,7 @@ def sign_tx(client, address, transaction, chain_id):
|
|||||||
|
|
||||||
if not isinstance(response, messages.EosSignedTx):
|
if not isinstance(response, messages.EosSignedTx):
|
||||||
raise exceptions.TrezorException(
|
raise exceptions.TrezorException(
|
||||||
"Unexpected message: {}".format(response.__class__.__name__)
|
f"Unexpected message: {response.__class__.__name__}"
|
||||||
)
|
)
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
@ -35,7 +35,7 @@ class TrezorFailure(TrezorException):
|
|||||||
if not name.startswith("_")
|
if not name.startswith("_")
|
||||||
}
|
}
|
||||||
if self.message is not None:
|
if self.message is not None:
|
||||||
return "{}: {}".format(types[self.code], self.message)
|
return f"{types[self.code]}: {self.message}"
|
||||||
else:
|
else:
|
||||||
return types[self.failure.code]
|
return types[self.failure.code]
|
||||||
|
|
||||||
|
@ -304,9 +304,7 @@ def check_sig_v1(
|
|||||||
|
|
||||||
if len(distinct_key_indexes) < len(key_indexes):
|
if len(distinct_key_indexes) < len(key_indexes):
|
||||||
raise InvalidSignatureError(
|
raise InvalidSignatureError(
|
||||||
"Not enough distinct signatures (found {}, need {})".format(
|
f"Not enough distinct signatures (found {len(distinct_key_indexes)}, need {len(key_indexes)})"
|
||||||
len(distinct_key_indexes), len(key_indexes)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
for i in range(len(key_indexes)):
|
for i in range(len(key_indexes)):
|
||||||
@ -315,14 +313,14 @@ def check_sig_v1(
|
|||||||
|
|
||||||
if key_idx >= len(V1_BOOTLOADER_KEYS):
|
if key_idx >= len(V1_BOOTLOADER_KEYS):
|
||||||
# unknown pubkey
|
# unknown pubkey
|
||||||
raise InvalidSignatureError("Unknown key in slot {}".format(i))
|
raise InvalidSignatureError(f"Unknown key in slot {i}")
|
||||||
|
|
||||||
pubkey = V1_BOOTLOADER_KEYS[key_idx][1:]
|
pubkey = V1_BOOTLOADER_KEYS[key_idx][1:]
|
||||||
verify = ecdsa.VerifyingKey.from_string(pubkey, curve=ecdsa.curves.SECP256k1)
|
verify = ecdsa.VerifyingKey.from_string(pubkey, curve=ecdsa.curves.SECP256k1)
|
||||||
try:
|
try:
|
||||||
verify.verify_digest(signature, digest)
|
verify.verify_digest(signature, digest)
|
||||||
except ecdsa.BadSignatureError as e:
|
except ecdsa.BadSignatureError as e:
|
||||||
raise InvalidSignatureError("Invalid signature in slot {}".format(i)) from e
|
raise InvalidSignatureError(f"Invalid signature in slot {i}") from e
|
||||||
|
|
||||||
|
|
||||||
def header_digest(header: c.Container, hash_function: Callable = blake2s) -> bytes:
|
def header_digest(header: c.Container, hash_function: Callable = blake2s) -> bytes:
|
||||||
@ -498,7 +496,7 @@ def update(client, data):
|
|||||||
if isinstance(resp, messages.Success):
|
if isinstance(resp, messages.Success):
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
raise RuntimeError("Unexpected result %s" % resp)
|
raise RuntimeError(f"Unexpected result {resp}")
|
||||||
|
|
||||||
# TREZORv2 method
|
# TREZORv2 method
|
||||||
while isinstance(resp, messages.FirmwareRequest):
|
while isinstance(resp, messages.FirmwareRequest):
|
||||||
@ -509,4 +507,4 @@ def update(client, data):
|
|||||||
if isinstance(resp, messages.Success):
|
if isinstance(resp, messages.Success):
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
raise RuntimeError("Unexpected message %s" % resp)
|
raise RuntimeError(f"Unexpected message {resp}")
|
||||||
|
@ -39,7 +39,7 @@ class PrettyProtobufFormatter(logging.Formatter):
|
|||||||
)
|
)
|
||||||
if hasattr(record, "protobuf"):
|
if hasattr(record, "protobuf"):
|
||||||
if type(record.protobuf) in OMITTED_MESSAGES:
|
if type(record.protobuf) in OMITTED_MESSAGES:
|
||||||
message += " ({} bytes)".format(record.protobuf.ByteSize())
|
message += f" ({record.protobuf.ByteSize()} bytes)"
|
||||||
else:
|
else:
|
||||||
message += "\n" + protobuf.format_message(record.protobuf)
|
message += "\n" + protobuf.format_message(record.protobuf)
|
||||||
return message
|
return message
|
||||||
|
@ -42,8 +42,7 @@ def build_map():
|
|||||||
def register_message(msg_class):
|
def register_message(msg_class):
|
||||||
if msg_class.MESSAGE_WIRE_TYPE in map_type_to_class:
|
if msg_class.MESSAGE_WIRE_TYPE in map_type_to_class:
|
||||||
raise Exception(
|
raise Exception(
|
||||||
"Message for wire type %s is already registered by %s"
|
f"Message for wire type {msg_class.MESSAGE_WIRE_TYPE} is already registered by {get_class(msg_class.MESSAGE_WIRE_TYPE)}"
|
||||||
% (msg_class.MESSAGE_WIRE_TYPE, get_class(msg_class.MESSAGE_WIRE_TYPE))
|
|
||||||
)
|
)
|
||||||
|
|
||||||
map_class_to_type[msg_class] = msg_class.MESSAGE_WIRE_TYPE
|
map_class_to_type[msg_class] = msg_class.MESSAGE_WIRE_TYPE
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user