2018-07-20 14:49:45 +00:00
|
|
|
#!/usr/bin/env python3
|
2022-05-23 14:19:19 +00:00
|
|
|
from __future__ import annotations
|
|
|
|
|
2019-04-18 14:27:27 +00:00
|
|
|
import json
|
2018-07-20 14:49:45 +00:00
|
|
|
import logging
|
2019-04-18 14:27:27 +00:00
|
|
|
import os
|
|
|
|
import re
|
|
|
|
from collections import OrderedDict, defaultdict
|
2020-11-11 10:25:33 +00:00
|
|
|
from pathlib import Path
|
2022-05-23 14:19:19 +00:00
|
|
|
from typing import Any, Callable, Iterable, Iterator, Literal, TypedDict, cast
|
2018-07-20 14:49:45 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
import requests
|
|
|
|
except ImportError:
|
|
|
|
requests = None
|
|
|
|
|
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
2022-01-28 15:02:17 +00:00
|
|
|
ROOT = Path(__file__).resolve().parent.parent
|
2020-11-11 10:25:33 +00:00
|
|
|
|
|
|
|
if os.environ.get("DEFS_DIR"):
|
|
|
|
DEFS_DIR = Path(os.environ.get("DEFS_DIR")).resolve()
|
|
|
|
else:
|
|
|
|
DEFS_DIR = ROOT / "defs"
|
2018-07-20 14:49:45 +00:00
|
|
|
|
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
class SupportItemBool(TypedDict):
|
|
|
|
supported: dict[str, bool]
|
|
|
|
unsupported: dict[str, bool]
|
|
|
|
|
|
|
|
|
|
|
|
class SupportItemVersion(TypedDict):
|
|
|
|
supported: dict[str, str]
|
|
|
|
unsupported: dict[str, str]
|
|
|
|
|
|
|
|
|
|
|
|
class SupportData(TypedDict):
|
|
|
|
connect: SupportItemBool
|
|
|
|
suite: SupportItemBool
|
|
|
|
trezor1: SupportItemVersion
|
|
|
|
trezor2: SupportItemVersion
|
|
|
|
|
|
|
|
|
|
|
|
class SupportInfoItem(TypedDict):
|
|
|
|
connect: bool
|
|
|
|
suite: bool
|
|
|
|
trezor1: Literal[False] | str
|
|
|
|
trezor2: Literal[False] | str
|
|
|
|
|
|
|
|
|
|
|
|
SupportInfo = dict[str, SupportInfoItem]
|
|
|
|
|
2022-05-24 18:28:48 +00:00
|
|
|
WalletItems = dict[str, str]
|
|
|
|
WalletInfo = dict[str, WalletItems]
|
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
|
|
|
|
class Coin(TypedDict):
|
|
|
|
# Necessary fields for BTC - from BTC_CHECKS
|
|
|
|
coin_name: str
|
|
|
|
coin_shortcut: str
|
|
|
|
coin_label: str
|
|
|
|
website: str
|
|
|
|
github: str
|
|
|
|
maintainer: str
|
|
|
|
curve_name: str
|
|
|
|
address_type: int
|
|
|
|
address_type_p2sh: int
|
|
|
|
maxfee_kb: int
|
|
|
|
minfee_kb: int
|
|
|
|
hash_genesis_block: str
|
|
|
|
xprv_magic: int
|
|
|
|
xpub_magic: int
|
|
|
|
xpub_magic_segwit_p2sh: int
|
|
|
|
xpub_magic_segwit_native: int
|
|
|
|
slip44: int
|
|
|
|
segwit: bool
|
|
|
|
decred: bool
|
|
|
|
fork_id: int
|
|
|
|
force_bip143: bool
|
|
|
|
default_fee_b: dict[str, int]
|
|
|
|
dust_limit: int
|
|
|
|
blocktime_seconds: int
|
|
|
|
signed_message_header: str
|
|
|
|
uri_prefix: str
|
|
|
|
min_address_length: int
|
|
|
|
max_address_length: int
|
|
|
|
bech32_prefix: str
|
|
|
|
cashaddr_prefix: str
|
|
|
|
|
|
|
|
# Other fields optionally coming from JSON
|
|
|
|
links: dict[str, str]
|
2022-05-24 18:28:48 +00:00
|
|
|
wallet: WalletItems
|
2022-05-23 14:19:19 +00:00
|
|
|
curve: str
|
|
|
|
decimals: int
|
|
|
|
|
|
|
|
# Mandatory fields added later in coin.update()
|
|
|
|
name: str
|
|
|
|
shortcut: str
|
|
|
|
key: str
|
|
|
|
icon: str
|
|
|
|
|
|
|
|
# Special ETH fields
|
|
|
|
chain: str
|
|
|
|
chain_id: str
|
|
|
|
rskip60: bool
|
|
|
|
url: str
|
|
|
|
|
|
|
|
# Special erc20 fields
|
|
|
|
symbol: str
|
|
|
|
address: str
|
|
|
|
address_bytes: bytes
|
|
|
|
dup_key_nontoken: bool
|
|
|
|
deprecation: dict[str, str]
|
|
|
|
|
|
|
|
# Special NEM fields
|
|
|
|
ticker: str
|
|
|
|
|
|
|
|
# Fields that are being created
|
|
|
|
unsupported: bool
|
|
|
|
duplicate: bool
|
|
|
|
support: SupportInfoItem
|
|
|
|
|
|
|
|
# Backend-oriented fields
|
|
|
|
blockchain_link: dict[str, Any]
|
|
|
|
blockbook: list[str]
|
|
|
|
bitcore: list[str]
|
|
|
|
|
|
|
|
|
|
|
|
Coins = list[Coin]
|
|
|
|
CoinBuckets = dict[str, Coins]
|
|
|
|
|
|
|
|
|
|
|
|
class FidoApp(TypedDict):
|
|
|
|
name: str
|
|
|
|
webauthn: list[str]
|
|
|
|
u2f: list[dict[str, str]]
|
|
|
|
use_sign_count: bool
|
|
|
|
use_self_attestation: bool
|
|
|
|
no_icon: bool
|
|
|
|
|
|
|
|
key: str
|
|
|
|
icon: str
|
|
|
|
|
|
|
|
|
|
|
|
FidoApps = list[FidoApp]
|
|
|
|
|
|
|
|
|
|
|
|
def load_json(*path: str | Path) -> Any:
|
2018-07-23 14:33:00 +00:00
|
|
|
"""Convenience function to load a JSON file from DEFS_DIR."""
|
2020-11-11 10:25:33 +00:00
|
|
|
if len(path) == 1 and isinstance(path[0], Path):
|
|
|
|
file = path[0]
|
2018-07-20 14:49:45 +00:00
|
|
|
else:
|
2020-11-11 10:25:33 +00:00
|
|
|
file = Path(DEFS_DIR, *path)
|
2018-07-20 14:49:45 +00:00
|
|
|
|
2020-11-11 10:25:33 +00:00
|
|
|
return json.loads(file.read_text(), object_pairs_hook=OrderedDict)
|
2018-07-20 14:49:45 +00:00
|
|
|
|
|
|
|
|
2018-07-30 12:25:53 +00:00
|
|
|
# ====== CoinsInfo ======
|
|
|
|
|
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
class CoinsInfo(dict[str, Coins]):
|
2018-07-30 12:25:53 +00:00
|
|
|
"""Collection of information about all known kinds of coins.
|
|
|
|
|
|
|
|
It contains the following lists:
|
2018-08-24 13:42:06 +00:00
|
|
|
`bitcoin` for btc-like coins,
|
2018-07-30 12:25:53 +00:00
|
|
|
`eth` for ethereum networks,
|
|
|
|
`erc20` for ERC20 tokens,
|
|
|
|
`nem` for NEM mosaics,
|
|
|
|
`misc` for other networks.
|
|
|
|
|
2018-08-24 13:42:06 +00:00
|
|
|
Accessible as a dict or by attribute: `info["misc"] == info.misc`
|
2018-07-30 12:25:53 +00:00
|
|
|
"""
|
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
def as_list(self) -> Coins:
|
2018-07-30 12:25:53 +00:00
|
|
|
return sum(self.values(), [])
|
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
def as_dict(self) -> dict[str, Coin]:
|
2018-07-30 12:25:53 +00:00
|
|
|
return {coin["key"]: coin for coin in self.as_list()}
|
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
def __getattr__(self, attr: str) -> Coins:
|
2018-07-30 12:25:53 +00:00
|
|
|
if attr in self:
|
|
|
|
return self[attr]
|
|
|
|
else:
|
|
|
|
raise AttributeError(attr)
|
|
|
|
|
|
|
|
|
2018-07-20 14:49:45 +00:00
|
|
|
# ====== coin validation ======
|
|
|
|
|
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
def check_type(
|
|
|
|
val: Any,
|
|
|
|
types: type | tuple[type, ...],
|
|
|
|
nullable: bool = False,
|
|
|
|
empty: bool = False,
|
|
|
|
regex: str | None = None,
|
|
|
|
choice: list[str] | None = None,
|
|
|
|
) -> None:
|
2018-07-20 14:49:45 +00:00
|
|
|
# check nullable
|
|
|
|
if val is None:
|
|
|
|
if nullable:
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
raise ValueError("Missing required value")
|
|
|
|
|
|
|
|
# check type
|
|
|
|
if not isinstance(val, types):
|
2021-09-27 10:13:51 +00:00
|
|
|
raise TypeError(f"Wrong type (expected: {types})")
|
2018-07-20 14:49:45 +00:00
|
|
|
|
|
|
|
# check empty
|
|
|
|
if isinstance(val, (list, dict)) and not empty and not val:
|
|
|
|
raise ValueError("Empty collection")
|
2020-06-10 14:26:04 +00:00
|
|
|
if isinstance(val, str) and not empty and not val:
|
|
|
|
raise ValueError("Empty string")
|
2018-07-20 14:49:45 +00:00
|
|
|
|
|
|
|
# check regex
|
|
|
|
if regex is not None:
|
|
|
|
if types is not str:
|
|
|
|
raise TypeError("Wrong type for regex check")
|
2022-05-23 14:19:19 +00:00
|
|
|
assert isinstance(val, str)
|
2018-07-20 14:49:45 +00:00
|
|
|
if not re.search(regex, val):
|
2021-09-27 10:13:51 +00:00
|
|
|
raise ValueError(f"Value does not match regex {regex}")
|
2018-07-20 14:49:45 +00:00
|
|
|
|
|
|
|
# check choice
|
|
|
|
if choice is not None and val not in choice:
|
2018-08-15 15:36:24 +00:00
|
|
|
choice_str = ", ".join(choice)
|
2021-09-27 10:13:51 +00:00
|
|
|
raise ValueError(f"Value not allowed, use one of: {choice_str}")
|
2018-07-20 14:49:45 +00:00
|
|
|
|
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
def check_key(
|
|
|
|
key: str, types: type | tuple[type, ...], optional: bool = False, **kwargs: Any
|
|
|
|
) -> Callable[[Coin], None]:
|
|
|
|
def do_check(coin: Coin) -> None:
|
2018-07-24 11:54:03 +00:00
|
|
|
if key not in coin:
|
|
|
|
if optional:
|
|
|
|
return
|
|
|
|
else:
|
2021-09-27 10:13:51 +00:00
|
|
|
raise KeyError(f"{key}: Missing key")
|
2018-07-20 14:49:45 +00:00
|
|
|
try:
|
|
|
|
check_type(coin[key], types, **kwargs)
|
|
|
|
except Exception as e:
|
2021-09-27 10:13:51 +00:00
|
|
|
raise ValueError(f"{key}: {e}") from e
|
2018-07-20 14:49:45 +00:00
|
|
|
|
|
|
|
return do_check
|
|
|
|
|
|
|
|
|
2018-07-23 14:33:00 +00:00
|
|
|
BTC_CHECKS = [
|
2018-07-20 14:49:45 +00:00
|
|
|
check_key("coin_name", str, regex=r"^[A-Z]"),
|
|
|
|
check_key("coin_shortcut", str, regex=r"^t?[A-Z]{3,}$"),
|
2021-10-26 08:58:14 +00:00
|
|
|
check_key("coin_label", str, regex=r"^x?[A-Z]"),
|
2018-10-26 13:23:03 +00:00
|
|
|
check_key("website", str, regex=r"^https://.*[^/]$"),
|
2019-03-31 17:55:58 +00:00
|
|
|
check_key("github", str, regex=r"^https://git(hu|la)b.com/.*[^/]$"),
|
2018-07-20 14:49:45 +00:00
|
|
|
check_key("maintainer", str),
|
|
|
|
check_key(
|
2018-11-29 14:42:49 +00:00
|
|
|
"curve_name",
|
|
|
|
str,
|
|
|
|
choice=[
|
|
|
|
"secp256k1",
|
|
|
|
"secp256k1_decred",
|
|
|
|
"secp256k1_groestl",
|
|
|
|
"secp256k1_smart",
|
|
|
|
],
|
2018-07-20 14:49:45 +00:00
|
|
|
),
|
|
|
|
check_key("address_type", int),
|
|
|
|
check_key("address_type_p2sh", int),
|
|
|
|
check_key("maxfee_kb", int),
|
|
|
|
check_key("minfee_kb", int),
|
|
|
|
check_key("hash_genesis_block", str, regex=r"^[0-9a-f]{64}$"),
|
|
|
|
check_key("xprv_magic", int),
|
|
|
|
check_key("xpub_magic", int),
|
|
|
|
check_key("xpub_magic_segwit_p2sh", int, nullable=True),
|
|
|
|
check_key("xpub_magic_segwit_native", int, nullable=True),
|
|
|
|
check_key("slip44", int),
|
|
|
|
check_key("segwit", bool),
|
|
|
|
check_key("decred", bool),
|
|
|
|
check_key("fork_id", int, nullable=True),
|
|
|
|
check_key("force_bip143", bool),
|
|
|
|
check_key("default_fee_b", dict),
|
|
|
|
check_key("dust_limit", int),
|
|
|
|
check_key("blocktime_seconds", int),
|
|
|
|
check_key("signed_message_header", str),
|
2019-03-31 17:55:58 +00:00
|
|
|
check_key("uri_prefix", str, regex=r"^[a-z-\.\+]+$"),
|
2018-07-20 14:49:45 +00:00
|
|
|
check_key("min_address_length", int),
|
|
|
|
check_key("max_address_length", int),
|
2019-03-31 17:55:58 +00:00
|
|
|
check_key("bech32_prefix", str, regex=r"^[a-z-\.\+]+$", nullable=True),
|
|
|
|
check_key("cashaddr_prefix", str, regex=r"^[a-z-\.\+]+$", nullable=True),
|
2018-07-20 14:49:45 +00:00
|
|
|
]
|
|
|
|
|
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
def validate_btc(coin: Coin) -> list[str]:
|
|
|
|
errors: list[str] = []
|
2018-07-23 14:33:00 +00:00
|
|
|
for check in BTC_CHECKS:
|
2018-07-20 14:49:45 +00:00
|
|
|
try:
|
|
|
|
check(coin)
|
|
|
|
except Exception as e:
|
|
|
|
errors.append(str(e))
|
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
magics: list[int] = [
|
2018-07-20 14:49:45 +00:00
|
|
|
coin[k]
|
|
|
|
for k in (
|
|
|
|
"xprv_magic",
|
|
|
|
"xpub_magic",
|
|
|
|
"xpub_magic_segwit_p2sh",
|
|
|
|
"xpub_magic_segwit_native",
|
|
|
|
)
|
|
|
|
if coin[k] is not None
|
|
|
|
]
|
|
|
|
# each of those must be unique
|
|
|
|
# therefore length of list == length of set of unique values
|
|
|
|
if len(magics) != len(set(magics)):
|
|
|
|
errors.append("XPUB/XPRV magic numbers must be unique")
|
|
|
|
|
|
|
|
if coin["address_type"] == coin["address_type_p2sh"]:
|
|
|
|
errors.append("address_type must be distinct from address_type_p2sh")
|
|
|
|
|
|
|
|
if not coin["maxfee_kb"] >= coin["minfee_kb"]:
|
|
|
|
errors.append("max fee must not be smaller than min fee")
|
|
|
|
|
|
|
|
if not coin["max_address_length"] >= coin["min_address_length"]:
|
|
|
|
errors.append("max address length must not be smaller than min address length")
|
|
|
|
|
2021-06-24 13:45:16 +00:00
|
|
|
if "testnet" in coin["coin_name"].lower() and coin["slip44"] != 1:
|
|
|
|
errors.append("testnet coins must use slip44 coin type 1")
|
|
|
|
|
2020-03-29 14:34:16 +00:00
|
|
|
if coin["segwit"]:
|
|
|
|
if coin["bech32_prefix"] is None:
|
|
|
|
errors.append("bech32_prefix must be defined for segwit-enabled coin")
|
|
|
|
if coin["xpub_magic_segwit_p2sh"] is None:
|
2020-03-31 07:22:17 +00:00
|
|
|
errors.append(
|
|
|
|
"xpub_magic_segwit_p2sh must be defined for segwit-enabled coin"
|
|
|
|
)
|
2020-03-29 14:34:16 +00:00
|
|
|
else:
|
|
|
|
if coin["bech32_prefix"] is not None:
|
|
|
|
errors.append("bech32_prefix must not be defined for segwit-disabled coin")
|
|
|
|
if coin["xpub_magic_segwit_p2sh"] is not None:
|
2020-03-31 07:22:17 +00:00
|
|
|
errors.append(
|
|
|
|
"xpub_magic_segwit_p2sh must not be defined for segwit-disabled coin"
|
|
|
|
)
|
2020-03-29 14:34:16 +00:00
|
|
|
|
2018-07-20 14:49:45 +00:00
|
|
|
return errors
|
|
|
|
|
|
|
|
|
|
|
|
# ======= Coin json loaders =======
|
|
|
|
|
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
def _load_btc_coins() -> Coins:
|
2019-11-26 17:11:09 +00:00
|
|
|
"""Load btc-like coins from `bitcoin/*.json`"""
|
2022-05-23 14:19:19 +00:00
|
|
|
coins: Coins = []
|
2020-11-11 10:25:33 +00:00
|
|
|
for file in DEFS_DIR.glob("bitcoin/*.json"):
|
2022-05-23 14:19:19 +00:00
|
|
|
coin: Coin = load_json(file)
|
2018-07-20 14:49:45 +00:00
|
|
|
coin.update(
|
2018-09-18 11:28:00 +00:00
|
|
|
name=coin["coin_label"],
|
2018-07-20 14:49:45 +00:00
|
|
|
shortcut=coin["coin_shortcut"],
|
2021-09-27 10:13:51 +00:00
|
|
|
key=f"bitcoin:{coin['coin_shortcut']}",
|
2020-11-11 10:25:33 +00:00
|
|
|
icon=str(file.with_suffix(".png")),
|
2018-07-20 14:49:45 +00:00
|
|
|
)
|
|
|
|
coins.append(coin)
|
|
|
|
|
|
|
|
return coins
|
|
|
|
|
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
def _load_ethereum_networks() -> Coins:
|
2018-07-23 14:33:00 +00:00
|
|
|
"""Load ethereum networks from `ethereum/networks.json`"""
|
2020-11-11 10:41:33 +00:00
|
|
|
chains_path = DEFS_DIR / "ethereum" / "chains" / "_data" / "chains"
|
2022-05-23 14:19:19 +00:00
|
|
|
networks: Coins = []
|
2021-02-15 13:39:38 +00:00
|
|
|
for chain in sorted(
|
|
|
|
chains_path.glob("eip155-*.json"),
|
|
|
|
key=lambda x: int(x.stem.replace("eip155-", "")),
|
|
|
|
):
|
2020-11-11 10:41:33 +00:00
|
|
|
chain_data = load_json(chain)
|
|
|
|
shortcut = chain_data["nativeCurrency"]["symbol"]
|
2021-03-01 10:32:03 +00:00
|
|
|
name = chain_data["name"]
|
2022-03-29 15:54:06 +00:00
|
|
|
title = chain_data.get("title", "")
|
|
|
|
is_testnet = "testnet" in name.lower() or "testnet" in title.lower()
|
2020-11-11 10:41:33 +00:00
|
|
|
if is_testnet:
|
|
|
|
slip44 = 1
|
|
|
|
else:
|
|
|
|
slip44 = chain_data.get("slip44", 60)
|
|
|
|
|
|
|
|
if is_testnet and not shortcut.lower().startswith("t"):
|
|
|
|
shortcut = "t" + shortcut
|
|
|
|
|
|
|
|
rskip60 = shortcut in ("RBTC", "TRBTC")
|
|
|
|
|
2021-03-01 10:32:03 +00:00
|
|
|
# strip out bullcrap in network naming
|
|
|
|
if "mainnet" in name.lower():
|
|
|
|
name = re.sub(r" mainnet.*$", "", name, flags=re.IGNORECASE)
|
|
|
|
|
2020-11-11 10:41:33 +00:00
|
|
|
network = dict(
|
|
|
|
chain=chain_data["shortName"],
|
|
|
|
chain_id=chain_data["chainId"],
|
|
|
|
slip44=slip44,
|
|
|
|
shortcut=shortcut,
|
2021-03-01 10:32:03 +00:00
|
|
|
name=name,
|
2020-11-11 10:41:33 +00:00
|
|
|
rskip60=rskip60,
|
|
|
|
url=chain_data["infoURL"],
|
|
|
|
key=f"eth:{shortcut}",
|
|
|
|
)
|
2022-05-23 14:19:19 +00:00
|
|
|
networks.append(cast(Coin, network))
|
2020-11-11 10:41:33 +00:00
|
|
|
|
2018-07-20 14:49:45 +00:00
|
|
|
return networks
|
|
|
|
|
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
def _load_erc20_tokens() -> Coins:
|
2018-07-23 14:33:00 +00:00
|
|
|
"""Load ERC20 tokens from `ethereum/tokens` submodule."""
|
|
|
|
networks = _load_ethereum_networks()
|
2022-05-23 14:19:19 +00:00
|
|
|
tokens: Coins = []
|
2018-07-20 14:49:45 +00:00
|
|
|
for network in networks:
|
2018-11-29 14:42:49 +00:00
|
|
|
chain = network["chain"]
|
2018-07-20 14:49:45 +00:00
|
|
|
|
2020-11-11 10:25:33 +00:00
|
|
|
chain_path = DEFS_DIR / "ethereum" / "tokens" / "tokens" / chain
|
|
|
|
for file in sorted(chain_path.glob("*.json")):
|
2022-05-23 14:19:19 +00:00
|
|
|
token: Coin = load_json(file)
|
2018-07-20 14:49:45 +00:00
|
|
|
token.update(
|
|
|
|
chain=chain,
|
|
|
|
chain_id=network["chain_id"],
|
2018-09-12 22:14:51 +00:00
|
|
|
address_bytes=bytes.fromhex(token["address"][2:]),
|
2018-07-20 14:49:45 +00:00
|
|
|
shortcut=token["symbol"],
|
2021-09-27 10:13:51 +00:00
|
|
|
key=f"erc20:{chain}:{token['symbol']}",
|
2018-07-20 14:49:45 +00:00
|
|
|
)
|
|
|
|
tokens.append(token)
|
|
|
|
|
|
|
|
return tokens
|
|
|
|
|
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
def _load_nem_mosaics() -> Coins:
|
2018-07-23 14:33:00 +00:00
|
|
|
"""Loads NEM mosaics from `nem/nem_mosaics.json`"""
|
2022-05-23 14:19:19 +00:00
|
|
|
mosaics: Coins = load_json("nem/nem_mosaics.json")
|
2018-07-20 14:49:45 +00:00
|
|
|
for mosaic in mosaics:
|
|
|
|
shortcut = mosaic["ticker"].strip()
|
2021-09-27 10:13:51 +00:00
|
|
|
mosaic.update(shortcut=shortcut, key=f"nem:{shortcut}")
|
2018-07-20 14:49:45 +00:00
|
|
|
return mosaics
|
|
|
|
|
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
def _load_misc() -> Coins:
|
2018-07-23 14:33:00 +00:00
|
|
|
"""Loads miscellaneous networks from `misc/misc.json`"""
|
2022-05-23 14:19:19 +00:00
|
|
|
others: Coins = load_json("misc/misc.json")
|
2018-07-20 14:49:45 +00:00
|
|
|
for other in others:
|
2021-09-27 10:13:51 +00:00
|
|
|
other.update(key=f"misc:{other['shortcut']}")
|
2018-07-20 14:49:45 +00:00
|
|
|
return others
|
|
|
|
|
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
def _load_fido_apps() -> FidoApps:
|
2019-12-09 11:59:23 +00:00
|
|
|
"""Load FIDO apps from `fido/*.json`"""
|
2022-05-23 14:19:19 +00:00
|
|
|
apps: FidoApps = []
|
2020-11-11 10:25:33 +00:00
|
|
|
for file in sorted(DEFS_DIR.glob("fido/*.json")):
|
|
|
|
app_name = file.stem.lower()
|
|
|
|
app = load_json(file)
|
2019-11-28 13:00:13 +00:00
|
|
|
app.setdefault("use_sign_count", None)
|
2019-12-11 10:47:02 +00:00
|
|
|
app.setdefault("use_self_attestation", None)
|
2019-11-28 13:00:13 +00:00
|
|
|
app.setdefault("u2f", [])
|
|
|
|
app.setdefault("webauthn", [])
|
|
|
|
|
2020-11-11 10:25:33 +00:00
|
|
|
icon_file = file.with_suffix(".png")
|
|
|
|
if not icon_file.exists():
|
2019-11-28 13:00:13 +00:00
|
|
|
icon_path = None
|
2020-11-11 10:25:33 +00:00
|
|
|
else:
|
|
|
|
icon_path = str(icon_file)
|
2019-11-28 13:00:13 +00:00
|
|
|
|
|
|
|
app.update(key=app_name, icon=icon_path)
|
2019-11-26 17:11:09 +00:00
|
|
|
apps.append(app)
|
|
|
|
|
|
|
|
return apps
|
|
|
|
|
|
|
|
|
2018-07-20 14:49:45 +00:00
|
|
|
# ====== support info ======
|
|
|
|
|
2020-09-02 13:05:26 +00:00
|
|
|
RELEASES_URL = "https://data.trezor.io/firmware/{}/releases.json"
|
2021-03-17 14:31:14 +00:00
|
|
|
MISSING_SUPPORT_MEANS_NO = ("connect", "suite")
|
2018-08-23 11:05:41 +00:00
|
|
|
VERSIONED_SUPPORT_INFO = ("trezor1", "trezor2")
|
2018-07-20 14:49:45 +00:00
|
|
|
|
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
def get_support_data() -> SupportData:
|
2018-07-24 11:54:03 +00:00
|
|
|
"""Get raw support data from `support.json`."""
|
|
|
|
return load_json("support.json")
|
|
|
|
|
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
def latest_releases() -> dict[str, Any]:
|
2018-07-23 14:33:00 +00:00
|
|
|
"""Get latest released firmware versions for Trezor 1 and 2"""
|
2018-07-20 14:49:45 +00:00
|
|
|
if not requests:
|
|
|
|
raise RuntimeError("requests library is required for getting release info")
|
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
latest: dict[str, Any] = {}
|
2018-07-20 14:49:45 +00:00
|
|
|
for v in ("1", "2"):
|
|
|
|
releases = requests.get(RELEASES_URL.format(v)).json()
|
2018-08-24 13:20:25 +00:00
|
|
|
latest["trezor" + v] = max(tuple(r["version"]) for r in releases)
|
2018-07-20 14:49:45 +00:00
|
|
|
return latest
|
|
|
|
|
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
def is_token(coin: Coin) -> bool:
|
2018-08-23 11:05:41 +00:00
|
|
|
return coin["key"].startswith("erc20:")
|
|
|
|
|
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
def support_info_single(support_data: SupportData, coin: Coin) -> SupportInfoItem:
|
2018-08-15 17:22:29 +00:00
|
|
|
"""Extract a support dict from `support.json` data.
|
2018-07-23 14:33:00 +00:00
|
|
|
|
2018-08-15 17:22:29 +00:00
|
|
|
Returns a dict of support values for each "device", i.e., `support.json`
|
|
|
|
top-level key.
|
2018-07-23 14:33:00 +00:00
|
|
|
|
2018-08-15 17:22:29 +00:00
|
|
|
The support value for each device is determined in order of priority:
|
2021-09-20 10:37:56 +00:00
|
|
|
* if the coin has an entry in `unsupported`, its support is `False`
|
2018-08-15 17:22:29 +00:00
|
|
|
* if the coin has an entry in `supported` its support is that entry
|
2021-03-17 14:31:14 +00:00
|
|
|
(usually a version string, or `True` for connect/suite)
|
2021-09-20 10:37:56 +00:00
|
|
|
* if the coin doesn't have an entry, its support status is `None`
|
2018-07-23 14:33:00 +00:00
|
|
|
"""
|
2022-05-23 14:19:19 +00:00
|
|
|
support_info_item = {}
|
2018-08-15 17:22:29 +00:00
|
|
|
key = coin["key"]
|
|
|
|
for device, values in support_data.items():
|
2022-05-23 14:19:19 +00:00
|
|
|
assert isinstance(values, dict)
|
2019-04-29 16:04:19 +00:00
|
|
|
if key in values["unsupported"]:
|
2022-05-23 14:19:19 +00:00
|
|
|
support_value: Any = False
|
2018-08-15 17:22:29 +00:00
|
|
|
elif key in values["supported"]:
|
|
|
|
support_value = values["supported"][key]
|
2018-08-23 11:05:41 +00:00
|
|
|
elif device in MISSING_SUPPORT_MEANS_NO:
|
2018-08-28 11:56:53 +00:00
|
|
|
support_value = False
|
|
|
|
else:
|
|
|
|
support_value = None
|
2022-05-23 14:19:19 +00:00
|
|
|
support_info_item[device] = support_value
|
|
|
|
return cast(SupportInfoItem, support_info_item)
|
2018-07-20 14:49:45 +00:00
|
|
|
|
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
def support_info(coins: Iterable[Coin] | CoinsInfo | dict[str, Coin]) -> SupportInfo:
|
2018-07-23 14:33:00 +00:00
|
|
|
"""Generate Trezor support information.
|
|
|
|
|
2018-07-30 12:25:53 +00:00
|
|
|
Takes a collection of coins and generates a support-info entry for each.
|
2018-08-15 17:22:29 +00:00
|
|
|
The support-info is a dict with keys based on `support.json` keys.
|
2021-03-17 14:31:14 +00:00
|
|
|
These are usually: "trezor1", "trezor2", "connect" and "suite".
|
2018-07-23 14:33:00 +00:00
|
|
|
|
2018-07-30 12:25:53 +00:00
|
|
|
The `coins` argument can be a `CoinsInfo` object, a list or a dict of
|
|
|
|
coin items.
|
|
|
|
|
2018-08-15 17:22:29 +00:00
|
|
|
Support information is taken from `support.json`.
|
2018-07-23 14:33:00 +00:00
|
|
|
"""
|
2018-07-30 12:25:53 +00:00
|
|
|
if isinstance(coins, CoinsInfo):
|
|
|
|
coins = coins.as_list()
|
|
|
|
elif isinstance(coins, dict):
|
|
|
|
coins = coins.values()
|
|
|
|
|
2018-07-24 11:54:03 +00:00
|
|
|
support_data = get_support_data()
|
2022-05-23 14:19:19 +00:00
|
|
|
support: SupportInfo = {}
|
2018-07-20 14:49:45 +00:00
|
|
|
for coin in coins:
|
2018-08-15 17:22:29 +00:00
|
|
|
support[coin["key"]] = support_info_single(support_data, coin)
|
2018-07-20 14:49:45 +00:00
|
|
|
|
2018-08-15 17:22:29 +00:00
|
|
|
return support
|
2018-07-20 14:49:45 +00:00
|
|
|
|
|
|
|
|
2022-05-24 18:28:48 +00:00
|
|
|
# ====== wallet info ======
|
|
|
|
|
|
|
|
WALLET_SUITE = {"Trezor Suite": "https://suite.trezor.io"}
|
|
|
|
WALLET_NEM = {"Nano Wallet": "https://nemplatform.com/wallets/#desktop"}
|
|
|
|
WALLETS_ETH_3RDPARTY = {
|
|
|
|
"MyEtherWallet": "https://www.myetherwallet.com",
|
|
|
|
"MyCrypto": "https://mycrypto.com",
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
def get_wallet_data() -> WalletInfo:
|
|
|
|
"""Get wallet data from `wallets.json`."""
|
|
|
|
return load_json("wallets.json")
|
|
|
|
|
|
|
|
|
|
|
|
def _suite_support(coin: Coin, support: SupportInfoItem) -> bool:
|
|
|
|
"""Check the "suite" support property.
|
|
|
|
If set, check that at least one of the backends run on trezor.io.
|
|
|
|
If yes, assume we support the coin in our wallet.
|
|
|
|
Otherwise it's probably working with a custom backend, which means don't
|
|
|
|
link to our wallet.
|
|
|
|
"""
|
|
|
|
if not support["suite"]:
|
|
|
|
return False
|
|
|
|
return any(".trezor.io" in url for url in coin["blockbook"])
|
|
|
|
|
|
|
|
|
|
|
|
def wallet_info_single(
|
|
|
|
support_data: SupportInfo,
|
|
|
|
eth_networks_support_data: SupportInfo,
|
|
|
|
wallet_data: WalletInfo,
|
|
|
|
coin: Coin,
|
|
|
|
) -> WalletItems:
|
|
|
|
"""Adds together a dict of all wallets for a coin."""
|
|
|
|
wallets: WalletItems = {}
|
|
|
|
|
|
|
|
key = coin["key"]
|
|
|
|
|
|
|
|
# Add wallets from the coin itself
|
|
|
|
# (usually not there, only for the `misc` category)
|
|
|
|
wallets.update(coin.get("wallet", {}))
|
|
|
|
|
|
|
|
# Each coin category has different further logic
|
|
|
|
if key.startswith("bitcoin:"):
|
|
|
|
if _suite_support(coin, support_data[key]):
|
|
|
|
wallets.update(WALLET_SUITE)
|
|
|
|
elif key.startswith("eth:"):
|
|
|
|
if support_data[key]["suite"]:
|
|
|
|
wallets.update(WALLET_SUITE)
|
|
|
|
else:
|
|
|
|
wallets.update(WALLETS_ETH_3RDPARTY)
|
|
|
|
elif key.startswith("erc20:"):
|
|
|
|
if eth_networks_support_data[coin["chain"]]["suite"]:
|
|
|
|
wallets.update(WALLET_SUITE)
|
|
|
|
else:
|
|
|
|
wallets.update(WALLETS_ETH_3RDPARTY)
|
|
|
|
elif key.startswith("nem:"):
|
|
|
|
wallets.update(WALLET_NEM)
|
|
|
|
elif key.startswith("misc:"):
|
|
|
|
pass # no special logic here
|
|
|
|
else:
|
|
|
|
raise ValueError(f"Unknown coin category: {key}")
|
|
|
|
|
|
|
|
# Add wallets from `wallets.json`
|
|
|
|
# This must come last as it offers the ability to override existing wallets
|
|
|
|
# (for example with `"MyEtherWallet": null` we delete the MyEtherWallet from the coin)
|
|
|
|
wallets.update(wallet_data.get(key, {}))
|
|
|
|
|
|
|
|
# Removing potentially disabled wallets from the last step
|
|
|
|
wallets = {name: url for name, url in wallets.items() if url}
|
|
|
|
|
|
|
|
return wallets
|
|
|
|
|
|
|
|
|
|
|
|
def wallet_info(coins: Iterable[Coin] | CoinsInfo | dict[str, Coin]) -> WalletInfo:
|
|
|
|
"""Generate Trezor wallet information.
|
|
|
|
|
|
|
|
Takes a collection of coins and generates a WalletItems entry for each.
|
|
|
|
The WalletItems is a dict with keys being the names of the wallets and
|
|
|
|
values being the URLs to those - same format as in `wallets.json`.
|
|
|
|
|
|
|
|
The `coins` argument can be a `CoinsInfo` object, a list or a dict of
|
|
|
|
coin items.
|
|
|
|
|
|
|
|
Wallet information is taken from `wallets.json`.
|
|
|
|
"""
|
|
|
|
if isinstance(coins, CoinsInfo):
|
|
|
|
coins = coins.as_list()
|
|
|
|
elif isinstance(coins, dict):
|
|
|
|
coins = coins.values()
|
|
|
|
|
|
|
|
support_data = support_info(coins)
|
|
|
|
wallet_data = get_wallet_data()
|
|
|
|
|
|
|
|
# Needed to find out suitable wallets for all the erc20 coins (Suite vs 3rd party)
|
|
|
|
eth_networks = [coin for coin in coins if coin["key"].startswith("eth:")]
|
|
|
|
eth_networks_support_data = {
|
|
|
|
n["chain"]: support_data[n["key"]] for n in eth_networks
|
|
|
|
}
|
|
|
|
|
|
|
|
wallet: WalletInfo = {}
|
|
|
|
for coin in coins:
|
|
|
|
wallet[coin["key"]] = wallet_info_single(
|
|
|
|
support_data, eth_networks_support_data, wallet_data, coin
|
|
|
|
)
|
|
|
|
|
|
|
|
return wallet
|
|
|
|
|
|
|
|
|
2018-07-20 14:49:45 +00:00
|
|
|
# ====== data cleanup functions ======
|
|
|
|
|
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
def _ensure_mandatory_values(coins: Coins) -> None:
|
2018-07-23 14:33:00 +00:00
|
|
|
"""Checks that every coin has the mandatory fields: name, shortcut, key"""
|
2018-07-20 14:49:45 +00:00
|
|
|
for coin in coins:
|
|
|
|
if not all(coin.get(k) for k in ("name", "shortcut", "key")):
|
|
|
|
raise ValueError(coin)
|
|
|
|
|
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
def symbol_from_shortcut(shortcut: str) -> tuple[str, str]:
|
2018-08-23 11:05:41 +00:00
|
|
|
symsplit = shortcut.split(" ", maxsplit=1)
|
|
|
|
return symsplit[0], symsplit[1] if len(symsplit) > 1 else ""
|
|
|
|
|
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
def mark_duplicate_shortcuts(coins: Coins) -> CoinBuckets:
|
2018-11-29 14:42:49 +00:00
|
|
|
"""Finds coins with identical symbols and sets their `duplicate` field.
|
2018-08-23 11:05:41 +00:00
|
|
|
|
2018-11-29 14:42:49 +00:00
|
|
|
"Symbol" here means the first part of `shortcut` (separated by space),
|
|
|
|
so, e.g., "BTL (Battle)" and "BTL (Bitlle)" have the same symbol "BTL".
|
2018-08-23 11:05:41 +00:00
|
|
|
|
|
|
|
The result of this function is a dictionary of _buckets_, each of which is
|
|
|
|
indexed by the duplicated symbol, or `_override`. The `_override` bucket will
|
2018-11-29 14:42:49 +00:00
|
|
|
contain all coins that are set to `true` in `duplicity_overrides.json`.
|
2018-08-23 11:05:41 +00:00
|
|
|
|
2018-11-29 14:42:49 +00:00
|
|
|
Each coin in every bucket will have its "duplicate" property set to True, unless
|
|
|
|
it's explicitly marked as `false` in `duplicity_overrides.json`.
|
2018-07-23 14:33:00 +00:00
|
|
|
"""
|
2022-05-23 14:19:19 +00:00
|
|
|
dup_symbols: CoinBuckets = defaultdict(list)
|
2018-07-20 14:49:45 +00:00
|
|
|
|
|
|
|
for coin in coins:
|
2018-08-27 18:07:02 +00:00
|
|
|
symbol, _ = symbol_from_shortcut(coin["shortcut"].lower())
|
2018-08-15 17:22:29 +00:00
|
|
|
dup_symbols[symbol].append(coin)
|
2018-07-20 14:49:45 +00:00
|
|
|
|
2018-11-29 14:42:49 +00:00
|
|
|
dup_symbols = {k: v for k, v in dup_symbols.items() if len(v) > 1}
|
2021-02-25 15:11:23 +00:00
|
|
|
# mark duplicate symbols
|
|
|
|
for values in dup_symbols.values():
|
|
|
|
for coin in values:
|
|
|
|
coin["duplicate"] = True
|
|
|
|
|
|
|
|
return dup_symbols
|
|
|
|
|
2018-08-15 17:22:29 +00:00
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
def apply_duplicity_overrides(coins: Coins) -> Coins:
|
2018-08-23 11:05:41 +00:00
|
|
|
overrides = load_json("duplicity_overrides.json")
|
2022-05-23 14:19:19 +00:00
|
|
|
override_bucket: Coins = []
|
2018-08-23 11:05:41 +00:00
|
|
|
for coin in coins:
|
2021-02-25 15:11:23 +00:00
|
|
|
override_value = overrides.get(coin["key"])
|
|
|
|
if override_value is True:
|
2018-08-23 11:05:41 +00:00
|
|
|
override_bucket.append(coin)
|
2021-02-25 15:11:23 +00:00
|
|
|
if override_value is not None:
|
|
|
|
coin["duplicate"] = override_value
|
2018-08-23 11:05:41 +00:00
|
|
|
|
2021-02-25 15:11:23 +00:00
|
|
|
return override_bucket
|
2018-07-20 14:49:45 +00:00
|
|
|
|
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
def deduplicate_erc20(buckets: CoinBuckets, networks: Coins) -> None:
|
2018-11-29 14:42:49 +00:00
|
|
|
"""Apply further processing to ERC20 duplicate buckets.
|
|
|
|
|
|
|
|
This function works on results of `mark_duplicate_shortcuts`.
|
|
|
|
|
|
|
|
Buckets that contain at least one non-token are ignored - symbol collisions
|
2019-04-29 16:04:19 +00:00
|
|
|
with non-tokens always apply.
|
2018-11-29 14:42:49 +00:00
|
|
|
|
|
|
|
Otherwise the following rules are applied:
|
|
|
|
|
|
|
|
1. If _all tokens_ in the bucket have shortcuts with distinct suffixes, e.g.,
|
|
|
|
`CAT (BitClave)` and `CAT (Blockcat)`, the bucket is cleared - all are considered
|
|
|
|
non-duplicate.
|
|
|
|
|
|
|
|
(If even one token in the bucket _does not_ have a distinct suffix, e.g.,
|
|
|
|
`MIT` and `MIT (Mychatcoin)`, this rule does not apply and ALL tokens in the bucket
|
|
|
|
are still considered duplicate.)
|
|
|
|
|
|
|
|
2. If there is only one "main" token in the bucket, the bucket is cleared.
|
|
|
|
That means that all other tokens must either be on testnets, or they must be marked
|
|
|
|
as deprecated, with a deprecation pointing to the "main" token.
|
|
|
|
"""
|
|
|
|
|
|
|
|
testnet_networks = {n["chain"] for n in networks if "Testnet" in n["name"]}
|
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
def clear_bucket(bucket: Coins) -> None:
|
2018-11-29 14:42:49 +00:00
|
|
|
# allow all coins, except those that are explicitly marked through overrides
|
|
|
|
for coin in bucket:
|
2021-02-25 15:11:23 +00:00
|
|
|
coin["duplicate"] = False
|
2018-11-29 14:42:49 +00:00
|
|
|
|
|
|
|
for bucket in buckets.values():
|
|
|
|
# Only check buckets that contain purely ERC20 tokens. Collision with
|
|
|
|
# a non-token is always forbidden.
|
|
|
|
if not all(is_token(c) for c in bucket):
|
|
|
|
continue
|
|
|
|
|
|
|
|
splits = (symbol_from_shortcut(coin["shortcut"]) for coin in bucket)
|
|
|
|
suffixes = {suffix for _, suffix in splits}
|
|
|
|
# if 1. all suffixes are distinct and 2. none of them are empty
|
|
|
|
if len(suffixes) == len(bucket) and all(suffixes):
|
|
|
|
clear_bucket(bucket)
|
|
|
|
continue
|
|
|
|
|
|
|
|
# protected categories:
|
|
|
|
testnets = [coin for coin in bucket if coin["chain"] in testnet_networks]
|
|
|
|
deprecated_by_same = [
|
|
|
|
coin
|
|
|
|
for coin in bucket
|
|
|
|
if "deprecation" in coin
|
|
|
|
and any(
|
|
|
|
other["address"] == coin["deprecation"]["new_address"]
|
|
|
|
for other in bucket
|
|
|
|
)
|
|
|
|
]
|
|
|
|
remaining = [
|
|
|
|
coin
|
|
|
|
for coin in bucket
|
|
|
|
if coin not in testnets and coin not in deprecated_by_same
|
|
|
|
]
|
|
|
|
if len(remaining) <= 1:
|
|
|
|
for coin in deprecated_by_same:
|
|
|
|
deprecated_symbol = "[deprecated] " + coin["symbol"]
|
|
|
|
coin["shortcut"] = coin["symbol"] = deprecated_symbol
|
|
|
|
coin["key"] += ":deprecated"
|
|
|
|
clear_bucket(bucket)
|
|
|
|
|
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
def deduplicate_keys(all_coins: Coins) -> None:
|
|
|
|
dups: CoinBuckets = defaultdict(list)
|
2018-11-29 14:42:49 +00:00
|
|
|
for coin in all_coins:
|
|
|
|
dups[coin["key"]].append(coin)
|
|
|
|
|
|
|
|
for coins in dups.values():
|
|
|
|
if len(coins) <= 1:
|
|
|
|
continue
|
|
|
|
for i, coin in enumerate(coins):
|
|
|
|
if is_token(coin):
|
|
|
|
coin["key"] += ":" + coin["address"][2:6].lower() # first 4 hex chars
|
2020-11-11 10:41:33 +00:00
|
|
|
elif "chain_id" in coin:
|
|
|
|
coin["key"] += ":" + str(coin["chain_id"])
|
2018-11-29 14:42:49 +00:00
|
|
|
else:
|
2021-09-27 10:13:51 +00:00
|
|
|
coin["key"] += f":{i}"
|
2018-11-29 14:42:49 +00:00
|
|
|
coin["dup_key_nontoken"] = True
|
|
|
|
|
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
def fill_blockchain_links(all_coins: CoinsInfo) -> None:
|
2020-11-11 10:11:20 +00:00
|
|
|
blockchain_links = load_json("blockchain_link.json")
|
|
|
|
for coins in all_coins.values():
|
|
|
|
for coin in coins:
|
|
|
|
link = blockchain_links.get(coin["key"])
|
|
|
|
coin["blockchain_link"] = link
|
|
|
|
if link and link["type"] == "blockbook":
|
|
|
|
coin["blockbook"] = link["url"]
|
|
|
|
else:
|
|
|
|
coin["blockbook"] = []
|
|
|
|
|
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
def _btc_sort_key(coin: Coin) -> str:
|
2019-08-21 09:05:19 +00:00
|
|
|
if coin["name"] in ("Bitcoin", "Testnet", "Regtest"):
|
2018-07-20 14:49:45 +00:00
|
|
|
return "000000" + coin["name"]
|
|
|
|
else:
|
|
|
|
return coin["name"]
|
|
|
|
|
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
def collect_coin_info() -> CoinsInfo:
|
2018-07-23 14:33:00 +00:00
|
|
|
"""Returns all definition as dict organized by coin type.
|
|
|
|
`coins` for btc-like coins,
|
|
|
|
`eth` for ethereum networks,
|
|
|
|
`erc20` for ERC20 tokens,
|
|
|
|
`nem` for NEM mosaics,
|
|
|
|
`misc` for other networks.
|
|
|
|
"""
|
2018-07-30 12:25:53 +00:00
|
|
|
all_coins = CoinsInfo(
|
2018-08-24 13:42:06 +00:00
|
|
|
bitcoin=_load_btc_coins(),
|
2018-07-23 14:33:00 +00:00
|
|
|
eth=_load_ethereum_networks(),
|
|
|
|
erc20=_load_erc20_tokens(),
|
|
|
|
nem=_load_nem_mosaics(),
|
|
|
|
misc=_load_misc(),
|
2018-07-20 14:49:45 +00:00
|
|
|
)
|
|
|
|
|
2020-11-11 10:11:20 +00:00
|
|
|
for coins in all_coins.values():
|
2018-12-03 15:38:49 +00:00
|
|
|
_ensure_mandatory_values(coins)
|
|
|
|
|
2020-11-11 10:11:20 +00:00
|
|
|
fill_blockchain_links(all_coins)
|
|
|
|
|
2018-12-03 15:38:49 +00:00
|
|
|
return all_coins
|
|
|
|
|
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
def sort_coin_infos(all_coins: CoinsInfo) -> None:
|
2018-07-20 14:49:45 +00:00
|
|
|
for k, coins in all_coins.items():
|
2018-08-24 13:42:06 +00:00
|
|
|
if k == "bitcoin":
|
2018-07-20 14:49:45 +00:00
|
|
|
coins.sort(key=_btc_sort_key)
|
2018-07-23 14:33:00 +00:00
|
|
|
elif k == "nem":
|
|
|
|
# do not sort nem
|
|
|
|
pass
|
2018-08-15 17:22:29 +00:00
|
|
|
elif k == "eth":
|
|
|
|
# sort ethereum networks by chain_id
|
|
|
|
coins.sort(key=lambda c: c["chain_id"])
|
2018-07-20 14:49:45 +00:00
|
|
|
else:
|
|
|
|
coins.sort(key=lambda c: c["key"].upper())
|
|
|
|
|
2018-08-24 13:20:25 +00:00
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
def coin_info_with_duplicates() -> tuple[CoinsInfo, CoinBuckets]:
|
2018-08-24 13:20:25 +00:00
|
|
|
"""Collects coin info, detects duplicates but does not remove them.
|
|
|
|
|
|
|
|
Returns the CoinsInfo object and duplicate buckets.
|
|
|
|
"""
|
|
|
|
all_coins = collect_coin_info()
|
2021-02-25 15:11:23 +00:00
|
|
|
coin_list = all_coins.as_list()
|
|
|
|
# generate duplicity buckets based on shortcuts
|
2018-08-24 13:20:25 +00:00
|
|
|
buckets = mark_duplicate_shortcuts(all_coins.as_list())
|
2021-02-25 15:11:23 +00:00
|
|
|
# apply further processing to ERC20 tokens, generate deprecations etc.
|
2018-11-29 14:42:49 +00:00
|
|
|
deduplicate_erc20(buckets, all_coins.eth)
|
2021-02-25 15:11:23 +00:00
|
|
|
# ensure the whole list has unique keys (taking into account changes from deduplicate_erc20)
|
|
|
|
deduplicate_keys(coin_list)
|
|
|
|
# apply duplicity overrides
|
|
|
|
buckets["_override"] = apply_duplicity_overrides(coin_list)
|
2018-12-03 15:38:49 +00:00
|
|
|
sort_coin_infos(all_coins)
|
|
|
|
|
2018-08-24 13:20:25 +00:00
|
|
|
return all_coins, buckets
|
|
|
|
|
2018-07-20 14:49:45 +00:00
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
def coin_info() -> CoinsInfo:
|
2019-04-29 16:04:19 +00:00
|
|
|
"""Collects coin info, fills out support info and returns the result.
|
|
|
|
|
|
|
|
Does not auto-delete duplicates. This should now be based on support info.
|
2018-08-24 13:20:25 +00:00
|
|
|
"""
|
|
|
|
all_coins, _ = coin_info_with_duplicates()
|
2019-04-29 16:04:19 +00:00
|
|
|
# all_coins["erc20"] = [
|
|
|
|
# coin for coin in all_coins["erc20"] if not coin.get("duplicate")
|
|
|
|
# ]
|
2018-07-20 14:49:45 +00:00
|
|
|
return all_coins
|
2018-08-23 11:05:41 +00:00
|
|
|
|
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
def fido_info() -> FidoApps:
|
2019-11-26 17:11:09 +00:00
|
|
|
"""Returns info about known FIDO/U2F apps."""
|
|
|
|
return _load_fido_apps()
|
|
|
|
|
|
|
|
|
2022-05-23 14:19:19 +00:00
|
|
|
def search(coins: CoinsInfo | Coins, keyword: str) -> Iterator[Any]:
|
2018-08-23 11:05:41 +00:00
|
|
|
kwl = keyword.lower()
|
2018-08-24 13:20:25 +00:00
|
|
|
if isinstance(coins, CoinsInfo):
|
|
|
|
coins = coins.as_list()
|
|
|
|
|
2018-08-23 11:05:41 +00:00
|
|
|
for coin in coins:
|
|
|
|
key = coin["key"].lower()
|
|
|
|
name = coin["name"].lower()
|
|
|
|
shortcut = coin["shortcut"].lower()
|
|
|
|
symbol, suffix = symbol_from_shortcut(shortcut)
|
|
|
|
if (
|
|
|
|
kwl == key
|
|
|
|
or kwl in name
|
|
|
|
or kwl == shortcut
|
|
|
|
or kwl == symbol
|
|
|
|
or kwl in suffix
|
|
|
|
):
|
|
|
|
yield coin
|