2018-07-20 14:49:45 +00:00
|
|
|
#!/usr/bin/env python3
|
2019-04-18 14:27:27 +00:00
|
|
|
import json
|
2018-07-20 14:49:45 +00:00
|
|
|
import logging
|
2019-04-18 14:27:27 +00:00
|
|
|
import os
|
|
|
|
import re
|
|
|
|
from collections import OrderedDict, defaultdict
|
2020-11-11 10:25:33 +00:00
|
|
|
from pathlib import Path
|
2018-07-20 14:49:45 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
import requests
|
|
|
|
except ImportError:
|
|
|
|
requests = None
|
|
|
|
|
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
2020-11-11 10:25:33 +00:00
|
|
|
ROOT = (Path(__file__).parent / "..").resolve()
|
|
|
|
|
|
|
|
if os.environ.get("DEFS_DIR"):
|
|
|
|
DEFS_DIR = Path(os.environ.get("DEFS_DIR")).resolve()
|
|
|
|
else:
|
|
|
|
DEFS_DIR = ROOT / "defs"
|
2018-07-20 14:49:45 +00:00
|
|
|
|
|
|
|
|
|
|
|
def load_json(*path):
|
2018-07-23 14:33:00 +00:00
|
|
|
"""Convenience function to load a JSON file from DEFS_DIR."""
|
2020-11-11 10:25:33 +00:00
|
|
|
if len(path) == 1 and isinstance(path[0], Path):
|
|
|
|
file = path[0]
|
2018-07-20 14:49:45 +00:00
|
|
|
else:
|
2020-11-11 10:25:33 +00:00
|
|
|
file = Path(DEFS_DIR, *path)
|
2018-07-20 14:49:45 +00:00
|
|
|
|
2020-11-11 10:25:33 +00:00
|
|
|
return json.loads(file.read_text(), object_pairs_hook=OrderedDict)
|
2018-07-20 14:49:45 +00:00
|
|
|
|
|
|
|
|
2018-07-30 12:25:53 +00:00
|
|
|
# ====== CoinsInfo ======
|
|
|
|
|
|
|
|
|
|
|
|
class CoinsInfo(dict):
|
|
|
|
"""Collection of information about all known kinds of coins.
|
|
|
|
|
|
|
|
It contains the following lists:
|
2018-08-24 13:42:06 +00:00
|
|
|
`bitcoin` for btc-like coins,
|
2018-07-30 12:25:53 +00:00
|
|
|
`eth` for ethereum networks,
|
|
|
|
`erc20` for ERC20 tokens,
|
|
|
|
`nem` for NEM mosaics,
|
|
|
|
`misc` for other networks.
|
|
|
|
|
2018-08-24 13:42:06 +00:00
|
|
|
Accessible as a dict or by attribute: `info["misc"] == info.misc`
|
2018-07-30 12:25:53 +00:00
|
|
|
"""
|
|
|
|
|
|
|
|
def as_list(self):
|
|
|
|
return sum(self.values(), [])
|
|
|
|
|
|
|
|
def as_dict(self):
|
|
|
|
return {coin["key"]: coin for coin in self.as_list()}
|
|
|
|
|
|
|
|
def __getattr__(self, attr):
|
|
|
|
if attr in self:
|
|
|
|
return self[attr]
|
|
|
|
else:
|
|
|
|
raise AttributeError(attr)
|
|
|
|
|
|
|
|
|
2018-07-20 14:49:45 +00:00
|
|
|
# ====== coin validation ======
|
|
|
|
|
|
|
|
|
|
|
|
def check_type(val, types, nullable=False, empty=False, regex=None, choice=None):
|
|
|
|
# check nullable
|
|
|
|
if val is None:
|
|
|
|
if nullable:
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
raise ValueError("Missing required value")
|
|
|
|
|
|
|
|
# check type
|
|
|
|
if not isinstance(val, types):
|
2018-09-04 13:19:14 +00:00
|
|
|
raise TypeError("Wrong type (expected: {})".format(types))
|
2018-07-20 14:49:45 +00:00
|
|
|
|
|
|
|
# check empty
|
|
|
|
if isinstance(val, (list, dict)) and not empty and not val:
|
|
|
|
raise ValueError("Empty collection")
|
2020-06-10 14:26:04 +00:00
|
|
|
if isinstance(val, str) and not empty and not val:
|
|
|
|
raise ValueError("Empty string")
|
2018-07-20 14:49:45 +00:00
|
|
|
|
|
|
|
# check regex
|
|
|
|
if regex is not None:
|
|
|
|
if types is not str:
|
|
|
|
raise TypeError("Wrong type for regex check")
|
|
|
|
if not re.search(regex, val):
|
2018-09-04 13:19:14 +00:00
|
|
|
raise ValueError("Value does not match regex {}".format(regex))
|
2018-07-20 14:49:45 +00:00
|
|
|
|
|
|
|
# check choice
|
|
|
|
if choice is not None and val not in choice:
|
2018-08-15 15:36:24 +00:00
|
|
|
choice_str = ", ".join(choice)
|
2018-09-04 13:19:14 +00:00
|
|
|
raise ValueError("Value not allowed, use one of: {}".format(choice_str))
|
2018-07-20 14:49:45 +00:00
|
|
|
|
|
|
|
|
2018-07-24 11:54:03 +00:00
|
|
|
def check_key(key, types, optional=False, **kwargs):
|
2018-07-20 14:49:45 +00:00
|
|
|
def do_check(coin):
|
2018-07-24 11:54:03 +00:00
|
|
|
if key not in coin:
|
|
|
|
if optional:
|
|
|
|
return
|
|
|
|
else:
|
2018-09-04 13:19:14 +00:00
|
|
|
raise KeyError("{}: Missing key".format(key))
|
2018-07-20 14:49:45 +00:00
|
|
|
try:
|
|
|
|
check_type(coin[key], types, **kwargs)
|
|
|
|
except Exception as e:
|
2018-09-04 13:19:14 +00:00
|
|
|
raise ValueError("{}: {}".format(key, e)) from e
|
2018-07-20 14:49:45 +00:00
|
|
|
|
|
|
|
return do_check
|
|
|
|
|
|
|
|
|
2018-07-23 14:33:00 +00:00
|
|
|
BTC_CHECKS = [
|
2018-07-20 14:49:45 +00:00
|
|
|
check_key("coin_name", str, regex=r"^[A-Z]"),
|
|
|
|
check_key("coin_shortcut", str, regex=r"^t?[A-Z]{3,}$"),
|
|
|
|
check_key("coin_label", str, regex=r"^[A-Z]"),
|
2018-10-26 13:23:03 +00:00
|
|
|
check_key("website", str, regex=r"^https://.*[^/]$"),
|
2019-03-31 17:55:58 +00:00
|
|
|
check_key("github", str, regex=r"^https://git(hu|la)b.com/.*[^/]$"),
|
2018-07-20 14:49:45 +00:00
|
|
|
check_key("maintainer", str),
|
|
|
|
check_key(
|
2018-11-29 14:42:49 +00:00
|
|
|
"curve_name",
|
|
|
|
str,
|
|
|
|
choice=[
|
|
|
|
"secp256k1",
|
|
|
|
"secp256k1_decred",
|
|
|
|
"secp256k1_groestl",
|
|
|
|
"secp256k1_smart",
|
|
|
|
],
|
2018-07-20 14:49:45 +00:00
|
|
|
),
|
|
|
|
check_key("address_type", int),
|
|
|
|
check_key("address_type_p2sh", int),
|
|
|
|
check_key("maxfee_kb", int),
|
|
|
|
check_key("minfee_kb", int),
|
|
|
|
check_key("hash_genesis_block", str, regex=r"^[0-9a-f]{64}$"),
|
|
|
|
check_key("xprv_magic", int),
|
|
|
|
check_key("xpub_magic", int),
|
|
|
|
check_key("xpub_magic_segwit_p2sh", int, nullable=True),
|
|
|
|
check_key("xpub_magic_segwit_native", int, nullable=True),
|
|
|
|
check_key("slip44", int),
|
|
|
|
check_key("segwit", bool),
|
|
|
|
check_key("decred", bool),
|
|
|
|
check_key("fork_id", int, nullable=True),
|
|
|
|
check_key("force_bip143", bool),
|
|
|
|
check_key("default_fee_b", dict),
|
|
|
|
check_key("dust_limit", int),
|
|
|
|
check_key("blocktime_seconds", int),
|
|
|
|
check_key("signed_message_header", str),
|
2019-03-31 17:55:58 +00:00
|
|
|
check_key("uri_prefix", str, regex=r"^[a-z-\.\+]+$"),
|
2018-07-20 14:49:45 +00:00
|
|
|
check_key("min_address_length", int),
|
|
|
|
check_key("max_address_length", int),
|
2019-03-31 17:55:58 +00:00
|
|
|
check_key("bech32_prefix", str, regex=r"^[a-z-\.\+]+$", nullable=True),
|
|
|
|
check_key("cashaddr_prefix", str, regex=r"^[a-z-\.\+]+$", nullable=True),
|
2018-07-20 14:49:45 +00:00
|
|
|
]
|
|
|
|
|
|
|
|
|
2018-07-23 14:33:00 +00:00
|
|
|
def validate_btc(coin):
|
2018-07-20 14:49:45 +00:00
|
|
|
errors = []
|
2018-07-23 14:33:00 +00:00
|
|
|
for check in BTC_CHECKS:
|
2018-07-20 14:49:45 +00:00
|
|
|
try:
|
|
|
|
check(coin)
|
|
|
|
except Exception as e:
|
|
|
|
errors.append(str(e))
|
|
|
|
|
|
|
|
magics = [
|
|
|
|
coin[k]
|
|
|
|
for k in (
|
|
|
|
"xprv_magic",
|
|
|
|
"xpub_magic",
|
|
|
|
"xpub_magic_segwit_p2sh",
|
|
|
|
"xpub_magic_segwit_native",
|
|
|
|
)
|
|
|
|
if coin[k] is not None
|
|
|
|
]
|
|
|
|
# each of those must be unique
|
|
|
|
# therefore length of list == length of set of unique values
|
|
|
|
if len(magics) != len(set(magics)):
|
|
|
|
errors.append("XPUB/XPRV magic numbers must be unique")
|
|
|
|
|
|
|
|
if coin["address_type"] == coin["address_type_p2sh"]:
|
|
|
|
errors.append("address_type must be distinct from address_type_p2sh")
|
|
|
|
|
|
|
|
if not coin["maxfee_kb"] >= coin["minfee_kb"]:
|
|
|
|
errors.append("max fee must not be smaller than min fee")
|
|
|
|
|
|
|
|
if not coin["max_address_length"] >= coin["min_address_length"]:
|
|
|
|
errors.append("max address length must not be smaller than min address length")
|
|
|
|
|
2021-06-24 13:45:16 +00:00
|
|
|
if "testnet" in coin["coin_name"].lower() and coin["slip44"] != 1:
|
|
|
|
errors.append("testnet coins must use slip44 coin type 1")
|
|
|
|
|
2020-03-29 14:34:16 +00:00
|
|
|
if coin["segwit"]:
|
|
|
|
if coin["bech32_prefix"] is None:
|
|
|
|
errors.append("bech32_prefix must be defined for segwit-enabled coin")
|
|
|
|
if coin["xpub_magic_segwit_p2sh"] is None:
|
2020-03-31 07:22:17 +00:00
|
|
|
errors.append(
|
|
|
|
"xpub_magic_segwit_p2sh must be defined for segwit-enabled coin"
|
|
|
|
)
|
2020-03-29 14:34:16 +00:00
|
|
|
else:
|
|
|
|
if coin["bech32_prefix"] is not None:
|
|
|
|
errors.append("bech32_prefix must not be defined for segwit-disabled coin")
|
|
|
|
if coin["xpub_magic_segwit_p2sh"] is not None:
|
2020-03-31 07:22:17 +00:00
|
|
|
errors.append(
|
|
|
|
"xpub_magic_segwit_p2sh must not be defined for segwit-disabled coin"
|
|
|
|
)
|
2020-03-29 14:34:16 +00:00
|
|
|
|
2018-07-20 14:49:45 +00:00
|
|
|
return errors
|
|
|
|
|
|
|
|
|
|
|
|
# ======= Coin json loaders =======
|
|
|
|
|
|
|
|
|
2018-07-23 14:33:00 +00:00
|
|
|
def _load_btc_coins():
|
2019-11-26 17:11:09 +00:00
|
|
|
"""Load btc-like coins from `bitcoin/*.json`"""
|
2018-07-20 14:49:45 +00:00
|
|
|
coins = []
|
2020-11-11 10:25:33 +00:00
|
|
|
for file in DEFS_DIR.glob("bitcoin/*.json"):
|
|
|
|
coin = load_json(file)
|
2018-07-20 14:49:45 +00:00
|
|
|
coin.update(
|
2018-09-18 11:28:00 +00:00
|
|
|
name=coin["coin_label"],
|
2018-07-20 14:49:45 +00:00
|
|
|
shortcut=coin["coin_shortcut"],
|
2018-08-24 13:42:06 +00:00
|
|
|
key="bitcoin:{}".format(coin["coin_shortcut"]),
|
2020-11-11 10:25:33 +00:00
|
|
|
icon=str(file.with_suffix(".png")),
|
2018-07-20 14:49:45 +00:00
|
|
|
)
|
|
|
|
coins.append(coin)
|
|
|
|
|
|
|
|
return coins
|
|
|
|
|
|
|
|
|
2018-07-23 14:33:00 +00:00
|
|
|
def _load_ethereum_networks():
|
|
|
|
"""Load ethereum networks from `ethereum/networks.json`"""
|
2020-11-11 10:41:33 +00:00
|
|
|
chains_path = DEFS_DIR / "ethereum" / "chains" / "_data" / "chains"
|
|
|
|
networks = []
|
2021-02-15 13:39:38 +00:00
|
|
|
for chain in sorted(
|
|
|
|
chains_path.glob("eip155-*.json"),
|
|
|
|
key=lambda x: int(x.stem.replace("eip155-", "")),
|
|
|
|
):
|
2020-11-11 10:41:33 +00:00
|
|
|
chain_data = load_json(chain)
|
|
|
|
shortcut = chain_data["nativeCurrency"]["symbol"]
|
2021-03-01 10:32:03 +00:00
|
|
|
name = chain_data["name"]
|
|
|
|
is_testnet = "testnet" in name.lower()
|
2020-11-11 10:41:33 +00:00
|
|
|
if is_testnet:
|
|
|
|
slip44 = 1
|
|
|
|
else:
|
|
|
|
slip44 = chain_data.get("slip44", 60)
|
|
|
|
|
|
|
|
if is_testnet and not shortcut.lower().startswith("t"):
|
|
|
|
shortcut = "t" + shortcut
|
|
|
|
|
|
|
|
rskip60 = shortcut in ("RBTC", "TRBTC")
|
|
|
|
|
2021-03-01 10:32:03 +00:00
|
|
|
# strip out bullcrap in network naming
|
|
|
|
if "mainnet" in name.lower():
|
|
|
|
name = re.sub(r" mainnet.*$", "", name, flags=re.IGNORECASE)
|
|
|
|
|
2020-11-11 10:41:33 +00:00
|
|
|
network = dict(
|
|
|
|
chain=chain_data["shortName"],
|
|
|
|
chain_id=chain_data["chainId"],
|
|
|
|
slip44=slip44,
|
|
|
|
shortcut=shortcut,
|
2021-03-01 10:32:03 +00:00
|
|
|
name=name,
|
2020-11-11 10:41:33 +00:00
|
|
|
rskip60=rskip60,
|
|
|
|
url=chain_data["infoURL"],
|
|
|
|
key=f"eth:{shortcut}",
|
|
|
|
)
|
|
|
|
networks.append(network)
|
|
|
|
|
2018-07-20 14:49:45 +00:00
|
|
|
return networks
|
|
|
|
|
|
|
|
|
2018-07-23 14:33:00 +00:00
|
|
|
def _load_erc20_tokens():
|
|
|
|
"""Load ERC20 tokens from `ethereum/tokens` submodule."""
|
|
|
|
networks = _load_ethereum_networks()
|
2018-07-20 14:49:45 +00:00
|
|
|
tokens = []
|
|
|
|
for network in networks:
|
2018-11-29 14:42:49 +00:00
|
|
|
chain = network["chain"]
|
2018-07-20 14:49:45 +00:00
|
|
|
|
2020-11-11 10:25:33 +00:00
|
|
|
chain_path = DEFS_DIR / "ethereum" / "tokens" / "tokens" / chain
|
|
|
|
for file in sorted(chain_path.glob("*.json")):
|
|
|
|
token = load_json(file)
|
2018-07-20 14:49:45 +00:00
|
|
|
token.update(
|
|
|
|
chain=chain,
|
|
|
|
chain_id=network["chain_id"],
|
2018-09-12 22:14:51 +00:00
|
|
|
address_bytes=bytes.fromhex(token["address"][2:]),
|
2018-07-20 14:49:45 +00:00
|
|
|
shortcut=token["symbol"],
|
|
|
|
key="erc20:{}:{}".format(chain, token["symbol"]),
|
|
|
|
)
|
|
|
|
tokens.append(token)
|
|
|
|
|
|
|
|
return tokens
|
|
|
|
|
|
|
|
|
2018-07-23 14:33:00 +00:00
|
|
|
def _load_nem_mosaics():
|
|
|
|
"""Loads NEM mosaics from `nem/nem_mosaics.json`"""
|
2020-11-11 10:25:33 +00:00
|
|
|
mosaics = load_json("nem/nem_mosaics.json")
|
2018-07-20 14:49:45 +00:00
|
|
|
for mosaic in mosaics:
|
|
|
|
shortcut = mosaic["ticker"].strip()
|
|
|
|
mosaic.update(shortcut=shortcut, key="nem:{}".format(shortcut))
|
|
|
|
return mosaics
|
|
|
|
|
|
|
|
|
2018-07-23 14:33:00 +00:00
|
|
|
def _load_misc():
|
|
|
|
"""Loads miscellaneous networks from `misc/misc.json`"""
|
|
|
|
others = load_json("misc/misc.json")
|
2018-07-20 14:49:45 +00:00
|
|
|
for other in others:
|
2018-07-23 14:33:00 +00:00
|
|
|
other.update(key="misc:{}".format(other["shortcut"]))
|
2018-07-20 14:49:45 +00:00
|
|
|
return others
|
|
|
|
|
|
|
|
|
2019-11-26 17:11:09 +00:00
|
|
|
def _load_fido_apps():
|
2019-12-09 11:59:23 +00:00
|
|
|
"""Load FIDO apps from `fido/*.json`"""
|
2019-11-26 17:11:09 +00:00
|
|
|
apps = []
|
2020-11-11 10:25:33 +00:00
|
|
|
for file in sorted(DEFS_DIR.glob("fido/*.json")):
|
|
|
|
app_name = file.stem.lower()
|
|
|
|
app = load_json(file)
|
2019-11-28 13:00:13 +00:00
|
|
|
app.setdefault("use_sign_count", None)
|
2019-12-11 10:47:02 +00:00
|
|
|
app.setdefault("use_self_attestation", None)
|
2019-11-28 13:00:13 +00:00
|
|
|
app.setdefault("u2f", [])
|
|
|
|
app.setdefault("webauthn", [])
|
|
|
|
|
2020-11-11 10:25:33 +00:00
|
|
|
icon_file = file.with_suffix(".png")
|
|
|
|
if not icon_file.exists():
|
2019-11-28 13:00:13 +00:00
|
|
|
icon_path = None
|
2020-11-11 10:25:33 +00:00
|
|
|
else:
|
|
|
|
icon_path = str(icon_file)
|
2019-11-28 13:00:13 +00:00
|
|
|
|
|
|
|
app.update(key=app_name, icon=icon_path)
|
2019-11-26 17:11:09 +00:00
|
|
|
apps.append(app)
|
|
|
|
|
|
|
|
return apps
|
|
|
|
|
|
|
|
|
2018-07-20 14:49:45 +00:00
|
|
|
# ====== support info ======
|
|
|
|
|
2020-09-02 13:05:26 +00:00
|
|
|
RELEASES_URL = "https://data.trezor.io/firmware/{}/releases.json"
|
2021-03-17 14:31:14 +00:00
|
|
|
MISSING_SUPPORT_MEANS_NO = ("connect", "suite")
|
2018-08-23 11:05:41 +00:00
|
|
|
VERSIONED_SUPPORT_INFO = ("trezor1", "trezor2")
|
2018-07-20 14:49:45 +00:00
|
|
|
|
|
|
|
|
2018-07-24 11:54:03 +00:00
|
|
|
def get_support_data():
|
|
|
|
"""Get raw support data from `support.json`."""
|
|
|
|
return load_json("support.json")
|
|
|
|
|
|
|
|
|
2018-07-20 14:49:45 +00:00
|
|
|
def latest_releases():
|
2018-07-23 14:33:00 +00:00
|
|
|
"""Get latest released firmware versions for Trezor 1 and 2"""
|
2018-07-20 14:49:45 +00:00
|
|
|
if not requests:
|
|
|
|
raise RuntimeError("requests library is required for getting release info")
|
|
|
|
|
|
|
|
latest = {}
|
|
|
|
for v in ("1", "2"):
|
|
|
|
releases = requests.get(RELEASES_URL.format(v)).json()
|
2018-08-24 13:20:25 +00:00
|
|
|
latest["trezor" + v] = max(tuple(r["version"]) for r in releases)
|
2018-07-20 14:49:45 +00:00
|
|
|
return latest
|
|
|
|
|
|
|
|
|
2018-08-23 11:05:41 +00:00
|
|
|
def is_token(coin):
|
|
|
|
return coin["key"].startswith("erc20:")
|
|
|
|
|
|
|
|
|
2018-08-15 17:22:29 +00:00
|
|
|
def support_info_single(support_data, coin):
|
|
|
|
"""Extract a support dict from `support.json` data.
|
2018-07-23 14:33:00 +00:00
|
|
|
|
2018-08-15 17:22:29 +00:00
|
|
|
Returns a dict of support values for each "device", i.e., `support.json`
|
|
|
|
top-level key.
|
2018-07-23 14:33:00 +00:00
|
|
|
|
2018-08-15 17:22:29 +00:00
|
|
|
The support value for each device is determined in order of priority:
|
2018-08-23 11:05:41 +00:00
|
|
|
* if the coin is a duplicate ERC20 token, all support values are `None`
|
2018-08-15 17:22:29 +00:00
|
|
|
* if the coin has an entry in `unsupported`, its support is `None`
|
|
|
|
* if the coin has an entry in `supported` its support is that entry
|
2021-03-17 14:31:14 +00:00
|
|
|
(usually a version string, or `True` for connect/suite)
|
2018-08-15 17:22:29 +00:00
|
|
|
* otherwise support is presumed "soon"
|
2018-07-23 14:33:00 +00:00
|
|
|
"""
|
2018-08-15 17:22:29 +00:00
|
|
|
support_info = {}
|
|
|
|
key = coin["key"]
|
|
|
|
dup = coin.get("duplicate")
|
|
|
|
for device, values in support_data.items():
|
2019-04-29 16:04:19 +00:00
|
|
|
if key in values["unsupported"]:
|
2018-08-28 11:56:53 +00:00
|
|
|
support_value = False
|
2018-08-15 17:22:29 +00:00
|
|
|
elif key in values["supported"]:
|
|
|
|
support_value = values["supported"][key]
|
2018-08-23 11:05:41 +00:00
|
|
|
elif device in MISSING_SUPPORT_MEANS_NO:
|
2018-08-28 11:56:53 +00:00
|
|
|
support_value = False
|
|
|
|
elif is_token(coin):
|
2019-04-29 16:04:19 +00:00
|
|
|
if dup:
|
|
|
|
# if duplicate token that is not explicitly listed, it's unsupported
|
|
|
|
support_value = False
|
|
|
|
else:
|
|
|
|
# otherwise implicitly supported in next
|
|
|
|
support_value = "soon"
|
2018-08-28 11:56:53 +00:00
|
|
|
else:
|
|
|
|
support_value = None
|
2018-08-15 17:22:29 +00:00
|
|
|
support_info[device] = support_value
|
|
|
|
return support_info
|
2018-07-20 14:49:45 +00:00
|
|
|
|
|
|
|
|
2018-08-15 17:22:29 +00:00
|
|
|
def support_info(coins):
|
2018-07-23 14:33:00 +00:00
|
|
|
"""Generate Trezor support information.
|
|
|
|
|
2018-07-30 12:25:53 +00:00
|
|
|
Takes a collection of coins and generates a support-info entry for each.
|
2018-08-15 17:22:29 +00:00
|
|
|
The support-info is a dict with keys based on `support.json` keys.
|
2021-03-17 14:31:14 +00:00
|
|
|
These are usually: "trezor1", "trezor2", "connect" and "suite".
|
2018-07-23 14:33:00 +00:00
|
|
|
|
2018-07-30 12:25:53 +00:00
|
|
|
The `coins` argument can be a `CoinsInfo` object, a list or a dict of
|
|
|
|
coin items.
|
|
|
|
|
2018-08-15 17:22:29 +00:00
|
|
|
Support information is taken from `support.json`.
|
2018-07-23 14:33:00 +00:00
|
|
|
"""
|
2018-07-30 12:25:53 +00:00
|
|
|
if isinstance(coins, CoinsInfo):
|
|
|
|
coins = coins.as_list()
|
|
|
|
elif isinstance(coins, dict):
|
|
|
|
coins = coins.values()
|
|
|
|
|
2018-07-24 11:54:03 +00:00
|
|
|
support_data = get_support_data()
|
2018-07-20 14:49:45 +00:00
|
|
|
support = {}
|
|
|
|
for coin in coins:
|
2018-08-15 17:22:29 +00:00
|
|
|
support[coin["key"]] = support_info_single(support_data, coin)
|
2018-07-20 14:49:45 +00:00
|
|
|
|
2018-08-15 17:22:29 +00:00
|
|
|
return support
|
2018-07-20 14:49:45 +00:00
|
|
|
|
|
|
|
|
|
|
|
# ====== data cleanup functions ======
|
|
|
|
|
|
|
|
|
2018-07-23 14:33:00 +00:00
|
|
|
def _ensure_mandatory_values(coins):
|
|
|
|
"""Checks that every coin has the mandatory fields: name, shortcut, key"""
|
2018-07-20 14:49:45 +00:00
|
|
|
for coin in coins:
|
|
|
|
if not all(coin.get(k) for k in ("name", "shortcut", "key")):
|
|
|
|
raise ValueError(coin)
|
|
|
|
|
|
|
|
|
2018-08-23 11:05:41 +00:00
|
|
|
def symbol_from_shortcut(shortcut):
|
|
|
|
symsplit = shortcut.split(" ", maxsplit=1)
|
|
|
|
return symsplit[0], symsplit[1] if len(symsplit) > 1 else ""
|
|
|
|
|
|
|
|
|
2018-08-15 17:22:29 +00:00
|
|
|
def mark_duplicate_shortcuts(coins):
|
2018-11-29 14:42:49 +00:00
|
|
|
"""Finds coins with identical symbols and sets their `duplicate` field.
|
2018-08-23 11:05:41 +00:00
|
|
|
|
2018-11-29 14:42:49 +00:00
|
|
|
"Symbol" here means the first part of `shortcut` (separated by space),
|
|
|
|
so, e.g., "BTL (Battle)" and "BTL (Bitlle)" have the same symbol "BTL".
|
2018-08-23 11:05:41 +00:00
|
|
|
|
|
|
|
The result of this function is a dictionary of _buckets_, each of which is
|
|
|
|
indexed by the duplicated symbol, or `_override`. The `_override` bucket will
|
2018-11-29 14:42:49 +00:00
|
|
|
contain all coins that are set to `true` in `duplicity_overrides.json`.
|
2018-08-23 11:05:41 +00:00
|
|
|
|
2018-11-29 14:42:49 +00:00
|
|
|
Each coin in every bucket will have its "duplicate" property set to True, unless
|
|
|
|
it's explicitly marked as `false` in `duplicity_overrides.json`.
|
2018-07-23 14:33:00 +00:00
|
|
|
"""
|
2018-08-15 17:22:29 +00:00
|
|
|
dup_symbols = defaultdict(list)
|
2018-07-20 14:49:45 +00:00
|
|
|
|
|
|
|
for coin in coins:
|
2018-08-27 18:07:02 +00:00
|
|
|
symbol, _ = symbol_from_shortcut(coin["shortcut"].lower())
|
2018-08-15 17:22:29 +00:00
|
|
|
dup_symbols[symbol].append(coin)
|
2018-07-20 14:49:45 +00:00
|
|
|
|
2018-11-29 14:42:49 +00:00
|
|
|
dup_symbols = {k: v for k, v in dup_symbols.items() if len(v) > 1}
|
2021-02-25 15:11:23 +00:00
|
|
|
# mark duplicate symbols
|
|
|
|
for values in dup_symbols.values():
|
|
|
|
for coin in values:
|
|
|
|
coin["duplicate"] = True
|
|
|
|
|
|
|
|
return dup_symbols
|
|
|
|
|
2018-08-15 17:22:29 +00:00
|
|
|
|
2021-02-25 15:11:23 +00:00
|
|
|
def apply_duplicity_overrides(coins):
|
2018-08-23 11:05:41 +00:00
|
|
|
overrides = load_json("duplicity_overrides.json")
|
|
|
|
override_bucket = []
|
|
|
|
for coin in coins:
|
2021-02-25 15:11:23 +00:00
|
|
|
override_value = overrides.get(coin["key"])
|
|
|
|
if override_value is True:
|
2018-08-23 11:05:41 +00:00
|
|
|
override_bucket.append(coin)
|
2021-02-25 15:11:23 +00:00
|
|
|
if override_value is not None:
|
|
|
|
coin["duplicate"] = override_value
|
2018-08-23 11:05:41 +00:00
|
|
|
|
2021-02-25 15:11:23 +00:00
|
|
|
return override_bucket
|
2018-07-20 14:49:45 +00:00
|
|
|
|
|
|
|
|
2018-11-29 14:42:49 +00:00
|
|
|
def deduplicate_erc20(buckets, networks):
|
|
|
|
"""Apply further processing to ERC20 duplicate buckets.
|
|
|
|
|
|
|
|
This function works on results of `mark_duplicate_shortcuts`.
|
|
|
|
|
|
|
|
Buckets that contain at least one non-token are ignored - symbol collisions
|
2019-04-29 16:04:19 +00:00
|
|
|
with non-tokens always apply.
|
2018-11-29 14:42:49 +00:00
|
|
|
|
|
|
|
Otherwise the following rules are applied:
|
|
|
|
|
|
|
|
1. If _all tokens_ in the bucket have shortcuts with distinct suffixes, e.g.,
|
|
|
|
`CAT (BitClave)` and `CAT (Blockcat)`, the bucket is cleared - all are considered
|
|
|
|
non-duplicate.
|
|
|
|
|
|
|
|
(If even one token in the bucket _does not_ have a distinct suffix, e.g.,
|
|
|
|
`MIT` and `MIT (Mychatcoin)`, this rule does not apply and ALL tokens in the bucket
|
|
|
|
are still considered duplicate.)
|
|
|
|
|
|
|
|
2. If there is only one "main" token in the bucket, the bucket is cleared.
|
|
|
|
That means that all other tokens must either be on testnets, or they must be marked
|
|
|
|
as deprecated, with a deprecation pointing to the "main" token.
|
|
|
|
"""
|
|
|
|
|
|
|
|
testnet_networks = {n["chain"] for n in networks if "Testnet" in n["name"]}
|
|
|
|
|
|
|
|
def clear_bucket(bucket):
|
|
|
|
# allow all coins, except those that are explicitly marked through overrides
|
|
|
|
for coin in bucket:
|
2021-02-25 15:11:23 +00:00
|
|
|
coin["duplicate"] = False
|
2018-11-29 14:42:49 +00:00
|
|
|
|
|
|
|
for bucket in buckets.values():
|
|
|
|
# Only check buckets that contain purely ERC20 tokens. Collision with
|
|
|
|
# a non-token is always forbidden.
|
|
|
|
if not all(is_token(c) for c in bucket):
|
|
|
|
continue
|
|
|
|
|
|
|
|
splits = (symbol_from_shortcut(coin["shortcut"]) for coin in bucket)
|
|
|
|
suffixes = {suffix for _, suffix in splits}
|
|
|
|
# if 1. all suffixes are distinct and 2. none of them are empty
|
|
|
|
if len(suffixes) == len(bucket) and all(suffixes):
|
|
|
|
clear_bucket(bucket)
|
|
|
|
continue
|
|
|
|
|
|
|
|
# protected categories:
|
|
|
|
testnets = [coin for coin in bucket if coin["chain"] in testnet_networks]
|
|
|
|
deprecated_by_same = [
|
|
|
|
coin
|
|
|
|
for coin in bucket
|
|
|
|
if "deprecation" in coin
|
|
|
|
and any(
|
|
|
|
other["address"] == coin["deprecation"]["new_address"]
|
|
|
|
for other in bucket
|
|
|
|
)
|
|
|
|
]
|
|
|
|
remaining = [
|
|
|
|
coin
|
|
|
|
for coin in bucket
|
|
|
|
if coin not in testnets and coin not in deprecated_by_same
|
|
|
|
]
|
|
|
|
if len(remaining) <= 1:
|
|
|
|
for coin in deprecated_by_same:
|
|
|
|
deprecated_symbol = "[deprecated] " + coin["symbol"]
|
|
|
|
coin["shortcut"] = coin["symbol"] = deprecated_symbol
|
|
|
|
coin["key"] += ":deprecated"
|
|
|
|
clear_bucket(bucket)
|
|
|
|
|
|
|
|
|
|
|
|
def deduplicate_keys(all_coins):
|
|
|
|
dups = defaultdict(list)
|
|
|
|
for coin in all_coins:
|
|
|
|
dups[coin["key"]].append(coin)
|
|
|
|
|
|
|
|
for coins in dups.values():
|
|
|
|
if len(coins) <= 1:
|
|
|
|
continue
|
|
|
|
for i, coin in enumerate(coins):
|
|
|
|
if is_token(coin):
|
|
|
|
coin["key"] += ":" + coin["address"][2:6].lower() # first 4 hex chars
|
2020-11-11 10:41:33 +00:00
|
|
|
elif "chain_id" in coin:
|
|
|
|
coin["key"] += ":" + str(coin["chain_id"])
|
2018-11-29 14:42:49 +00:00
|
|
|
else:
|
|
|
|
coin["key"] += ":{}".format(i)
|
|
|
|
coin["dup_key_nontoken"] = True
|
|
|
|
|
|
|
|
|
2020-11-11 10:11:20 +00:00
|
|
|
def fill_blockchain_links(all_coins):
|
|
|
|
blockchain_links = load_json("blockchain_link.json")
|
|
|
|
for coins in all_coins.values():
|
|
|
|
for coin in coins:
|
|
|
|
link = blockchain_links.get(coin["key"])
|
|
|
|
coin["blockchain_link"] = link
|
|
|
|
if link and link["type"] == "blockbook":
|
|
|
|
coin["blockbook"] = link["url"]
|
|
|
|
else:
|
|
|
|
coin["blockbook"] = []
|
|
|
|
|
|
|
|
|
2018-07-20 14:49:45 +00:00
|
|
|
def _btc_sort_key(coin):
|
2019-08-21 09:05:19 +00:00
|
|
|
if coin["name"] in ("Bitcoin", "Testnet", "Regtest"):
|
2018-07-20 14:49:45 +00:00
|
|
|
return "000000" + coin["name"]
|
|
|
|
else:
|
|
|
|
return coin["name"]
|
|
|
|
|
|
|
|
|
2018-08-24 13:20:25 +00:00
|
|
|
def collect_coin_info():
|
2018-07-23 14:33:00 +00:00
|
|
|
"""Returns all definition as dict organized by coin type.
|
|
|
|
`coins` for btc-like coins,
|
|
|
|
`eth` for ethereum networks,
|
|
|
|
`erc20` for ERC20 tokens,
|
|
|
|
`nem` for NEM mosaics,
|
|
|
|
`misc` for other networks.
|
|
|
|
"""
|
2018-07-30 12:25:53 +00:00
|
|
|
all_coins = CoinsInfo(
|
2018-08-24 13:42:06 +00:00
|
|
|
bitcoin=_load_btc_coins(),
|
2018-07-23 14:33:00 +00:00
|
|
|
eth=_load_ethereum_networks(),
|
|
|
|
erc20=_load_erc20_tokens(),
|
|
|
|
nem=_load_nem_mosaics(),
|
|
|
|
misc=_load_misc(),
|
2018-07-20 14:49:45 +00:00
|
|
|
)
|
|
|
|
|
2020-11-11 10:11:20 +00:00
|
|
|
for coins in all_coins.values():
|
2018-12-03 15:38:49 +00:00
|
|
|
_ensure_mandatory_values(coins)
|
|
|
|
|
2020-11-11 10:11:20 +00:00
|
|
|
fill_blockchain_links(all_coins)
|
|
|
|
|
2018-12-03 15:38:49 +00:00
|
|
|
return all_coins
|
|
|
|
|
|
|
|
|
|
|
|
def sort_coin_infos(all_coins):
|
2018-07-20 14:49:45 +00:00
|
|
|
for k, coins in all_coins.items():
|
2018-08-24 13:42:06 +00:00
|
|
|
if k == "bitcoin":
|
2018-07-20 14:49:45 +00:00
|
|
|
coins.sort(key=_btc_sort_key)
|
2018-07-23 14:33:00 +00:00
|
|
|
elif k == "nem":
|
|
|
|
# do not sort nem
|
|
|
|
pass
|
2018-08-15 17:22:29 +00:00
|
|
|
elif k == "eth":
|
|
|
|
# sort ethereum networks by chain_id
|
|
|
|
coins.sort(key=lambda c: c["chain_id"])
|
2018-07-20 14:49:45 +00:00
|
|
|
else:
|
|
|
|
coins.sort(key=lambda c: c["key"].upper())
|
|
|
|
|
2018-08-24 13:20:25 +00:00
|
|
|
|
|
|
|
def coin_info_with_duplicates():
|
|
|
|
"""Collects coin info, detects duplicates but does not remove them.
|
|
|
|
|
|
|
|
Returns the CoinsInfo object and duplicate buckets.
|
|
|
|
"""
|
|
|
|
all_coins = collect_coin_info()
|
2021-02-25 15:11:23 +00:00
|
|
|
coin_list = all_coins.as_list()
|
|
|
|
# generate duplicity buckets based on shortcuts
|
2018-08-24 13:20:25 +00:00
|
|
|
buckets = mark_duplicate_shortcuts(all_coins.as_list())
|
2021-02-25 15:11:23 +00:00
|
|
|
# apply further processing to ERC20 tokens, generate deprecations etc.
|
2018-11-29 14:42:49 +00:00
|
|
|
deduplicate_erc20(buckets, all_coins.eth)
|
2021-02-25 15:11:23 +00:00
|
|
|
# ensure the whole list has unique keys (taking into account changes from deduplicate_erc20)
|
|
|
|
deduplicate_keys(coin_list)
|
|
|
|
# apply duplicity overrides
|
|
|
|
buckets["_override"] = apply_duplicity_overrides(coin_list)
|
2018-12-03 15:38:49 +00:00
|
|
|
sort_coin_infos(all_coins)
|
|
|
|
|
2018-08-24 13:20:25 +00:00
|
|
|
return all_coins, buckets
|
|
|
|
|
2018-07-20 14:49:45 +00:00
|
|
|
|
2018-08-24 13:20:25 +00:00
|
|
|
def coin_info():
|
2019-04-29 16:04:19 +00:00
|
|
|
"""Collects coin info, fills out support info and returns the result.
|
|
|
|
|
|
|
|
Does not auto-delete duplicates. This should now be based on support info.
|
2018-08-24 13:20:25 +00:00
|
|
|
"""
|
|
|
|
all_coins, _ = coin_info_with_duplicates()
|
2019-04-29 16:04:19 +00:00
|
|
|
# all_coins["erc20"] = [
|
|
|
|
# coin for coin in all_coins["erc20"] if not coin.get("duplicate")
|
|
|
|
# ]
|
2018-07-20 14:49:45 +00:00
|
|
|
return all_coins
|
2018-08-23 11:05:41 +00:00
|
|
|
|
|
|
|
|
2019-11-26 17:11:09 +00:00
|
|
|
def fido_info():
|
|
|
|
"""Returns info about known FIDO/U2F apps."""
|
|
|
|
return _load_fido_apps()
|
|
|
|
|
|
|
|
|
2018-08-23 11:05:41 +00:00
|
|
|
def search(coins, keyword):
|
|
|
|
kwl = keyword.lower()
|
2018-08-24 13:20:25 +00:00
|
|
|
if isinstance(coins, CoinsInfo):
|
|
|
|
coins = coins.as_list()
|
|
|
|
|
2018-08-23 11:05:41 +00:00
|
|
|
for coin in coins:
|
|
|
|
key = coin["key"].lower()
|
|
|
|
name = coin["name"].lower()
|
|
|
|
shortcut = coin["shortcut"].lower()
|
|
|
|
symbol, suffix = symbol_from_shortcut(shortcut)
|
|
|
|
if (
|
|
|
|
kwl == key
|
|
|
|
or kwl in name
|
|
|
|
or kwl == shortcut
|
|
|
|
or kwl == symbol
|
|
|
|
or kwl in suffix
|
|
|
|
):
|
|
|
|
yield coin
|