1
0
mirror of https://github.com/trezor/trezor-firmware.git synced 2024-12-21 13:58:08 +00:00

Merge pull request #173 from trezor/matejcik/coin-defs

unified coin data management
This commit is contained in:
matejcik 2018-08-27 14:45:53 +02:00 committed by GitHub
commit d37abab57d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
98 changed files with 5707 additions and 2338 deletions

1
.gitignore vendored
View File

@ -1,3 +1,4 @@
/.vscode
__pycache__/
defs/coins/tools/coins.json
defs/coins/tools/coindefs.json

View File

@ -9,16 +9,17 @@ cache:
- $HOME/.cache/pip
python:
- "3.5"
- "3.6"
install:
- pip install ed25519 Pillow demjson
- pip install --no-deps trezor
- pip install demjson
- pip install -r tools/requirements.txt
script:
- jsonlint defs/*.json
- jsonlint defs/*/*.json
- python tools/build_coins.py --defs
- python tools/cointool.py check
- python tools/support.py check --ignore-tokens --ignore-missing
notifications:
webhooks:

View File

Before

Width:  |  Height:  |  Size: 3.9 KiB

After

Width:  |  Height:  |  Size: 3.9 KiB

View File

Before

Width:  |  Height:  |  Size: 3.4 KiB

After

Width:  |  Height:  |  Size: 3.4 KiB

View File

Before

Width:  |  Height:  |  Size: 8.3 KiB

After

Width:  |  Height:  |  Size: 8.3 KiB

View File

Before

Width:  |  Height:  |  Size: 7.2 KiB

After

Width:  |  Height:  |  Size: 7.2 KiB

View File

Before

Width:  |  Height:  |  Size: 3.7 KiB

After

Width:  |  Height:  |  Size: 3.7 KiB

View File

Before

Width:  |  Height:  |  Size: 3.6 KiB

After

Width:  |  Height:  |  Size: 3.6 KiB

View File

Before

Width:  |  Height:  |  Size: 12 KiB

After

Width:  |  Height:  |  Size: 12 KiB

View File

Before

Width:  |  Height:  |  Size: 6.8 KiB

After

Width:  |  Height:  |  Size: 6.8 KiB

View File

Before

Width:  |  Height:  |  Size: 2.8 KiB

After

Width:  |  Height:  |  Size: 2.8 KiB

View File

Before

Width:  |  Height:  |  Size: 4.6 KiB

After

Width:  |  Height:  |  Size: 4.6 KiB

View File

Before

Width:  |  Height:  |  Size: 2.5 KiB

After

Width:  |  Height:  |  Size: 2.5 KiB

View File

Before

Width:  |  Height:  |  Size: 3.9 KiB

After

Width:  |  Height:  |  Size: 3.9 KiB

View File

Before

Width:  |  Height:  |  Size: 4.0 KiB

After

Width:  |  Height:  |  Size: 4.0 KiB

View File

Before

Width:  |  Height:  |  Size: 17 KiB

After

Width:  |  Height:  |  Size: 17 KiB

View File

Before

Width:  |  Height:  |  Size: 5.9 KiB

After

Width:  |  Height:  |  Size: 5.9 KiB

View File

Before

Width:  |  Height:  |  Size: 11 KiB

After

Width:  |  Height:  |  Size: 11 KiB

View File

Before

Width:  |  Height:  |  Size: 9.9 KiB

After

Width:  |  Height:  |  Size: 9.9 KiB

View File

Before

Width:  |  Height:  |  Size: 12 KiB

After

Width:  |  Height:  |  Size: 12 KiB

View File

Before

Width:  |  Height:  |  Size: 6.4 KiB

After

Width:  |  Height:  |  Size: 6.4 KiB

View File

Before

Width:  |  Height:  |  Size: 9.4 KiB

After

Width:  |  Height:  |  Size: 9.4 KiB

View File

Before

Width:  |  Height:  |  Size: 5.0 KiB

After

Width:  |  Height:  |  Size: 5.0 KiB

View File

Before

Width:  |  Height:  |  Size: 14 KiB

After

Width:  |  Height:  |  Size: 14 KiB

View File

Before

Width:  |  Height:  |  Size: 5.2 KiB

After

Width:  |  Height:  |  Size: 5.2 KiB

View File

Before

Width:  |  Height:  |  Size: 5.2 KiB

After

Width:  |  Height:  |  Size: 5.2 KiB

View File

Before

Width:  |  Height:  |  Size: 3.6 KiB

After

Width:  |  Height:  |  Size: 3.6 KiB

View File

Before

Width:  |  Height:  |  Size: 3.7 KiB

After

Width:  |  Height:  |  Size: 3.7 KiB

View File

Before

Width:  |  Height:  |  Size: 7.1 KiB

After

Width:  |  Height:  |  Size: 7.1 KiB

View File

Before

Width:  |  Height:  |  Size: 3.9 KiB

After

Width:  |  Height:  |  Size: 3.9 KiB

View File

Before

Width:  |  Height:  |  Size: 19 KiB

After

Width:  |  Height:  |  Size: 19 KiB

View File

Before

Width:  |  Height:  |  Size: 3.3 KiB

After

Width:  |  Height:  |  Size: 3.3 KiB

View File

Before

Width:  |  Height:  |  Size: 3.1 KiB

After

Width:  |  Height:  |  Size: 3.1 KiB

View File

Before

Width:  |  Height:  |  Size: 8.6 KiB

After

Width:  |  Height:  |  Size: 8.6 KiB

View File

Before

Width:  |  Height:  |  Size: 5.9 KiB

After

Width:  |  Height:  |  Size: 5.9 KiB

View File

Before

Width:  |  Height:  |  Size: 3.7 KiB

After

Width:  |  Height:  |  Size: 3.7 KiB

View File

Before

Width:  |  Height:  |  Size: 5.9 KiB

After

Width:  |  Height:  |  Size: 5.9 KiB

View File

Before

Width:  |  Height:  |  Size: 5.0 KiB

After

Width:  |  Height:  |  Size: 5.0 KiB

View File

Before

Width:  |  Height:  |  Size: 1.6 KiB

After

Width:  |  Height:  |  Size: 1.6 KiB

View File

Before

Width:  |  Height:  |  Size: 1.2 KiB

After

Width:  |  Height:  |  Size: 1.2 KiB

View File

Before

Width:  |  Height:  |  Size: 4.6 KiB

After

Width:  |  Height:  |  Size: 4.6 KiB

1
defs/coins Symbolic link
View File

@ -0,0 +1 @@
bitcoin

View File

@ -1 +0,0 @@
../../../tools/build_coins.py

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,69 @@
{
"nem:DIMTOK": {
"coinmarketcap_alias": "dimcoin"
},
"erc20:eth:GNT": {
"coinmarketcap_alias": "golem-network-tokens"
},
"bitcoin:BCH": {
"wallet": {
"Electron Cash": "https://electroncash.org"
}
},
"bitcoin:BTC": {
"wallet": {
"Electrum": "https://electrum.org"
}
},
"bitcoin:BTCP": {
"wallet": {
"BTCP Electrum": "https://github.com/BTCPrivate/electrum-btcp"
}
},
"bitcoin:BTG": {
"wallet": {
"ElectrumG": "https://github.com/BTCGPU/electrum"
}
},
"bitcoin:DASH": {
"wallet": {
"Dash Electrum": "https://electrum.dash.org"
}
},
"bitcoin:FJC": {
"wallet": {
"Electrum-FJC": "http://www.fujicoin.org/downloads.php"
}
},
"bitcoin:GRS": {
"wallet": {
"Electrum-GRS": "https://www.groestlcoin.org/groestlcoin-electrum-wallet/"
}
},
"bitcoin:LTC": {
"wallet": {
"Electrum-LTC": "https://electrum-ltc.org"
}
},
"bitcoin:MONA": {
"wallet": {
"Electrum-MONA": "https://electrum-mona.org"
}
},
"bitcoin:NMC": {
"wallet": {
"Electrum-NMC": "https://github.com/namecoin/electrum-nmc"
}
},
"bitcoin:VIA": {
"wallet": {
"Vialectrum": "https://vialectrum.org"
}
},
"bitcoin:XZC": {
"wallet": {
"Electrum-XZC": "https://github.com/zcoinofficial/electrum-xzc",
"Znode Tool": "https://github.com/yura-pakhuchiy/znode-tool"
}
}
}

View File

@ -0,0 +1,6 @@
{
"erc20:eth:BTL (Battle)": true,
"erc20:eth:BTL (Bitlle)": true,
"erc20:eth:LINK Platform": true,
"erc20:eth:NXX": false
}

View File

@ -18,7 +18,7 @@
{
"chain_id": 3,
"slip44": 1,
"shortcut": "tETH",
"shortcut": "tROP",
"name": "Ethereum Testnet Ropsten",
"rskip60": false,
"url": "https://www.ethereum.org"
@ -26,7 +26,7 @@
{
"chain_id": 4,
"slip44": 1,
"shortcut": "tETH",
"shortcut": "tRIN",
"name": "Ethereum Testnet Rinkeby",
"rskip60": false,
"url": "https://www.ethereum.org"
@ -35,7 +35,7 @@
"chain_id": 8,
"slip44": 108,
"shortcut": "UBQ",
"name": "UBIQ",
"name": "Ubiq",
"rskip60": false,
"url": "https://ubiqsmart.com"
},
@ -74,7 +74,7 @@
{
"chain_id": 42,
"slip44": 1,
"shortcut": "tETH",
"shortcut": "tKOV",
"name": "Ethereum Testnet Kovan",
"rskip60": false,
"url": "https://www.ethereum.org"

@ -1 +1 @@
Subproject commit 0b7f34d48de1376f39e10adbeb5a0191cdffd73c
Subproject commit 44826a687a155bc0bb5e121370a07f5a95689d09

54
defs/misc/misc.json Normal file
View File

@ -0,0 +1,54 @@
[
{
"name": "Lisk",
"shortcut": "LSK",
"links": {
"Homepage": "https://lisk.io/"
}
},
{
"name": "Stellar",
"notes": "In development",
"shortcut": "XLM",
"links": {
"Homepage": "https://www.stellar.org"
}
},
{
"name": "Monero",
"shortcut": "XMR",
"notes": "In development",
"links": {
"Homepage": "https://getmonero.org"
}
},
{
"name": "Ripple",
"shortcut": "XRP",
"notes": "In development",
"links": {
"Homepage": "https://ripple.com"
}
},
{
"name": "Cardano",
"shortcut": "ADA",
"links": {
"Homepage": "https://www.cardano.org"
}
},
{
"name": "Tezos",
"shortcut": "XTZ",
"links": {
"Homepage": "https://tezos.com"
}
},
{
"name": "Ontology",
"shortcut": "ONT",
"links": {
"Homepage": "https://ont.io"
}
}
]

View File

@ -1,14 +1,20 @@
[
{
"name": "XEM",
"ticker": " XEM",
"name": "NEM",
"ticker": "XEM",
"namespace": "nem",
"mosaic": "xem",
"divisibility": 6
"divisibility": 6,
"links": {
"Homepage": "https://nem.io"
},
"wallet": {
"Nano Wallet": "https://nem.io/downloads/"
}
},
{
"name": "DIMCOIN",
"ticker": " DIM",
"ticker": "DIM",
"namespace": "dim",
"mosaic": "coin",
"divisibility": 6,
@ -18,41 +24,65 @@
"levy_mosaic": "coin",
"networks": [
104
]
],
"links": {
"Homepage": "https://www.dimcoin.io"
},
"wallet": {
"Nano Wallet": "https://nem.io/downloads/"
}
},
{
"name": "DIM TOKEN",
"ticker": " DIMTOK",
"ticker": "DIMTOK",
"namespace": "dim",
"mosaic": "token",
"divisibility": 6,
"networks": [
104
]
],
"links": {
"Homepage": "https://www.dimcoin.io"
},
"wallet": {
"Nano Wallet": "https://nem.io/downloads/"
}
},
{
"name": "Breeze Token",
"ticker": " BREEZE",
"ticker": "BREEZE",
"namespace": "breeze",
"mosaic": "breeze-token",
"divisibility": 0,
"networks": [
104
]
],
"links": {
"Homepage": "https://breeze.chat"
},
"wallet": {
"Nano Wallet": "https://nem.io/downloads/"
}
},
{
"name": "PacNEM Game Credits",
"ticker": " PAC:HRT",
"ticker": "PAC:HRT",
"namespace": "pacnem",
"mosaic": "heart",
"divisibility": 0,
"networks": [
104
]
],
"links": {
"Homepage": "https://pacnem.com"
},
"wallet": {
"Nano Wallet": "https://nem.io/downloads/"
}
},
{
"name": "PacNEM Score Tokens",
"ticker": " PAC:CHS",
"ticker": "PAC:CHS",
"namespace": "pacnem",
"mosaic": "cheese",
"divisibility": 6,
@ -62,6 +92,12 @@
"levy_mosaic": "xem",
"networks": [
104
]
],
"links": {
"Homepage": "https://pacnem.com"
},
"wallet": {
"Nano Wallet": "https://nem.io/downloads/"
}
}
]

File diff suppressed because it is too large Load Diff

61
tools/README.md Normal file
View File

@ -0,0 +1,61 @@
# Tools directory
### `coin_info.py`
Central module that extracts information from jsons in `defs/` directory.
Its most important function is `get_all()`.
### `cointool.py`
Code and data generator. Has the following functions:
* __`check`__: runs validations. Currently, that is:
* schema validity in `defs/coins/*.json`
* availability of bitcore/blockbook backends
* icon format
* __`coins_json`__: generates `coins.json` for use in python-trezor, connect
and wallet. By default outputs to current directory.
* __`coindefs`__: generates `coindefs.json`, intended future format for sending
coin definitions to Trezor.
* __`render`__: for every `filename.ext.mako` passed (or for all found in directory),
renders the Mako template with coin definitions and stores as corresponding
`filename.ext`. This is used to generate code in trezor-mcu and trezor-core.
### `coins_details.py`
Regenerates `defs/coins_details.json`, which is a catalog of coins for https://trezor.io/coins.
All information is generated from coin definitions in `defs/`, support info is
taken either from `support.json`, or assumed (see `coin_info.support_info()`).
If needed, any value can be overriden in `coins_details.override.json`.
### `support.py`
Support info management. Ensures `support.json` is in the proper format. Has the
following subcommands:
* __`check`__: checks validity of support data. Ensures that data is valid,
there are no orphaned keys in `support.json`, no known coins have missing
support info, etc.
* __`show <keyword>`__: searches coin database, matching key (`coin:BTC`),
name ("Bitcoin") or shortcut / ticker symbol ("BTC"). Displays all coins that match
and their support info, if found.
* __`set <key> [symbol=value]...`__: updates support info for coin key (`coin:BTC`,
can be found with `support.py show`). Basic `symbol`s are: `trezor1 trezor2
connect webwallet`. Anything else is considered a link name:
`"Electrum=https://electrum.org"`
Allowed `value`s are `yes`, `no`, `soon`, `planned`, URLs and firmware version
numbers. Empty value (`trezor1=`) clears the respective symbol.
### `requirements.txt`
List of Python requiremens for all tools in `pip` format. Set up your environment with
`pip3 install -r requirements.txt`, or `pipenv install -r requirements.txt`.
Python 3.6 or higher is required.

View File

@ -1,268 +0,0 @@
#!/usr/bin/env python3
# This script generates coins.json files from the definitions in defs/
#
# - `./build_coins.py` generates a big file with everything
# - `./build_coins.py XXX` generates a file with coins supported by XXX
# for example: `./build_coins.py webwallet` or `./build_coins.py trezor1`
# - `./build_coins.py XXX --defs` also adds protobuf definitions with TOIF icon
#
# generated file is coins.json in current directory,
# and coindefs.json if --def is enabled
import json
import glob
import re
import os
import sys
BUILD_DEFS = '--defs' in sys.argv
if BUILD_DEFS:
from binascii import unhexlify
from hashlib import sha256
import ed25519
from PIL import Image
from trezorlib.protobuf import dump_message
from coindef import CoinDef
BUILD_DEFS = True
TEST_BACKEND = '--test-backend' in sys.argv
if TEST_BACKEND:
import requests
def check_type(val, types, nullable=False, empty=False, regex=None, choice=None): # noqa:E501
# check nullable
if nullable and val is None:
return True
# check empty
try:
if not empty and len(val) == 0:
return False
except TypeError:
pass
# check regex
if regex is not None:
if types is not str:
return False
m = re.match(regex, val)
if not m:
return False
# check choice
if choice is not None:
if val not in choice:
return False
# check type
if isinstance(types, list):
return True in [isinstance(val, t) for t in types]
else:
return isinstance(val, types)
def get_hash_genesis_block(api):
r = requests.get(api + '/api/block-index/0')
j = r.json()
return j['blockHash']
def validate_coin(coin):
assert check_type(coin['coin_name'], str, regex=r'^[A-Z]')
assert check_type(coin['coin_shortcut'], str, regex=r'^[A-Zt][A-Z][A-Z]+$')
assert check_type(coin['coin_label'], str, regex=r'^[A-Z]')
assert check_type(coin['website'], str, regex=r'^http.*[^/]$')
assert check_type(coin['github'], str, regex=r'^https://github.com/.*[^/]$') # noqa:E501
assert check_type(coin['maintainer'], str)
assert check_type(coin['curve_name'], str, choice=['secp256k1', 'secp256k1_decred', 'secp256k1_groestl']) # noqa:E501
assert check_type(coin['address_type'], int)
assert check_type(coin['address_type_p2sh'], int)
assert coin['address_type'] != coin['address_type_p2sh']
assert check_type(coin['maxfee_kb'], int)
assert check_type(coin['minfee_kb'], int)
assert coin['maxfee_kb'] >= coin['minfee_kb']
assert check_type(coin['hash_genesis_block'], str, regex=r'^[0-9a-f]{64}$')
assert check_type(coin['xprv_magic'], int)
assert check_type(coin['xpub_magic'], int)
assert check_type(coin['xpub_magic_segwit_p2sh'], int, nullable=True)
assert check_type(coin['xpub_magic_segwit_native'], int, nullable=True)
assert coin['xprv_magic'] != coin['xpub_magic']
assert coin['xprv_magic'] != coin['xpub_magic_segwit_p2sh']
assert coin['xprv_magic'] != coin['xpub_magic_segwit_native']
assert coin['xpub_magic'] != coin['xpub_magic_segwit_p2sh']
assert coin['xpub_magic'] != coin['xpub_magic_segwit_native']
assert coin['xpub_magic_segwit_p2sh'] is None or coin['xpub_magic_segwit_native'] is None or coin['xpub_magic_segwit_p2sh'] != coin['xpub_magic_segwit_native'] # noqa:E501
assert check_type(coin['slip44'], int)
assert check_type(coin['segwit'], bool)
assert check_type(coin['decred'], bool)
assert check_type(coin['fork_id'], int, nullable=True)
assert check_type(coin['force_bip143'], bool)
assert check_type(coin['bip115'], bool)
assert check_type(coin['version_group_id'], int, nullable=True)
assert check_type(coin['default_fee_b'], dict)
assert check_type(coin['dust_limit'], int)
assert check_type(coin['blocktime_seconds'], int)
assert check_type(coin['signed_message_header'], str)
assert check_type(coin['uri_prefix'], str, regex=r'^[a-z]+$')
assert check_type(coin['min_address_length'], int)
assert check_type(coin['max_address_length'], int)
assert coin['max_address_length'] >= coin['min_address_length']
assert check_type(coin['bech32_prefix'], str, regex=r'^[a-z]+$', nullable=True)
assert check_type(coin['cashaddr_prefix'], str, regex=r'^[a-z]+$', nullable=True)
assert check_type(coin['bitcore'], list, empty=True)
for bc in coin['bitcore']:
assert not bc.endswith('/')
if TEST_BACKEND:
assert get_hash_genesis_block(bc) == coin['hash_genesis_block']
assert check_type(coin['blockbook'], list, empty=True)
for bb in coin['blockbook']:
assert not bb.endswith('/')
if TEST_BACKEND:
assert get_hash_genesis_block(bb) == coin['hash_genesis_block']
def validate_icon(icon):
assert icon.size == (96, 96)
assert icon.mode == 'RGBA'
class Writer:
def __init__(self):
self.buf = bytearray()
def write(self, buf):
self.buf.extend(buf)
def serialize(coin, icon):
c = dict(coin)
c['signed_message_header'] = c['signed_message_header'].encode()
c['hash_genesis_block'] = unhexlify(c['hash_genesis_block'])
c['icon'] = icon
msg = CoinDef(**c)
w = Writer()
dump_message(w, msg)
return bytes(w.buf)
def sign(data):
h = sha256(data).digest()
sign_key = ed25519.SigningKey(b'A' * 32)
return sign_key.sign(h)
# conversion copied from trezor-core/tools/png2toi
# TODO: extract into common module in python-trezor
def convert_icon(icon):
import struct
import zlib
w, h = 32, 32
icon = icon.resize((w, h), Image.LANCZOS)
# remove alpha channel, replace with black
bg = Image.new('RGBA', icon.size, (0, 0, 0, 255))
icon = Image.alpha_composite(bg, icon)
# process pixels
pix = icon.load()
data = bytes()
for j in range(h):
for i in range(w):
r, g, b, _ = pix[i, j]
c = ((r & 0xF8) << 8) | ((g & 0xFC) << 3) | ((b & 0xF8) >> 3)
data += struct.pack('>H', c)
z = zlib.compressobj(level=9, wbits=10)
zdata = z.compress(data) + z.flush()
zdata = zdata[2:-4] # strip header and checksum
return zdata
def process_json(fn):
print(os.path.basename(fn), end=' ... ')
j = json.load(open(fn))
validate_coin(j)
if BUILD_DEFS:
i = Image.open(fn.replace('.json', '.png'))
validate_icon(i)
ser = serialize(j, convert_icon(i))
sig = sign(ser)
definition = (sig + ser).hex()
print('OK')
return j, definition
else:
print('OK')
return j, None
def process(for_device=None):
scriptdir = os.path.dirname(os.path.realpath(__file__))
support_json = json.load(open(scriptdir + '/../defs/support.json'))
if for_device is not None:
support_list = support_json[for_device].keys()
else:
support_list = None
coins = {}
defs = {}
for fn in glob.glob(scriptdir + '/../defs/coins/*.json'):
c, d = process_json(fn)
n = c['coin_name']
c['support'] = {}
for s in support_json.keys():
c['support'][s] = support_json[s][n] if n in support_json[s] else None # noqa:E501
if support_list is None or n in support_list:
coins[n] = c
defs[n] = d
return (coins, defs)
if __name__ == '__main__':
if len(sys.argv) > 1 and not sys.argv[1].startswith('-'):
for_device = sys.argv[1]
else:
for_device = None
(coins, defs) = process(for_device)
json.dump(coins, open('coins.json', 'w'), indent=4, sort_keys=True)
if BUILD_DEFS:
json.dump(defs, open('coindefs.json', 'w'), indent=4, sort_keys=True)
# check for colliding address versions
at_p2pkh = {}
at_p2sh = {}
slip44 = {}
for n, c in coins.items():
s = c['slip44']
if s not in slip44:
slip44[s] = []
if not(n.endswith('Testnet') and s == 1):
slip44[s].append(n)
if c['cashaddr_prefix']: # skip cashaddr currencies
continue
a1, a2 = c['address_type'], c['address_type_p2sh']
if a1 not in at_p2pkh:
at_p2pkh[a1] = []
if a2 not in at_p2sh:
at_p2sh[a2] = []
at_p2pkh[a1].append(n)
at_p2sh[a2].append(n)
print()
print('Colliding address_types for P2PKH:')
for k, v in at_p2pkh.items():
if len(v) >= 2:
print('-', k, ':', ','.join(v))
print()
print('Colliding address_types for P2SH:')
for k, v in at_p2sh.items():
if len(v) >= 2:
print('-', k, ':', ','.join(v))
print()
print('Colliding SLIP44 constants:')
for k, v in slip44.items():
if len(v) >= 2:
print('-', k, ':', ','.join(v))

515
tools/coin_info.py Executable file
View File

@ -0,0 +1,515 @@
#!/usr/bin/env python3
from binascii import unhexlify
from collections import defaultdict, OrderedDict
import re
import os
import json
import glob
import logging
try:
import requests
except ImportError:
requests = None
log = logging.getLogger(__name__)
DEFS_DIR = os.path.abspath(
os.environ.get("DEFS_DIR") or os.path.join(os.path.dirname(__file__), "..", "defs")
)
def load_json(*path):
"""Convenience function to load a JSON file from DEFS_DIR."""
if len(path) == 1 and path[0].startswith("/"):
filename = path[0]
else:
filename = os.path.join(DEFS_DIR, *path)
with open(filename) as f:
return json.load(f, object_pairs_hook=OrderedDict)
# ====== CoinsInfo ======
class CoinsInfo(dict):
"""Collection of information about all known kinds of coins.
It contains the following lists:
`bitcoin` for btc-like coins,
`eth` for ethereum networks,
`erc20` for ERC20 tokens,
`nem` for NEM mosaics,
`misc` for other networks.
Accessible as a dict or by attribute: `info["misc"] == info.misc`
"""
def as_list(self):
return sum(self.values(), [])
def as_dict(self):
return {coin["key"]: coin for coin in self.as_list()}
def __getattr__(self, attr):
if attr in self:
return self[attr]
else:
raise AttributeError(attr)
# ====== coin validation ======
def check_type(val, types, nullable=False, empty=False, regex=None, choice=None):
# check nullable
if val is None:
if nullable:
return
else:
raise ValueError("Missing required value")
# check type
if not isinstance(val, types):
raise TypeError(f"Wrong type (expected: {types})")
# check empty
if isinstance(val, (list, dict)) and not empty and not val:
raise ValueError("Empty collection")
# check regex
if regex is not None:
if types is not str:
raise TypeError("Wrong type for regex check")
if not re.search(regex, val):
raise ValueError(f"Value does not match regex {regex}")
# check choice
if choice is not None and val not in choice:
choice_str = ", ".join(choice)
raise ValueError(f"Value not allowed, use one of: {choice_str}")
def check_key(key, types, optional=False, **kwargs):
def do_check(coin):
if key not in coin:
if optional:
return
else:
raise KeyError(f"{key}: Missing key")
try:
check_type(coin[key], types, **kwargs)
except Exception as e:
raise ValueError(f"{key}: {e}") from e
return do_check
BTC_CHECKS = [
check_key("coin_name", str, regex=r"^[A-Z]"),
check_key("coin_shortcut", str, regex=r"^t?[A-Z]{3,}$"),
check_key("coin_label", str, regex=r"^[A-Z]"),
check_key("website", str, regex=r"^http.*[^/]$"),
check_key("github", str, regex=r"^https://github.com/.*[^/]$"),
check_key("maintainer", str),
check_key(
"curve_name", str, choice=["secp256k1", "secp256k1_decred", "secp256k1_groestl"]
),
check_key("address_type", int),
check_key("address_type_p2sh", int),
check_key("maxfee_kb", int),
check_key("minfee_kb", int),
check_key("hash_genesis_block", str, regex=r"^[0-9a-f]{64}$"),
check_key("xprv_magic", int),
check_key("xpub_magic", int),
check_key("xpub_magic_segwit_p2sh", int, nullable=True),
check_key("xpub_magic_segwit_native", int, nullable=True),
check_key("slip44", int),
check_key("segwit", bool),
check_key("decred", bool),
check_key("fork_id", int, nullable=True),
check_key("force_bip143", bool),
check_key("bip115", bool),
check_key("version_group_id", int, nullable=True),
check_key("default_fee_b", dict),
check_key("dust_limit", int),
check_key("blocktime_seconds", int),
check_key("signed_message_header", str),
check_key("uri_prefix", str, regex=r"^[a-z]+$"),
check_key("min_address_length", int),
check_key("max_address_length", int),
check_key("bech32_prefix", str, regex=r"^[a-z]+$", nullable=True),
check_key("cashaddr_prefix", str, regex=r"^[a-z]+$", nullable=True),
check_key("bitcore", list, empty=True),
check_key("blockbook", list, empty=True),
]
def validate_btc(coin):
errors = []
for check in BTC_CHECKS:
try:
check(coin)
except Exception as e:
errors.append(str(e))
magics = [
coin[k]
for k in (
"xprv_magic",
"xpub_magic",
"xpub_magic_segwit_p2sh",
"xpub_magic_segwit_native",
)
if coin[k] is not None
]
# each of those must be unique
# therefore length of list == length of set of unique values
if len(magics) != len(set(magics)):
errors.append("XPUB/XPRV magic numbers must be unique")
if coin["address_type"] == coin["address_type_p2sh"]:
errors.append("address_type must be distinct from address_type_p2sh")
if not coin["maxfee_kb"] >= coin["minfee_kb"]:
errors.append("max fee must not be smaller than min fee")
if not coin["max_address_length"] >= coin["min_address_length"]:
errors.append("max address length must not be smaller than min address length")
for bc in coin["bitcore"] + coin["blockbook"]:
if bc.endswith("/"):
errors.append("make sure URLs don't end with '/'")
return errors
# ======= Coin json loaders =======
def _load_btc_coins():
"""Load btc-like coins from `coins/*.json`"""
coins = []
for filename in glob.glob(os.path.join(DEFS_DIR, "coins", "*.json")):
coin = load_json(filename)
coin.update(
name=coin["coin_name"],
shortcut=coin["coin_shortcut"],
key="bitcoin:{}".format(coin["coin_shortcut"]),
icon=filename.replace(".json", ".png"),
)
coins.append(coin)
return coins
def _load_ethereum_networks():
"""Load ethereum networks from `ethereum/networks.json`"""
networks = load_json("ethereum", "networks.json")
for network in networks:
network.update(key="eth:{}".format(network["shortcut"]))
return networks
def _load_erc20_tokens():
"""Load ERC20 tokens from `ethereum/tokens` submodule."""
networks = _load_ethereum_networks()
tokens = []
for network in networks:
if network["name"].startswith("Ethereum Testnet "):
idx = len("Ethereum Testnet ")
chain = network["name"][idx : idx + 3]
else:
chain = network["shortcut"]
chain = chain.lower()
if not chain:
continue
chain_path = os.path.join(DEFS_DIR, "ethereum", "tokens", "tokens", chain)
for filename in glob.glob(os.path.join(chain_path, "*.json")):
token = load_json(filename)
token.update(
chain=chain,
chain_id=network["chain_id"],
address_bytes=unhexlify(token["address"][2:]),
shortcut=token["symbol"],
key="erc20:{}:{}".format(chain, token["symbol"]),
)
tokens.append(token)
return tokens
def _load_nem_mosaics():
"""Loads NEM mosaics from `nem/nem_mosaics.json`"""
mosaics = load_json("nem", "nem_mosaics.json")
for mosaic in mosaics:
shortcut = mosaic["ticker"].strip()
mosaic.update(shortcut=shortcut, key="nem:{}".format(shortcut))
return mosaics
def _load_misc():
"""Loads miscellaneous networks from `misc/misc.json`"""
others = load_json("misc/misc.json")
for other in others:
other.update(key="misc:{}".format(other["shortcut"]))
return others
# ====== support info ======
RELEASES_URL = "https://wallet.trezor.io/data/firmware/{}/releases.json"
MISSING_SUPPORT_MEANS_NO = ("connect", "webwallet")
VERSIONED_SUPPORT_INFO = ("trezor1", "trezor2")
def get_support_data():
"""Get raw support data from `support.json`."""
return load_json("support.json")
def latest_releases():
"""Get latest released firmware versions for Trezor 1 and 2"""
if not requests:
raise RuntimeError("requests library is required for getting release info")
latest = {}
for v in ("1", "2"):
releases = requests.get(RELEASES_URL.format(v)).json()
latest["trezor" + v] = max(tuple(r["version"]) for r in releases)
return latest
def is_token(coin):
return coin["key"].startswith("erc20:")
def support_info_single(support_data, coin):
"""Extract a support dict from `support.json` data.
Returns a dict of support values for each "device", i.e., `support.json`
top-level key.
The support value for each device is determined in order of priority:
* if the coin is a duplicate ERC20 token, all support values are `None`
* if the coin has an entry in `unsupported`, its support is `None`
* if the coin has an entry in `supported` its support is that entry
(usually a version string, or `True` for connect/webwallet)
* otherwise support is presumed "soon"
"""
support_info = {}
key = coin["key"]
dup = coin.get("duplicate")
for device, values in support_data.items():
if dup and is_token(coin):
support_value = None
elif key in values["unsupported"]:
support_value = None
elif key in values["supported"]:
support_value = values["supported"][key]
elif device in MISSING_SUPPORT_MEANS_NO:
support_value = None
else:
support_value = "soon"
support_info[device] = support_value
return support_info
def support_info(coins):
"""Generate Trezor support information.
Takes a collection of coins and generates a support-info entry for each.
The support-info is a dict with keys based on `support.json` keys.
These are usually: "trezor1", "trezor2", "connect" and "webwallet".
The `coins` argument can be a `CoinsInfo` object, a list or a dict of
coin items.
Support information is taken from `support.json`.
"""
if isinstance(coins, CoinsInfo):
coins = coins.as_list()
elif isinstance(coins, dict):
coins = coins.values()
support_data = get_support_data()
support = {}
for coin in coins:
support[coin["key"]] = support_info_single(support_data, coin)
return support
# ====== data cleanup functions ======
def _ensure_mandatory_values(coins):
"""Checks that every coin has the mandatory fields: name, shortcut, key"""
for coin in coins:
if not all(coin.get(k) for k in ("name", "shortcut", "key")):
raise ValueError(coin)
def symbol_from_shortcut(shortcut):
symsplit = shortcut.split(" ", maxsplit=1)
return symsplit[0], symsplit[1] if len(symsplit) > 1 else ""
def mark_duplicate_shortcuts(coins):
"""Finds coins with identical `shortcut`s.
Updates their keys and sets a `duplicate` field.
The logic is a little crazy.
The result of this function is a dictionary of _buckets_, each of which is
indexed by the duplicated symbol, or `_override`. The `_override` bucket will
contain all coins that are set to `true` in `duplicity_overrides.json`. These
will _always_ be marked as duplicate (and later possibly deleted if they're ERC20).
The rest will disambiguate based on the full shortcut.
(i.e., when `shortcut` is `BTL (Battle)`, the `symbol` is just `BTL`).
If _all tokens_ in the bucket have shortcuts with distinct suffixes, e.g.,
`CAT (BitClave)` and `CAT (Blockcat)`, we DO NOT mark them as duplicate.
These will then be supported and included in outputs.
If even one token in the bucket _does not_ have a distinct suffix, e.g.,
`MIT` and `MIT (Mychatcoin)`, the whole bucket is marked as duplicate.
If a token is set to `false` in `duplicity_overrides.json`, it will NOT
be marked as duplicate in this step, even if it is part of a "bad" bucket.
"""
dup_symbols = defaultdict(list)
dup_keys = defaultdict(list)
def dups_only(dups):
return {k: v for k, v in dups.items() if len(v) > 1}
for coin in coins:
symbol, _ = symbol_from_shortcut(coin["shortcut"])
dup_symbols[symbol].append(coin)
dup_keys[coin["key"]].append(coin)
dup_symbols = dups_only(dup_symbols)
dup_keys = dups_only(dup_keys)
# first deduplicate keys so that we can identify overrides
for values in dup_keys.values():
for i, coin in enumerate(values):
coin["key"] += f":{i}"
# load overrides and put them into their own bucket
overrides = load_json("duplicity_overrides.json")
override_bucket = []
for coin in coins:
if overrides.get(coin["key"], False):
coin["duplicate"] = True
override_bucket.append(coin)
# mark duplicate symbols
for values in dup_symbols.values():
splits = (symbol_from_shortcut(coin["shortcut"]) for coin in values)
suffixes = {suffix for _, suffix in splits}
# if 1. all suffixes are distinct and 2. none of them are empty
if len(suffixes) == len(values) and all(suffixes):
# Allow the whole bucket.
# For all intents and purposes these should be considered non-dups
# So we won't mark them as dups here
# But they still have their own bucket, and also overrides can
# explicitly mark them as duplicate one step before, in which case
# they *still* keep duplicate status (and possibly are deleted).
continue
for coin in values:
# allow overrides to skip this; if not listed in overrides, assume True
is_dup = overrides.get(coin["key"], True)
if is_dup:
coin["duplicate"] = True
# again: still in dups, but not marked as duplicate and not deleted
dup_symbols["_override"] = override_bucket
return dup_symbols
def _btc_sort_key(coin):
if coin["name"] in ("Bitcoin", "Testnet"):
return "000000" + coin["name"]
else:
return coin["name"]
def collect_coin_info():
"""Returns all definition as dict organized by coin type.
`coins` for btc-like coins,
`eth` for ethereum networks,
`erc20` for ERC20 tokens,
`nem` for NEM mosaics,
`misc` for other networks.
Automatically removes duplicate symbols from the result.
"""
all_coins = CoinsInfo(
bitcoin=_load_btc_coins(),
eth=_load_ethereum_networks(),
erc20=_load_erc20_tokens(),
nem=_load_nem_mosaics(),
misc=_load_misc(),
)
for k, coins in all_coins.items():
if k == "bitcoin":
coins.sort(key=_btc_sort_key)
elif k == "nem":
# do not sort nem
pass
elif k == "eth":
# sort ethereum networks by chain_id
coins.sort(key=lambda c: c["chain_id"])
else:
coins.sort(key=lambda c: c["key"].upper())
_ensure_mandatory_values(coins)
return all_coins
def coin_info_with_duplicates():
"""Collects coin info, detects duplicates but does not remove them.
Returns the CoinsInfo object and duplicate buckets.
"""
all_coins = collect_coin_info()
buckets = mark_duplicate_shortcuts(all_coins.as_list())
return all_coins, buckets
def coin_info():
"""Collects coin info, marks and prunes duplicate ERC20 symbols, fills out support
info and returns the result.
"""
all_coins, _ = coin_info_with_duplicates()
all_coins["erc20"] = [
coin for coin in all_coins["erc20"] if not coin.get("duplicate")
]
return all_coins
def search(coins, keyword):
kwl = keyword.lower()
if isinstance(coins, CoinsInfo):
coins = coins.as_list()
for coin in coins:
key = coin["key"].lower()
name = coin["name"].lower()
shortcut = coin["shortcut"].lower()
symbol, suffix = symbol_from_shortcut(shortcut)
if (
kwl == key
or kwl in name
or kwl == shortcut
or kwl == symbol
or kwl in suffix
):
yield coin

View File

@ -1,358 +1,330 @@
#!/usr/bin/env python3
"""Fetch information about coins and tokens supported by Trezor and update it in coins_details.json."""
import os
import time
import json
import logging
import requests
import ethereum_tokens_gen
import build_coins
from distutils.version import LooseVersion
import sys
import coin_info
T1_LATEST='1.6.2'
T2_LATEST='2.0.7'
import click
COINS = {}
LOG = logging.getLogger(__name__)
def coinmarketcap_init():
global COINS
OPTIONAL_KEYS = ("links", "notes", "wallet")
ALLOWED_SUPPORT_STATUS = ("yes", "no", "planned", "soon")
OVERRIDES = coin_info.load_json("coins_details.override.json")
VERSIONS = coin_info.latest_releases()
COINMAKETCAP_CACHE = os.path.join(os.path.dirname(__file__), "coinmarketcap.json")
COINMARKETCAP_API_BASE = "https://pro-api.coinmarketcap.com/v1/"
MARKET_CAPS = {}
def coinmarketcap_call(endpoint, api_key, params=None):
url = COINMARKETCAP_API_BASE + endpoint
r = requests.get(url, params=params, headers={"X-CMC_PRO_API_KEY": api_key})
r.raise_for_status()
return r.json()
def coinmarketcap_init(api_key, refresh=None):
global MARKET_CAPS
force_refresh = refresh is True
disable_refresh = refresh is False
try:
COINS = json.load(open('coinmarketcap.json', 'r'))
except FileNotFoundError:
pass
else:
if COINS["1"]["last_updated"] > time.time() - 3600:
print("Using local cache of coinmarketcap")
return
mtime = os.path.getmtime(COINMAKETCAP_CACHE)
cache_is_fresh = mtime > time.time() - 3600
if disable_refresh or (cache_is_fresh and not force_refresh):
print("Using cached market cap data")
with open(COINMAKETCAP_CACHE) as f:
coinmarketcap_data = json.load(f)
else:
print("Fetching market cap data")
coinmarketcap_data = coinmarketcap_call(
"cryptocurrency/listings/latest",
api_key,
params={"limit": 5000, "convert": "USD"},
)
except Exception as e:
raise RuntimeError("market cap data unavailable") from e
print("Updating coins from coinmarketcap")
total = None
COINS = {}
coin_data = {}
for coin in coinmarketcap_data["data"]:
slug = coin["slug"]
market_cap = coin["quote"]["USD"]["market_cap"]
if market_cap is not None:
coin_data[slug] = int(market_cap)
while total is None or len(COINS) < total:
url = 'https://api.coinmarketcap.com/v2/ticker/?start=%d&convert=USD&limit=100' % (len(COINS))
data = requests.get(url).json()
COINS.update(data['data'])
if total is None:
total = data['metadata']['num_cryptocurrencies']
MARKET_CAPS = coin_data
print("Fetched %d of %d coins" % (len(COINS), total))
time.sleep(1)
with open(COINMAKETCAP_CACHE, "w") as f:
json.dump(coinmarketcap_data, f)
json.dump(COINS, open('coinmarketcap.json', 'w'), sort_keys=True, indent=4)
return coin_data
def coinmarketcap_info(shortcut):
global COINS
shortcut = shortcut.replace(' ', '-').lower()
for _id in COINS:
coin = COINS[_id]
# print(shortcut, coin['website_slug'])
if shortcut == coin['website_slug']:
# print(coin)
return coin
def marketcap(coin):
cap = None
if "coinmarketcap_alias" in coin:
cap = MARKET_CAPS.get(coin["coinmarketcap_alias"])
if cap is None:
slug = coin["name"].replace(" ", "-").lower()
cap = MARKET_CAPS.get(slug)
if cap is None:
cap = MARKET_CAPS.get(coin["shortcut"].lower())
return cap
def update_marketcap(obj, shortcut):
def update_marketcaps(coins):
for coin in coins.values():
coin["marketcap_usd"] = marketcap(coin) or 0
def summary(coins, api_key):
t1_coins = 0
t2_coins = 0
supported_marketcap = 0
for coin in coins.values():
if coin.get("hidden"):
continue
t1_enabled = coin["t1_enabled"] == "yes"
t2_enabled = coin["t2_enabled"] == "yes"
if t1_enabled:
t1_coins += 1
if t2_enabled:
t2_coins += 1
if t1_enabled or t2_enabled:
supported_marketcap += coin.get("marketcap_usd", 0)
total_marketcap = None
try:
obj['marketcap_usd'] = int(float(coinmarketcap_info(shortcut)['quotes']['USD']['market_cap']))
except:
pass
# print("Marketcap info not found for", shortcut)
def coinmarketcap_global():
url = 'https://api.coinmarketcap.com/v2/global'
ret = requests.get(url)
data = ret.json()
return data
def set_default(obj, key, default_value):
obj[key] = obj.setdefault(key, default_value)
def update_info(details):
details['info']['updated_at'] = int(time.time())
details['info']['updated_at_readable'] = time.asctime()
details['info']['t1_coins'] = len([True for _, c in details['coins'].items() if c.get('t1_enabled') == 'yes' and not c.get('hidden', False)])
details['info']['t2_coins'] = len([True for _, c in details['coins'].items() if c.get('t2_enabled') == 'yes' and not c.get('hidden', False)])
try:
details['info']['total_marketcap_usd'] = int(coinmarketcap_global()['data']['quotes']['USD']['total_market_cap'])
ret = coinmarketcap_call("global-metrics/quotes/latest", api_key)
total_marketcap = int(ret["data"]["quote"]["USD"]["total_market_cap"])
except:
pass
marketcap = 0
for k, c in details['coins'].items():
if c['t1_enabled'] == 'yes' or c['t2_enabled'] == 'yes':
marketcap += details['coins'][k].setdefault('marketcap_usd', 0)
details['info']['marketcap_usd'] = marketcap
return dict(
updated_at=int(time.time()),
updated_at_readable=time.asctime(),
t1_coins=t1_coins,
t2_coins=t2_coins,
marketcap_usd=supported_marketcap,
total_marketcap_usd=total_marketcap,
)
def check_unsupported(details, prefix, supported):
for k in details['coins'].keys():
if not k.startswith(prefix):
continue
if k not in supported:
print("%s not supported by Trezor? (Possible manual entry)" % k)
def _is_supported(support, trezor_version):
version = VERSIONS[trezor_version]
nominal = support.get(trezor_version)
if nominal is None:
return "no"
elif isinstance(nominal, bool):
return "yes" if nominal else "no"
try:
nominal_version = tuple(map(int, nominal.split(".")))
return "yes" if nominal_version <= version else "soon"
except ValueError:
return nominal
def update_coins(details):
(coins, _) = build_coins.process(None)
firmware = json.load(open('../defs/support.json', 'r'))
supported = []
for key, coin in coins.items():
t1_enabled = 'no'
if key in firmware['trezor1']:
if LooseVersion(firmware['trezor1'][key]) <= LooseVersion(T1_LATEST):
t1_enabled = 'yes'
else:
t1_enabled = 'soon'
t2_enabled = 'no'
if key in firmware['trezor2']:
if LooseVersion(firmware['trezor2'][key]) <= LooseVersion(T2_LATEST):
t2_enabled = 'yes'
else:
t2_enabled = 'soon'
# print("Updating", coin['coin_label'], coin['coin_shortcut'])
key = "coin:%s" % coin['coin_shortcut']
supported.append(key)
out = details['coins'].setdefault(key, {})
out['type'] = 'coin'
out['t1_enabled'] = t1_enabled
out['t2_enabled'] = t2_enabled
set_default(out, 'shortcut', coin['coin_shortcut'])
set_default(out, 'name', coin['coin_label'])
set_default(out, 'links', {})
set_default(out['links'], 'Homepage', coin['website'])
set_default(out['links'], 'Github', coin['github'])
update_marketcap(out, coin.get('coinmarketcap_alias', coin['coin_label']))
check_unsupported(details, 'coin:', supported)
def _webwallet_support(coin, support):
"""Check the "webwallet" support property.
If set, check that at least one of the backends run on trezor.io.
If yes, assume we support the coin in our wallet.
Otherwise it's probably working with a custom backend, which means don't
link to our wallet.
"""
if not support.get("webwallet"):
return False
return any(".trezor.io" in url for url in coin["blockbook"] + coin["bitcore"])
def update_erc20(details):
networks = [x[0] for x in ethereum_tokens_gen.networks]
def update_simple(coins, support_info, type):
res = {}
for coin in coins:
key = coin["key"]
support = support_info[key]
LATEST_T1 = 'https://raw.githubusercontent.com/trezor/trezor-mcu/v%s/firmware/ethereum_tokens.c' % T1_LATEST
LATEST_T2 = 'https://raw.githubusercontent.com/trezor/trezor-core/v%s/src/apps/ethereum/tokens.py' % T2_LATEST
details = dict(
name=coin["name"],
shortcut=coin["shortcut"],
type=type,
t1_enabled=_is_supported(support, "trezor1"),
t2_enabled=_is_supported(support, "trezor2"),
)
for k in OPTIONAL_KEYS:
if k in coin:
details[k] = coin[k]
tokens = ethereum_tokens_gen.get_tokens()
tokens_t1 = requests.get(LATEST_T1).text
tokens_t2 = requests.get(LATEST_T2).text
res[key] = details
supported = []
for t in tokens:
# print('Updating', t['symbol'])
if t['chain'] not in networks:
print('Skipping, %s is disabled' % t['chain'])
continue
key = "erc20:%s:%s" % (t['chain'], t['symbol'])
supported.append(key)
out = details['coins'].setdefault(key, {})
out['type'] = 'erc20'
out['network'] = t['chain']
out['address'] = t['address']
set_default(out, 'shortcut', t['symbol'])
set_default(out, 'name', t['name'])
set_default(out, 'links', {})
set_default(out, 'wallet', {})
if "\" %s\"" % t['symbol'] in tokens_t1:
out['t1_enabled'] = 'yes'
else:
out['t1_enabled'] = 'soon'
if "'%s'" % t['symbol'] in tokens_t2:
out['t2_enabled'] = 'yes'
else:
out['t2_enabled'] = 'soon'
out['wallet']['MyCrypto'] = 'https://mycrypto.com'
out['wallet']['MyEtherWallet'] = 'https://www.myetherwallet.com'
if t['website']:
out['links']['Homepage'] = t['website']
if t.get('social', {}).get('github', None):
out['links']['Github'] = t['social']['github']
update_marketcap(out, out.get('coinmarketcap_alias', t['symbol']))
check_unsupported(details, 'erc20:', supported)
return res
def update_ethereum(details):
out = details['coins'].setdefault('coin2:ETH', {})
out['type'] = 'coin'
set_default(out, 'shortcut', 'ETH')
set_default(out, 'name', 'Ethereum')
set_default(out, 't1_enabled', 'yes')
set_default(out, 't2_enabled', 'yes')
update_marketcap(out, 'ethereum')
def update_bitcoin(coins, support_info):
res = update_simple(coins, support_info, "coin")
for coin in coins:
key = coin["key"]
support = support_info[key]
details = dict(
name=coin["coin_label"],
links=dict(Homepage=coin["website"], Github=coin["github"]),
wallet={},
)
if _webwallet_support(coin, support):
details["wallet"]["Trezor"] = "https://wallet.trezor.io"
out = details['coins'].setdefault('coin2:ETC', {})
out['type'] = 'coin'
set_default(out, 'shortcut', 'ETC')
set_default(out, 'name', 'Ethereum Classic')
set_default(out, 't1_enabled', 'yes')
set_default(out, 't2_enabled', 'yes')
update_marketcap(out, 'ethereum-classic')
res[key].update(details)
out = details['coins'].setdefault('coin2:RSK', {})
out['type'] = 'coin'
set_default(out, 'shortcut', 'RSK')
set_default(out, 'name', 'Rootstock')
set_default(out, 't1_enabled', 'yes')
set_default(out, 't2_enabled', 'yes')
update_marketcap(out, 'rootstock')
out = details['coins'].setdefault('coin2:EXP', {})
out['type'] = 'coin'
set_default(out, 'shortcut', 'EXP')
set_default(out, 'name', 'Expanse')
set_default(out, 't1_enabled', 'yes')
set_default(out, 't2_enabled', 'yes')
update_marketcap(out, 'expanse')
out = details['coins'].setdefault('coin2:UBQ', {})
out['type'] = 'coin'
set_default(out, 'shortcut', 'UBQ')
set_default(out, 'name', 'Ubiq')
set_default(out, 't1_enabled', 'yes')
set_default(out, 't2_enabled', 'yes')
update_marketcap(out, 'ubiq')
out = details['coins'].setdefault('coin2:ELLA', {})
out['type'] = 'coin'
set_default(out, 'shortcut', 'ELLA')
set_default(out, 'name', 'Ellaism')
set_default(out, 't1_enabled', 'yes')
set_default(out, 't2_enabled', 'yes')
update_marketcap(out, 'ellaism')
out = details['coins'].setdefault('coin2:EGEM', {})
out['type'] = 'coin'
set_default(out, 'shortcut', 'EGEM')
set_default(out, 'name', 'EtherGem')
set_default(out, 't1_enabled', 'yes')
set_default(out, 't2_enabled', 'yes')
update_marketcap(out, 'egem')
out = details['coins'].setdefault('coin2:ETSC', {})
out['type'] = 'coin'
set_default(out, 'shortcut', 'ETSC')
set_default(out, 'name', 'Ethereum Social')
set_default(out, 't1_enabled', 'yes')
set_default(out, 't2_enabled', 'yes')
update_marketcap(out, 'etsc')
out = details['coins'].setdefault('coin2:EOSC', {})
out['type'] = 'coin'
set_default(out, 'shortcut', 'EOSC')
set_default(out, 'name', 'EOS Classic')
set_default(out, 't1_enabled', 'yes')
set_default(out, 't2_enabled', 'yes')
update_marketcap(out, 'eosc')
out = details['coins'].setdefault('coin2:ADA', {})
out['type'] = 'coin'
set_default(out, 'shortcut', 'ADA')
set_default(out, 'name', 'Cardano')
set_default(out, 't1_enabled', 'no')
set_default(out, 't2_enabled', 'soon')
update_marketcap(out, 'cardano')
out = details['coins'].setdefault('coin2:XTZ', {})
out['type'] = 'coin'
set_default(out, 'shortcut', 'XTZ')
set_default(out, 'name', 'Tezos')
set_default(out, 't1_enabled', 'no')
set_default(out, 't2_enabled', 'soon')
update_marketcap(out, 'tezos')
return res
def update_mosaics(details):
d = json.load(open('../defs/nem/nem_mosaics.json'))
supported = []
for mosaic in d:
# print('Updating', mosaic['name'], mosaic['ticker'])
def update_erc20(coins, support_info):
# TODO skip disabled networks?
res = update_simple(coins, support_info, "erc20")
for coin in coins:
key = coin["key"]
details = dict(
network=coin["chain"],
address=coin["address"],
shortcut=coin["shortcut"],
links={},
wallet=dict(
MyCrypto="https://mycrypto.com",
MyEtherWallet="https://www.myetherwallet.com",
),
)
if coin.get("website"):
details["links"]["Homepage"] = coin["website"]
if coin.get("social", {}).get("github"):
details["links"]["Github"] = coin["social"]["github"]
key = "mosaic:%s" % mosaic['ticker'].strip()
supported.append(key)
out = details['coins'].setdefault(key, {})
out['type'] = 'mosaic'
set_default(out, 'shortcut', mosaic['ticker'].strip())
set_default(out, 'name', mosaic['name'])
set_default(out, 't1_enabled', 'yes')
set_default(out, 't2_enabled', 'yes')
res[key].update(details)
update_marketcap(out, out.get('coinmarketcap_alias', out['name']))
check_unsupported(details, 'mosaic:', supported)
return res
def check_missing_details(details):
for k in details['coins'].keys():
coin = details['coins'][k]
def update_ethereum_networks(coins, support_info):
res = update_simple(coins, support_info, "coin")
for coin in coins:
key = coin["key"]
details = dict(
wallet=dict(
MyCrypto="https://mycrypto.com",
MyEtherWallet="https://www.myetherwallet.com",
),
links=dict(Homepage=coin.get("url")),
)
res[key].update(details)
return res
def check_missing_data(coins):
for k, coin in coins.items():
hide = False
if 'links' not in coin:
print("%s: Missing links" % k)
hide = True
if 'Homepage' not in coin.get('links', {}):
if "Homepage" not in coin.get("links", {}):
print("%s: Missing homepage" % k)
hide = True
if coin['t1_enabled'] not in ('yes', 'no', 'planned', 'soon'):
if coin["t1_enabled"] not in ALLOWED_SUPPORT_STATUS:
print("%s: Unknown t1_enabled" % k)
hide = True
if coin['t2_enabled'] not in ('yes', 'no', 'planned', 'soon'):
if coin["t2_enabled"] not in ALLOWED_SUPPORT_STATUS:
print("%s: Unknown t2_enabled" % k)
hide = True
if 'Trezor' in coin.get('wallet', {}) and coin['wallet']['Trezor'] != 'https://wallet.trezor.io':
if (
"Trezor" in coin.get("wallet", {})
and coin["wallet"]["Trezor"] != "https://wallet.trezor.io"
):
print("%s: Strange URL for Trezor Wallet" % k)
hide = True
if len(coin.get('wallet', {})) == 0:
if len(coin.get("wallet", {})) == 0:
print("%s: Missing wallet" % k)
if "Testnet" in coin["name"]:
print("%s: Hiding testnet" % k)
hide = True
if hide:
if coin.get('hidden') != 1:
if coin.get("hidden") != 1:
print("%s: HIDING COIN!" % k)
# If any of important detail is missing, hide coin from list
coin['hidden'] = 1
coin["hidden"] = 1
if not hide and coin.get('hidden'):
if not hide and coin.get("hidden"):
print("%s: Details are OK, but coin is still hidden" % k)
for k in details['coins'].keys():
if details['coins'][k].get('hidden') == 1:
# summary of hidden coins
for k, coin in coins.items():
if coin.get("hidden") == 1:
print("%s: Coin is hidden" % k)
if __name__ == '__main__':
try:
details = json.load(open('../defs/coins_details.json', 'r'))
except FileNotFoundError:
details = {'coins': {}, 'info': {}}
def apply_overrides(coins):
for key, override in OVERRIDES.items():
if key not in coins:
LOG.warning(f"override without coin: {key}")
continue
coinmarketcap_init()
update_coins(details)
update_erc20(details)
update_ethereum(details)
update_mosaics(details)
update_info(details)
check_missing_details(details)
def recursive_update(orig, new):
if isinstance(new, dict) and isinstance(orig, dict):
for k, v in new.items():
orig[k] = recursive_update(orig.get(k), v)
return orig
else:
return new
print(json.dumps(details['info'], sort_keys=True, indent=4))
json.dump(details, open('../defs/coins_details.json', 'w'), sort_keys=True, indent=4)
coin = coins[key]
recursive_update(coin, override)
@click.command()
# fmt: off
@click.option("-r", "--refresh", "refresh", flag_value=True, help="Force refresh market cap info")
@click.option("-R", "--no-refresh", "refresh", flag_value=False, help="Force use cached market cap info")
@click.option("-A", "--api-key", required=True, envvar="COINMARKETCAP_API_KEY", help="Coinmarketcap API key")
# fmt: on
def main(refresh, api_key):
# setup logging
root = logging.getLogger()
root.setLevel(logging.DEBUG)
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.DEBUG)
root.addHandler(handler)
coinmarketcap_init(api_key, refresh=refresh)
defs = coin_info.coin_info()
support_info = coin_info.support_info(defs)
coins = {}
coins.update(update_bitcoin(defs.bitcoin, support_info))
coins.update(update_erc20(defs.erc20, support_info))
coins.update(update_ethereum_networks(defs.eth, support_info))
coins.update(update_simple(defs.nem, support_info, "mosaic"))
coins.update(update_simple(defs.misc, support_info, "coin"))
apply_overrides(coins)
update_marketcaps(coins)
check_missing_data(coins)
info = summary(coins, api_key)
details = dict(coins=coins, info=info)
print(json.dumps(info, sort_keys=True, indent=4))
with open(os.path.join(coin_info.DEFS_DIR, "coins_details.json"), "w") as f:
json.dump(details, f, sort_keys=True, indent=4)
if __name__ == "__main__":
main()

648
tools/cointool.py Executable file
View File

@ -0,0 +1,648 @@
#!/usr/bin/env python3
import io
import json
import logging
import re
import sys
import os
import glob
import binascii
import struct
import zlib
from collections import defaultdict
from hashlib import sha256
import click
import coin_info
from coindef import CoinDef
try:
import termcolor
except ImportError:
termcolor = None
try:
import mako
import mako.template
from munch import Munch
CAN_RENDER = True
except ImportError:
CAN_RENDER = False
try:
import requests
except ImportError:
requests = None
try:
import ed25519
from PIL import Image
from trezorlib import protobuf
CAN_BUILD_DEFS = True
except ImportError:
CAN_BUILD_DEFS = False
# ======= Crayon colors ======
USE_COLORS = False
def crayon(color, string, bold=False, dim=False):
if not termcolor or not USE_COLORS:
return string
else:
if bold:
attrs = ["bold"]
elif dim:
attrs = ["dark"]
else:
attrs = []
return termcolor.colored(string, color, attrs=attrs)
def print_log(level, *args, **kwargs):
prefix = logging.getLevelName(level)
if level == logging.DEBUG:
prefix = crayon("blue", prefix, bold=False)
elif level == logging.INFO:
prefix = crayon("blue", prefix, bold=True)
elif level == logging.WARNING:
prefix = crayon("red", prefix, bold=False)
elif level == logging.ERROR:
prefix = crayon("red", prefix, bold=True)
print(prefix, *args, **kwargs)
# ======= Mako management ======
def c_str_filter(b):
if b is None:
return "NULL"
def hexescape(c):
return r"\x{:02x}".format(c)
if isinstance(b, bytes):
return '"' + "".join(map(hexescape, b)) + '"'
else:
return json.dumps(b)
def black_repr_filter(val):
if isinstance(val, str):
if '"' in val:
return repr(val)
else:
return c_str_filter(val)
elif isinstance(val, bytes):
return "b" + c_str_filter(val)
else:
return repr(val)
def ascii_filter(s):
return re.sub("[^ -\x7e]", "_", s)
def make_support_filter(support_info):
def supported_on(device, coins):
for coin in coins:
if support_info[coin.key].get(device):
yield coin
return supported_on
MAKO_FILTERS = {
"c_str": c_str_filter,
"ascii": ascii_filter,
"black_repr": black_repr_filter,
}
def render_file(src, dst, coins, support_info):
"""Renders `src` template into `dst`.
`src` is a filename, `dst` is an open file object.
"""
template = mako.template.Template(filename=src)
result = template.render(
support_info=support_info,
supported_on=make_support_filter(support_info),
**coins,
**MAKO_FILTERS,
)
dst.write(result)
# ====== validation functions ======
def highlight_key(coin, color):
"""Return a colorful string where the SYMBOL part is bold."""
keylist = coin["key"].split(":")
if keylist[-1].isdigit():
keylist[-2] = crayon(color, keylist[-2], bold=True)
else:
keylist[-1] = crayon(color, keylist[-1], bold=True)
key = crayon(color, ":".join(keylist))
name = crayon(None, f"({coin['name']})", dim=True)
return f"{key} {name}"
def find_address_collisions(coins, field):
"""Detects collisions in a given field. Returns buckets of colliding coins."""
collisions = defaultdict(list)
for coin in coins:
value = coin[field]
collisions[value].append(coin)
return {k: v for k, v in collisions.items() if len(v) > 1}
def check_btc(coins):
check_passed = True
support_infos = coin_info.support_info(coins)
# validate individual coin data
for coin in coins:
errors = coin_info.validate_btc(coin)
if errors:
check_passed = False
print_log(logging.ERROR, "invalid definition for", coin["name"])
print("\n".join(errors))
def collision_str(bucket):
"""Generate a colorful string out of a bucket of colliding coins."""
coin_strings = []
for coin in bucket:
name = coin["name"]
prefix = ""
if name.endswith("Testnet"):
color = "green"
elif name == "Bitcoin":
color = "red"
elif coin.get("unsupported"):
color = "grey"
prefix = crayon("blue", "(X)", bold=True)
else:
color = "blue"
hl = highlight_key(coin, color)
coin_strings.append(prefix + hl)
return ", ".join(coin_strings)
def print_collision_buckets(buckets, prefix):
"""Intelligently print collision buckets.
For each bucket, if there are any collision with a mainnet, print it.
If the collision is with unsupported networks or testnets, it's just INFO.
If the collision is with supported mainnets, it's WARNING.
If the collision with any supported network includes Bitcoin, it's an ERROR.
"""
failed = False
for key, bucket in buckets.items():
mainnets = [c for c in bucket if not c["name"].endswith("Testnet")]
have_bitcoin = False
for coin in mainnets:
if coin["name"] == "Bitcoin":
have_bitcoin = True
if all(v is None for k,v in support_infos[coin["key"]].items()):
coin["unsupported"] = True
supported_mainnets = [c for c in mainnets if not c.get("unsupported")]
supported_networks = [c for c in bucket if not c.get("unsupported")]
if len(mainnets) > 1:
if have_bitcoin and len(supported_networks) > 1:
# ANY collision with Bitcoin is bad
level = logging.ERROR
failed = True
elif len(supported_mainnets) > 1:
# collision between supported networks is still pretty bad
level = logging.WARNING
else:
# collision between some unsupported networks is OK
level = logging.INFO
print_log(level, f"prefix {key}:", collision_str(bucket))
return failed
# slip44 collisions
print("Checking SLIP44 prefix collisions...")
slip44 = find_address_collisions(coins, "slip44")
if print_collision_buckets(slip44, "key"):
check_passed = False
# only check address_type on coins that don't use cashaddr
nocashaddr = [coin for coin in coins if not coin.get("cashaddr_prefix")]
print("Checking address_type collisions...")
address_type = find_address_collisions(nocashaddr, "address_type")
if print_collision_buckets(address_type, "address type"):
check_passed = False
print("Checking address_type_p2sh collisions...")
address_type_p2sh = find_address_collisions(nocashaddr, "address_type_p2sh")
# we ignore failed checks on P2SH, because reasons
print_collision_buckets(address_type_p2sh, "address type")
return check_passed
def check_dups(buckets, print_at_level=logging.ERROR):
"""Analyze and pretty-print results of `coin_info.mark_duplicate_shortcuts`.
`print_at_level` can be one of logging levels.
The results are buckets of colliding symbols.
If the collision is only between ERC20 tokens, it's DEBUG.
If the collision includes one non-token, it's INFO.
If the collision includes more than one non-token, it's ERROR and printed always.
"""
def coin_str(coin):
"""Colorize coins. Tokens are cyan, nontokens are red. Coins that are NOT
marked duplicate get a green asterisk.
"""
if coin_info.is_token(coin):
color = "cyan"
else:
color = "red"
highlighted = highlight_key(coin, color)
if not coin.get("duplicate"):
prefix = crayon("green", "*", bold=True)
else:
prefix = ""
return f"{prefix}{highlighted}"
check_passed = True
for symbol in sorted(buckets.keys()):
bucket = buckets[symbol]
if not bucket:
continue
nontokens = [coin for coin in bucket if not coin_info.is_token(coin)]
# string generation
dup_str = ", ".join(coin_str(coin) for coin in bucket)
if not nontokens:
level = logging.DEBUG
elif len(nontokens) == 1:
level = logging.INFO
else:
level = logging.ERROR
check_passed = False
# deciding whether to print
if level < print_at_level:
continue
if symbol == "_override":
print_log(level, "force-set duplicates:", dup_str)
else:
print_log(level, f"duplicate symbol {symbol}:", dup_str)
return check_passed
def check_backends(coins):
check_passed = True
for coin in coins:
genesis_block = coin.get("hash_genesis_block")
if not genesis_block:
continue
backends = coin.get("blockbook", []) + coin.get("bitcore", [])
for backend in backends:
print("checking", backend, "... ", end="", flush=True)
try:
j = requests.get(backend + "/api/block-index/0").json()
if j["blockHash"] != genesis_block:
raise RuntimeError("genesis block mismatch")
except Exception as e:
print(e)
check_passed = False
else:
print("OK")
return check_passed
def check_icons(coins):
check_passed = True
for coin in coins:
key = coin["key"]
icon_file = coin.get("icon")
if not icon_file:
print(key, ": missing icon")
check_passed = False
continue
try:
icon = Image.open(icon_file)
except Exception:
print(key, ": failed to open icon file", icon_file)
check_passed = False
continue
if icon.size != (96, 96) or icon.mode != "RGBA":
print(key, ": bad icon format (must be RGBA 96x96)")
check_passed = False
return check_passed
IGNORE_NONUNIFORM_KEYS = frozenset(("unsupported", "duplicate", "notes"))
def check_key_uniformity(coins):
keysets = defaultdict(list)
for coin in coins:
keyset = frozenset(coin.keys()) | IGNORE_NONUNIFORM_KEYS
keysets[keyset].append(coin)
if len(keysets) <= 1:
return True
buckets = list(keysets.values())
buckets.sort(key=lambda x: len(x))
majority = buckets[-1]
rest = sum(buckets[:-1], [])
reference_keyset = set(majority[0].keys())
for coin in rest:
key = coin["key"]
keyset = set(coin.keys())
missing = ", ".join(reference_keyset - keyset)
if missing:
print_log(logging.ERROR, f"coin {key} has missing keys: {missing}")
additional = ", ".join(keyset - reference_keyset)
if additional:
print_log(logging.ERROR, f"coin {key} has superfluous keys: {additional}")
return False
# ====== coindefs generators ======
def convert_icon(icon):
"""Convert PIL icon to TOIF format"""
# TODO: move this to python-trezor at some point
DIM = 32
icon = icon.resize((DIM, DIM), Image.LANCZOS)
# remove alpha channel, replace with black
bg = Image.new("RGBA", icon.size, (0, 0, 0, 255))
icon = Image.alpha_composite(bg, icon)
# process pixels
pix = icon.load()
data = bytes()
for y in range(DIM):
for x in range(DIM):
r, g, b, _ = pix[x, y]
c = ((r & 0xF8) << 8) | ((g & 0xFC) << 3) | ((b & 0xF8) >> 3)
data += struct.pack(">H", c)
z = zlib.compressobj(level=9, wbits=10)
zdata = z.compress(data) + z.flush()
zdata = zdata[2:-4] # strip header and checksum
return zdata
def coindef_from_dict(coin):
proto = CoinDef()
for fname, _, fflags in CoinDef.FIELDS.values():
val = coin.get(fname)
if val is None and fflags & protobuf.FLAG_REPEATED:
val = []
elif fname == "signed_message_header":
val = val.encode("utf-8")
elif fname == "hash_genesis_block":
val = binascii.unhexlify(val)
setattr(proto, fname, val)
return proto
def serialize_coindef(proto, icon):
proto.icon = icon
buf = io.BytesIO()
protobuf.dump_message(buf, proto)
return buf.getvalue()
def sign(data):
h = sha256(data).digest()
sign_key = ed25519.SigningKey(b"A" * 32)
return sign_key.sign(h)
# ====== click command handlers ======
@click.group()
@click.option(
"--colors/--no-colors",
"-c/-C",
default=sys.stdout.isatty(),
help="Force colored output on/off",
)
def cli(colors):
global USE_COLORS
USE_COLORS = colors
@cli.command()
# fmt: off
@click.option("--backend/--no-backend", "-b", default=False, help="Check blockbook/bitcore responses")
@click.option("--icons/--no-icons", default=True, help="Check icon files")
@click.option("-d", "--show-duplicates", type=click.Choice(("all", "nontoken", "errors")),
default="errors", help="How much information about duplicate shortcuts should be shown.")
# fmt: on
def check(backend, icons, show_duplicates):
"""Validate coin definitions.
Checks that every btc-like coin is properly filled out, reports duplicate symbols,
missing or invalid icons, backend responses, and uniform key information --
i.e., that all coins of the same type have the same fields in their JSON data.
Uniformity check ignores NEM mosaics and ERC20 tokens, where non-uniformity is
expected.
The `--show-duplicates` option can be set to:
- all: all shortcut collisions are shown, including colliding ERC20 tokens
- nontoken: only collisions that affect non-ERC20 coins are shown
- errors: only collisions between non-ERC20 tokens are shown. This is the default,
as a collision between two or more non-ERC20 tokens is an error.
In the output, duplicate ERC tokens will be shown in cyan; duplicate non-tokens
in red. An asterisk (*) next to symbol name means that even though it was detected
as duplicate, it is still included in results.
The collision detection checks that SLIP44 numbers don't collide between different
mainnets (testnet collisions are allowed), that `address_prefix` doesn't collide
with Bitcoin (other collisions are reported as warnings). `address_prefix_p2sh`
is also checked but we have a bunch of collisions there and can't do much
about them, so it's not an error.
In the collision checks, Bitcoin is shown in red, other mainnets in blue,
testnets in green and unsupported networks in gray, marked with `(X)` for
non-colored output.
"""
if backend and requests is None:
raise click.ClickException("You must install requests for backend check")
if icons and not CAN_BUILD_DEFS:
raise click.ClickException("Missing requirements for icon check")
defs, buckets = coin_info.coin_info_with_duplicates()
all_checks_passed = True
print("Checking BTC-like coins...")
if not check_btc(defs.bitcoin):
all_checks_passed = False
if show_duplicates == "all":
dup_level = logging.DEBUG
elif show_duplicates == "nontoken":
dup_level = logging.INFO
else:
dup_level = logging.ERROR
print("Checking unexpected duplicates...")
if not check_dups(buckets, dup_level):
all_checks_passed = False
if icons:
print("Checking icon files...")
if not check_icons(defs.bitcoin):
all_checks_passed = False
if backend:
print("Checking backend responses...")
if not check_backends(defs.bitcoin):
all_checks_passed = False
print("Checking key uniformity...")
for cointype, coinlist in defs.items():
if cointype in ("erc20", "nem"):
continue
if not check_key_uniformity(coinlist):
all_checks_passed = False
if not all_checks_passed:
print("Some checks failed.")
sys.exit(1)
else:
print("Everything is OK.")
@cli.command()
@click.option("-o", "--outfile", type=click.File(mode="w"), default="./coins.json")
def coins_json(outfile):
"""Generate coins.json for consumption in python-trezor and Connect/Wallet"""
coins = coin_info.coin_info().bitcoin
support_info = coin_info.support_info(coins)
by_name = {}
for coin in coins:
coin["support"] = support_info[coin["key"]]
by_name[coin["name"]] = coin
with outfile:
json.dump(by_name, outfile, indent=4, sort_keys=True)
outfile.write("\n")
@cli.command()
@click.option("-o", "--outfile", type=click.File(mode="w"), default="./coindefs.json")
def coindefs(outfile):
"""Generate signed coin definitions for python-trezor and others
This is currently unused but should enable us to add new coins without having to
update firmware.
"""
coins = coin_info.coin_info().bitcoin
coindefs = {}
for coin in coins:
key = coin["key"]
icon = Image.open(coin["icon"])
ser = serialize_coindef(coindef_from_dict(coin), convert_icon(icon))
sig = sign(ser)
definition = binascii.hexlify(sig + ser).decode("ascii")
coindefs[key] = definition
with outfile:
json.dump(coindefs, outfile, indent=4, sort_keys=True)
outfile.write("\n")
@cli.command()
# fmt: off
@click.argument("paths", metavar="[path]...", nargs=-1)
@click.option("-o", "--outfile", type=click.File("w"), help="Alternate output file")
@click.option("-v", "--verbose", is_flag=True, help="Print rendered file names")
# fmt: on
def render(paths, outfile, verbose):
"""Generate source code from Mako templates.
For every "foo.bar.mako" filename passed, runs the template and
saves the result as "foo.bar". For every directory name passed,
processes all ".mako" files found in that directory.
If `-o` is specified, renders a single file into the specified outfile.
If no arguments are given, processes the current directory.
"""
if not CAN_RENDER:
raise click.ClickException("Please install 'mako' and 'munch'")
if outfile and (len(paths) != 1 or not os.path.isfile(paths[0])):
raise click.ClickException("Option -o can only be used with single input file")
# prepare defs
defs = coin_info.coin_info()
support_info = coin_info.support_info(defs)
# munch dicts - make them attribute-accessible
for key, value in defs.items():
defs[key] = [Munch(coin) for coin in value]
for key, value in support_info.items():
support_info[key] = Munch(value)
def do_render(src, dst):
if verbose:
click.echo("Rendering {} => {}".format(src, dst))
render_file(src, dst, defs, support_info)
# single in-out case
if outfile:
do_render(paths[0], outfile)
return
# find files in directories
if not paths:
paths = ["."]
files = []
for path in paths:
if not os.path.exists(path):
click.echo("Path {} does not exist".format(path))
elif os.path.isdir(path):
files += glob.glob(os.path.join(path, "*.mako"))
else:
files.append(path)
# render each file
for file in files:
if not file.endswith(".mako"):
click.echo("File {} does not end with .mako".format(file))
else:
target = file[: -len(".mako")]
with open(target, "w") as dst:
do_render(file, dst)
if __name__ == "__main__":
cli()

View File

@ -1,77 +0,0 @@
#!/usr/bin/env python3
import sys
import os
import json
import re
def get_tokens():
tokens = []
for s, i in networks:
try:
files = os.scandir('../defs/ethereum/tokens/tokens/%s' % s)
except FileNotFoundError:
continue
for f in files:
if not f.path.endswith('.json'):
continue
# print('Processing', f.path)
data = json.load(open(f.path, 'r'))
data['chain'] = s
data['chain_id'] = i
tokens.append(data)
return tokens
def print_tokens(tokens, python=False):
count = 0
for t in sorted(tokens, key=lambda x: x['chain'] + x['symbol'].upper()):
address, name, symbol, decimal, chain, chain_id = t['address'], t['name'], t['symbol'], int(t['decimals']), t['chain'], t['chain_id'] # noqa:E501
address = '\\x'.join([address[i:i + 2] for i in range(0, len(address), 2)])[2:].lower() # noqa:E501
ascii_only = re.match(r'^[ -\x7F]+$', symbol) is not None
if not ascii_only: # skip Unicode symbols, they are stupid
continue
name = name.strip()
count += 1
if python:
print(" (%d, b'%s', '%s', %d), # %s / %s" % (chain_id, address, symbol, decimal, chain, name)) # noqa:E501
else:
print('\t{%2d, "%s", " %s", %d}, // %s / %s' % (chain_id, address, symbol, decimal, chain, name)) # noqa:E501
return count
# disabled are networks with no tokens defined in ethereum-lists/tokens repo
networks = [
('ella', 64),
('etc', 61),
('eth', 1),
('kov', 42),
('rin', 4),
('rop', 3),
('ubq', 8),
]
def generate_c(tokens):
count = print_tokens(tokens)
print('-' * 32)
print('#define TOKENS_COUNT %d' % count)
def generate_python(tokens):
print('tokens = [')
print_tokens(tokens, python=True)
print(']')
if __name__ == '__main__':
tokens = get_tokens()
if len(sys.argv) > 1 and sys.argv[1] == '--python':
generate_python(tokens)
else:
generate_c(tokens)

16
tools/requirements.txt Normal file
View File

@ -0,0 +1,16 @@
click>=6
# for `coin_gen coindefs` and checking icons:
ed25519>=1.4
Pillow>=5.2.0
trezor>=0.10
# for checking backends, generating coins_details.json
requests>=2.19
# for rendering templates:
Mako>=1.0.7
munch>=2.3.2
# for pretty colors in checks
termcolor >= 0.1.2

497
tools/support.py Executable file
View File

@ -0,0 +1,497 @@
#!/usr/bin/env python3
import re
import os
import subprocess
import sys
import click
import coin_info
import json
SUPPORT_INFO = coin_info.get_support_data()
VERSION_RE = re.compile(r"\d+.\d+.\d+")
ERC20_DUPLICATE_KEY = "(AUTO) duplicate key"
def write_support_info():
with open(os.path.join(coin_info.DEFS_DIR, "support.json"), "w") as f:
json.dump(SUPPORT_INFO, f, indent=2, sort_keys=True)
f.write("\n")
def support_dicts(device):
return SUPPORT_INFO[device]["supported"], SUPPORT_INFO[device]["unsupported"]
def all_support_dicts():
for device in SUPPORT_INFO:
yield (device, *support_dicts(device))
def clear_support(device, key):
supported, unsupported = support_dicts(device)
supported.pop(key, None)
unsupported.pop(key, None)
def set_supported(device, key, value):
clear_support(device, key)
supported, _ = support_dicts(device)
supported[key] = value
def set_unsupported(device, key, value):
clear_support(device, key)
_, unsupported = support_dicts(device)
unsupported[key] = value
def print_support(coin):
def support_value(where, key, missing_means_no=False):
if "supported" in where and key in where["supported"]:
val = where["supported"][key]
if val is True:
return "YES"
elif val == "soon":
return "SOON"
elif VERSION_RE.match(val):
return f"YES since {val}"
else:
return f"BAD VALUE {val}"
elif "unsupported" in where and key in where["unsupported"]:
val = where["unsupported"][key]
return f"NO (reason: {val})"
elif missing_means_no:
return "NO"
else:
return "support info missing"
key, name, shortcut = coin["key"], coin["name"], coin["shortcut"]
print(f"{key} - {name} ({shortcut})")
if coin.get("duplicate"):
if coin_info.is_token(coin):
print(" * DUPLICATE SYMBOL (no support)")
return
else:
print(" * DUPLICATE SYMBOL")
for dev, where in SUPPORT_INFO.items():
missing_means_no = dev in coin_info.MISSING_SUPPORT_MEANS_NO
print(" *", dev, ":", support_value(where, key, missing_means_no))
# ====== validation functions ====== #
def check_support_values():
def _check_value_version_soon(val):
if not isinstance(value, str):
raise ValueError(f"non-str value: {value}")
is_version = VERSION_RE.match(value)
is_soon = value == "soon"
if not (is_version or is_soon):
raise ValueError(f"expected version or 'soon', found '{value}'")
errors = []
for device, values in SUPPORT_INFO.items():
supported = values.get("supported")
if not isinstance(supported, dict):
errors.append(f"Missing 'supported' dict for {device}")
else:
for key, value in supported.items():
try:
if device in coin_info.VERSIONED_SUPPORT_INFO:
_check_value_version_soon(value)
else:
if value is not True:
raise ValueError(f"only allowed is True, but found {value}")
except Exception as e:
errors.append(f"{device}.supported.{key}: {e}")
unsupported = values.get("unsupported")
if not isinstance(unsupported, dict):
errors.append(f"Missing 'supported' dict for {device}")
else:
for key, value in unsupported.items():
if not isinstance(value, str) or not value:
errors.append(f"{device}.unsupported.{key}: missing reason")
return errors
def find_unsupported_coins(coins_dict):
result = {}
for device in coin_info.VERSIONED_SUPPORT_INFO:
supported, unsupported = support_dicts(device)
support_set = set(supported.keys())
support_set.update(unsupported.keys())
result[device] = []
for key, coin in coins_dict.items():
if coin.get("duplicate") and coin_info.is_token(coin):
continue
if key not in support_set:
result[device].append(coin)
return result
def find_orphaned_support_keys(coins_dict):
orphans = set()
for _, supported, unsupported in all_support_dicts():
orphans.update(key for key in supported if key not in coins_dict)
orphans.update(key for key in unsupported if key not in coins_dict)
return orphans
def find_supported_duplicate_tokens(coins_dict):
result = []
for _, supported, _ in all_support_dicts():
for key in supported:
if not key.startswith("erc20:"):
continue
if coins_dict.get(key, {}).get("duplicate"):
result.append(key)
return result
def process_erc20(coins_dict):
"""Make sure that:
* orphaned ERC20 support info is cleared out
* duplicate ERC20 tokens are not listed as supported
* non-duplicate ERC20 tokens are cleared out from the unsupported list
"""
erc20_dict = {
key: coin.get("duplicate", False)
for key, coin in coins_dict.items()
if coin_info.is_token(coin)
}
for device, supported, unsupported in all_support_dicts():
nondups = set()
dups = set(key for key, value in erc20_dict.items() if value)
for key in supported:
if key not in erc20_dict:
continue
if not erc20_dict[key]:
dups.discard(key)
for key in unsupported:
if key not in erc20_dict:
continue
# ignore dups that are unsupported now
dups.discard(key)
if not erc20_dict[key] and unsupported[key] == ERC20_DUPLICATE_KEY:
# remove duplicate status
nondups.add(key)
for key in dups:
if device in coin_info.MISSING_SUPPORT_MEANS_NO:
clear_support(device, key)
else:
print(f"ERC20 on {device}: adding duplicate {key}")
set_unsupported(device, key, ERC20_DUPLICATE_KEY)
for key in nondups:
print(f"ERC20 on {device}: clearing non-duplicate {key}")
clear_support(device, key)
@click.group()
def cli():
pass
@cli.command()
@click.option("-n", "--dry-run", is_flag=True, help="Do not write changes")
def fix(dry_run):
"""Fix expected problems.
Prunes orphaned keys and ensures that ERC20 duplicate info matches support info.
"""
all_coins, _ = coin_info.coin_info_with_duplicates()
coins_dict = all_coins.as_dict()
orphaned = find_orphaned_support_keys(coins_dict)
for orphan in orphaned:
print(f"pruning orphan {orphan}")
for device in SUPPORT_INFO:
clear_support(device, orphan)
process_erc20(coins_dict)
if not dry_run:
write_support_info()
@cli.command()
# fmt: off
@click.option("-t", "--ignore-tokens", is_flag=True, help="Ignore unsupported ERC20 tokens")
@click.option("-m", "--ignore-missing", is_flag=True, help="Do not fail on missing supportinfo")
# fmt: on
def check(ignore_tokens, ignore_missing):
"""Check validity of support information.
Ensures that `support.json` data is well formed, there are no keys without
corresponding coins, and there are no coins without corresponding keys.
If `--ignore-tokens` is specified, the check will ignore ERC20 tokens
without support info. This is useful because there is usually a lot of ERC20
tokens.
If `--ignore-missing` is specified, the check will display coins with missing
support info, but will not fail when missing coins are found. This is
useful in Travis.
"""
all_coins, _ = coin_info.coin_info_with_duplicates()
coins_dict = all_coins.as_dict()
checks_ok = True
errors = check_support_values()
if errors:
for error in errors:
print(error)
checks_ok = False
orphaned = find_orphaned_support_keys(coins_dict)
for orphan in orphaned:
print(f"orphaned key {orphan}")
checks_ok = False
missing = find_unsupported_coins(coins_dict)
for device, values in missing.items():
if ignore_tokens:
values = [coin for coin in values if not coin_info.is_token(coin)]
if values:
if not ignore_missing:
checks_ok = False
print(f"Device {device} has missing support infos:")
for coin in values:
print(f"{coin['key']} - {coin['name']}")
supported_dups = find_supported_duplicate_tokens(coins_dict)
for coin in supported_dups:
checks_ok = False
print(f"Token {coin['key']} ({coin['name']}) is duplicate but supported")
if not checks_ok:
print("Some checks have failed")
sys.exit(1)
@cli.command()
# fmt: off
@click.argument("device")
@click.option("-r", "--version", help="Set explicit version string (default: guess from latest release)")
@click.option("--git-tag/--no-git-tag", "-g", default=False, help="Create a corresponding Git tag")
@click.option("--release-soon/--no-release-soon", default=True, help="Release coins marked 'soon'")
@click.option("--release-missing/--no-release-missing", default=True, help="Release coins with missing support info")
@click.option("-n", "--dry-run", is_flag=True, help="Do not write changes")
@click.option("-s", "--soon", is_flag=True, help="Only set missing support-infos to be released 'soon'")
@click.option("-f", "--force", is_flag=True, help="Proceed even with bad version/device info")
# fmt: on
@click.pass_context
def release(
ctx,
device: str,
version,
git_tag,
release_soon,
release_missing,
dry_run,
soon,
force,
):
"""Release a new Trezor firmware.
Update support infos so that all coins have a clear support status.
By default, marks duplicate tokens as unsupported, and all coins that either
don't have support info, or they are supported "soon", are set to the
released firmware version.
Optionally tags the repository with the given version.
`device` can be "1", "2", or a string matching `support.json` key. Version
is autodetected by downloading a list of latest releases and incrementing
micro version by one, or you can specify `--version` explicitly.
"""
# check condition(s)
if soon and git_tag:
raise click.ClickException("Cannot git-tag a 'soon' revision")
# process `device`
if device.isnumeric():
device = f"trezor{device}"
if not force and device not in coin_info.VERSIONED_SUPPORT_INFO:
raise click.ClickException(
f"Non-releasable device {device} (support info is not versioned). "
"Use --force to proceed anyway."
)
if not soon:
# guess `version` if not given
if not version:
versions = coin_info.latest_releases()
latest_version = versions.get(device)
if latest_version is None:
raise click.ClickException(
"Failed to guess version. "
"Please use --version to specify it explicitly."
)
else:
latest_version = list(latest_version)
latest_version[-1] += 1
version = ".".join(str(n) for n in latest_version)
# process `version`
try:
version_numbers = list(map(int, version.split(".")))
expected_device = f"trezor{version_numbers[0]}"
if not force and device != expected_device:
raise click.ClickException(
f"Device {device} should not be version {version}. "
"Use --force to proceed anyway."
)
except ValueError as e:
if not force:
raise click.ClickException(
f"Failed to parse '{version}' as a version. "
"Use --force to proceed anyway."
) from e
if soon:
version = "soon"
print(f"Moving {device} missing infos to 'soon'")
else:
print(f"Releasing {device} firmware version {version}")
defs, _ = coin_info.coin_info_with_duplicates()
coins_dict = defs.as_dict()
# Invoke data fixup as dry-run. That will modify data internally but won't write
# changes. We will write changes at the end based on our own `dry_run` value.
print("Fixing up data...")
ctx.invoke(fix, dry_run=True)
# process missing (not listed) supportinfos
if release_missing:
missing_list = find_unsupported_coins(coins_dict)[device]
for coin in missing_list:
key = coin["key"]
# we should have no unprocessed dup tokens at this point
assert not (coin.get("duplicate") and coin_info.is_token(coin))
print(f"Adding missing {key} ({coin['name']})")
set_supported(device, key, version)
# if we're releasing, process coins marked "soon"
# (`not soon` because `soon` means set release version to "soon")
if not soon and release_soon:
supported, _ = support_dicts(device)
soon_list = [
coins_dict[key]
for key, val in supported.items()
if val == "soon" and key in coins_dict
]
for coin in soon_list:
key = coin["key"]
print(f"Adding soon-released {key} ({coin['name']})")
set_supported(device, key, version)
tagname = f"{device}-{version}"
if git_tag:
if dry_run:
print(f"Would tag current commit with {tagname}")
else:
print(f"Tagging current commit with {tagname}")
subprocess.check_call(["git", "tag", tagname])
if not dry_run:
write_support_info()
else:
print("No changes written")
@cli.command()
@click.argument("keyword", nargs=-1, required=True)
def show(keyword):
"""Show support status of specified coins.
Keywords match against key, name or shortcut (ticker symbol) of coin.
"""
defs, _ = coin_info.coin_info_with_duplicates()
for kw in keyword:
for coin in coin_info.search(defs, kw):
print_support(coin)
@cli.command(name="set")
# fmt: off
@click.argument("key", required=True)
@click.argument("entries", nargs=-1, required=True, metavar="entry=value [entry=value]...")
@click.option("-r", "--reason", help="Reason for not supporting")
# fmt: on
def set_support_value(key, entries, reason):
"""Set a support info variable.
Examples:
support.py set coin:BTC trezor1=soon trezor2=2.0.7 webwallet=yes connect=no
support.py set coin:LTC trezor1=yes connect=
Setting a variable to "yes", "true" or "1" sets support to true.
Setting a variable to "no", "false" or "0" sets support to false.
(or null, in case of trezor1/2)
Setting variable to empty ("trezor1=") will set to null, or clear the entry.
Setting to "soon", "planned", "2.1.1" etc. will set the literal string.
Entries that are always present:
trezor1 trezor2 webwallet connect
Entries with other names will be inserted into "others". This is a good place
to store links to 3rd party software, such as Electrum forks or claim tools.
"""
defs, _ = coin_info.coin_info_with_duplicates()
coins = defs.as_dict()
if key not in coins:
click.echo(f"Failed to find key {key}")
click.echo("Use 'support.py show' to search for the right one.")
sys.exit(1)
if coins[key].get("duplicate") and coin_info.is_token(coins[key]):
shortcut = coins[key]["shortcut"]
click.echo(f"Note: shortcut {shortcut} is a duplicate.")
click.echo(f"Coin will NOT be listed regardless of support.json status.")
for entry in entries:
try:
device, value = entry.split("=", maxsplit=1)
except ValueError:
click.echo(f"Invalid entry: {entry}")
sys.exit(2)
if device not in SUPPORT_INFO:
raise click.ClickException(f"unknown device: {device}")
if value in ("yes", "true", "1"):
set_supported(device, key, True)
elif value in ("no", "false", "0"):
if device in coin_info.MISSING_SUPPORT_MEANS_NO:
click.echo("Setting explicitly unsupported for {device}.")
click.echo("Perhaps you meant removing support, i.e., '{device}=' ?")
if not reason:
reason = click.prompt(f"Enter reason for not supporting on {device}:")
set_unsupported(device, key, reason)
elif value == "":
clear_support(device, key)
else:
# arbitrary string
set_supported(device, key, value)
print_support(coins[key])
write_support_info()
if __name__ == "__main__":
cli()