1
0
mirror of https://github.com/trezor/trezor-firmware.git synced 2024-12-22 14:28:07 +00:00

build: replace local copy of pb2py / build_protobuf with pb2py from trezor-common

This commit is contained in:
matejcik 2018-07-03 13:44:01 +02:00
parent 6f23331dfc
commit ef736e837c
4 changed files with 10 additions and 295 deletions

View File

@ -8,6 +8,7 @@ import tempfile
from setuptools import setup, Command, find_packages
from setuptools.command.build_py import build_py
from setuptools.command.develop import develop
from distutils.errors import DistutilsError
install_requires = [
'setuptools>=19.0',
@ -45,7 +46,7 @@ class PrebuildCommand(Command):
# check for existence of the submodule directory
common_defs = os.path.join(TREZOR_COMMON, 'defs')
if not os.path.exists(common_defs):
raise Exception('trezor-common submodule seems to be missing.\n' +
raise DistutilsError('trezor-common submodule seems to be missing.\n' +
'Use "git submodule update --init" to retrieve it.')
# generate and copy coins.json to the tree
@ -56,11 +57,14 @@ class PrebuildCommand(Command):
# regenerate messages
try:
subprocess.check_call([os.path.join(CWD, 'tools', 'build_protobuf'), '--no-core'])
proto_srcs = [os.path.join(TREZOR_COMMON, "protob", name + ".proto") for name in ("messages", "types")]
subprocess.check_call([
os.path.join(TREZOR_COMMON, "protob", "pb2py"),
"-o", os.path.join(CWD, "trezorlib", "messages"),
"-P", "..protobuf",
] + proto_srcs)
except Exception as e:
print(e)
print("Generating protobuf failed. Maybe you don't have 'protoc', or maybe you are on Windows?")
print("Using pre-generated files.")
raise DistutilsError("Generating protobuf failed. Make sure you have 'protoc' in your PATH.") from e
def _patch_prebuild(cls):

View File

@ -1,72 +0,0 @@
#!/bin/bash
set -e
IS_CORE=""
if [ "$1" == "--core" ]; then
shift
IS_CORE=yes
elif [ "$1" == "--no-core" ]; then
shift
elif echo $PWD | grep -q "trezor-core"; then
IS_CORE=yes
fi
if [ -n "$1" ]; then
OUTDIR=`readlink -f "$1"`
fi
cd "$(dirname "$0")"
# set up paths
INDEX="__init__.py"
GENPATH="${OUTDIR:-../trezorlib/messages}"
PROTO_PATH="../vendor/trezor-common/protob"
PROTO_FILES="types messages"
# set up temporary directory & cleanup
TMPDIR=$(mktemp -d)
function cleanup {
rm -r $TMPDIR
}
trap cleanup EXIT
# set up pb2 outdir
PB2_OUT="$TMPDIR/pb2"
mkdir -p "$PB2_OUT"
# compile .proto files to python2 modules using google protobuf library
for file in $PROTO_FILES; do
protoc --python_out="$PB2_OUT" -I/usr/include -I"$PROTO_PATH" "$PROTO_PATH/$file.proto"
done
if [ -n "$IS_CORE" ]; then
# generate for micropython
PB2PY_OPTS="-m"
else
# create index (__init__.py)
echo "# Automatically generated by pb2py" > $TMPDIR/$INDEX
echo >> $TMPDIR/$INDEX
PB2PY_OPTS="-l $TMPDIR/$INDEX"
fi
# convert google protobuf library to trezor's internal format
for file in $PROTO_FILES; do
./pb2py $PB2PY_OPTS -P "trezorlib.protobuf" -p "$PB2_OUT" "$file" "$TMPDIR"
done
if [ -n "$IS_CORE" ]; then
cp "$TMPDIR/MessageType.py" "$TMPDIR/wire_types.py"
fi
# ensure $GENPATH exists and is empty of messages
mkdir -p "$GENPATH"
# only remove messages - there could possibly be other files not starting with capital letter
rm -f "$GENPATH"/[A-Z]*.py
# move generated files to the destination
# (this assumes $INDEX is *.py, otherwise we'd have to add $INDEX separately)
mv "$TMPDIR"/*.py "$GENPATH"
# the exit trap handles removing the tmp directory

View File

@ -1,217 +0,0 @@
#!/usr/bin/env python3
# Converts Google's protobuf python definitions of TREZOR wire messages
# to plain-python objects as used in TREZOR Core and python-trezor
import argparse
import importlib
import logging
import os
import sys
from collections import namedtuple
ProtoField = namedtuple('ProtoField', 'name, number, proto_type, py_type, repeated, required, orig')
def parse_field(number, field):
FIELD_TYPES = {
field.TYPE_UINT64: ('p.UVarintType', 'int'),
field.TYPE_UINT32: ('p.UVarintType', 'int'),
field.TYPE_ENUM: ('p.UVarintType', 'int'),
field.TYPE_SINT32: ('p.SVarintType', 'int'),
field.TYPE_SINT64: ('p.SVarintType', 'int'),
field.TYPE_STRING: ('p.UnicodeType', 'str'),
field.TYPE_BOOL: ('p.BoolType', 'bool'),
field.TYPE_BYTES: ('p.BytesType', 'bytes'),
}
repeated = (field.label == field.LABEL_REPEATED)
required = (field.label == field.LABEL_REQUIRED)
if field.type == field.TYPE_MESSAGE:
proto_type = py_type = field.message_type.name
else:
try:
proto_type, py_type = FIELD_TYPES[field.type]
except KeyError:
raise ValueError("Unknown field type %d for field %s" % (field.type, field.name)) from None
if repeated:
py_type = "List[%s]" % py_type
return ProtoField(
name=field.name,
number=number,
proto_type=proto_type,
py_type=py_type,
repeated=repeated,
required=required,
orig=field,
)
def import_pb2(name):
return importlib.import_module("%s_pb2" % name)
def create_message_import(name):
return "from .%s import %s" % (name, name)
def remove_from_start(s, prefix):
if s.startswith(prefix):
return s[len(prefix):]
else:
return s
def process_message_imports(descriptor):
imports = set()
for field in descriptor.fields:
if field.type == field.TYPE_MESSAGE:
imports.add(field.message_type.name)
for name in sorted(imports):
yield create_message_import(name)
def create_init_method(fields):
yield " def __init__("
yield " self,"
for field in fields[:-1]:
yield " %s: %s = None," % (field.name, field.py_type)
# last field must not have a traling comma
yield " %s: %s = None" % (fields[-1].name, fields[-1].py_type)
yield " ) -> None:"
for field in fields:
if field.repeated:
yield " self.{0} = {0} if {0} is not None else []".format(field.name)
else:
yield " self.{0} = {0}".format(field.name)
def process_message(descriptor, protobuf_module, msg_id, is_upy):
logging.debug("Processing message %s", descriptor.name)
if is_upy:
yield "import protobuf as p"
else:
yield "from .. import protobuf as p"
fields = list(parse_field(number, field)
for number, field
in descriptor.fields_by_number.items())
if any(field.repeated for field in fields):
yield "if __debug__:"
yield " try:"
yield " from typing import List"
yield " except ImportError:"
yield " List = None"
yield from process_message_imports(descriptor)
yield ""
yield ""
yield "class %s(p.MessageType):" % descriptor.name
if msg_id is not None:
yield " MESSAGE_WIRE_TYPE = %d" % msg_id
if fields:
yield " FIELDS = {"
for field in fields:
comments = []
if field.required:
comments.append('required')
if field.orig.has_default_value:
comments.append("default=%s" % repr(field.orig.default_value))
if comments:
comment = " # %s" % ' '.join(comments)
else:
comment = ''
if field.repeated:
flags = 'p.FLAG_REPEATED'
else:
flags = '0'
yield " %d: ('%s', %s, %s),%s" % (field.number, field.name, field.proto_type, flags, comment)
yield " }"
yield ""
yield from create_init_method(fields)
if not fields and not msg_id:
yield " pass"
def process_enum(descriptor, is_upy):
logging.debug("Processing enum %s", descriptor.name)
for name, value in descriptor.values_by_name.items():
# Remove type name from the beginning of the constant
# For example "PinMatrixRequestType_Current" -> "Current"
enum_prefix = descriptor.name
name = remove_from_start(name, "%s_" % enum_prefix)
# If type ends with *Type, but constant use type name without *Type, remove it too :)
# For example "ButtonRequestType & ButtonRequest_Other" => "Other"
if enum_prefix.endswith("Type"):
enum_prefix, _ = enum_prefix.rsplit("Type", 1)
name = remove_from_start(name, "%s_" % enum_prefix)
yield "%s = %s" % (name, value.number)
def process_file(descriptor, protobuf_module, genpath, modlist, is_upy):
logging.info("Processing module %s", descriptor.name)
msg_types = import_pb2('messages').MessageType
for name, message_descriptor in sorted(descriptor.message_types_by_name.items()):
# Find message type for given class
try:
msg_id = msg_types.Value("MessageType_%s" % name)
except ValueError:
msg_id = None
out = process_message(message_descriptor, protobuf_module, msg_id, is_upy)
write_to_file(genpath, name, out)
if modlist:
modlist.write(create_message_import(name) + "\n")
for name, enum_descriptor in descriptor.enum_types_by_name.items():
out = process_enum(enum_descriptor, is_upy)
write_to_file(genpath, name, out)
if modlist:
modlist.write("from . import %s\n" % name)
def write_to_file(genpath, t, out):
# Write generated sourcecode to given file
with open(os.path.join(genpath, "%s.py" % t), 'w') as f:
f.write("# Automatically generated by pb2py\n")
for line in out:
f.write(line + "\n")
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
parser = argparse.ArgumentParser()
parser.add_argument('module', help="Name of module to generate")
parser.add_argument('genpath', help="Directory for generated source code")
parser.add_argument('-P', '--protobuf-module', default="protobuf", help="Name of protobuf module")
parser.add_argument('-l', '--modlist', type=argparse.FileType('a'), help="Generate list of modules")
parser.add_argument('-p', '--protopath', type=str, help="Path to search for pregenerated Google's python sources")
parser.add_argument('-m', '--micropython', action='store_true', help="Use micropython-favoured source code")
args = parser.parse_args()
if args.protopath:
sys.path.append(args.protopath)
# This must be done after sys.path.append
module = import_pb2(args.module)
process_file(module.DESCRIPTOR, args.protobuf_module, args.genpath, args.modlist, args.micropython)

@ -1 +1 @@
Subproject commit e5df1abfbe752da9232893715c496b3a1a9bde7a
Subproject commit 6575418de93d2ca5fd67dd306f19ee736ebfd7b7