fix merge conflicts with updated python-trezor

pull/25/head
matejcik 6 years ago
commit 2375a8f516

@ -1,32 +1,32 @@
#!/bin/bash #!/bin/bash
CURDIR=$(pwd) set -e
PB2DIR=$CURDIR/pb2
OUTDIR=../trezorlib/messages cd "$(dirname "$0")"
INDEX=$OUTDIR/__init__.py
GENPATH="../trezorlib/messages"
rm -f $OUTDIR/[A-Z]*.py INDEX="$GENPATH/__init__.py"
mkdir -p $OUTDIR PROTO_PATH="../../trezor-common/protob"
mkdir -p $PB2DIR PROTO_FILES="types messages"
touch $PB2DIR/__init__.py PB2_OUT="pb2"
rm -f $INDEX rm -f "$GENPATH/[A-Z]*.py"
echo '# Automatically generated by pb2py' >> $INDEX mkdir -p "$GENPATH"
echo 'from __future__ import absolute_import' >> $INDEX
echo '' >> $INDEX cat > "$INDEX" << EOF
# Automatically generated by pb2py
for i in types messages ; do
EOF
mkdir -p "$PB2_OUT"
for file in $PROTO_FILES; do
# Compile .proto files to python2 modules using google protobuf library # Compile .proto files to python2 modules using google protobuf library
cd $CURDIR/../../trezor-common/protob protoc --python_out="$PB2_OUT" -I"$PROTO_PATH" "$file.proto"
protoc --python_out=$PB2DIR -I/usr/include -I. $i.proto
done done
# hack to make output python 3 compatible for file in $PROTO_FILES; do
sed -i 's/^import types_pb2/from . import types_pb2/g' $CURDIR/pb2/messages_pb2.py
for i in types messages ; do
# Convert google protobuf library to trezor's internal format # Convert google protobuf library to trezor's internal format
cd $CURDIR ./pb2py -P "trezorlib.protobuf" -p "$PB2_OUT" -l "$INDEX" "$file" "$GENPATH"
./pb2py -p $CURDIR -l $INDEX $i $OUTDIR
done done
rm -rf $PB2DIR rm -rf "$PB2_OUT"

@ -3,72 +3,78 @@
# to plain-python objects as used in TREZOR Core and python-trezor # to plain-python objects as used in TREZOR Core and python-trezor
import sys import sys
import importlib
import os import os
import argparse import argparse
from google.protobuf.internal.enum_type_wrapper import EnumTypeWrapper
def import_pb2(name):
return importlib.import_module("%s_pb2" % name)
def process_type(t, cls, msg_id, indexfile, is_upy):
print(" * type %s" % t)
imports = [] def create_message_import(name):
out = ["", "", "class %s(p.MessageType):" % t, ] return "from .%s import %s" % (name, name)
if cls.DESCRIPTOR.fields_by_name:
out.append(" FIELDS = {")
elif msg_id is None:
out.append(" pass")
for v in sorted(cls.DESCRIPTOR.fields_by_name.values(), key=lambda x: x.number):
number = v.number
fieldname = v.name
type = None
repeated = v.label == 3
required = v.label == 2
# print v.has_default_value, v.default_value
if v.type in (4, 13, 14): def create_const(name, value, is_upy):
# TYPE_UINT64 = 4 if is_upy:
# TYPE_UINT32 = 13 return "%s = const(%s)" % (name, value)
# TYPE_ENUM = 14 else:
type = 'p.UVarintType' return "%s = %s" % (name, value)
elif v.type in (17,):
# TYPE_SINT32 = 17
type = 'p.Sint32Type'
elif v.type in (18,): def remove_from_start(s, prefix):
# TYPE_SINT64 = 18 if s.startswith(prefix):
type = 'p.Sint64Type' return s[len(prefix):]
else:
return s
elif v.type == 9:
# TYPE_STRING = 9
type = 'p.UnicodeType'
elif v.type == 8: def process_message(descriptor, protobuf_module, msg_id, indexfile, is_upy):
# TYPE_BOOL = 8 print(" * type %s" % descriptor.name)
type = 'p.BoolType'
elif v.type == 12: imports = []
# TYPE_BYTES = 12 out = ["", "", "class %s(p.MessageType):" % descriptor.name, ]
type = 'p.BytesType'
elif v.type == 11: if descriptor.fields_by_number:
# TYPE_MESSAGE = 1 out.append(" FIELDS = {")
type = v.message_type.name elif msg_id is None:
imports.append("from .%s import %s" % out.append(" pass")
(v.message_type.name, v.message_type.name))
for number, field in descriptor.fields_by_number.items():
field_name = field.name
field_type = None
repeated = (field.label == field.LABEL_REPEATED)
required = (field.label == field.LABEL_REQUIRED)
types = {
field.TYPE_UINT64: 'p.UVarintType',
field.TYPE_UINT32: 'p.UVarintType',
field.TYPE_ENUM: 'p.UVarintType',
field.TYPE_SINT32: 'p.Sint32Type',
field.TYPE_SINT64: 'p.Sint64Type',
field.TYPE_STRING: 'p.UnicodeType',
field.TYPE_BOOL: 'p.BoolType',
field.TYPE_BYTES: 'p.BytesType'
}
if field.type == field.TYPE_MESSAGE:
field_type = field.message_type.name
imports.append(create_message_import(field_type))
else: else:
raise Exception("Unknown field type %s for field %s" % try:
(v.type, fieldname)) field_type = types[field.type]
except KeyError:
raise ValueError("Unknown field type %d for field %s" % (field.type, field_name))
comments = []
if required: if required:
comment = ' # required' comments.append('required')
elif v.has_default_value: if field.has_default_value:
comment = ' # default=%s' % repr(v.default_value) comments.append("default=%s" % repr(field.default_value))
if comments:
comment = " # %s" % ' '.join(comments)
else: else:
comment = '' comment = ''
@ -78,27 +84,15 @@ def process_type(t, cls, msg_id, indexfile, is_upy):
flags = '0' flags = '0'
out.append(" %d: ('%s', %s, %s),%s" % out.append(" %d: ('%s', %s, %s),%s" %
(number, fieldname, type, flags, comment)) (number, field_name, field_type, flags, comment))
# print fieldname, number, type, repeated, comment if descriptor.fields_by_name:
# print v.__dict__
# print v.CPPTYPE_STRING
# print v.LABEL_REPEATED
# print v.enum_type
# v.has_default_value, v.default_value
# v.label == 3 # repeated
# print v.number
if cls.DESCRIPTOR.fields_by_name:
out.append(" }") out.append(" }")
if msg_id is not None: if msg_id is not None:
out.append(" MESSAGE_WIRE_TYPE = %d" % msg_id) out.append(" MESSAGE_WIRE_TYPE = %d" % msg_id)
if indexfile is not None: if indexfile is not None:
if is_upy: indexfile.write(create_const(t, msg_id, is_upy))
indexfile.write("%s = const(%d)\n" % (t, msg_id))
else:
indexfile.write("%s = %d\n" % (t, msg_id))
# Remove duplicate imports # Remove duplicate imports
imports = sorted(list(set(imports))) imports = sorted(list(set(imports)))
@ -106,73 +100,60 @@ def process_type(t, cls, msg_id, indexfile, is_upy):
if is_upy: if is_upy:
imports = ['import protobuf as p'] + imports imports = ['import protobuf as p'] + imports
else: else:
imports = ['from __future__ import absolute_import', imports = ['from .. import protobuf as p'] + imports
'from .. import protobuf as p'] + imports
return imports + out return imports + out
def process_enum(t, cls, is_upy): def process_enum(descriptor, is_upy):
out = [] out = []
if is_upy: if is_upy:
out += ("from micropython import const", "") out += ("from micropython import const", "")
print(" * enum %s" % t) print(" * enum %s" % descriptor.name)
for k, v in cls.items(): for name, value in descriptor.values_by_name.items():
# Remove type name from the beginning of the constant # Remove type name from the beginning of the constant
# For example "PinMatrixRequestType_Current" -> "Current" # For example "PinMatrixRequestType_Current" -> "Current"
if k.startswith("%s_" % t): enum_prefix = descriptor.name
k = k.replace("%s_" % t, '') name = remove_from_start(name, "%s_" % enum_prefix)
# If type ends with *Type, but constant use type name without *Type, remove it too :) # If type ends with *Type, but constant use type name without *Type, remove it too :)
# For example "ButtonRequestType & ButtonRequest_Other" => "Other" # For example "ButtonRequestType & ButtonRequest_Other" => "Other"
if t.endswith("Type") and k.startswith("%s_" % t.replace("Type", '')): if enum_prefix.endswith("Type"):
k = k.replace("%s_" % t.replace("Type", ''), '') enum_prefix, _ = enum_prefix.rsplit("Type", 1)
name = remove_from_start(name, "%s_" % enum_prefix)
if is_upy: out.append(create_const(name, value.number, is_upy))
out.append("%s = const(%s)" % (k, v))
else:
out.append("%s = %s" % (k, v))
return out return out
def find_msg_type(msg_types, t): def process_file(descriptor, protobuf_module, genpath, indexfile, modlist, is_upy):
for k, v in msg_types:
msg_name = k.replace('MessageType_', '')
if msg_name == t:
return v
def process_module(mod, genpath, indexfile, modlist, is_upy):
print("Processing module %s" % mod.__name__) print("Processing module %s" % descriptor.name)
types = dict([(name, cls)
for name, cls in mod.__dict__.items() if isinstance(cls, type)])
msg_types = __import__('pb2', globals(), locals(), [ msg_types = import_pb2('messages').MessageType
'messages_pb2', ]).messages_pb2.MessageType.items()
for t, cls in sorted(types.items()): for name, message_descriptor in descriptor.message_types_by_name.items():
# Find message type for given class # Find message type for given class
msg_id = find_msg_type(msg_types, t) try:
msg_id = msg_types.Value("MessageType_%s" % name)
except ValueError:
msg_id = None
out = process_type(t, cls, msg_id, indexfile, is_upy) out = process_message(message_descriptor, protobuf_module, msg_id, indexfile, is_upy)
write_to_file(genpath, t, out) write_to_file(genpath, name, out)
if modlist: if modlist:
modlist.write("from .%s import *\n" % t) modlist.write(create_message_import(name) + "\n")
enums = dict([(name, cls) for name, cls in mod.__dict__.items() for name, enum_descriptor in descriptor.enum_types_by_name.items():
if isinstance(cls, EnumTypeWrapper)]) out = process_enum(enum_descriptor, is_upy)
write_to_file(genpath, name, out)
for t, cls in enums.items():
out = process_enum(t, cls, is_upy)
write_to_file(genpath, t, out)
if modlist: if modlist:
modlist.write("from . import %s\n" % t) modlist.write("from . import %s\n" % name)
def write_to_file(genpath, t, out): def write_to_file(genpath, t, out):
@ -188,29 +169,19 @@ def write_to_file(genpath, t, out):
if __name__ == '__main__': if __name__ == '__main__':
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument('modulename', type=str, help="Name of module to generate") parser.add_argument('module', help="Name of module to generate")
parser.add_argument('genpath', type=str, help="Directory for generated source code") parser.add_argument('genpath', help="Directory for generated source code")
parser.add_argument('-i', '--indexfile', type=str, help="[optional] Generate index file of wire types") parser.add_argument('-P', '--protobuf-module', default="protobuf", help="Name of protobuf module")
parser.add_argument('-l', '--modlist', type=str, help="[optional] Generate list of modules") parser.add_argument('-i', '--indexfile', type=argparse.FileType('a'), help="Generate index file of wire types")
parser.add_argument('-p', '--protopath', type=str, help="[optional] Path to search for pregenerated Google's python sources") parser.add_argument('-l', '--modlist', type=argparse.FileType('a'), help="Generate list of modules")
parser.add_argument('-p', '--protopath', type=str, help="Path to search for pregenerated Google's python sources")
parser.add_argument('-m', '--micropython', action='store_true', help="Use micropython-favoured source code") parser.add_argument('-m', '--micropython', action='store_true', help="Use micropython-favoured source code")
args = parser.parse_args() args = parser.parse_args()
if args.indexfile:
indexfile = open(args.indexfile, 'a')
else:
indexfile = None
if args.modlist:
modlist = open(args.modlist, 'a')
else:
modlist = None
if args.protopath: if args.protopath:
sys.path.append(args.protopath) sys.path.append(args.protopath)
# Dynamically load module from argv[1] # This must be done after sys.path.append
tmp = __import__('pb2', globals(), locals(), ['%s_pb2' % args.modulename]) module = import_pb2(args.module)
mod = getattr(tmp, "%s_pb2" % args.modulename)
process_module(mod, args.genpath, indexfile, modlist, args.micropython) process_file(module.DESCRIPTOR, args.protobuf_module, args.genpath, args.indexfile, args.modlist, args.micropython)

Loading…
Cancel
Save