2018-06-29 15:11:22 +00:00
|
|
|
#!/usr/bin/env python3
|
2019-06-17 18:27:55 +00:00
|
|
|
# Converts Google's protobuf python definitions of Trezor wire messages
|
|
|
|
# to plain-python objects as used in Trezor Core and python-trezor
|
2018-06-29 15:11:22 +00:00
|
|
|
|
|
|
|
import argparse
|
|
|
|
import importlib
|
|
|
|
import logging
|
|
|
|
import os
|
|
|
|
import shutil
|
|
|
|
import subprocess
|
|
|
|
import sys
|
|
|
|
import tempfile
|
2019-08-02 17:06:01 +00:00
|
|
|
from collections import namedtuple, defaultdict
|
|
|
|
|
|
|
|
import attr
|
2020-09-14 10:48:03 +00:00
|
|
|
import construct as c
|
2018-06-29 15:11:22 +00:00
|
|
|
|
|
|
|
from google.protobuf import descriptor_pb2
|
|
|
|
|
|
|
|
|
|
|
|
AUTO_HEADER = "# Automatically generated by pb2py\n"
|
|
|
|
|
|
|
|
# fmt: off
|
|
|
|
FIELD_TYPES = {
|
|
|
|
descriptor_pb2.FieldDescriptorProto.TYPE_UINT64: ('p.UVarintType', 'int'),
|
|
|
|
descriptor_pb2.FieldDescriptorProto.TYPE_UINT32: ('p.UVarintType', 'int'),
|
2019-08-02 17:06:01 +00:00
|
|
|
# descriptor_pb2.FieldDescriptorProto.TYPE_ENUM: ('p.UVarintType', 'int'),
|
2018-06-29 15:11:22 +00:00
|
|
|
descriptor_pb2.FieldDescriptorProto.TYPE_SINT32: ('p.SVarintType', 'int'),
|
|
|
|
descriptor_pb2.FieldDescriptorProto.TYPE_SINT64: ('p.SVarintType', 'int'),
|
|
|
|
descriptor_pb2.FieldDescriptorProto.TYPE_STRING: ('p.UnicodeType', 'str'),
|
|
|
|
descriptor_pb2.FieldDescriptorProto.TYPE_BOOL: ('p.BoolType', 'bool'),
|
|
|
|
descriptor_pb2.FieldDescriptorProto.TYPE_BYTES: ('p.BytesType', 'bytes'),
|
|
|
|
}
|
|
|
|
# fmt: on
|
|
|
|
|
2020-09-14 10:48:03 +00:00
|
|
|
ListOfSimpleValues = c.GreedyRange(
|
|
|
|
c.Struct(
|
|
|
|
"key" / c.VarInt,
|
|
|
|
"value" / c.VarInt,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def parse_protobuf_simple(data):
|
|
|
|
"""Micro-parse protobuf-encoded data.
|
|
|
|
|
|
|
|
Assume every field is of type 0 (varint), and parse to a dict of fieldnum: value.
|
|
|
|
"""
|
|
|
|
return {v.key >> 3: v.value for v in ListOfSimpleValues.parse(data)}
|
|
|
|
|
|
|
|
|
2018-07-03 13:30:17 +00:00
|
|
|
PROTOC = shutil.which("protoc")
|
|
|
|
if not PROTOC:
|
|
|
|
print("protoc command not found")
|
|
|
|
sys.exit(1)
|
2018-06-29 15:11:22 +00:00
|
|
|
|
2018-07-03 13:30:17 +00:00
|
|
|
PROTOC_PREFIX = os.path.dirname(os.path.dirname(PROTOC))
|
|
|
|
PROTOC_INCLUDE = os.path.join(PROTOC_PREFIX, "include")
|
|
|
|
|
|
|
|
|
2019-08-02 17:06:01 +00:00
|
|
|
@attr.s
|
|
|
|
class ProtoField:
|
|
|
|
name = attr.ib()
|
|
|
|
number = attr.ib()
|
|
|
|
orig = attr.ib()
|
|
|
|
repeated = attr.ib()
|
|
|
|
required = attr.ib()
|
2020-10-07 16:18:03 +00:00
|
|
|
experimental = attr.ib()
|
2019-08-02 17:06:01 +00:00
|
|
|
type_name = attr.ib()
|
|
|
|
proto_type = attr.ib()
|
|
|
|
py_type = attr.ib()
|
2020-09-14 10:47:06 +00:00
|
|
|
default_value = attr.ib()
|
|
|
|
|
|
|
|
@property
|
|
|
|
def optional(self):
|
|
|
|
return not self.required and not self.repeated
|
2019-08-02 17:06:01 +00:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def from_field(cls, descriptor, field):
|
|
|
|
repeated = field.label == field.LABEL_REPEATED
|
|
|
|
required = field.label == field.LABEL_REQUIRED
|
2020-10-07 16:18:03 +00:00
|
|
|
experimental = bool(descriptor._get_extension(field, "experimental"))
|
2019-08-02 17:06:01 +00:00
|
|
|
# ignore package path
|
|
|
|
type_name = field.type_name.rsplit(".")[-1]
|
|
|
|
|
|
|
|
if field.type == field.TYPE_MESSAGE:
|
|
|
|
proto_type = py_type = type_name
|
|
|
|
elif field.type == field.TYPE_ENUM:
|
2019-10-02 12:40:25 +00:00
|
|
|
value_dict = descriptor.enum_types[type_name]
|
|
|
|
valuestr = ", ".join(str(v) for v in value_dict.values())
|
2021-04-07 03:59:08 +00:00
|
|
|
proto_type = 'p.EnumType("{}", ({},))'.format(type_name, valuestr)
|
2019-08-02 17:06:01 +00:00
|
|
|
py_type = "EnumType" + type_name
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
proto_type, py_type = FIELD_TYPES[field.type]
|
|
|
|
except KeyError:
|
|
|
|
raise ValueError(
|
|
|
|
"Unknown field type {} for field {}".format(field.type, field.name)
|
|
|
|
) from None
|
|
|
|
|
2020-09-14 10:47:06 +00:00
|
|
|
if not field.HasField("default_value"):
|
|
|
|
default_value = None
|
|
|
|
elif field.type == field.TYPE_ENUM:
|
|
|
|
default_value = str(descriptor.enum_types[type_name][field.default_value])
|
|
|
|
elif field.type == field.TYPE_STRING:
|
|
|
|
default_value = f'"{field.default_value}"'
|
|
|
|
elif field.type == field.TYPE_BYTES:
|
|
|
|
default_value = f'b"{field.default_value}"'
|
|
|
|
elif field.type == field.TYPE_BOOL:
|
|
|
|
default_value = "True" if field.default_value == "true" else "False"
|
|
|
|
else:
|
|
|
|
default_value = field.default_value
|
2019-08-02 17:06:01 +00:00
|
|
|
|
|
|
|
return cls(
|
|
|
|
name=field.name,
|
|
|
|
number=field.number,
|
|
|
|
orig=field,
|
|
|
|
repeated=repeated,
|
|
|
|
required=required,
|
2020-10-07 16:18:03 +00:00
|
|
|
experimental=experimental,
|
2019-08-02 17:06:01 +00:00
|
|
|
type_name=type_name,
|
|
|
|
proto_type=proto_type,
|
|
|
|
py_type=py_type,
|
2020-09-14 10:47:06 +00:00
|
|
|
default_value=default_value,
|
2019-08-02 17:06:01 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2018-07-03 13:30:17 +00:00
|
|
|
def protoc(files, additional_includes=()):
|
2018-06-29 15:11:22 +00:00
|
|
|
"""Compile code with protoc and return the data."""
|
2018-07-03 13:30:17 +00:00
|
|
|
include_dirs = set()
|
|
|
|
include_dirs.add(PROTOC_INCLUDE)
|
|
|
|
include_dirs.update(additional_includes)
|
|
|
|
|
2018-06-29 15:11:22 +00:00
|
|
|
for file in files:
|
|
|
|
dirname = os.path.dirname(file) or "."
|
|
|
|
include_dirs.add(dirname)
|
2018-07-03 13:30:17 +00:00
|
|
|
protoc_includes = ["-I" + dir for dir in include_dirs if dir]
|
2018-06-29 15:11:22 +00:00
|
|
|
|
|
|
|
# Note that we could avoid creating temp files if protoc let us write to stdout
|
|
|
|
# directly. this is currently only possible on Unix, by passing /dev/stdout as
|
|
|
|
# the file name. Since there's no direct Windows equivalent, not counting
|
|
|
|
# being creative with named pipes, special-casing this is not worth the effort.
|
|
|
|
with tempfile.TemporaryDirectory() as tmpdir:
|
|
|
|
outfile = os.path.join(tmpdir, "DESCRIPTOR_SET")
|
|
|
|
subprocess.check_call(
|
2018-07-03 13:30:17 +00:00
|
|
|
[PROTOC, "--descriptor_set_out={}".format(outfile)]
|
2018-06-29 15:11:22 +00:00
|
|
|
+ protoc_includes
|
|
|
|
+ files
|
|
|
|
)
|
|
|
|
with open(outfile, "rb") as f:
|
|
|
|
return f.read()
|
|
|
|
|
|
|
|
|
|
|
|
def strip_leader(s, prefix):
|
|
|
|
"""Remove given prefix from underscored name."""
|
|
|
|
leader = prefix + "_"
|
|
|
|
if s.startswith(leader):
|
|
|
|
return s[len(leader) :]
|
|
|
|
else:
|
|
|
|
return s
|
|
|
|
|
|
|
|
|
|
|
|
def import_statement_from_path(path):
|
|
|
|
# separate leading dots
|
|
|
|
dot_prefix = ""
|
|
|
|
while path.startswith("."):
|
|
|
|
dot_prefix += "."
|
|
|
|
path = path[1:]
|
|
|
|
|
|
|
|
# split on remaining dots
|
|
|
|
split_path = path.rsplit(".", maxsplit=1)
|
|
|
|
leader, import_name = split_path[:-1], split_path[-1]
|
|
|
|
|
|
|
|
if leader:
|
|
|
|
from_part = dot_prefix + leader
|
|
|
|
elif dot_prefix:
|
|
|
|
from_part = dot_prefix
|
|
|
|
else:
|
|
|
|
from_part = ""
|
|
|
|
|
|
|
|
if from_part:
|
|
|
|
return "from {} import {}".format(from_part, import_name)
|
|
|
|
else:
|
|
|
|
return "import {}".format(import_name)
|
|
|
|
|
|
|
|
|
|
|
|
class Descriptor:
|
|
|
|
def __init__(self, data, message_type="MessageType", import_path="protobuf"):
|
|
|
|
self.descriptor = descriptor_pb2.FileDescriptorSet()
|
|
|
|
self.descriptor.ParseFromString(data)
|
|
|
|
|
|
|
|
self.files = self.descriptor.file
|
|
|
|
|
|
|
|
logging.debug("found {} files".format(len(self.files)))
|
|
|
|
|
|
|
|
# find messages and enums
|
|
|
|
self.messages = []
|
|
|
|
self.enums = []
|
2019-10-02 12:40:25 +00:00
|
|
|
self.enum_types = defaultdict(dict)
|
2020-09-14 10:48:03 +00:00
|
|
|
|
|
|
|
self.extensions = {}
|
|
|
|
|
2018-06-29 15:11:22 +00:00
|
|
|
for file in self.files:
|
|
|
|
self.messages += file.message_type
|
|
|
|
self.enums += file.enum_type
|
2018-07-04 15:07:37 +00:00
|
|
|
for message in file.message_type:
|
|
|
|
self._nested_types_from_message(message)
|
2020-09-14 10:48:03 +00:00
|
|
|
for extension in file.extension:
|
|
|
|
self.extensions[extension.name] = extension.number
|
2018-06-29 15:11:22 +00:00
|
|
|
|
|
|
|
if not self.messages and not self.enums:
|
|
|
|
raise RuntimeError("No messages and no enums found.")
|
|
|
|
|
|
|
|
self.message_types = self.find_message_types(message_type)
|
|
|
|
self.protobuf_import = import_statement_from_path(import_path)
|
|
|
|
|
|
|
|
self.out_dir = None
|
|
|
|
|
2020-09-14 10:48:03 +00:00
|
|
|
def _get_extension(self, something, extension_name, default=None):
|
|
|
|
# There doesn't seem to be a sane way to access extensions on a descriptor
|
|
|
|
# via the google.protobuf API.
|
|
|
|
# We do have access to descriptors of the extensions...
|
|
|
|
extension_num = self.extensions[extension_name]
|
|
|
|
# ...and the "options" descriptor _does_ include the extension data. But while
|
|
|
|
# the API provides access to unknown fields, it hides the extensions.
|
|
|
|
# What we do is re-encode the options descriptor...
|
|
|
|
options_bytes = something.options.SerializeToString()
|
|
|
|
# ...and re-parse it as a dict of uvarints...
|
|
|
|
simple_values = parse_protobuf_simple(options_bytes)
|
|
|
|
# ...and extract the value corresponding to the extension we care about.
|
|
|
|
return simple_values.get(extension_num, default)
|
|
|
|
|
2018-07-04 15:07:37 +00:00
|
|
|
def _nested_types_from_message(self, message):
|
|
|
|
self.messages += message.nested_type
|
|
|
|
self.enums += message.enum_type
|
|
|
|
for nested in message.nested_type:
|
|
|
|
self._nested_types_from_message(nested)
|
|
|
|
|
2018-06-29 15:11:22 +00:00
|
|
|
def find_message_types(self, message_type):
|
|
|
|
message_types = {}
|
|
|
|
try:
|
2019-08-02 17:06:01 +00:00
|
|
|
message_type_enum = next(e for e in self.enums if e.name == message_type)
|
2018-06-29 15:11:22 +00:00
|
|
|
for value in message_type_enum.value:
|
|
|
|
name = strip_leader(value.name, message_type)
|
|
|
|
message_types[name] = value.number
|
|
|
|
|
|
|
|
except StopIteration:
|
|
|
|
# No message type found. Oh well.
|
|
|
|
logging.warning(
|
|
|
|
"Message IDs not found under '{}'".format(args.message_type)
|
|
|
|
)
|
|
|
|
|
|
|
|
return message_types
|
|
|
|
|
|
|
|
def create_message_import(self, name):
|
|
|
|
return "from .{0} import {0}".format(name)
|
|
|
|
|
2019-08-02 17:06:01 +00:00
|
|
|
def process_subtype_imports(self, fields):
|
2018-06-29 15:11:22 +00:00
|
|
|
imports = set(
|
|
|
|
field.proto_type
|
|
|
|
for field in fields
|
|
|
|
if field.orig.type == field.orig.TYPE_MESSAGE
|
|
|
|
)
|
|
|
|
|
2018-08-03 17:11:02 +00:00
|
|
|
if len(imports) > 0:
|
|
|
|
yield "" # make isort happy
|
2018-06-29 15:11:22 +00:00
|
|
|
for name in sorted(imports):
|
|
|
|
yield self.create_message_import(name)
|
|
|
|
|
|
|
|
def create_init_method(self, fields):
|
2020-09-14 10:47:06 +00:00
|
|
|
required_fields = [f for f in fields if f.required]
|
|
|
|
repeated_fields = [f for f in fields if f.repeated]
|
|
|
|
optional_fields = [f for f in fields if f.optional]
|
2018-06-29 15:11:22 +00:00
|
|
|
# please keep the yields aligned
|
|
|
|
# fmt: off
|
2020-09-14 10:47:06 +00:00
|
|
|
yield " def __init__("
|
|
|
|
yield " self,"
|
|
|
|
yield " *,"
|
|
|
|
for field in required_fields:
|
|
|
|
yield f" {field.name}: {field.py_type},"
|
|
|
|
for field in repeated_fields:
|
2021-03-17 19:52:14 +00:00
|
|
|
yield f" {field.name}: Optional[List[{field.py_type}]] = None,"
|
2020-09-14 10:47:06 +00:00
|
|
|
for field in optional_fields:
|
2021-03-17 19:52:14 +00:00
|
|
|
if field.default_value is None:
|
|
|
|
yield f" {field.name}: Optional[{field.py_type}] = None,"
|
|
|
|
else:
|
|
|
|
yield f" {field.name}: {field.py_type} = {field.default_value},"
|
2019-10-02 12:40:25 +00:00
|
|
|
yield " ) -> None:"
|
2018-06-29 15:11:22 +00:00
|
|
|
|
2020-09-14 10:47:06 +00:00
|
|
|
for field in repeated_fields:
|
|
|
|
yield f" self.{field.name} = {field.name} if {field.name} is not None else []"
|
|
|
|
for field in required_fields + optional_fields:
|
|
|
|
yield f" self.{field.name} = {field.name}"
|
2018-06-29 15:11:22 +00:00
|
|
|
# fmt: on
|
|
|
|
|
2018-09-18 12:56:55 +00:00
|
|
|
def create_fields_method(self, fields):
|
|
|
|
# fmt: off
|
|
|
|
yield " @classmethod"
|
2019-07-03 13:02:50 +00:00
|
|
|
yield " def get_fields(cls) -> Dict:"
|
2018-09-18 12:56:55 +00:00
|
|
|
yield " return {"
|
|
|
|
for field in fields:
|
|
|
|
comments = []
|
2020-09-14 10:47:06 +00:00
|
|
|
if field.default_value is not None:
|
|
|
|
comments.append(f"default={field.orig.default_value}")
|
2018-09-18 12:56:55 +00:00
|
|
|
|
|
|
|
if comments:
|
|
|
|
comment = " # " + " ".join(comments)
|
|
|
|
else:
|
|
|
|
comment = ""
|
|
|
|
|
|
|
|
if field.repeated:
|
2020-10-14 13:54:58 +00:00
|
|
|
if field.default_value is not None:
|
|
|
|
raise ValueError("Repeated fields can't have default values.")
|
|
|
|
if field.experimental:
|
|
|
|
raise ValueError("Repeated experimental fields are currently not supported.")
|
2018-09-18 12:56:55 +00:00
|
|
|
flags = "p.FLAG_REPEATED"
|
2020-09-14 10:47:06 +00:00
|
|
|
elif field.required:
|
2020-10-14 13:54:58 +00:00
|
|
|
if field.default_value is not None:
|
|
|
|
raise ValueError("Required fields can't have default values.")
|
|
|
|
if field.experimental:
|
|
|
|
raise ValueError("Required fields can't be experimental.")
|
2020-09-14 10:47:06 +00:00
|
|
|
flags = "p.FLAG_REQUIRED"
|
2020-10-07 16:18:03 +00:00
|
|
|
elif field.experimental:
|
2020-10-14 13:54:58 +00:00
|
|
|
if field.default_value is not None:
|
|
|
|
raise ValueError("Experimental fields can't have default values.")
|
2020-10-07 16:18:03 +00:00
|
|
|
flags = "p.FLAG_EXPERIMENTAL"
|
2018-09-18 12:56:55 +00:00
|
|
|
else:
|
2020-09-14 10:47:06 +00:00
|
|
|
flags = field.default_value
|
2018-09-18 12:56:55 +00:00
|
|
|
|
|
|
|
yield " {num}: ('{name}', {type}, {flags}),{comment}".format(
|
|
|
|
num=field.number,
|
|
|
|
name=field.name,
|
|
|
|
type=field.proto_type,
|
|
|
|
flags=flags,
|
|
|
|
comment=comment,
|
|
|
|
)
|
|
|
|
|
|
|
|
yield " }"
|
|
|
|
# fmt: on
|
|
|
|
|
2020-06-30 07:43:01 +00:00
|
|
|
def process_message(self, message, include_deprecated=False):
|
2018-06-29 15:11:22 +00:00
|
|
|
logging.debug("Processing message {}".format(message.name))
|
2020-09-14 10:48:03 +00:00
|
|
|
|
|
|
|
msg_id = self._get_extension(message, "wire_type")
|
|
|
|
if msg_id is None:
|
|
|
|
msg_id = self.message_types.get(message.name)
|
2018-06-29 15:11:22 +00:00
|
|
|
|
2020-10-06 22:59:35 +00:00
|
|
|
unstable = self._get_extension(message, "unstable")
|
|
|
|
|
2018-06-29 15:11:22 +00:00
|
|
|
# "from .. import protobuf as p"
|
|
|
|
yield self.protobuf_import + " as p"
|
|
|
|
|
2019-08-02 17:06:01 +00:00
|
|
|
fields = [ProtoField.from_field(self, field) for field in message.field]
|
2020-06-30 07:43:01 +00:00
|
|
|
if not include_deprecated:
|
|
|
|
fields = [field for field in fields if not field.orig.options.deprecated]
|
2018-06-29 15:11:22 +00:00
|
|
|
|
2019-08-02 17:06:01 +00:00
|
|
|
yield from self.process_subtype_imports(fields)
|
2018-08-03 17:11:02 +00:00
|
|
|
|
2019-07-03 13:02:50 +00:00
|
|
|
yield ""
|
|
|
|
yield "if __debug__:"
|
|
|
|
yield " try:"
|
2021-03-17 19:52:14 +00:00
|
|
|
yield " from typing import Dict, List, Optional # noqa: F401"
|
2019-08-02 17:06:01 +00:00
|
|
|
yield " from typing_extensions import Literal # noqa: F401"
|
|
|
|
|
|
|
|
all_enums = [field for field in fields if field.type_name in self.enum_types]
|
|
|
|
for field in all_enums:
|
2019-10-02 12:40:25 +00:00
|
|
|
allowed_values = self.enum_types[field.type_name].values()
|
2019-08-02 17:06:01 +00:00
|
|
|
valuestr = ", ".join(str(v) for v in sorted(allowed_values))
|
2020-09-14 10:47:06 +00:00
|
|
|
yield " {} = Literal[{}]".format(field.py_type, valuestr)
|
2019-08-02 17:06:01 +00:00
|
|
|
|
2019-07-03 13:02:50 +00:00
|
|
|
yield " except ImportError:"
|
2019-10-02 12:40:25 +00:00
|
|
|
yield " pass"
|
2018-06-29 15:11:22 +00:00
|
|
|
|
|
|
|
yield ""
|
|
|
|
yield ""
|
|
|
|
yield "class {}(p.MessageType):".format(message.name)
|
|
|
|
|
|
|
|
if msg_id is not None:
|
|
|
|
yield " MESSAGE_WIRE_TYPE = {}".format(msg_id)
|
|
|
|
|
2020-10-06 22:59:35 +00:00
|
|
|
if unstable is not None:
|
|
|
|
yield " UNSTABLE = True"
|
|
|
|
|
2018-06-29 15:11:22 +00:00
|
|
|
if fields:
|
|
|
|
yield ""
|
|
|
|
yield from self.create_init_method(fields)
|
2018-09-18 12:56:55 +00:00
|
|
|
yield ""
|
|
|
|
yield from self.create_fields_method(fields)
|
2018-06-29 15:11:22 +00:00
|
|
|
|
|
|
|
if not fields and not msg_id:
|
|
|
|
yield " pass"
|
|
|
|
|
|
|
|
def process_enum(self, enum):
|
|
|
|
logging.debug("Processing enum {}".format(enum.name))
|
|
|
|
|
2019-10-02 12:40:25 +00:00
|
|
|
# file header
|
2020-10-19 10:17:43 +00:00
|
|
|
yield "if __debug__:"
|
|
|
|
yield " try:"
|
|
|
|
yield " from typing_extensions import Literal # noqa: F401"
|
|
|
|
yield " except ImportError:"
|
|
|
|
yield " pass"
|
2019-10-02 12:40:25 +00:00
|
|
|
yield ""
|
|
|
|
|
2018-06-29 15:11:22 +00:00
|
|
|
for value in enum.value:
|
|
|
|
# Remove type name from the beginning of the constant
|
|
|
|
# For example "PinMatrixRequestType_Current" -> "Current"
|
|
|
|
enum_prefix = enum.name
|
|
|
|
name = value.name
|
|
|
|
name = strip_leader(name, enum_prefix)
|
|
|
|
|
|
|
|
# If type ends with *Type, but constant use type name without *Type, remove it too :)
|
|
|
|
# For example "ButtonRequestType & ButtonRequest_Other" => "Other"
|
|
|
|
if enum_prefix.endswith("Type"):
|
|
|
|
enum_prefix, _ = enum_prefix.rsplit("Type", 1)
|
|
|
|
name = strip_leader(name, enum_prefix)
|
|
|
|
|
2019-10-02 12:40:25 +00:00
|
|
|
self.enum_types[enum.name][value.name] = value.number
|
2020-10-19 10:17:43 +00:00
|
|
|
yield f"{name}: Literal[{value.number}] = {value.number}"
|
2018-06-29 15:11:22 +00:00
|
|
|
|
2020-06-30 07:43:01 +00:00
|
|
|
def process_messages(self, messages, include_deprecated=False):
|
2018-06-29 15:11:22 +00:00
|
|
|
for message in sorted(messages, key=lambda m: m.name):
|
2020-09-14 10:47:06 +00:00
|
|
|
self.write_to_file(
|
|
|
|
message.name, self.process_message(message, include_deprecated)
|
|
|
|
)
|
2018-06-29 15:11:22 +00:00
|
|
|
|
|
|
|
def process_enums(self, enums):
|
|
|
|
for enum in sorted(enums, key=lambda e: e.name):
|
|
|
|
self.write_to_file(enum.name, self.process_enum(enum))
|
|
|
|
|
|
|
|
def write_to_file(self, name, out):
|
|
|
|
# Write generated sourcecode to given file
|
|
|
|
logging.debug("Writing file {}.py".format(name))
|
|
|
|
with open(os.path.join(self.out_dir, name + ".py"), "w") as f:
|
|
|
|
f.write(AUTO_HEADER)
|
|
|
|
f.write("# fmt: off\n")
|
2021-04-21 12:20:22 +00:00
|
|
|
f.write("# isort:skip_file\n")
|
2018-06-29 15:11:22 +00:00
|
|
|
for line in out:
|
|
|
|
f.write(line + "\n")
|
|
|
|
|
|
|
|
def write_init_py(self):
|
|
|
|
filename = os.path.join(self.out_dir, "__init__.py")
|
|
|
|
with open(filename, "w") as init_py:
|
|
|
|
init_py.write(AUTO_HEADER)
|
2021-04-21 12:20:22 +00:00
|
|
|
init_py.write("# fmt: off\n")
|
|
|
|
init_py.write("# isort:skip_file\n\n")
|
2018-06-29 15:11:22 +00:00
|
|
|
for message in sorted(self.messages, key=lambda m: m.name):
|
|
|
|
init_py.write(self.create_message_import(message.name) + "\n")
|
|
|
|
for enum in sorted(self.enums, key=lambda m: m.name):
|
|
|
|
init_py.write("from . import {}\n".format(enum.name))
|
|
|
|
|
2020-06-30 07:43:01 +00:00
|
|
|
def write_classes(self, out_dir, init_py=True, include_deprecated=False):
|
2018-06-29 15:11:22 +00:00
|
|
|
self.out_dir = out_dir
|
|
|
|
self.process_enums(self.enums)
|
2020-06-30 07:43:01 +00:00
|
|
|
self.process_messages(self.messages, include_deprecated)
|
2018-06-29 15:11:22 +00:00
|
|
|
if init_py:
|
|
|
|
self.write_init_py()
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
parser = argparse.ArgumentParser()
|
|
|
|
# fmt: off
|
|
|
|
parser.add_argument("proto", nargs="+", help="Protobuf definition files")
|
|
|
|
parser.add_argument("-o", "--out-dir", help="Directory for generated source code")
|
|
|
|
parser.add_argument("-P", "--protobuf-module", default="protobuf", help="Name of protobuf module")
|
|
|
|
parser.add_argument("-l", "--no-init-py", action="store_true", help="Do not generate __init__.py with list of modules")
|
|
|
|
parser.add_argument("--message-type", default="MessageType", help="Name of enum with message IDs")
|
2018-07-03 13:30:17 +00:00
|
|
|
parser.add_argument("-I", "--protoc-include", action="append", help="protoc include path")
|
2019-05-16 11:28:20 +00:00
|
|
|
parser.add_argument("-v", "--verbose", action="store_true", help="Print debug messages")
|
2020-06-30 07:43:01 +00:00
|
|
|
parser.add_argument("-d", "--include-deprecated", action="store_true", help="Include deprecated fields")
|
2018-06-29 15:11:22 +00:00
|
|
|
# fmt: on
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
2019-05-16 11:28:20 +00:00
|
|
|
if args.verbose:
|
|
|
|
logging.basicConfig(level=logging.DEBUG)
|
|
|
|
|
2018-07-03 13:30:17 +00:00
|
|
|
protoc_includes = args.protoc_include or (os.environ.get("PROTOC_INCLUDE"),)
|
|
|
|
descriptor_proto = protoc(args.proto, protoc_includes)
|
2018-06-29 15:11:22 +00:00
|
|
|
descriptor = Descriptor(descriptor_proto, args.message_type, args.protobuf_module)
|
|
|
|
|
|
|
|
with tempfile.TemporaryDirectory() as tmpdir:
|
2020-06-30 07:43:01 +00:00
|
|
|
descriptor.write_classes(tmpdir, not args.no_init_py, args.include_deprecated)
|
2018-06-29 15:11:22 +00:00
|
|
|
|
|
|
|
for filename in os.listdir(args.out_dir):
|
|
|
|
pathname = os.path.join(args.out_dir, filename)
|
|
|
|
try:
|
|
|
|
with open(pathname, "r") as f:
|
|
|
|
if next(f, None) == AUTO_HEADER:
|
|
|
|
os.unlink(pathname)
|
|
|
|
except Exception:
|
|
|
|
pass
|
|
|
|
|
|
|
|
for filename in os.listdir(tmpdir):
|
|
|
|
src = os.path.join(tmpdir, filename)
|
|
|
|
shutil.copy(src, args.out_dir)
|