'''Generate header file for nanopb from a ProtoBuf FileDescriptorSet.'''
-import google.protobuf.descriptor_pb2 as descriptor
-import nanopb_pb2
+try:
+ import google.protobuf.descriptor_pb2 as descriptor
+except:
+ print
+ print "*************************************************************"
+ print "*** Could not import the Google protobuf Python libraries ***"
+ print "*** Try installing package 'python-protobuf' or similar. ***"
+ print "*************************************************************"
+ print
+ raise
+
+try:
+ import nanopb_pb2
+except:
+ print
+ print "***************************************************************"
+ print "*** Could not import the precompiled nanopb_pb2.py. ***"
+ print "*** Run 'make' in the 'generator' folder to update the file.***"
+ print "***************************************************************"
+ print
+ raise
+
import os.path
# Values are tuple (c type, pb ltype)
result += '\n &%s_default}' % (self.struct_name + self.name)
return result
+
+ def needs_32bit_pb_field_t(self):
+ '''Determine if this field needs 32bit pb_field_t structure to compile properly.
+ Returns True, False or a C-expression for assert.'''
+ if self.tag > 255 or self.max_size > 255:
+ return True
+
+ if self.ltype == 'PB_LTYPE_SUBMESSAGE':
+ if self.htype == 'PB_HTYPE_ARRAY':
+ return 'pb_membersize(%s, %s[0]) > 255' % (self.struct_name, self.name)
+ else:
+ return 'pb_membersize(%s, %s) > 255' % (self.struct_name, self.name)
+
+ return False
class Message:
def __init__(self, names, desc):
'''
for k, v in data.items():
v.discard(k) # Ignore self dependencies
- extra_items_in_deps = reduce(set.union, data.values()) - set(data.keys())
+ extra_items_in_deps = reduce(set.union, data.values(), set()) - set(data.keys())
data.update(dict([(item, set()) for item in extra_items_in_deps]))
while True:
ordered = set(item for item,dep in data.items() if not dep)
if msgname in message_by_name:
yield message_by_name[msgname]
-def generate_header(headername, enums, messages):
+def generate_header(dependencies, headername, enums, messages):
'''Generate content for a header file.
Generates strings, which should be concatenated and stored to file.
'''
yield '#define _PB_%s_\n' % symbol
yield '#include <pb.h>\n\n'
+ for dependency in dependencies:
+ noext = os.path.splitext(dependency)[0]
+ yield '#include "%s.pb.h"\n' % noext
+ yield '\n'
+
yield '/* Enum definitions */\n'
for enum in enums:
yield str(enum) + '\n\n'
for msg in messages:
yield msg.fields_declaration() + '\n'
+ count_required_fields = lambda m: len([f for f in msg.fields if f.htype == 'PB_HTYPE_REQUIRED'])
+ largest_msg = max(messages, key = count_required_fields)
+ largest_count = count_required_fields(largest_msg)
+ if largest_count > 64:
+ yield '\n/* Check that missing required fields will be properly detected */\n'
+ yield '#if PB_MAX_REQUIRED_FIELDS < %d\n' % largest_count
+ yield '#warning Properly detecting missing required fields in %s requires \\\n' % largest_msg.name
+ yield ' setting PB_MAX_REQUIRED_FIELDS to %d or more.\n' % largest_count
+ yield '#endif\n'
+
+ worst = False
+ worst_field = ''
+ for msg in messages:
+ for field in msg.fields:
+ status = field.needs_32bit_pb_field_t()
+ if status == True:
+ worst = True
+ worst_field = str(field.struct_name) + '.' + str(field.name)
+ elif status != False:
+ if worst == False:
+ worst = status
+ elif worst != True:
+ worst += ' || ' + status
+
+ if worst != False:
+ yield '\n/* Check that field information fits in pb_field_t */\n'
+ yield '#ifndef PB_MANY_FIELDS\n'
+ if worst == True:
+ yield '#error Field descriptor for %s is too large. Define PB_MANY_FIELDS to fix this.\n' % worst_field
+ else:
+ yield 'STATIC_ASSERT(!(%s), YOU_MUST_DEFINE_PB_MANY_FIELDS)\n' % worst
+ yield '#endif\n'
+
+ # End of header
yield '\n#endif\n'
def generate_source(headername, enums, messages):
print "Writing to " + headername + " and " + sourcename
+ # List of .proto files that should not be included in the C header file
+ # even if they are mentioned in the source .proto.
+ excludes = ['nanopb.proto', 'google/protobuf/descriptor.proto']
+ dependencies = [d for d in fdesc.file[0].dependency if d not in excludes]
+
header = open(headername, 'w')
- for part in generate_header(headerbasename, enums, messages):
+ for part in generate_header(dependencies, headerbasename, enums, messages):
header.write(part)
source = open(sourcename, 'w')