self.values = [(self.names + x.name, x.number) for x in desc.value]
def __str__(self):
- result = 'typedef enum {\n'
+ result = 'typedef enum _%s {\n' % self.names
result += ',\n'.join([" %s = %d" % x for x in self.values])
result += '\n} %s;' % self.names
return result
prev_field_name is the name of the previous field or None.
'''
result = ' {%d, ' % self.tag
- result += self.htype
+ result += '(pb_type_t) ((int) ' + self.htype
if self.ltype is not None:
- result += ' | ' + self.ltype
- result += ',\n'
+ result += ' | (int) ' + self.ltype
+ result += '),\n'
if prev_field_name is None:
result += ' offsetof(%s, %s),' % (self.struct_name, self.name)
result += '\n &%s_default}' % (self.struct_name + self.name)
return result
+
+ def largest_field_value(self):
+ '''Determine if this field needs 16bit or 32bit pb_field_t structure to compile properly.
+ Returns numeric value or a C-expression for assert.'''
+ if self.ltype == 'PB_LTYPE_SUBMESSAGE':
+ if self.htype == 'PB_HTYPE_ARRAY':
+ return 'pb_membersize(%s, %s[0])' % (self.struct_name, self.name)
+ else:
+ return 'pb_membersize(%s, %s)' % (self.struct_name, self.name)
+
+ return max(self.tag, self.max_size, self.max_count)
+
class Message:
def __init__(self, names, desc):
return [str(field.ctype) for field in self.fields]
def __str__(self):
- result = 'typedef struct {\n'
+ result = 'typedef struct _%s {\n' % self.name
result += '\n'.join([str(f) for f in self.ordered_fields])
result += '\n} %s;' % self.name
return result
'''
for k, v in data.items():
v.discard(k) # Ignore self dependencies
- extra_items_in_deps = reduce(set.union, data.values()) - set(data.keys())
+ extra_items_in_deps = reduce(set.union, data.values(), set()) - set(data.keys())
data.update(dict([(item, set()) for item in extra_items_in_deps]))
while True:
ordered = set(item for item,dep in data.items() if not dep)
for msg in messages:
yield msg.fields_declaration() + '\n'
+ if messages:
+ count_required_fields = lambda m: len([f for f in msg.fields if f.htype == 'PB_HTYPE_REQUIRED'])
+ largest_msg = max(messages, key = count_required_fields)
+ largest_count = count_required_fields(largest_msg)
+ if largest_count > 64:
+ yield '\n/* Check that missing required fields will be properly detected */\n'
+ yield '#if PB_MAX_REQUIRED_FIELDS < %d\n' % largest_count
+ yield '#error Properly detecting missing required fields in %s requires \\\n' % largest_msg.name
+ yield ' setting PB_MAX_REQUIRED_FIELDS to %d or more.\n' % largest_count
+ yield '#endif\n'
+
+ worst = 0
+ worst_field = ''
+ checks = []
+ for msg in messages:
+ for field in msg.fields:
+ status = field.largest_field_value()
+ if isinstance(status, (str, unicode)):
+ checks.append(status)
+ elif status > worst:
+ worst = status
+ worst_field = str(field.struct_name) + '.' + str(field.name)
+
+ if worst > 255 or checks:
+ yield '\n/* Check that field information fits in pb_field_t */\n'
+
+ if worst < 65536:
+ yield '#if !defined(PB_FIELD_16BIT) && !defined(PB_FIELD_32BIT)\n'
+ if worst > 255:
+ yield '#error Field descriptor for %s is too large. Define PB_FIELD_16BIT to fix this.\n' % worst_field
+ else:
+ assertion = ' && '.join(str(c) + ' < 256' for c in checks)
+ yield 'STATIC_ASSERT((%s), YOU_MUST_DEFINE_PB_FIELD_16BIT)\n' % assertion
+ yield '#endif\n\n'
+
+ if worst > 65535 or checks:
+ yield '#if !defined(PB_FIELD_32BIT)\n'
+ if worst > 65535:
+ yield '#error Field descriptor for %s is too large. Define PB_FIELD_32BIT to fix this.\n' % worst_field
+ else:
+ assertion = ' && '.join(str(c) + ' < 65536' for c in checks)
+ yield 'STATIC_ASSERT((%s), YOU_MUST_DEFINE_PB_FIELD_32BIT)\n' % assertion
+ yield '#endif\n'
+
+ # End of header
yield '\n#endif\n'
def generate_source(headername, enums, messages):
# List of .proto files that should not be included in the C header file
# even if they are mentioned in the source .proto.
- excludes = ['nanopb.proto']
+ excludes = ['nanopb.proto', 'google/protobuf/descriptor.proto']
dependencies = [d for d in fdesc.file[0].dependency if d not in excludes]
header = open(headername, 'w')