-
Notifications
You must be signed in to change notification settings - Fork 25
/
protobin_to_proto.py
executable file
·396 lines (317 loc) · 13.7 KB
/
protobin_to_proto.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
#!/usr/bin/env python
import argparse
import os
import sys
import google.protobuf.descriptor_pb2 as pb2
from google.protobuf.internal.decoder import _DecodeVarint
from google.protobuf.message import DecodeError
import zlib
def is_valid_path(filepath):
whitelist = set("abcdefghijklmnopqrstuvwxyz0123456789-_/$,.[]()")
if not isinstance(filepath, str):
try:
filepath = str(filepath, "utf8")
except UnicodeDecodeError:
return False
if all(char in whitelist for char in filepath.lower()):
return True
return False
def discover_fd_proto_fields():
known_fields = {key: value for key, value in pb2.FileDescriptorProto.__dict__.items() if
key.endswith("FIELD_NUMBER")}
return list(known_fields.values())
class FileDescriptorWalker:
KNOWN_FIELD_NUMBERS = discover_fd_proto_fields()
def __init__(self, data: bytearray):
self.data = data
self.no_double_fields = [pb2.FileDescriptorProto.NAME_FIELD_NUMBER,
pb2.FileDescriptorProto.PACKAGE_FIELD_NUMBER]
self.seen_fields = []
self._size = 0
def approximate_size(self):
end = False
offset = 0
while not end and offset < len(self.data):
value, offset = _DecodeVarint(self.data, offset)
field = (value & 0xfffffff8) >> 3
wire_type = value & 0x7 # last 3 bits
# If we have seen certain fields already, we might have entered a next encoded protobuffer.
if (field not in self.KNOWN_FIELD_NUMBERS) or \
(field in self.no_double_fields and field in self.seen_fields):
end = True
break
self.seen_fields.append(field)
# Varint
if wire_type == 0:
_, offset = _DecodeVarint(self.data, offset)
# 64-bit
elif wire_type == 1:
offset += 8
# Length-delimited (~string/bytes)
elif wire_type == 2:
value, offset = _DecodeVarint(self.data, offset)
offset += value
# Groups - deprecated
elif wire_type == 3 or wire_type == 4:
continue
# 32-bit
elif wire_type == 5:
offset += 4
else:
end = True
self._size = offset
def get_size(self):
return self._size
class ProtobinDecompiler:
label_map = {
pb2.FieldDescriptorProto.LABEL_OPTIONAL: "optional",
pb2.FieldDescriptorProto.LABEL_REQUIRED: "required",
pb2.FieldDescriptorProto.LABEL_REPEATED: "repeated"
}
type_map = {
pb2.FieldDescriptorProto.TYPE_DOUBLE: "double",
pb2.FieldDescriptorProto.TYPE_FLOAT: "float",
pb2.FieldDescriptorProto.TYPE_INT64: "int64",
pb2.FieldDescriptorProto.TYPE_UINT64: "uint64",
pb2.FieldDescriptorProto.TYPE_INT32: "int32",
pb2.FieldDescriptorProto.TYPE_FIXED64: "fixed64",
pb2.FieldDescriptorProto.TYPE_FIXED32: "fixed32",
pb2.FieldDescriptorProto.TYPE_BOOL: "bool",
pb2.FieldDescriptorProto.TYPE_STRING: "string",
pb2.FieldDescriptorProto.TYPE_BYTES: "bytes",
pb2.FieldDescriptorProto.TYPE_UINT32: "uint32",
pb2.FieldDescriptorProto.TYPE_SFIXED32: "sfixed32",
pb2.FieldDescriptorProto.TYPE_SFIXED64: "sfixed64",
pb2.FieldDescriptorProto.TYPE_SINT32: "sint32",
pb2.FieldDescriptorProto.TYPE_SINT64: "sint64"
}
def __init__(self):
self.out = None
self.indent_level = 0
def decompile(self, file, out_dir=".", stdout=False):
data = file.read()
file.close()
# Collect all hidden FileDescriptor protobuffer objects
descriptors = []
# Discover wire-encoded FileDescriptorProto's
print("Checking for wire-encoded proto files..")
descriptors.extend(self.discover_encoded_file_descriptor(data))
print("")
# Discover GZipped FileDescriptorProto's
print("Checking for GZIPPED proto files..")
descriptors.extend(self.discover_gzipped_file_descriptor(data))
print("")
for descriptor in descriptors:
descriptor_name = descriptor.name
if stdout:
self.out = sys.stdout
else:
out_file_name = os.path.join(out_dir, descriptor_name)
out_full_dir = os.path.dirname(out_file_name)
if not os.path.exists(out_full_dir):
os.makedirs(out_full_dir)
self.out = open(out_file_name, "w")
print(out_file_name)
self.indent_level = 0
self.decompile_file_descriptor(descriptor)
def discover_encoded_file_descriptor(self, data):
descriptors = []
proto_bytes = ".proto".encode()
proto_namespace = ".protobuf".encode()
offset = 0
while offset < len(data):
try:
p = data.index(proto_bytes, offset)
# Next iteration must start after p!
offset = p + 1
if data[p:p + len(proto_namespace)] == proto_namespace:
continue
# Backtrack_range is allowed to flow back into previous message
backtrack_range = range(150)
for diff in backtrack_range:
try:
varint_pos = p - diff
value, str_start_idx = _DecodeVarint(data, varint_pos)
pathlength = p + len(proto_bytes) - str_start_idx
filepath = data[str_start_idx:str_start_idx + pathlength]
# Hard constraints:
# pathlength is never allowed to be less than zero!
# filepath MUST always contain valid characters!
#
# Result should match the length of the entire filepath string
if pathlength < 0 or \
not is_valid_path(filepath) or \
value != pathlength:
continue
# Locate the index of the Tag which indicates the filename field.
# This is limited to 1 byte since we know FileDescriptorProto has less than 2^4 fields
proto_start_offset = varint_pos - 1
proto_stream = data[proto_start_offset:]
proto_walker = FileDescriptorWalker(proto_stream)
proto_walker.approximate_size()
approx_size = proto_walker.get_size()
for adj_size in range(approx_size, 0, -1):
try:
slice = proto_stream[:adj_size]
descriptor = pb2.FileDescriptorProto.FromString(slice)
# Unnamed proto's are malformed and we don't want them!
if len(descriptor.name) > 0:
print("HIT `%s`" % descriptor.name)
descriptors.append(descriptor)
break
except DecodeError:
pass
break
except DecodeError:
pass
except ValueError:
# End of file reached
break
return descriptors
def discover_gzipped_file_descriptor(self, data):
descriptors = []
# Magic string / ID
# Including 'deflate' compression method
gzip_header = bytearray.fromhex('1f8b08')
offset = 0
while offset < len(data):
try:
p = data.index(gzip_header, offset)
# Next iteration must start after p!
offset = p + 1
# Setup new decompression system
decompressed_data = bytearray()
d = zlib.decompressobj(zlib.MAX_WBITS | 32)
inner_offset = p
try:
while not d.eof:
# Slice data per 64 bytes
slice = data[inner_offset:inner_offset + 64]
inner_offset += len(slice)
d_data = d.decompress(slice)
decompressed_data.extend(d_data)
except zlib.error:
# Invalid compression block encountered
continue
# Decompressed data should be exact!
try:
# Bytearray MUST be converted to a bytestring
proto_data = bytes(decompressed_data)
descriptor = pb2.FileDescriptorProto.FromString(proto_data)
# Unnamed proto's are malformed and we don't want them!
if len(descriptor.name) > 0:
print("HIT `%s`" % descriptor.name)
descriptors.append(descriptor)
except DecodeError:
pass
# Test remaining data and calculate next offset
remaining_data = d.unused_data
offset = inner_offset - len(remaining_data)
except ValueError:
# End of file reached
break
return descriptors
def decompile_file_descriptor(self, descriptor):
# Write meta information of proto file, this is equivalent to a header
if descriptor.HasField("syntax"):
self.write("syntax = \"%s\";\n" % descriptor.syntax)
else:
self.write("syntax = \"proto2\";\n")
if descriptor.HasField("package"):
self.write("package %s;\n" % descriptor.package)
self.write("\n// Proto extractor compiled unit - https://github.com/HearthSim/proto-extractor\n\n")
for dep in descriptor.dependency:
self.write("import \"%s\";\n" % dep)
self.write("\n")
# enumerations
for enum in descriptor.enum_type:
self.decompile_enum_type(enum)
# messages
for msg in descriptor.message_type:
self.write("\n")
self.decompile_message_type(msg)
# services
for service in descriptor.service:
self.write("\n")
self.decompile_service(service)
def decompile_message_type(self, msg):
self.write("message %s {\n" % msg.name)
self.indent_level += 1
# deserialize nested messages
for nested_msg in msg.nested_type:
self.decompile_message_type(nested_msg)
# deserialize nested enumerations
for nested_enum in msg.enum_type:
self.decompile_enum_type(nested_enum)
# deserialize fields
for field in msg.field:
self.decompile_field(field)
# extension ranges
for range in msg.extension_range:
end_name = range.end
if end_name == 0x20000000:
end_name = "max"
self.write("extensions %s to %s;\n" % (range.start, end_name))
# extensions
for extension in msg.extension:
self.decompile_extension(extension)
self.indent_level -= 1
self.write("}\n")
def decompile_extension(self, extension):
self.write("extend %s {\n" % extension.extendee)
self.indent_level += 1
self.decompile_field(extension)
self.indent_level -= 1
self.write("}\n")
def decompile_field(self, field):
# type name is either another message or a standard type
type_name = ""
if field.type in (pb2.FieldDescriptorProto.TYPE_MESSAGE,
pb2.FieldDescriptorProto.TYPE_ENUM):
type_name = field.type_name
else:
type_name = self.type_map[field.type]
# build basic field string with label name
field_str = "%s %s %s = %d" % (self.label_map[field.label], type_name,
field.name, field.number)
# add default value if set
if field.HasField("default_value"):
def_val = field.default_value
# string default values have to be put in quotes
if field.type == pb2.FieldDescriptorProto.TYPE_STRING:
def_val = "\"%s\"" % def_val
field_str += " [default = %s]" % def_val
field_str += ";\n"
self.write(field_str)
def decompile_enum_type(self, enum):
self.write("enum %s {\n" % enum.name)
self.indent_level += 1
# deserialize enum values
for value in enum.value:
self.write("%s = %d;\n" % (value.name, value.number))
self.indent_level -= 1
self.write("}\n")
def decompile_service(self, service):
self.write("service %s {\n" % service.name)
self.indent_level += 1
for method in service.method:
self.decompile_method(method)
self.indent_level -= 1
self.write("}\n")
def decompile_method(self, method):
self.write("rpc %s (%s) returns (%s);\n" %
(method.name, method.input_type, method.output_type))
def write(self, str):
self.out.write("\t" * self.indent_level)
self.out.write(str)
if __name__ == "__main__":
app = ProtobinDecompiler()
parser = argparse.ArgumentParser()
parser.add_argument(
"infiles", nargs="+", type=argparse.FileType("rb"), default=sys.stdout)
parser.add_argument("-o", dest="outdir", help="output directory")
args = parser.parse_args(sys.argv[1:])
in_files = args.infiles
stdout = args.outdir is None
for file in in_files:
app.decompile(file, args.outdir, stdout)