2013-11-15 19:51:08 +08:00
|
|
|
#!/usr/bin/env python
|
|
|
|
# Merge or print the coverage data collected by asan's coverage.
|
|
|
|
# Input files are sequences of 4-byte integers.
|
|
|
|
# We need to merge these integers into a set and then
|
|
|
|
# either print them (as hex) or dump them into another file.
|
|
|
|
import array
|
2014-05-27 20:37:52 +08:00
|
|
|
import bisect
|
2015-04-01 22:46:10 +08:00
|
|
|
import glob
|
2014-05-27 20:37:52 +08:00
|
|
|
import os.path
|
2015-04-01 22:46:10 +08:00
|
|
|
import struct
|
2015-05-07 04:48:29 +08:00
|
|
|
import subprocess
|
2015-04-01 22:46:10 +08:00
|
|
|
import sys
|
2013-11-15 19:51:08 +08:00
|
|
|
|
2015-03-19 06:03:39 +08:00
|
|
|
prog_name = ""
|
2013-11-15 19:51:08 +08:00
|
|
|
|
|
|
|
def Usage():
|
2017-02-12 08:26:28 +08:00
|
|
|
sys.stderr.write(
|
|
|
|
"Usage: \n" + \
|
|
|
|
" " + prog_name + " merge FILE [FILE...] > OUTPUT\n" \
|
|
|
|
" " + prog_name + " print FILE [FILE...]\n" \
|
|
|
|
" " + prog_name + " unpack FILE [FILE...]\n" \
|
|
|
|
" " + prog_name + " rawunpack FILE [FILE ...]\n" \
|
|
|
|
" " + prog_name + " missing BINARY < LIST_OF_PCS\n" \
|
|
|
|
"\n")
|
2013-11-15 19:51:08 +08:00
|
|
|
exit(1)
|
|
|
|
|
2015-03-19 06:03:39 +08:00
|
|
|
def CheckBits(bits):
|
|
|
|
if bits != 32 and bits != 64:
|
2015-03-25 22:56:02 +08:00
|
|
|
raise Exception("Wrong bitness: %d" % bits)
|
2015-03-19 06:03:39 +08:00
|
|
|
|
2015-03-18 06:09:19 +08:00
|
|
|
def TypeCodeForBits(bits):
|
2015-03-19 06:03:39 +08:00
|
|
|
CheckBits(bits)
|
|
|
|
return 'L' if bits == 64 else 'I'
|
|
|
|
|
2016-04-22 17:20:22 +08:00
|
|
|
def TypeCodeForStruct(bits):
|
|
|
|
CheckBits(bits)
|
|
|
|
return 'Q' if bits == 64 else 'I'
|
|
|
|
|
2015-03-20 03:52:30 +08:00
|
|
|
kMagic32SecondHalf = 0xFFFFFF32;
|
|
|
|
kMagic64SecondHalf = 0xFFFFFF64;
|
|
|
|
kMagicFirstHalf = 0xC0BFFFFF;
|
2015-03-18 06:09:19 +08:00
|
|
|
|
2015-03-19 06:03:39 +08:00
|
|
|
def MagicForBits(bits):
|
|
|
|
CheckBits(bits)
|
2015-03-25 22:56:02 +08:00
|
|
|
if sys.byteorder == 'little':
|
|
|
|
return [kMagic64SecondHalf if bits == 64 else kMagic32SecondHalf, kMagicFirstHalf]
|
|
|
|
else:
|
|
|
|
return [kMagicFirstHalf, kMagic64SecondHalf if bits == 64 else kMagic32SecondHalf]
|
2015-03-19 06:03:39 +08:00
|
|
|
|
2015-03-20 05:01:27 +08:00
|
|
|
def ReadMagicAndReturnBitness(f, path):
|
2015-03-20 03:52:30 +08:00
|
|
|
magic_bytes = f.read(8)
|
|
|
|
magic_words = struct.unpack('II', magic_bytes);
|
|
|
|
bits = 0
|
2015-03-25 22:56:02 +08:00
|
|
|
idx = 1 if sys.byteorder == 'little' else 0
|
|
|
|
if magic_words[idx] == kMagicFirstHalf:
|
|
|
|
if magic_words[1-idx] == kMagic64SecondHalf:
|
2015-03-20 03:52:30 +08:00
|
|
|
bits = 64
|
2015-03-25 22:56:02 +08:00
|
|
|
elif magic_words[1-idx] == kMagic32SecondHalf:
|
2015-03-20 03:52:30 +08:00
|
|
|
bits = 32
|
|
|
|
if bits == 0:
|
2015-03-20 05:01:27 +08:00
|
|
|
raise Exception('Bad magic word in %s' % path)
|
2015-03-20 03:52:30 +08:00
|
|
|
return bits
|
|
|
|
|
2015-03-19 06:03:39 +08:00
|
|
|
def ReadOneFile(path):
|
2014-05-19 20:53:03 +08:00
|
|
|
with open(path, mode="rb") as f:
|
|
|
|
f.seek(0, 2)
|
|
|
|
size = f.tell()
|
|
|
|
f.seek(0, 0)
|
2015-03-20 05:01:27 +08:00
|
|
|
if size < 8:
|
|
|
|
raise Exception('File %s is short (< 8 bytes)' % path)
|
|
|
|
bits = ReadMagicAndReturnBitness(f, path)
|
2015-03-19 06:03:39 +08:00
|
|
|
size -= 8
|
2017-02-07 22:12:45 +08:00
|
|
|
w = size * 8 // bits
|
|
|
|
s = struct.unpack_from(TypeCodeForStruct(bits) * (w), f.read(size))
|
|
|
|
sys.stderr.write(
|
|
|
|
"%s: read %d %d-bit PCs from %s\n" % (prog_name, w, bits, path))
|
2013-11-15 19:51:08 +08:00
|
|
|
return s
|
|
|
|
|
2015-03-19 06:03:39 +08:00
|
|
|
def Merge(files):
|
2013-11-15 19:51:08 +08:00
|
|
|
s = set()
|
|
|
|
for f in files:
|
2015-03-19 06:03:39 +08:00
|
|
|
s = s.union(set(ReadOneFile(f)))
|
2017-02-12 08:26:28 +08:00
|
|
|
sys.stderr.write(
|
|
|
|
"%s: %d files merged; %d PCs total\n" % (prog_name, len(files), len(s))
|
|
|
|
)
|
2013-11-15 19:51:08 +08:00
|
|
|
return sorted(s)
|
|
|
|
|
2015-03-19 06:03:39 +08:00
|
|
|
def PrintFiles(files):
|
2015-03-18 08:23:44 +08:00
|
|
|
if len(files) > 1:
|
2015-03-19 06:03:39 +08:00
|
|
|
s = Merge(files)
|
2015-03-18 08:23:44 +08:00
|
|
|
else: # If there is just on file, print the PCs in order.
|
2015-03-19 06:03:39 +08:00
|
|
|
s = ReadOneFile(files[0])
|
2017-02-12 08:26:28 +08:00
|
|
|
sys.stderr.write("%s: 1 file merged; %d PCs total\n" % (prog_name, len(s)))
|
2013-11-15 19:51:08 +08:00
|
|
|
for i in s:
|
2017-02-12 08:26:28 +08:00
|
|
|
print("0x%x" % i)
|
2013-11-15 19:51:08 +08:00
|
|
|
|
2015-03-19 06:03:39 +08:00
|
|
|
def MergeAndPrint(files):
|
2013-11-15 19:51:08 +08:00
|
|
|
if sys.stdout.isatty():
|
|
|
|
Usage()
|
2015-03-19 06:03:39 +08:00
|
|
|
s = Merge(files)
|
|
|
|
bits = 32
|
|
|
|
if max(s) > 0xFFFFFFFF:
|
|
|
|
bits = 64
|
2017-02-11 08:16:02 +08:00
|
|
|
stdout_buf = getattr(sys.stdout, 'buffer', sys.stdout)
|
|
|
|
array.array('I', MagicForBits(bits)).tofile(stdout_buf)
|
2016-04-22 17:20:22 +08:00
|
|
|
a = struct.pack(TypeCodeForStruct(bits) * len(s), *s)
|
2017-02-11 08:16:02 +08:00
|
|
|
stdout_buf.write(a)
|
2013-11-15 19:51:08 +08:00
|
|
|
|
2014-05-19 20:53:03 +08:00
|
|
|
|
|
|
|
def UnpackOneFile(path):
|
|
|
|
with open(path, mode="rb") as f:
|
2017-02-12 08:26:28 +08:00
|
|
|
sys.stderr.write("%s: unpacking %s\n" % (prog_name, path))
|
2014-05-19 20:53:03 +08:00
|
|
|
while True:
|
|
|
|
header = f.read(12)
|
|
|
|
if not header: return
|
|
|
|
if len(header) < 12:
|
|
|
|
break
|
|
|
|
pid, module_length, blob_size = struct.unpack('iII', header)
|
2017-02-07 22:13:18 +08:00
|
|
|
module = f.read(module_length).decode('utf-8')
|
2014-05-19 20:53:03 +08:00
|
|
|
blob = f.read(blob_size)
|
|
|
|
assert(len(module) == module_length)
|
|
|
|
assert(len(blob) == blob_size)
|
|
|
|
extracted_file = "%s.%d.sancov" % (module, pid)
|
2017-02-12 08:26:28 +08:00
|
|
|
sys.stderr.write("%s: extracting %s\n" % (prog_name, extracted_file))
|
2014-05-19 20:53:03 +08:00
|
|
|
# The packed file may contain multiple blobs for the same pid/module
|
|
|
|
# pair. Append to the end of the file instead of overwriting.
|
|
|
|
with open(extracted_file, 'ab') as f2:
|
|
|
|
f2.write(blob)
|
|
|
|
# fail
|
|
|
|
raise Exception('Error reading file %s' % path)
|
|
|
|
|
|
|
|
|
|
|
|
def Unpack(files):
|
|
|
|
for f in files:
|
|
|
|
UnpackOneFile(f)
|
|
|
|
|
2015-03-19 06:03:39 +08:00
|
|
|
def UnpackOneRawFile(path, map_path):
|
2014-05-27 20:37:52 +08:00
|
|
|
mem_map = []
|
|
|
|
with open(map_path, mode="rt") as f_map:
|
2017-02-12 08:26:28 +08:00
|
|
|
sys.stderr.write("%s: reading map %s\n" % (prog_name, map_path))
|
2015-03-19 06:03:39 +08:00
|
|
|
bits = int(f_map.readline())
|
|
|
|
if bits != 32 and bits != 64:
|
2015-03-18 06:09:19 +08:00
|
|
|
raise Exception('Wrong bits size in the map')
|
2014-05-27 20:37:52 +08:00
|
|
|
for line in f_map:
|
|
|
|
parts = line.rstrip().split()
|
|
|
|
mem_map.append((int(parts[0], 16),
|
|
|
|
int(parts[1], 16),
|
|
|
|
int(parts[2], 16),
|
2014-06-03 23:25:43 +08:00
|
|
|
' '.join(parts[3:])))
|
2014-05-27 20:37:52 +08:00
|
|
|
mem_map.sort(key=lambda m : m[0])
|
|
|
|
mem_map_keys = [m[0] for m in mem_map]
|
|
|
|
|
|
|
|
with open(path, mode="rb") as f:
|
2017-02-12 08:26:28 +08:00
|
|
|
sys.stderr.write("%s: unpacking %s\n" % (prog_name, path))
|
2014-05-27 20:37:52 +08:00
|
|
|
|
|
|
|
f.seek(0, 2)
|
|
|
|
size = f.tell()
|
|
|
|
f.seek(0, 0)
|
2017-02-07 22:12:45 +08:00
|
|
|
pcs = struct.unpack_from(TypeCodeForStruct(bits) * (size * 8 // bits), f.read(size))
|
2014-05-27 20:37:52 +08:00
|
|
|
mem_map_pcs = [[] for i in range(0, len(mem_map))]
|
|
|
|
|
|
|
|
for pc in pcs:
|
|
|
|
if pc == 0: continue
|
|
|
|
map_idx = bisect.bisect(mem_map_keys, pc) - 1
|
|
|
|
(start, end, base, module_path) = mem_map[map_idx]
|
|
|
|
assert pc >= start
|
|
|
|
if pc >= end:
|
2017-02-12 08:26:28 +08:00
|
|
|
sys.stderr.write("warning: %s: pc %x outside of any known mapping\n" % (prog_name, pc))
|
2014-05-27 20:37:52 +08:00
|
|
|
continue
|
|
|
|
mem_map_pcs[map_idx].append(pc - base)
|
|
|
|
|
|
|
|
for ((start, end, base, module_path), pc_list) in zip(mem_map, mem_map_pcs):
|
|
|
|
if len(pc_list) == 0: continue
|
|
|
|
assert path.endswith('.sancov.raw')
|
|
|
|
dst_path = module_path + '.' + os.path.basename(path)[:-4]
|
2017-02-12 08:26:28 +08:00
|
|
|
sys.stderr.write("%s: writing %d PCs to %s\n" % (prog_name, len(pc_list), dst_path))
|
2016-04-22 17:20:22 +08:00
|
|
|
sorted_pc_list = sorted(pc_list)
|
|
|
|
pc_buffer = struct.pack(TypeCodeForStruct(bits) * len(pc_list), *sorted_pc_list)
|
|
|
|
with open(dst_path, 'ab+') as f2:
|
2015-03-20 05:01:27 +08:00
|
|
|
array.array('I', MagicForBits(bits)).tofile(f2)
|
2016-04-22 17:20:22 +08:00
|
|
|
f2.seek(0, 2)
|
|
|
|
f2.write(pc_buffer)
|
2014-05-27 20:37:52 +08:00
|
|
|
|
2015-03-19 06:03:39 +08:00
|
|
|
def RawUnpack(files):
|
2014-05-27 20:37:52 +08:00
|
|
|
for f in files:
|
|
|
|
if not f.endswith('.sancov.raw'):
|
|
|
|
raise Exception('Unexpected raw file name %s' % f)
|
|
|
|
f_map = f[:-3] + 'map'
|
2015-03-19 06:03:39 +08:00
|
|
|
UnpackOneRawFile(f, f_map)
|
2014-05-19 20:53:03 +08:00
|
|
|
|
2015-05-07 04:48:29 +08:00
|
|
|
def GetInstrumentedPCs(binary):
|
2015-05-13 00:46:54 +08:00
|
|
|
# This looks scary, but all it does is extract all offsets where we call:
|
|
|
|
# - __sanitizer_cov() or __sanitizer_cov_with_check(),
|
|
|
|
# - with call or callq,
|
|
|
|
# - directly or via PLT.
|
2015-05-07 04:48:29 +08:00
|
|
|
cmd = "objdump -d %s | " \
|
2017-06-01 02:26:32 +08:00
|
|
|
"grep '^\s\+[0-9a-f]\+:.*\scall\(q\|\)\s\+[0-9a-f]\+ <__sanitizer_cov\(_with_check\|\|_trace_pc_guard\)\(@plt\|\)>' | " \
|
2015-05-07 04:48:29 +08:00
|
|
|
"grep '^\s\+[0-9a-f]\+' -o" % binary
|
|
|
|
proc = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
|
|
|
|
shell=True)
|
|
|
|
proc.stdin.close()
|
|
|
|
# The PCs we get from objdump are off by 4 bytes, as they point to the
|
|
|
|
# beginning of the callq instruction. Empirically this is true on x86 and
|
|
|
|
# x86_64.
|
|
|
|
return set(int(line.strip(), 16) + 4 for line in proc.stdout)
|
|
|
|
|
|
|
|
def PrintMissing(binary):
|
|
|
|
if not os.path.isfile(binary):
|
|
|
|
raise Exception('File not found: %s' % binary)
|
|
|
|
instrumented = GetInstrumentedPCs(binary)
|
2017-02-12 08:26:28 +08:00
|
|
|
sys.stderr.write("%s: found %d instrumented PCs in %s\n" % (prog_name,
|
|
|
|
len(instrumented),
|
|
|
|
binary))
|
2015-05-07 04:48:29 +08:00
|
|
|
covered = set(int(line, 16) for line in sys.stdin)
|
2017-02-12 08:26:28 +08:00
|
|
|
sys.stderr.write("%s: read %d PCs from stdin\n" % (prog_name, len(covered)))
|
2015-05-07 04:48:29 +08:00
|
|
|
missing = instrumented - covered
|
2017-02-12 08:26:28 +08:00
|
|
|
sys.stderr.write("%s: %d PCs missing from coverage\n" % (prog_name, len(missing)))
|
2015-05-07 04:48:29 +08:00
|
|
|
if (len(missing) > len(instrumented) - len(covered)):
|
2017-02-12 08:26:28 +08:00
|
|
|
sys.stderr.write(
|
|
|
|
"%s: WARNING: stdin contains PCs not found in binary\n" % prog_name
|
|
|
|
)
|
2015-05-07 04:48:29 +08:00
|
|
|
for pc in sorted(missing):
|
2017-02-12 08:26:28 +08:00
|
|
|
print("0x%x" % pc)
|
2015-05-07 04:48:29 +08:00
|
|
|
|
2013-11-15 19:51:08 +08:00
|
|
|
if __name__ == '__main__':
|
|
|
|
prog_name = sys.argv[0]
|
2015-03-19 06:03:39 +08:00
|
|
|
if len(sys.argv) <= 2:
|
2013-11-15 19:51:08 +08:00
|
|
|
Usage();
|
2015-03-18 06:09:19 +08:00
|
|
|
|
2015-05-07 04:48:29 +08:00
|
|
|
if sys.argv[1] == "missing":
|
|
|
|
if len(sys.argv) != 3:
|
|
|
|
Usage()
|
|
|
|
PrintMissing(sys.argv[2])
|
|
|
|
exit(0)
|
|
|
|
|
2015-04-01 22:46:10 +08:00
|
|
|
file_list = []
|
|
|
|
for f in sys.argv[2:]:
|
|
|
|
file_list += glob.glob(f)
|
|
|
|
if not file_list:
|
|
|
|
Usage()
|
|
|
|
|
2015-03-19 06:03:39 +08:00
|
|
|
if sys.argv[1] == "print":
|
2015-04-01 22:46:10 +08:00
|
|
|
PrintFiles(file_list)
|
2015-03-19 06:03:39 +08:00
|
|
|
elif sys.argv[1] == "merge":
|
2015-04-01 22:46:10 +08:00
|
|
|
MergeAndPrint(file_list)
|
2015-03-19 06:03:39 +08:00
|
|
|
elif sys.argv[1] == "unpack":
|
2015-04-01 22:46:10 +08:00
|
|
|
Unpack(file_list)
|
2015-03-19 06:03:39 +08:00
|
|
|
elif sys.argv[1] == "rawunpack":
|
2015-04-01 22:46:10 +08:00
|
|
|
RawUnpack(file_list)
|
2013-11-15 19:51:08 +08:00
|
|
|
else:
|
|
|
|
Usage()
|