forked from OSchip/llvm-project
Added gdb-remote auxv tests.
First batch of auxv-related tests from llgs branch. Includes helpers for unescaping gdb-remote binary-escaped data, converting binary data from inferior endian-ness to integral values, etc. Tests on debugserver are expected to be skipped since it doesn't support auxv and the tests are geared to be skipped on platforms that don't broadcast support for the feature in qSupported. (llgs is listed as XFAIL since qSupported support in llgs upstream is not there, so the support check cannot work in upstream llgs.) llvm-svn: 211105
This commit is contained in:
parent
e35705675f
commit
8d7ab8c617
|
@ -0,0 +1,150 @@
|
|||
import unittest2
|
||||
|
||||
import gdbremote_testcase
|
||||
from lldbtest import *
|
||||
|
||||
class TestGdbRemoteAuxvSupport(gdbremote_testcase.GdbRemoteTestCaseBase):
|
||||
|
||||
AUXV_SUPPORT_FEATURE_NAME = "qXfer:auxv:read"
|
||||
|
||||
def has_auxv_support(self, inferior_args=None):
|
||||
procs = self.prep_debug_monitor_and_inferior(inferior_args=inferior_args)
|
||||
self.add_qSupported_packets()
|
||||
|
||||
context = self.expect_gdbremote_sequence()
|
||||
self.assertIsNotNone(context)
|
||||
|
||||
features = self.parse_qSupported_response(context)
|
||||
return self.AUXV_SUPPORT_FEATURE_NAME in features and features[self.AUXV_SUPPORT_FEATURE_NAME] == "+"
|
||||
|
||||
def get_raw_auxv_data(self, inferior_args=None):
|
||||
# Start up llgs and inferior, and check for auxv support.
|
||||
if not self.has_auxv_support(inferior_args=inferior_args):
|
||||
self.skipTest("auxv data not supported")
|
||||
|
||||
# Grab pointer size for target. We'll assume that is equivalent to an unsigned long on the target.
|
||||
# Auxv is specified in terms of pairs of unsigned longs.
|
||||
self.reset_test_sequence()
|
||||
self.add_process_info_collection_packets()
|
||||
|
||||
context = self.expect_gdbremote_sequence()
|
||||
self.assertIsNotNone(context)
|
||||
|
||||
proc_info = self.parse_process_info_response(context)
|
||||
self.assertIsNotNone(proc_info)
|
||||
self.assertTrue("ptrsize" in proc_info)
|
||||
word_size = int(proc_info["ptrsize"])
|
||||
|
||||
OFFSET = 0
|
||||
LENGTH = 0x400
|
||||
|
||||
# Grab the auxv data.
|
||||
self.reset_test_sequence()
|
||||
self.test_sequence.add_log_lines([
|
||||
"read packet: $qXfer:auxv:read::{:x},{:x}:#00".format(OFFSET, LENGTH),
|
||||
{"direction":"send", "regex":r"^\$([^E])(.*)#[0-9a-fA-F]{2}$", "capture":{1:"response_type", 2:"content_raw"} }
|
||||
], True)
|
||||
|
||||
context = self.expect_gdbremote_sequence()
|
||||
self.assertIsNotNone(context)
|
||||
|
||||
# Ensure we end up with all auxv data in one packet.
|
||||
# FIXME don't assume it all comes back in one packet.
|
||||
self.assertEquals(context.get("response_type"), "l")
|
||||
|
||||
# Decode binary data.
|
||||
content_raw = context.get("content_raw")
|
||||
self.assertIsNotNone(content_raw)
|
||||
return (word_size, self.decode_gdbremote_binary(content_raw))
|
||||
|
||||
def supports_auxv(self, inferior_args=None):
|
||||
# When non-auxv platforms support llgs, skip the test on platforms
|
||||
# that don't support auxv.
|
||||
self.assertTrue(self.has_auxv_support(inferior_args=inferior_args))
|
||||
|
||||
#
|
||||
# We skip the "supports_auxv" test on debugserver. The rest of the tests
|
||||
# appropriately skip the auxv tests if the support flag is not present
|
||||
# in the qSupported response, so the debugserver test bits are still there
|
||||
# in case debugserver code one day does have auxv support and thus those
|
||||
# tests don't get skipped.
|
||||
#
|
||||
|
||||
@llgs_test
|
||||
@dwarf_test
|
||||
@unittest2.expectedFailure()
|
||||
def test_supports_auxv_llgs_dwarf(self):
|
||||
self.init_llgs_test()
|
||||
self.buildDwarf()
|
||||
self.set_inferior_startup_launch()
|
||||
self.supports_auxv()
|
||||
|
||||
def auxv_data_is_correct_size(self):
|
||||
(word_size, auxv_data) = self.get_raw_auxv_data(inferior_args=["sleep:1"])
|
||||
self.assertIsNotNone(auxv_data)
|
||||
|
||||
# Ensure auxv data is a multiple of 2*word_size (there should be two unsigned long fields per auxv entry).
|
||||
self.assertEquals(len(auxv_data) % (2*word_size), 0)
|
||||
# print "auxv contains {} entries".format(len(auxv_data) / (2*word_size))
|
||||
|
||||
@debugserver_test
|
||||
@dsym_test
|
||||
def test_auxv_data_is_correct_size_debugserver_dsym(self):
|
||||
self.init_debugserver_test()
|
||||
self.buildDsym()
|
||||
self.set_inferior_startup_launch()
|
||||
self.auxv_data_is_correct_size()
|
||||
|
||||
@llgs_test
|
||||
@dwarf_test
|
||||
@unittest2.expectedFailure()
|
||||
def test_auxv_data_is_correct_size_llgs_dwarf(self):
|
||||
self.init_llgs_test()
|
||||
self.buildDwarf()
|
||||
self.set_inferior_startup_launch()
|
||||
self.auxv_data_is_correct_size()
|
||||
|
||||
def auxv_keys_look_valid(self):
|
||||
(word_size, auxv_data) = self.get_raw_auxv_data(inferior_args=["sleep:1"])
|
||||
self.assertIsNotNone(auxv_data)
|
||||
|
||||
# Grab endian.
|
||||
self.reset_test_sequence()
|
||||
self.add_process_info_collection_packets()
|
||||
context = self.expect_gdbremote_sequence()
|
||||
self.assertIsNotNone(context)
|
||||
|
||||
process_info = self.parse_process_info_response(context)
|
||||
self.assertIsNotNone(process_info)
|
||||
endian = process_info.get("endian")
|
||||
self.assertIsNotNone(endian)
|
||||
|
||||
auxv_dict = self.build_auxv_dict(endian, word_size, auxv_data)
|
||||
self.assertIsNotNone(auxv_dict)
|
||||
|
||||
# Verify keys look reasonable.
|
||||
for auxv_key in auxv_dict:
|
||||
self.assertTrue(auxv_key >= 1)
|
||||
self.assertTrue(auxv_key <= 1000)
|
||||
# print "auxv dict: {}".format(auxv_dict)
|
||||
|
||||
@debugserver_test
|
||||
@dsym_test
|
||||
def test_auxv_keys_look_valid_debugserver_dsym(self):
|
||||
self.init_debugserver_test()
|
||||
self.buildDsym()
|
||||
self.set_inferior_startup_launch()
|
||||
self.auxv_keys_look_valid()
|
||||
|
||||
@llgs_test
|
||||
@dwarf_test
|
||||
@unittest2.expectedFailure()
|
||||
def test_auxv_keys_look_valid_llgs_dwarf(self):
|
||||
self.init_llgs_test()
|
||||
self.buildDwarf()
|
||||
self.set_inferior_startup_launch()
|
||||
self.auxv_keys_look_valid()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest2.main()
|
|
@ -578,4 +578,55 @@ class GdbRemoteTestCaseBase(TestBase):
|
|||
return reg_info
|
||||
return None
|
||||
|
||||
|
||||
def decode_gdbremote_binary(self, encoded_bytes):
|
||||
decoded_bytes = ""
|
||||
i = 0
|
||||
while i < len(encoded_bytes):
|
||||
if encoded_bytes[i] == "}":
|
||||
# Handle escaped char.
|
||||
self.assertTrue(i + 1 < len(encoded_bytes))
|
||||
decoded_bytes += chr(ord(encoded_bytes[i+1]) ^ 0x20)
|
||||
i +=2
|
||||
elif encoded_bytes[i] == "*":
|
||||
# Handle run length encoding.
|
||||
self.assertTrue(len(decoded_bytes) > 0)
|
||||
self.assertTrue(i + 1 < len(encoded_bytes))
|
||||
repeat_count = ord(encoded_bytes[i+1]) - 29
|
||||
decoded_bytes += decoded_bytes[-1] * repeat_count
|
||||
i += 2
|
||||
else:
|
||||
decoded_bytes += encoded_bytes[i]
|
||||
i += 1
|
||||
return decoded_bytes
|
||||
|
||||
def build_auxv_dict(self, endian, word_size, auxv_data):
|
||||
self.assertIsNotNone(endian)
|
||||
self.assertIsNotNone(word_size)
|
||||
self.assertIsNotNone(auxv_data)
|
||||
|
||||
auxv_dict = {}
|
||||
|
||||
while len(auxv_data) > 0:
|
||||
# Chop off key.
|
||||
raw_key = auxv_data[:word_size]
|
||||
auxv_data = auxv_data[word_size:]
|
||||
|
||||
# Chop of value.
|
||||
raw_value = auxv_data[:word_size]
|
||||
auxv_data = auxv_data[word_size:]
|
||||
|
||||
# Convert raw text from target endian.
|
||||
key = unpack_endian_binary_string(endian, raw_key)
|
||||
value = unpack_endian_binary_string(endian, raw_value)
|
||||
|
||||
# Handle ending entry.
|
||||
if key == 0:
|
||||
self.assertEquals(value, 0)
|
||||
return auxv_dict
|
||||
|
||||
# The key should not already be present.
|
||||
self.assertFalse(key in auxv_dict)
|
||||
auxv_dict[key] = value
|
||||
|
||||
self.fail("should not reach here - implies required double zero entry not found")
|
||||
return auxv_dict
|
||||
|
|
|
@ -309,6 +309,30 @@ def parse_threadinfo_response(response_packet):
|
|||
# Return list of thread ids
|
||||
return [int(thread_id_hex,16) for thread_id_hex in response_packet.split(",") if len(thread_id_hex) > 0]
|
||||
|
||||
def unpack_endian_binary_string(endian, value_string):
|
||||
"""Unpack a gdb-remote binary (post-unescaped, i.e. not escaped) response to an unsigned int given endianness of the inferior."""
|
||||
if not endian:
|
||||
raise Exception("endian cannot be None")
|
||||
if not value_string or len(value_string) < 1:
|
||||
raise Exception("value_string cannot be None or empty")
|
||||
|
||||
if endian == 'little':
|
||||
value = 0
|
||||
i = 0
|
||||
while len(value_string) > 0:
|
||||
value += (ord(value_string[0]) << i)
|
||||
value_string = value_string[1:]
|
||||
i += 8
|
||||
return value
|
||||
elif endian == 'big':
|
||||
value = 0
|
||||
while len(value_string) > 0:
|
||||
value = (value << 8) + ord(value_string[0])
|
||||
value_string = value_string[1:]
|
||||
return value
|
||||
else:
|
||||
# pdp is valid but need to add parse code once needed.
|
||||
raise Exception("unsupported endian:{}".format(endian))
|
||||
|
||||
def unpack_register_hex_unsigned(endian, value_string):
|
||||
"""Unpack a gdb-remote $p-style response to an unsigned int given endianness of inferior."""
|
||||
|
@ -316,7 +340,7 @@ def unpack_register_hex_unsigned(endian, value_string):
|
|||
raise Exception("endian cannot be None")
|
||||
if not value_string or len(value_string) < 1:
|
||||
raise Exception("value_string cannot be None or empty")
|
||||
|
||||
|
||||
if endian == 'little':
|
||||
value = 0
|
||||
i = 0
|
||||
|
@ -623,10 +647,10 @@ class MatchRemoteOutputEntry(GdbRemoteEntryBase):
|
|||
self._regex_mode = regex_mode
|
||||
self._capture = capture
|
||||
self._matched = False
|
||||
|
||||
|
||||
if not self._regex:
|
||||
raise Exception("regex cannot be None")
|
||||
|
||||
|
||||
if not self._regex_mode in ["match", "search"]:
|
||||
raise Exception("unsupported regex mode \"{}\": must be \"match\" or \"search\"".format(self._regex_mode))
|
||||
|
||||
|
@ -654,7 +678,7 @@ class MatchRemoteOutputEntry(GdbRemoteEntryBase):
|
|||
# If we don't have any content yet, we don't match.
|
||||
if len(accumulated_output) < 1:
|
||||
return context
|
||||
|
||||
|
||||
# Check if we match
|
||||
if self._regex_mode == "match":
|
||||
match = self._regex.match(accumulated_output)
|
||||
|
@ -662,16 +686,16 @@ class MatchRemoteOutputEntry(GdbRemoteEntryBase):
|
|||
match = self._regex.search(accumulated_output)
|
||||
else:
|
||||
raise Exception("Unexpected regex mode: {}".format(self._regex_mode))
|
||||
|
||||
|
||||
# If we don't match, wait to try again after next $O content, or time out.
|
||||
if not match:
|
||||
# print "re pattern \"{}\" did not match against \"{}\"".format(self._regex.pattern, accumulated_output)
|
||||
return context
|
||||
|
||||
|
||||
# We do match.
|
||||
self._matched = True
|
||||
# print "re pattern \"{}\" matched against \"{}\"".format(self._regex.pattern, accumulated_output)
|
||||
|
||||
|
||||
# Collect up any captures into the context.
|
||||
if self._capture:
|
||||
# Handle captures.
|
||||
|
|
Loading…
Reference in New Issue