mirror of
https://github.com/capstone-engine/llvm-capstone.git
synced 2025-02-04 08:16:49 +00:00
Added gdb-remote auxv test for chunked reads.
Verifies that a sum of offset,length auxv reads matches a single large read, and that the auxv data extracted from them match. llvm-svn: 211127
This commit is contained in:
parent
aa60209311
commit
518867327f
@ -145,6 +145,54 @@ class TestGdbRemoteAuxvSupport(gdbremote_testcase.GdbRemoteTestCaseBase):
|
||||
self.set_inferior_startup_launch()
|
||||
self.auxv_keys_look_valid()
|
||||
|
||||
def auxv_chunked_reads_work(self):
|
||||
# Verify that multiple smaller offset,length reads of auxv data
|
||||
# return the same data as a single larger read.
|
||||
|
||||
# Grab the auxv data with a single large read here.
|
||||
(word_size, auxv_data) = self.get_raw_auxv_data(inferior_args=["sleep:1"])
|
||||
self.assertIsNotNone(auxv_data)
|
||||
|
||||
# Grab endian.
|
||||
self.reset_test_sequence()
|
||||
self.add_process_info_collection_packets()
|
||||
context = self.expect_gdbremote_sequence()
|
||||
self.assertIsNotNone(context)
|
||||
|
||||
process_info = self.parse_process_info_response(context)
|
||||
self.assertIsNotNone(process_info)
|
||||
endian = process_info.get("endian")
|
||||
self.assertIsNotNone(endian)
|
||||
|
||||
auxv_dict = self.build_auxv_dict(endian, word_size, auxv_data)
|
||||
self.assertIsNotNone(auxv_dict)
|
||||
|
||||
iterated_auxv_data = self.read_binary_data_in_chunks("qXfer:auxv:read::", 2*word_size)
|
||||
self.assertIsNotNone(iterated_auxv_data)
|
||||
|
||||
auxv_dict_iterated = self.build_auxv_dict(endian, word_size, iterated_auxv_data)
|
||||
self.assertIsNotNone(auxv_dict_iterated)
|
||||
|
||||
# Verify both types of data collection returned same content.
|
||||
self.assertEquals(auxv_dict_iterated, auxv_dict)
|
||||
|
||||
@debugserver_test
|
||||
@dsym_test
|
||||
def test_auxv_chunked_reads_work_debugserver_dsym(self):
|
||||
self.init_debugserver_test()
|
||||
self.buildDsym()
|
||||
self.set_inferior_startup_launch()
|
||||
self.auxv_chunked_reads_work()
|
||||
|
||||
@llgs_test
|
||||
@dwarf_test
|
||||
@unittest2.expectedFailure()
|
||||
def test_auxv_chunked_reads_work_llgs_dwarf(self):
|
||||
self.init_llgs_test()
|
||||
self.buildDwarf()
|
||||
self.set_inferior_startup_launch()
|
||||
self.auxv_chunked_reads_work()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest2.main()
|
||||
|
@ -630,3 +630,37 @@ class GdbRemoteTestCaseBase(TestBase):
|
||||
|
||||
self.fail("should not reach here - implies required double zero entry not found")
|
||||
return auxv_dict
|
||||
|
||||
def read_binary_data_in_chunks(self, command_prefix, chunk_length):
|
||||
"""Collect command_prefix{offset:x},{chunk_length:x} until a single 'l' or 'l' with data is returned."""
|
||||
offset = 0
|
||||
done = False
|
||||
decoded_data = ""
|
||||
|
||||
while not done:
|
||||
# Grab the next iteration of data.
|
||||
self.reset_test_sequence()
|
||||
self.test_sequence.add_log_lines([
|
||||
"read packet: ${}{:x},{:x}:#00".format(command_prefix, offset, chunk_length),
|
||||
{"direction":"send", "regex":r"^\$([^E])(.*)#[0-9a-fA-F]{2}$", "capture":{1:"response_type", 2:"content_raw"} }
|
||||
], True)
|
||||
|
||||
context = self.expect_gdbremote_sequence()
|
||||
self.assertIsNotNone(context)
|
||||
|
||||
response_type = context.get("response_type")
|
||||
self.assertIsNotNone(response_type)
|
||||
self.assertTrue(response_type in ["l", "m"])
|
||||
|
||||
# Move offset along.
|
||||
offset += chunk_length
|
||||
|
||||
# Figure out if we're done. We're done if the response type is l.
|
||||
done = response_type == "l"
|
||||
|
||||
# Decode binary data.
|
||||
content_raw = context.get("content_raw")
|
||||
if content_raw and len(content_raw) > 0:
|
||||
self.assertIsNotNone(content_raw)
|
||||
decoded_data += self.decode_gdbremote_binary(content_raw)
|
||||
return decoded_data
|
||||
|
Loading…
x
Reference in New Issue
Block a user