[test] Persist packets between expect_gdbremote_sequence invocations

Summary:
Some tests (Hc_then_Csignal_signals_correct_thread, at least) were sending a "continue" packet in
one expect_gdbremote_sequence invocation, and "expecting" the stop-reply in another call. This
posed a problem, because the were packets were not persisted between the two invocations, and if
the stub was exceptionally fast to respond, the packet would be received in the first invocation
(where it would be ignored) and then the second invocation would fail because it could not find
the packet.

Since doing matching in two invocations seems like a reasonable use of the packet pump, instead
of fixing the test, I make sure the packet_pump supports this usage by making the list of
captured packets persistent.

Reviewers: tfiala

Subscribers: lldb-commits

Differential Revision: http://reviews.llvm.org/D18140

llvm-svn: 263629
This commit is contained in:
Pavel Labath 2016-03-16 09:44:49 +00:00
parent 39aab4d606
commit 78fc483980
3 changed files with 59 additions and 48 deletions

View File

@ -78,6 +78,7 @@ class GdbRemoteTestCaseBase(TestBase):
self.setUpBaseLogging()
self._remote_server_log_file = None
self.debug_monitor_extra_args = []
self._pump_queues = socket_packet_pump.PumpQueues()
if self.isVerboseLoggingRequested():
# If requested, full logs go to a log file
@ -109,6 +110,8 @@ class GdbRemoteTestCaseBase(TestBase):
self.stub_hostname = "localhost"
def tearDown(self):
self._pump_queues.verify_queues_empty()
if self._remote_server_log_file is not None:
lldb.remote_platform.Get(lldb.SBFileSpec(self._remote_server_log_file),
lldb.SBFileSpec(self.getLocalServerLogFile()))
@ -629,7 +632,8 @@ class GdbRemoteTestCaseBase(TestBase):
def expect_gdbremote_sequence(self, timeout_seconds=None):
if not timeout_seconds:
timeout_seconds = self._TIMEOUT_SECONDS
return expect_lldb_gdbserver_replay(self, self.sock, self.test_sequence, timeout_seconds, self.logger)
return expect_lldb_gdbserver_replay(self, self.sock, self.test_sequence,
self._pump_queues, timeout_seconds, self.logger)
_KNOWN_REGINFO_KEYS = [
"name",

View File

@ -154,6 +154,7 @@ def expect_lldb_gdbserver_replay(
asserter,
sock,
test_sequence,
pump_queues,
timeout_seconds,
logger=None):
"""Replay socket communication with lldb-gdbserver and verify responses.
@ -193,7 +194,7 @@ def expect_lldb_gdbserver_replay(
return {}
context = {"O_count":0, "O_content":""}
with socket_packet_pump.SocketPacketPump(sock, logger) as pump:
with socket_packet_pump.SocketPacketPump(sock, pump_queues, logger) as pump:
# Grab the first sequence entry.
sequence_entry = test_sequence.entries.pop(0)
@ -220,14 +221,14 @@ def expect_lldb_gdbserver_replay(
if sequence_entry.is_output_matcher():
try:
# Grab next entry from the output queue.
content = pump.output_queue().get(True, timeout_seconds)
content = pump_queues.output_queue().get(True, timeout_seconds)
except queue.Empty:
if logger:
logger.warning("timeout waiting for stub output (accumulated output:{})".format(pump.get_accumulated_output()))
raise Exception("timed out while waiting for output match (accumulated output: {})".format(pump.get_accumulated_output()))
else:
try:
content = pump.packet_queue().get(True, timeout_seconds)
content = pump_queues.packet_queue().get(True, timeout_seconds)
except queue.Empty:
if logger:
logger.warning("timeout waiting for packet match (receive buffer: {})".format(pump.get_receive_buffer()))

View File

@ -26,46 +26,18 @@ def _dump_queue(the_queue):
print(codecs.encode(the_queue.get(True), "string_escape"))
print("\n")
class SocketPacketPump(object):
"""A threaded packet reader that partitions packets into two streams.
All incoming $O packet content is accumulated with the current accumulation
state put into the OutputQueue.
All other incoming packets are placed in the packet queue.
A select thread can be started and stopped, and runs to place packet
content into the two queues.
"""
_GDB_REMOTE_PACKET_REGEX = re.compile(r'^\$([^\#]*)#[0-9a-fA-F]{2}')
def __init__(self, pump_socket, logger=None):
if not pump_socket:
raise Exception("pump_socket cannot be None")
class PumpQueues(object):
def __init__(self):
self._output_queue = queue.Queue()
self._packet_queue = queue.Queue()
self._thread = None
self._stop_thread = False
self._socket = pump_socket
self._logger = logger
self._receive_buffer = ""
self._accumulated_output = ""
def __enter__(self):
"""Support the python 'with' statement.
def output_queue(self):
return self._output_queue
Start the pump thread."""
self.start_pump_thread()
return self
def __exit__(self, exit_type, value, the_traceback):
"""Support the python 'with' statement.
Shut down the pump thread."""
self.stop_pump_thread()
def packet_queue(self):
return self._packet_queue
def verify_queues_empty(self):
# Warn if there is any content left in any of the queues.
# That would represent unmatched packets.
if not self.output_queue().empty():
@ -80,6 +52,46 @@ class SocketPacketPump(object):
print("from here:")
traceback.print_stack()
class SocketPacketPump(object):
"""A threaded packet reader that partitions packets into two streams.
All incoming $O packet content is accumulated with the current accumulation
state put into the OutputQueue.
All other incoming packets are placed in the packet queue.
A select thread can be started and stopped, and runs to place packet
content into the two queues.
"""
_GDB_REMOTE_PACKET_REGEX = re.compile(r'^\$([^\#]*)#[0-9a-fA-F]{2}')
def __init__(self, pump_socket, pump_queues, logger=None):
if not pump_socket:
raise Exception("pump_socket cannot be None")
self._thread = None
self._stop_thread = False
self._socket = pump_socket
self._logger = logger
self._receive_buffer = ""
self._accumulated_output = ""
self._pump_queues = pump_queues
def __enter__(self):
"""Support the python 'with' statement.
Start the pump thread."""
self.start_pump_thread()
return self
def __exit__(self, exit_type, value, the_traceback):
"""Support the python 'with' statement.
Shut down the pump thread."""
self.stop_pump_thread()
def start_pump_thread(self):
if self._thread:
raise Exception("pump thread is already running")
@ -92,12 +104,6 @@ class SocketPacketPump(object):
if self._thread:
self._thread.join()
def output_queue(self):
return self._output_queue
def packet_queue(self):
return self._packet_queue
def _process_new_bytes(self, new_bytes):
if not new_bytes:
return
@ -114,7 +120,7 @@ class SocketPacketPump(object):
has_more = False
# handle '+' ack
elif self._receive_buffer[0] == "+":
self._packet_queue.put("+")
self._pump_queues.packet_queue().put("+")
self._receive_buffer = self._receive_buffer[1:]
if self._logger:
self._logger.debug(
@ -132,10 +138,10 @@ class SocketPacketPump(object):
if new_output_content:
# This was an $O packet with new content.
self._accumulated_output += new_output_content
self._output_queue.put(self._accumulated_output)
self._pump_queues.output_queue().put(self._accumulated_output)
else:
# Any packet other than $O.
self._packet_queue.put(packet_match.group(0))
self._pump_queues.packet_queue().put(packet_match.group(0))
# Remove the parsed packet from the receive
# buffer.