[lldb] Use a time-based timeout in IRInterpreter

At the moment the IRInterpreter will stop interpreting an expression
after a hardcoded 4096 instructions. After it reaches the limit it will
stop interpreting and leave the process in whatever state it was when
the timeout was reached.

This patch changes the instruction limit to a timeout and uses the
user-specified expression timeout value for this. The main motivation is
to allow users on targets where we can't use the JIT to run more
complicated expressions if they really want to (which they can do now by
just increasing the timeout).

The time-based approach also seems much more meaningful than the
arbitrary (and very low) instruction limit. 4096 instructions can be
interpreted in a few microseconds on some setups but might take much
longer if we have a slow connection to the target. I don't think any
user actually cares about the number of instructions that are executed
but only about the time they are willing to wait for a result.

Based off an original patch by Raphael Isemann.

Differential revision: https://reviews.llvm.org/D102762
This commit is contained in:
Jonas Devlieghere 2023-08-01 10:18:13 -07:00
parent e93a8137d3
commit 59237bb52c
No known key found for this signature in database
GPG Key ID: 49CC0BD90FDEED4D
5 changed files with 68 additions and 16 deletions

View File

@ -11,6 +11,7 @@
#include "lldb/Utility/ConstString.h"
#include "lldb/Utility/Stream.h"
#include "lldb/Utility/Timeout.h"
#include "lldb/lldb-public.h"
#include "llvm/ADT/ArrayRef.h"
#include "llvm/Pass.h"
@ -44,7 +45,8 @@ public:
lldb_private::Status &error,
lldb::addr_t stack_frame_bottom,
lldb::addr_t stack_frame_top,
lldb_private::ExecutionContext &exe_ctx);
lldb_private::ExecutionContext &exe_ctx,
lldb_private::Timeout<std::micro> timeout);
private:
static bool supportsFunction(llvm::Function &llvm_function,

View File

@ -346,9 +346,16 @@ public:
m_use_dynamic = dynamic;
}
const Timeout<std::micro> &GetTimeout() const { return m_timeout; }
const Timeout<std::micro> &GetTimeout() const {
assert(m_timeout && m_timeout->count() > 0);
return m_timeout;
}
void SetTimeout(const Timeout<std::micro> &timeout) { m_timeout = timeout; }
void SetTimeout(const Timeout<std::micro> &timeout) {
// Disallow setting a non-zero timeout.
if (timeout && timeout->count() > 0)
m_timeout = timeout;
}
const Timeout<std::micro> &GetOneThreadTimeout() const {
return m_one_thread_timeout;

View File

@ -470,7 +470,8 @@ static const char *memory_allocation_error =
"Interpreter couldn't allocate memory";
static const char *memory_write_error = "Interpreter couldn't write to memory";
static const char *memory_read_error = "Interpreter couldn't read from memory";
static const char *infinite_loop_error = "Interpreter ran for too many cycles";
static const char *timeout_error =
"Reached timeout while interpreting expression";
static const char *too_many_functions_error =
"Interpreter doesn't handle modules with multiple function bodies.";
@ -683,7 +684,8 @@ bool IRInterpreter::Interpret(llvm::Module &module, llvm::Function &function,
lldb_private::Status &error,
lldb::addr_t stack_frame_bottom,
lldb::addr_t stack_frame_top,
lldb_private::ExecutionContext &exe_ctx) {
lldb_private::ExecutionContext &exe_ctx,
lldb_private::Timeout<std::micro> timeout) {
lldb_private::Log *log(GetLog(LLDBLog::Expressions));
if (log) {
@ -722,11 +724,23 @@ bool IRInterpreter::Interpret(llvm::Module &module, llvm::Function &function,
frame.MakeArgument(&*ai, ptr);
}
uint32_t num_insts = 0;
frame.Jump(&function.front());
while (frame.m_ii != frame.m_ie && (++num_insts < 4096)) {
using clock = std::chrono::steady_clock;
// Compute the time at which the timeout has been exceeded.
std::optional<clock::time_point> end_time;
if (timeout && timeout->count() > 0)
end_time = clock::now() + *timeout;
while (frame.m_ii != frame.m_ie) {
// Timeout reached: stop interpreting.
if (end_time && clock::now() >= *end_time) {
error.SetErrorToGenericError();
error.SetErrorString(timeout_error);
return false;
}
const Instruction *inst = &*frame.m_ii;
LLDB_LOGF(log, "Interpreting %s", PrintValue(inst).c_str());
@ -1571,11 +1585,5 @@ bool IRInterpreter::Interpret(llvm::Module &module, llvm::Function &function,
++frame.m_ii;
}
if (num_insts >= 4096) {
error.SetErrorToGenericError();
error.SetErrorString(infinite_loop_error);
return false;
}
return false;
}

View File

@ -120,7 +120,7 @@ LLVMUserExpression::DoExecute(DiagnosticManager &diagnostic_manager,
IRInterpreter::Interpret(*module, *function, args, *m_execution_unit_sp,
interpreter_error, function_stack_bottom,
function_stack_top, exe_ctx);
function_stack_top, exe_ctx, options.GetTimeout());
if (!interpreter_error.Success()) {
diagnostic_manager.Printf(eDiagnosticSeverityError,
@ -233,7 +233,7 @@ LLVMUserExpression::DoExecute(DiagnosticManager &diagnostic_manager,
eDiagnosticSeverityError,
"Couldn't complete execution; the thread "
"on which the expression was being run: 0x%" PRIx64
" exited during its execution.",
" exited during its execution.",
expr_thread_id);
return execution_result;
} else if (execution_result != lldb::eExpressionCompleted) {

View File

@ -12,6 +12,41 @@ from lldbsuite.test import lldbutil
class IRInterpreterTestCase(TestBase):
NO_DEBUG_INFO_TESTCASE = True
def time_expression(self, expr, options):
start = time.time()
res = self.target.EvaluateExpression(expr, options)
return res, time.time() - start
def test_interpreter_timeout(self):
"""Test the timeout logic in the IRInterpreter."""
self.build()
self.target = self.dbg.CreateTarget(self.getBuildArtifact("a.out"))
self.assertTrue(self.target, VALID_TARGET)
# A non-trivial infinite loop.
inf_loop = "for (unsigned i = 0; i < 100; ++i) --i; 1"
timeout_error = "Reached timeout while interpreting expression"
options = lldb.SBExpressionOptions()
# This is an IRInterpreter specific test, so disable the JIT.
options.SetAllowJIT(False)
# No timeout means a 500ms.
options.SetTimeoutInMicroSeconds(0)
res, duration_sec = self.time_expression(inf_loop, options)
self.assertIn(timeout_error, str(res.GetError()))
# Depending on the machine load the expression might take quite some
# time, so give the time a generous upper bound.
self.assertLess(duration_sec, 15)
# Try a simple one second timeout.
options.SetTimeoutInMicroSeconds(1000000)
res, duration_sec = self.time_expression(inf_loop, options)
self.assertIn(timeout_error, str(res.GetError()))
self.assertGreaterEqual(duration_sec, 1)
self.assertLess(duration_sec, 30)
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)