mirror of
https://github.com/capstone-engine/llvm-capstone.git
synced 2024-11-30 00:51:02 +00:00
[ObjC][ARC] Use operand bundle 'clang.arc.rv' instead of explicitly
emitting retainRV or claimRV calls in the IR
This reapplies 3fe3946d9a
without the
changes made to lib/IR/AutoUpgrade.cpp, which was violating layering.
Original commit message:
Background:
This patch makes changes to the front-end and middle-end that are
needed to fix a longstanding problem where llvm breaks ARC's autorelease
optimization (see the link below) by separating calls from the marker
instructions or retainRV/claimRV calls. The backend changes are in
https://reviews.llvm.org/D92569.
https://clang.llvm.org/docs/AutomaticReferenceCounting.html#arc-runtime-objc-autoreleasereturnvalue
What this patch does to fix the problem:
- The front-end adds operand bundle "clang.arc.rv" to calls, which
indicates the call is implicitly followed by a marker instruction and
an implicit retainRV/claimRV call that consumes the call result. In
addition, it emits a call to @llvm.objc.clang.arc.noop.use, which
consumes the call result, to prevent the middle-end passes from changing
the return type of the called function. This is currently done only when
the target is arm64 and the optimization level is higher than -O0.
- ARC optimizer temporarily emits retainRV/claimRV calls after the calls
with the operand bundle in the IR and removes the inserted calls after
processing the function.
- ARC contract pass emits retainRV/claimRV calls after the call with the
operand bundle. It doesn't remove the operand bundle on the call since
the backend needs it to emit the marker instruction. The retainRV and
claimRV calls are emitted late in the pipeline to prevent optimization
passes from transforming the IR in a way that makes it harder for the
ARC middle-end passes to figure out the def-use relationship between
the call and the retainRV/claimRV calls (which is the cause of
PR31925).
- The function inliner removes an autoreleaseRV call in the callee if
nothing in the callee prevents it from being paired up with the
retainRV/claimRV call in the caller. It then inserts a release call if
the call is annotated with claimRV since autoreleaseRV+claimRV is
equivalent to a release. If it cannot find an autoreleaseRV call, it
tries to transfer the operand bundle to a function call in the callee.
This is important since ARC optimizer can remove the autoreleaseRV
returning the callee result, which makes it impossible to pair it up
with the retainRV/claimRV call in the caller. If that fails, it simply
emits a retain call in the IR if the implicit call is a call to
retainRV and does nothing if it's a call to claimRV.
Future work:
- Use the operand bundle on x86-64.
- Fix the auto upgrader to convert call+retainRV/claimRV pairs into
calls annotated with the operand bundles.
rdar://71443534
Differential Revision: https://reviews.llvm.org/D92808
This commit is contained in:
parent
8a7f5ad0fd
commit
4a64d8fe39
@ -23,6 +23,7 @@
|
||||
#include "clang/Basic/Diagnostic.h"
|
||||
#include "clang/CodeGen/CGFunctionInfo.h"
|
||||
#include "llvm/ADT/STLExtras.h"
|
||||
#include "llvm/Analysis/ObjCARCUtil.h"
|
||||
#include "llvm/BinaryFormat/MachO.h"
|
||||
#include "llvm/IR/DataLayout.h"
|
||||
#include "llvm/IR/InlineAsm.h"
|
||||
@ -2078,6 +2079,15 @@ void CodeGenFunction::EmitARCIntrinsicUse(ArrayRef<llvm::Value*> values) {
|
||||
EmitNounwindRuntimeCall(fn, values);
|
||||
}
|
||||
|
||||
/// Emit a call to "clang.arc.noop.use", which consumes the result of a call
|
||||
/// that has operand bundle "clang.arc.rv".
|
||||
void CodeGenFunction::EmitARCNoopIntrinsicUse(ArrayRef<llvm::Value *> values) {
|
||||
llvm::Function *&fn = CGM.getObjCEntrypoints().clang_arc_noop_use;
|
||||
if (!fn)
|
||||
fn = CGM.getIntrinsic(llvm::Intrinsic::objc_clang_arc_noop_use);
|
||||
EmitNounwindRuntimeCall(fn, values);
|
||||
}
|
||||
|
||||
static void setARCRuntimeFunctionLinkage(CodeGenModule &CGM, llvm::Value *RTF) {
|
||||
if (auto *F = dyn_cast<llvm::Function>(RTF)) {
|
||||
// If the target runtime doesn't naturally support ARC, emit weak
|
||||
@ -2304,10 +2314,11 @@ static void emitAutoreleasedReturnValueMarker(CodeGenFunction &CGF) {
|
||||
// with this marker yet, so leave a breadcrumb for the ARC
|
||||
// optimizer to pick up.
|
||||
} else {
|
||||
const char *markerKey = "clang.arc.retainAutoreleasedReturnValueMarker";
|
||||
if (!CGF.CGM.getModule().getModuleFlag(markerKey)) {
|
||||
const char *retainRVMarkerKey = llvm::objcarc::getRVMarkerModuleFlagStr();
|
||||
if (!CGF.CGM.getModule().getModuleFlag(retainRVMarkerKey)) {
|
||||
auto *str = llvm::MDString::get(CGF.getLLVMContext(), assembly);
|
||||
CGF.CGM.getModule().addModuleFlag(llvm::Module::Error, markerKey, str);
|
||||
CGF.CGM.getModule().addModuleFlag(llvm::Module::Error,
|
||||
retainRVMarkerKey, str);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2317,6 +2328,46 @@ static void emitAutoreleasedReturnValueMarker(CodeGenFunction &CGF) {
|
||||
CGF.Builder.CreateCall(marker, None, CGF.getBundlesForFunclet(marker));
|
||||
}
|
||||
|
||||
static llvm::Value *emitOptimizedARCReturnCall(llvm::Value *value,
|
||||
bool IsRetainRV,
|
||||
CodeGenFunction &CGF) {
|
||||
emitAutoreleasedReturnValueMarker(CGF);
|
||||
|
||||
// Add operand bundle "clang.arc.rv" to the call instead of emitting retainRV
|
||||
// or claimRV calls in the IR. We currently do this only when the optimization
|
||||
// level isn't -O0 since global-isel, which is currently run at -O0, doesn't
|
||||
// know about the operand bundle.
|
||||
|
||||
// FIXME: Do this when the target isn't aarch64.
|
||||
if (CGF.CGM.getCodeGenOpts().OptimizationLevel > 0 &&
|
||||
CGF.CGM.getTarget().getTriple().isAArch64()) {
|
||||
llvm::Value *bundleArgs[] = {llvm::ConstantInt::get(
|
||||
CGF.Int64Ty, llvm::objcarc::getRVOperandBundleEnum(IsRetainRV))};
|
||||
SmallVector<llvm::OperandBundleDef, 1> bundles;
|
||||
bundles.emplace_back("clang.arc.rv", bundleArgs);
|
||||
auto *oldCall = cast<llvm::CallBase>(value);
|
||||
llvm::CallBase *newCall = llvm::CallBase::Create(oldCall, bundles, oldCall);
|
||||
newCall->copyMetadata(*oldCall);
|
||||
oldCall->replaceAllUsesWith(newCall);
|
||||
oldCall->eraseFromParent();
|
||||
CGF.EmitARCNoopIntrinsicUse(newCall);
|
||||
return newCall;
|
||||
}
|
||||
|
||||
bool isNoTail =
|
||||
CGF.CGM.getTargetCodeGenInfo().markARCOptimizedReturnCallsAsNoTail();
|
||||
llvm::CallInst::TailCallKind tailKind =
|
||||
isNoTail ? llvm::CallInst::TCK_NoTail : llvm::CallInst::TCK_None;
|
||||
ObjCEntrypoints &EPs = CGF.CGM.getObjCEntrypoints();
|
||||
llvm::Function *&EP = IsRetainRV
|
||||
? EPs.objc_retainAutoreleasedReturnValue
|
||||
: EPs.objc_unsafeClaimAutoreleasedReturnValue;
|
||||
llvm::Intrinsic::ID IID =
|
||||
IsRetainRV ? llvm::Intrinsic::objc_retainAutoreleasedReturnValue
|
||||
: llvm::Intrinsic::objc_unsafeClaimAutoreleasedReturnValue;
|
||||
return emitARCValueOperation(CGF, value, nullptr, EP, IID, tailKind);
|
||||
}
|
||||
|
||||
/// Retain the given object which is the result of a function call.
|
||||
/// call i8* \@objc_retainAutoreleasedReturnValue(i8* %value)
|
||||
///
|
||||
@ -2324,15 +2375,7 @@ static void emitAutoreleasedReturnValueMarker(CodeGenFunction &CGF) {
|
||||
/// call with completely different semantics.
|
||||
llvm::Value *
|
||||
CodeGenFunction::EmitARCRetainAutoreleasedReturnValue(llvm::Value *value) {
|
||||
emitAutoreleasedReturnValueMarker(*this);
|
||||
llvm::CallInst::TailCallKind tailKind =
|
||||
CGM.getTargetCodeGenInfo().markARCOptimizedReturnCallsAsNoTail()
|
||||
? llvm::CallInst::TCK_NoTail
|
||||
: llvm::CallInst::TCK_None;
|
||||
return emitARCValueOperation(
|
||||
*this, value, nullptr,
|
||||
CGM.getObjCEntrypoints().objc_retainAutoreleasedReturnValue,
|
||||
llvm::Intrinsic::objc_retainAutoreleasedReturnValue, tailKind);
|
||||
return emitOptimizedARCReturnCall(value, true, *this);
|
||||
}
|
||||
|
||||
/// Claim a possibly-autoreleased return value at +0. This is only
|
||||
@ -2344,15 +2387,7 @@ CodeGenFunction::EmitARCRetainAutoreleasedReturnValue(llvm::Value *value) {
|
||||
/// call i8* \@objc_unsafeClaimAutoreleasedReturnValue(i8* %value)
|
||||
llvm::Value *
|
||||
CodeGenFunction::EmitARCUnsafeClaimAutoreleasedReturnValue(llvm::Value *value) {
|
||||
emitAutoreleasedReturnValueMarker(*this);
|
||||
llvm::CallInst::TailCallKind tailKind =
|
||||
CGM.getTargetCodeGenInfo().markARCOptimizedReturnCallsAsNoTail()
|
||||
? llvm::CallInst::TCK_NoTail
|
||||
: llvm::CallInst::TCK_None;
|
||||
return emitARCValueOperation(
|
||||
*this, value, nullptr,
|
||||
CGM.getObjCEntrypoints().objc_unsafeClaimAutoreleasedReturnValue,
|
||||
llvm::Intrinsic::objc_unsafeClaimAutoreleasedReturnValue, tailKind);
|
||||
return emitOptimizedARCReturnCall(value, false, *this);
|
||||
}
|
||||
|
||||
/// Release the given object.
|
||||
|
@ -4202,6 +4202,8 @@ public:
|
||||
|
||||
void EmitARCIntrinsicUse(ArrayRef<llvm::Value*> values);
|
||||
|
||||
void EmitARCNoopIntrinsicUse(ArrayRef<llvm::Value *> values);
|
||||
|
||||
static Destroyer destroyARCStrongImprecise;
|
||||
static Destroyer destroyARCStrongPrecise;
|
||||
static Destroyer destroyARCWeak;
|
||||
|
@ -210,6 +210,9 @@ struct ObjCEntrypoints {
|
||||
|
||||
/// void clang.arc.use(...);
|
||||
llvm::Function *clang_arc_use;
|
||||
|
||||
/// void clang.arc.noop.use(...);
|
||||
llvm::Function *clang_arc_noop_use;
|
||||
};
|
||||
|
||||
/// This class records statistics on instrumentation based profiling.
|
||||
|
177
clang/test/CodeGenObjC/arc-rv-attr.m
Normal file
177
clang/test/CodeGenObjC/arc-rv-attr.m
Normal file
@ -0,0 +1,177 @@
|
||||
// RUN: %clang_cc1 -triple arm64-apple-ios9 -fobjc-runtime=ios-9.0 -fobjc-arc -O -disable-llvm-passes -emit-llvm -o - %s | FileCheck %s -check-prefix=CHECK
|
||||
|
||||
@class A;
|
||||
|
||||
A *makeA(void);
|
||||
|
||||
void test_assign() {
|
||||
__unsafe_unretained id x;
|
||||
x = makeA();
|
||||
}
|
||||
// CHECK-LABEL: define{{.*}} void @test_assign()
|
||||
// CHECK: [[X:%.*]] = alloca i8*
|
||||
// CHECK: [[T0:%.*]] = call [[A:.*]]* @makeA() [ "clang.arc.rv"(i64 1) ]
|
||||
// CHECK-NEXT: call void (...) @llvm.objc.clang.arc.noop.use({{.*}} [[T0]])
|
||||
// CHECK-NEXT: [[T1:%.*]] = bitcast [[A]]* [[T0]] to i8*
|
||||
// CHECK-NEXT: store i8* [[T1]], i8** [[X]]
|
||||
// CHECK-NEXT: bitcast
|
||||
// CHECK-NEXT: lifetime.end
|
||||
// CHECK-NEXT: ret void
|
||||
|
||||
void test_assign_assign() {
|
||||
__unsafe_unretained id x, y;
|
||||
x = y = makeA();
|
||||
}
|
||||
// CHECK-LABEL: define{{.*}} void @test_assign_assign()
|
||||
// CHECK: [[X:%.*]] = alloca i8*
|
||||
// CHECK: [[Y:%.*]] = alloca i8*
|
||||
// CHECK: [[T0:%.*]] = call [[A]]* @makeA() [ "clang.arc.rv"(i64 1) ]
|
||||
// CHECK-NEXT: call void (...) @llvm.objc.clang.arc.noop.use({{.*}} [[T0]])
|
||||
// CHECK-NEXT: [[T1:%.*]] = bitcast [[A]]* [[T0]] to i8*
|
||||
// CHECK-NEXT: store i8* [[T1]], i8** [[Y]]
|
||||
// CHECK-NEXT: store i8* [[T1]], i8** [[X]]
|
||||
// CHECK-NEXT: bitcast
|
||||
// CHECK-NEXT: lifetime.end
|
||||
// CHECK-NEXT: bitcast
|
||||
// CHECK-NEXT: lifetime.end
|
||||
// CHECK-NEXT: ret void
|
||||
|
||||
void test_strong_assign_assign() {
|
||||
__strong id x;
|
||||
__unsafe_unretained id y;
|
||||
x = y = makeA();
|
||||
}
|
||||
// CHECK-LABEL: define{{.*}} void @test_strong_assign_assign()
|
||||
// CHECK: [[X:%.*]] = alloca i8*
|
||||
// CHECK: [[Y:%.*]] = alloca i8*
|
||||
// CHECK: [[T0:%.*]] = call [[A]]* @makeA() [ "clang.arc.rv"(i64 0) ]
|
||||
// CHECK-NEXT: call void (...) @llvm.objc.clang.arc.noop.use({{.*}} [[T0]])
|
||||
// CHECK-NEXT: [[T1:%.*]] = bitcast [[A]]* [[T0]] to i8*
|
||||
// CHECK-NEXT: store i8* [[T1]], i8** [[Y]]
|
||||
// CHECK-NEXT: [[OLD:%.*]] = load i8*, i8** [[X]]
|
||||
// CHECK-NEXT: store i8* [[T1]], i8** [[X]]
|
||||
// CHECK-NEXT: call void @llvm.objc.release(i8* [[OLD]]
|
||||
// CHECK-NEXT: bitcast
|
||||
// CHECK-NEXT: lifetime.end
|
||||
// CHECK-NEXT: [[T0:%.*]] = load i8*, i8** [[X]]
|
||||
// CHECK-NEXT: call void @llvm.objc.release(i8* [[T0]])
|
||||
// CHECK-NEXT: bitcast
|
||||
// CHECK-NEXT: lifetime.end
|
||||
// CHECK-NEXT: ret void
|
||||
|
||||
void test_assign_strong_assign() {
|
||||
__unsafe_unretained id x;
|
||||
__strong id y;
|
||||
x = y = makeA();
|
||||
}
|
||||
// CHECK-LABEL: define{{.*}} void @test_assign_strong_assign()
|
||||
// CHECK: [[X:%.*]] = alloca i8*
|
||||
// CHECK: [[Y:%.*]] = alloca i8*
|
||||
// CHECK: [[T0:%.*]] = call [[A]]* @makeA() [ "clang.arc.rv"(i64 0) ]
|
||||
// CHECK-NEXT: call void (...) @llvm.objc.clang.arc.noop.use({{.*}} [[T0]])
|
||||
// CHECK-NEXT: [[T1:%.*]] = bitcast [[A]]* [[T0]] to i8*
|
||||
// CHECK-NEXT: [[OLD:%.*]] = load i8*, i8** [[Y]]
|
||||
// CHECK-NEXT: store i8* [[T1]], i8** [[Y]]
|
||||
// CHECK-NEXT: call void @llvm.objc.release(i8* [[OLD]]
|
||||
// CHECK-NEXT: store i8* [[T1]], i8** [[X]]
|
||||
// CHECK-NEXT: [[T0:%.*]] = load i8*, i8** [[Y]]
|
||||
// CHECK-NEXT: call void @llvm.objc.release(i8* [[T0]])
|
||||
// CHECK-NEXT: bitcast
|
||||
// CHECK-NEXT: lifetime.end
|
||||
// CHECK-NEXT: bitcast
|
||||
// CHECK-NEXT: lifetime.end
|
||||
// CHECK-NEXT: ret void
|
||||
|
||||
void test_init() {
|
||||
__unsafe_unretained id x = makeA();
|
||||
}
|
||||
// CHECK-LABEL: define{{.*}} void @test_init()
|
||||
// CHECK: [[X:%.*]] = alloca i8*
|
||||
// CHECK: [[T0:%.*]] = call [[A]]* @makeA() [ "clang.arc.rv"(i64 1) ]
|
||||
// CHECK-NEXT: call void (...) @llvm.objc.clang.arc.noop.use({{.*}} [[T0]])
|
||||
// CHECK-NEXT: [[T1:%.*]] = bitcast [[A]]* [[T0]] to i8*
|
||||
// CHECK-NEXT: store i8* [[T1]], i8** [[X]]
|
||||
// CHECK-NEXT: bitcast
|
||||
// CHECK-NEXT: lifetime.end
|
||||
// CHECK-NEXT: ret void
|
||||
|
||||
void test_init_assignment() {
|
||||
__unsafe_unretained id x;
|
||||
__unsafe_unretained id y = x = makeA();
|
||||
}
|
||||
// CHECK-LABEL: define{{.*}} void @test_init_assignment()
|
||||
// CHECK: [[X:%.*]] = alloca i8*
|
||||
// CHECK: [[Y:%.*]] = alloca i8*
|
||||
// CHECK: [[T0:%.*]] = call [[A]]* @makeA() [ "clang.arc.rv"(i64 1) ]
|
||||
// CHECK-NEXT: call void (...) @llvm.objc.clang.arc.noop.use({{.*}} [[T0]])
|
||||
// CHECK-NEXT: [[T1:%.*]] = bitcast [[A]]* [[T0]] to i8*
|
||||
// CHECK-NEXT: store i8* [[T1]], i8** [[X]]
|
||||
// CHECK-NEXT: store i8* [[T1]], i8** [[Y]]
|
||||
// CHECK-NEXT: bitcast
|
||||
// CHECK-NEXT: lifetime.end
|
||||
// CHECK-NEXT: bitcast
|
||||
// CHECK-NEXT: lifetime.end
|
||||
// CHECK-NEXT: ret void
|
||||
|
||||
void test_strong_init_assignment() {
|
||||
__unsafe_unretained id x;
|
||||
__strong id y = x = makeA();
|
||||
}
|
||||
// CHECK-LABEL: define{{.*}} void @test_strong_init_assignment()
|
||||
// CHECK: [[X:%.*]] = alloca i8*
|
||||
// CHECK: [[Y:%.*]] = alloca i8*
|
||||
// CHECK: [[T0:%.*]] = call [[A]]* @makeA() [ "clang.arc.rv"(i64 0) ]
|
||||
// CHECK-NEXT: call void (...) @llvm.objc.clang.arc.noop.use({{.*}} [[T0]])
|
||||
// CHECK-NEXT: [[T1:%.*]] = bitcast [[A]]* [[T0]] to i8*
|
||||
// CHECK-NEXT: store i8* [[T1]], i8** [[X]]
|
||||
// CHECK-NEXT: store i8* [[T1]], i8** [[Y]]
|
||||
// CHECK-NEXT: [[T0:%.*]] = load i8*, i8** [[Y]]
|
||||
// CHECK-NEXT: call void @llvm.objc.release(i8* [[T0]])
|
||||
// CHECK-NEXT: bitcast
|
||||
// CHECK-NEXT: lifetime.end
|
||||
// CHECK-NEXT: bitcast
|
||||
// CHECK-NEXT: lifetime.end
|
||||
// CHECK-NEXT: ret void
|
||||
|
||||
void test_init_strong_assignment() {
|
||||
__strong id x;
|
||||
__unsafe_unretained id y = x = makeA();
|
||||
}
|
||||
// CHECK-LABEL: define{{.*}} void @test_init_strong_assignment()
|
||||
// CHECK: [[X:%.*]] = alloca i8*
|
||||
// CHECK: [[Y:%.*]] = alloca i8*
|
||||
// CHECK: [[T0:%.*]] = call [[A]]* @makeA() [ "clang.arc.rv"(i64 0) ]
|
||||
// CHECK-NEXT: call void (...) @llvm.objc.clang.arc.noop.use({{.*}} [[T0]])
|
||||
// CHECK-NEXT: [[T1:%.*]] = bitcast [[A]]* [[T0]] to i8*
|
||||
// CHECK-NEXT: [[OLD:%.*]] = load i8*, i8** [[X]]
|
||||
// CHECK-NEXT: store i8* [[T1]], i8** [[X]]
|
||||
// CHECK-NEXT: call void @llvm.objc.release(i8* [[OLD]])
|
||||
// CHECK-NEXT: store i8* [[T1]], i8** [[Y]]
|
||||
// CHECK-NEXT: bitcast
|
||||
// CHECK-NEXT: lifetime.end
|
||||
// CHECK-NEXT: [[T0:%.*]] = load i8*, i8** [[X]]
|
||||
// CHECK-NEXT: call void @llvm.objc.release(i8* [[T0]])
|
||||
// CHECK-NEXT: bitcast
|
||||
// CHECK-NEXT: lifetime.end
|
||||
// CHECK-NEXT: ret void
|
||||
|
||||
void test_ignored() {
|
||||
makeA();
|
||||
}
|
||||
// CHECK-LABEL: define{{.*}} void @test_ignored()
|
||||
// CHECK: [[T0:%.*]] = call [[A]]* @makeA() [ "clang.arc.rv"(i64 1) ]
|
||||
// CHECK-NEXT: call void (...) @llvm.objc.clang.arc.noop.use({{.*}} [[T0]])
|
||||
// CHECK-NEXT: ret void
|
||||
|
||||
void test_cast_to_void() {
|
||||
(void) makeA();
|
||||
}
|
||||
// CHECK-LABEL: define{{.*}} void @test_cast_to_void()
|
||||
// CHECK: [[T0:%.*]] = call [[A]]* @makeA() [ "clang.arc.rv"(i64 1) ]
|
||||
// CHECK-NEXT: call void (...) @llvm.objc.clang.arc.noop.use({{.*}} [[T0]])
|
||||
// CHECK-NEXT: ret void
|
||||
|
||||
// This is always at the end of the module.
|
||||
|
||||
// CHECK-OPTIMIZED: !llvm.module.flags = !{!0,
|
||||
// CHECK-OPTIMIZED: !0 = !{i32 1, !"clang.arc.retainAutoreleasedReturnValueMarker", !"mov{{.*}}marker for objc_retainAutoreleaseReturnValue"}
|
@ -4,11 +4,10 @@
|
||||
// Make sure it works on x86-32.
|
||||
// RUN: %clang_cc1 -triple i386-apple-darwin11 -fobjc-runtime=macosx-fragile-10.11 -fobjc-arc -emit-llvm -o - %s | FileCheck %s -check-prefix=CHECK -check-prefix=CHECK-UNOPTIMIZED -check-prefix=CHECK-MARKED -check-prefix=CALL
|
||||
|
||||
// Make sure it works on ARM.
|
||||
// RUN: %clang_cc1 -triple arm64-apple-ios9 -fobjc-runtime=ios-9.0 -fobjc-arc -emit-llvm -o - %s | FileCheck %s -check-prefix=CHECK -check-prefix=CHECK-UNOPTIMIZED -check-prefix=CHECK-MARKED -check-prefix=CALL
|
||||
// RUN: %clang_cc1 -triple arm64-apple-ios9 -fobjc-runtime=ios-9.0 -fobjc-arc -O -disable-llvm-passes -emit-llvm -o - %s | FileCheck %s -check-prefix=CHECK -check-prefix=CHECK-OPTIMIZED -check-prefix=CALL
|
||||
|
||||
// Make sure it works on ARM64.
|
||||
// RUN: %clang_cc1 -triple arm64-apple-ios9 -fobjc-runtime=ios-9.0 -fobjc-arc -emit-llvm -o - %s | FileCheck %s -check-prefix=CHECK -check-prefix=CHECK-UNOPTIMIZED -check-prefix=CHECK-MARKED -check-prefix=CALL
|
||||
|
||||
// Make sure it works on ARM.
|
||||
// RUN: %clang_cc1 -triple armv7-apple-ios9 -fobjc-runtime=ios-9.0 -fobjc-arc -emit-llvm -o - %s | FileCheck %s -check-prefix=CHECK -check-prefix=CHECK-UNOPTIMIZED -check-prefix=CHECK-MARKED -check-prefix=CALL
|
||||
// RUN: %clang_cc1 -triple armv7-apple-ios9 -fobjc-runtime=ios-9.0 -fobjc-arc -O -disable-llvm-passes -emit-llvm -o - %s | FileCheck %s -check-prefix=CHECK -check-prefix=CHECK-OPTIMIZED -check-prefix=CALL
|
||||
|
||||
|
@ -2325,6 +2325,18 @@ When lowered, any relocated value will be recorded in the corresponding
|
||||
:ref:`stackmap entry <statepoint-stackmap-format>`. See the intrinsic description
|
||||
for further details.
|
||||
|
||||
ObjC ARC RetainRV/ClaimRV Operand Bundles
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
A ``"clang.arc.rv"`` operand bundle on a call indicates the call is implicitly
|
||||
followed by a marker instruction and a call to an ObjC runtime function that
|
||||
uses the result of the call. If the argument passed to the operand bundle is 0,
|
||||
``@objc_retainAutoreleasedReturnValue`` is called. If 1 is passed,
|
||||
``@objc_unsafeClaimAutoreleasedReturnValue`` is called.
|
||||
|
||||
The operand bundle is needed to ensure the call is immediately followed by the
|
||||
marker instruction or the ObjC runtime call in the final output.
|
||||
|
||||
.. _moduleasm:
|
||||
|
||||
Module-Level Inline Assembly
|
||||
|
48
llvm/include/llvm/Analysis/ObjCARCUtil.h
Normal file
48
llvm/include/llvm/Analysis/ObjCARCUtil.h
Normal file
@ -0,0 +1,48 @@
|
||||
//===- ObjCARCUtil.h - ObjC ARC Utility Functions ---------------*- C++ -*-===//
|
||||
//
|
||||
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
|
||||
// See https://llvm.org/LICENSE.txt for license information.
|
||||
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
|
||||
//
|
||||
//===----------------------------------------------------------------------===//
|
||||
/// \file
|
||||
/// This file defines ARC utility functions which are used by various parts of
|
||||
/// the compiler.
|
||||
///
|
||||
//===----------------------------------------------------------------------===//
|
||||
|
||||
#ifndef LLVM_LIB_ANALYSIS_OBJCARCUTIL_H
|
||||
#define LLVM_LIB_ANALYSIS_OBJCARCUTIL_H
|
||||
|
||||
#include "llvm/IR/InstrTypes.h"
|
||||
#include "llvm/IR/LLVMContext.h"
|
||||
|
||||
namespace llvm {
|
||||
namespace objcarc {
|
||||
|
||||
static inline const char *getRVMarkerModuleFlagStr() {
|
||||
return "clang.arc.retainAutoreleasedReturnValueMarker";
|
||||
}
|
||||
|
||||
enum RVOperandBundle : unsigned { RVOB_Retain, RVOB_Claim };
|
||||
|
||||
static RVOperandBundle getRVOperandBundleEnum(bool IsRetain) {
|
||||
return IsRetain ? RVOB_Retain : RVOB_Claim;
|
||||
}
|
||||
|
||||
static inline bool hasRVOpBundle(const CallBase *CB, bool IsRetain) {
|
||||
auto B = CB->getOperandBundle(LLVMContext::OB_clang_arc_rv);
|
||||
if (!B.hasValue())
|
||||
return false;
|
||||
return cast<ConstantInt>(B->Inputs[0])->getZExtValue() ==
|
||||
getRVOperandBundleEnum(IsRetain);
|
||||
}
|
||||
|
||||
static inline bool hasRVOpBundle(const CallBase *CB) {
|
||||
return CB->getOperandBundle(LLVMContext::OB_clang_arc_rv).hasValue();
|
||||
}
|
||||
|
||||
} // end namespace objcarc
|
||||
} // end namespace llvm
|
||||
|
||||
#endif
|
@ -1214,6 +1214,15 @@ public:
|
||||
static CallBase *Create(CallBase *CB, ArrayRef<OperandBundleDef> Bundles,
|
||||
Instruction *InsertPt = nullptr);
|
||||
|
||||
/// Create a clone of \p CB with operand bundle \p OB added.
|
||||
static CallBase *addOperandBundle(CallBase *CB, uint32_t ID,
|
||||
OperandBundleDef OB,
|
||||
Instruction *InsertPt = nullptr);
|
||||
|
||||
/// Create a clone of \p CB with operand bundle \p ID removed.
|
||||
static CallBase *removeOperandBundle(CallBase *CB, uint32_t ID,
|
||||
Instruction *InsertPt = nullptr);
|
||||
|
||||
static bool classof(const Instruction *I) {
|
||||
return I->getOpcode() == Instruction::Call ||
|
||||
I->getOpcode() == Instruction::Invoke ||
|
||||
|
@ -446,6 +446,9 @@ def int_objc_storeWeak : Intrinsic<[llvm_ptr_ty],
|
||||
llvm_ptr_ty]>;
|
||||
def int_objc_clang_arc_use : Intrinsic<[],
|
||||
[llvm_vararg_ty]>;
|
||||
def int_objc_clang_arc_noop_use : DefaultAttrsIntrinsic<[],
|
||||
[llvm_vararg_ty],
|
||||
[IntrInaccessibleMemOnly]>;
|
||||
def int_objc_unsafeClaimAutoreleasedReturnValue : Intrinsic<[llvm_ptr_ty],
|
||||
[llvm_ptr_ty]>;
|
||||
def int_objc_retainedObject : Intrinsic<[llvm_ptr_ty],
|
||||
|
@ -93,6 +93,7 @@ public:
|
||||
OB_cfguardtarget = 3, // "cfguardtarget"
|
||||
OB_preallocated = 4, // "preallocated"
|
||||
OB_gc_live = 5, // "gc-live"
|
||||
OB_clang_arc_rv = 6, // "clang.arc.rv"
|
||||
};
|
||||
|
||||
/// getMDKindID - Return a unique non-zero ID for the specified metadata kind.
|
||||
|
@ -140,6 +140,7 @@ ARCInstKind llvm::objcarc::GetFunctionClass(const Function *F) {
|
||||
return ARCInstKind::User;
|
||||
case Intrinsic::objc_sync_exit:
|
||||
return ARCInstKind::User;
|
||||
case Intrinsic::objc_clang_arc_noop_use:
|
||||
case Intrinsic::objc_arc_annotation_topdown_bbstart:
|
||||
case Intrinsic::objc_arc_annotation_topdown_bbend:
|
||||
case Intrinsic::objc_arc_annotation_bottomup_bbstart:
|
||||
|
@ -2782,11 +2782,10 @@ void SelectionDAGBuilder::visitInvoke(const InvokeInst &I) {
|
||||
|
||||
// Deopt bundles are lowered in LowerCallSiteWithDeoptBundle, and we don't
|
||||
// have to do anything here to lower funclet bundles.
|
||||
assert(!I.hasOperandBundlesOtherThan({LLVMContext::OB_deopt,
|
||||
LLVMContext::OB_gc_transition,
|
||||
LLVMContext::OB_gc_live,
|
||||
LLVMContext::OB_funclet,
|
||||
LLVMContext::OB_cfguardtarget}) &&
|
||||
assert(!I.hasOperandBundlesOtherThan(
|
||||
{LLVMContext::OB_deopt, LLVMContext::OB_gc_transition,
|
||||
LLVMContext::OB_gc_live, LLVMContext::OB_funclet,
|
||||
LLVMContext::OB_cfguardtarget, LLVMContext::OB_clang_arc_rv}) &&
|
||||
"Cannot lower invokes with arbitrary operand bundles yet!");
|
||||
|
||||
const Value *Callee(I.getCalledOperand());
|
||||
@ -7873,7 +7872,8 @@ void SelectionDAGBuilder::visitCall(const CallInst &I) {
|
||||
// CFGuardTarget bundles are lowered in LowerCallTo.
|
||||
assert(!I.hasOperandBundlesOtherThan(
|
||||
{LLVMContext::OB_deopt, LLVMContext::OB_funclet,
|
||||
LLVMContext::OB_cfguardtarget, LLVMContext::OB_preallocated}) &&
|
||||
LLVMContext::OB_cfguardtarget, LLVMContext::OB_preallocated,
|
||||
LLVMContext::OB_clang_arc_rv}) &&
|
||||
"Cannot lower calls with arbitrary operand bundles!");
|
||||
|
||||
SDValue Callee = getValue(I.getCalledOperand());
|
||||
|
@ -424,6 +424,35 @@ CallBase::BundleOpInfo &CallBase::getBundleOpInfoForOperand(unsigned OpIdx) {
|
||||
return *Current;
|
||||
}
|
||||
|
||||
CallBase *CallBase::addOperandBundle(CallBase *CB, uint32_t ID,
|
||||
OperandBundleDef OB,
|
||||
Instruction *InsertPt) {
|
||||
if (CB->getOperandBundle(ID))
|
||||
return CB;
|
||||
|
||||
SmallVector<OperandBundleDef, 1> Bundles;
|
||||
CB->getOperandBundlesAsDefs(Bundles);
|
||||
Bundles.push_back(OB);
|
||||
return Create(CB, Bundles, InsertPt);
|
||||
}
|
||||
|
||||
CallBase *CallBase::removeOperandBundle(CallBase *CB, uint32_t ID,
|
||||
Instruction *InsertPt) {
|
||||
SmallVector<OperandBundleDef, 1> Bundles;
|
||||
bool CreateNew = false;
|
||||
|
||||
for (unsigned I = 0, E = CB->getNumOperandBundles(); I != E; ++I) {
|
||||
auto Bundle = CB->getOperandBundleAt(I);
|
||||
if (Bundle.getTagID() == ID) {
|
||||
CreateNew = true;
|
||||
continue;
|
||||
}
|
||||
Bundles.emplace_back(Bundle);
|
||||
}
|
||||
|
||||
return CreateNew ? Create(CB, Bundles, InsertPt) : CB;
|
||||
}
|
||||
|
||||
//===----------------------------------------------------------------------===//
|
||||
// CallInst Implementation
|
||||
//===----------------------------------------------------------------------===//
|
||||
|
@ -78,6 +78,11 @@ LLVMContext::LLVMContext() : pImpl(new LLVMContextImpl(*this)) {
|
||||
"gc-transition operand bundle id drifted!");
|
||||
(void)GCLiveEntry;
|
||||
|
||||
auto *ClangARCRVEntry = pImpl->getOrInsertBundleTag("clang.arc.rv");
|
||||
assert(ClangARCRVEntry->second == LLVMContext::OB_clang_arc_rv &&
|
||||
"clang.arc.rv operand bundle id drifted!");
|
||||
(void)ClangARCRVEntry;
|
||||
|
||||
SyncScope::ID SingleThreadSSID =
|
||||
pImpl->getOrInsertSyncScopeID("singlethread");
|
||||
assert(SingleThreadSSID == SyncScope::SingleThread &&
|
||||
|
@ -29,6 +29,7 @@
|
||||
#include "llvm/ADT/StringRef.h"
|
||||
#include "llvm/ADT/Triple.h"
|
||||
#include "llvm/ADT/Twine.h"
|
||||
#include "llvm/Analysis/ObjCARCUtil.h"
|
||||
#include "llvm/Analysis/VectorUtils.h"
|
||||
#include "llvm/CodeGen/CallingConvLower.h"
|
||||
#include "llvm/CodeGen/MachineBasicBlock.h"
|
||||
@ -5653,11 +5654,11 @@ AArch64TargetLowering::LowerCall(CallLoweringInfo &CLI,
|
||||
}
|
||||
|
||||
unsigned CallOpc = AArch64ISD::CALL;
|
||||
// Calls marked with "rv_marker" are special. They should be expanded to the
|
||||
// call, directly followed by a special marker sequence. Use the CALL_RVMARKER
|
||||
// to do that.
|
||||
if (CLI.CB && CLI.CB->hasRetAttr("rv_marker")) {
|
||||
assert(!IsTailCall && "tail calls cannot be marked with rv_marker");
|
||||
// Calls with operand bundle "clang.arc.rv" are special. They should be
|
||||
// expanded to the call, directly followed by a special marker sequence. Use
|
||||
// the CALL_RVMARKER to do that.
|
||||
if (CLI.CB && objcarc::hasRVOpBundle(CLI.CB)) {
|
||||
assert(!IsTailCall && "tail calls cannot be marked with clang.arc.rv");
|
||||
CallOpc = AArch64ISD::CALL_RVMARKER;
|
||||
}
|
||||
|
||||
|
@ -42,6 +42,7 @@ enum class ARCRuntimeEntryPointKind {
|
||||
Autorelease,
|
||||
StoreStrong,
|
||||
RetainRV,
|
||||
ClaimRV,
|
||||
RetainAutorelease,
|
||||
RetainAutoreleaseRV,
|
||||
};
|
||||
@ -61,6 +62,7 @@ public:
|
||||
Autorelease = nullptr;
|
||||
StoreStrong = nullptr;
|
||||
RetainRV = nullptr;
|
||||
ClaimRV = nullptr;
|
||||
RetainAutorelease = nullptr;
|
||||
RetainAutoreleaseRV = nullptr;
|
||||
}
|
||||
@ -85,6 +87,9 @@ public:
|
||||
case ARCRuntimeEntryPointKind::RetainRV:
|
||||
return getIntrinsicEntryPoint(RetainRV,
|
||||
Intrinsic::objc_retainAutoreleasedReturnValue);
|
||||
case ARCRuntimeEntryPointKind::ClaimRV:
|
||||
return getIntrinsicEntryPoint(
|
||||
ClaimRV, Intrinsic::objc_unsafeClaimAutoreleasedReturnValue);
|
||||
case ARCRuntimeEntryPointKind::RetainAutorelease:
|
||||
return getIntrinsicEntryPoint(RetainAutorelease,
|
||||
Intrinsic::objc_retainAutorelease);
|
||||
@ -121,6 +126,9 @@ private:
|
||||
/// Declaration for objc_retainAutoreleasedReturnValue().
|
||||
Function *RetainRV = nullptr;
|
||||
|
||||
/// Declaration for objc_unsafeClaimAutoreleasedReturnValue().
|
||||
Function *ClaimRV = nullptr;
|
||||
|
||||
/// Declaration for objc_retainAutorelease().
|
||||
Function *RetainAutorelease = nullptr;
|
||||
|
||||
|
@ -14,7 +14,12 @@
|
||||
|
||||
#include "ObjCARC.h"
|
||||
#include "llvm-c/Initialization.h"
|
||||
#include "llvm/Analysis/ObjCARCUtil.h"
|
||||
#include "llvm/IR/IRBuilder.h"
|
||||
#include "llvm/IR/InlineAsm.h"
|
||||
#include "llvm/IR/Instructions.h"
|
||||
#include "llvm/InitializePasses.h"
|
||||
#include "llvm/Transforms/Utils/BasicBlockUtils.h"
|
||||
|
||||
namespace llvm {
|
||||
class PassRegistry;
|
||||
@ -37,3 +42,91 @@ void llvm::initializeObjCARCOpts(PassRegistry &Registry) {
|
||||
void LLVMInitializeObjCARCOpts(LLVMPassRegistryRef R) {
|
||||
initializeObjCARCOpts(*unwrap(R));
|
||||
}
|
||||
|
||||
CallInst *objcarc::createCallInstWithColors(
|
||||
FunctionCallee Func, ArrayRef<Value *> Args, const Twine &NameStr,
|
||||
Instruction *InsertBefore,
|
||||
const DenseMap<BasicBlock *, ColorVector> &BlockColors) {
|
||||
FunctionType *FTy = Func.getFunctionType();
|
||||
Value *Callee = Func.getCallee();
|
||||
SmallVector<OperandBundleDef, 1> OpBundles;
|
||||
|
||||
if (!BlockColors.empty()) {
|
||||
const ColorVector &CV = BlockColors.find(InsertBefore->getParent())->second;
|
||||
assert(CV.size() == 1 && "non-unique color for block!");
|
||||
Instruction *EHPad = CV.front()->getFirstNonPHI();
|
||||
if (EHPad->isEHPad())
|
||||
OpBundles.emplace_back("funclet", EHPad);
|
||||
}
|
||||
|
||||
return CallInst::Create(FTy, Callee, Args, OpBundles, NameStr, InsertBefore);
|
||||
}
|
||||
|
||||
std::pair<bool, bool>
|
||||
BundledRetainClaimRVs::insertAfterInvokes(Function &F, DominatorTree *DT) {
|
||||
bool Changed = false, CFGChanged = false;
|
||||
|
||||
for (BasicBlock &BB : F) {
|
||||
auto *I = dyn_cast<InvokeInst>(BB.getTerminator());
|
||||
|
||||
if (!I)
|
||||
continue;
|
||||
|
||||
if (!objcarc::hasRVOpBundle(I))
|
||||
continue;
|
||||
|
||||
BasicBlock *DestBB = I->getNormalDest();
|
||||
|
||||
if (!DestBB->getSinglePredecessor()) {
|
||||
assert(I->getSuccessor(0) == DestBB &&
|
||||
"the normal dest is expected to be the first successor");
|
||||
DestBB = SplitCriticalEdge(I, 0, CriticalEdgeSplittingOptions(DT));
|
||||
CFGChanged = true;
|
||||
}
|
||||
|
||||
// We don't have to call insertRVCallWithColors since DestBB is the normal
|
||||
// destination of the invoke.
|
||||
insertRVCall(&*DestBB->getFirstInsertionPt(), I);
|
||||
Changed = true;
|
||||
}
|
||||
|
||||
return std::make_pair(Changed, CFGChanged);
|
||||
}
|
||||
|
||||
CallInst *BundledRetainClaimRVs::insertRVCall(Instruction *InsertPt,
|
||||
CallBase *AnnotatedCall) {
|
||||
DenseMap<BasicBlock *, ColorVector> BlockColors;
|
||||
return insertRVCallWithColors(InsertPt, AnnotatedCall, BlockColors);
|
||||
}
|
||||
|
||||
CallInst *BundledRetainClaimRVs::insertRVCallWithColors(
|
||||
Instruction *InsertPt, CallBase *AnnotatedCall,
|
||||
const DenseMap<BasicBlock *, ColorVector> &BlockColors) {
|
||||
IRBuilder<> Builder(InsertPt);
|
||||
bool IsRetainRV = objcarc::hasRVOpBundle(AnnotatedCall, true);
|
||||
Function *Func = EP.get(IsRetainRV ? ARCRuntimeEntryPointKind::RetainRV
|
||||
: ARCRuntimeEntryPointKind::ClaimRV);
|
||||
Type *ParamTy = Func->getArg(0)->getType();
|
||||
Value *CallArg = Builder.CreateBitCast(AnnotatedCall, ParamTy);
|
||||
auto *Call =
|
||||
createCallInstWithColors(Func, CallArg, "", InsertPt, BlockColors);
|
||||
RVCalls[Call] = AnnotatedCall;
|
||||
return Call;
|
||||
}
|
||||
|
||||
BundledRetainClaimRVs::~BundledRetainClaimRVs() {
|
||||
if (ContractPass) {
|
||||
// At this point, we know that the annotated calls can't be tail calls as
|
||||
// they are followed by marker instructions and retainRV/claimRV calls. Mark
|
||||
// them as notail, so that the backend knows these calls can't be tail
|
||||
// calls.
|
||||
for (auto P : RVCalls)
|
||||
if (auto *CI = dyn_cast<CallInst>(P.second))
|
||||
CI->setTailCallKind(CallInst::TCK_NoTail);
|
||||
} else {
|
||||
for (auto P : RVCalls)
|
||||
EraseInstruction(P.first);
|
||||
}
|
||||
|
||||
RVCalls.clear();
|
||||
}
|
||||
|
@ -22,7 +22,10 @@
|
||||
#ifndef LLVM_LIB_TRANSFORMS_OBJCARC_OBJCARC_H
|
||||
#define LLVM_LIB_TRANSFORMS_OBJCARC_OBJCARC_H
|
||||
|
||||
#include "ARCRuntimeEntryPoints.h"
|
||||
#include "llvm/Analysis/EHPersonalities.h"
|
||||
#include "llvm/Analysis/ObjCARCAnalysisUtils.h"
|
||||
#include "llvm/Analysis/ObjCARCUtil.h"
|
||||
#include "llvm/Transforms/Utils/Local.h"
|
||||
|
||||
namespace llvm {
|
||||
@ -87,6 +90,67 @@ void getEquivalentPHIs(PHINodeTy &PN, VectorTy &PHIList) {
|
||||
}
|
||||
}
|
||||
|
||||
static inline MDString *getRVInstMarker(Module &M) {
|
||||
const char *MarkerKey = getRVMarkerModuleFlagStr();
|
||||
return dyn_cast_or_null<MDString>(M.getModuleFlag(MarkerKey));
|
||||
}
|
||||
|
||||
/// Create a call instruction with the correct funclet token. This should be
|
||||
/// called instead of calling CallInst::Create directly unless the call is
|
||||
/// going to be removed from the IR before WinEHPrepare.
|
||||
CallInst *createCallInstWithColors(
|
||||
FunctionCallee Func, ArrayRef<Value *> Args, const Twine &NameStr,
|
||||
Instruction *InsertBefore,
|
||||
const DenseMap<BasicBlock *, ColorVector> &BlockColors);
|
||||
|
||||
class BundledRetainClaimRVs {
|
||||
public:
|
||||
BundledRetainClaimRVs(ARCRuntimeEntryPoints &P, bool ContractPass)
|
||||
: EP(P), ContractPass(ContractPass) {}
|
||||
~BundledRetainClaimRVs();
|
||||
|
||||
/// Insert a retainRV/claimRV call to the normal destination blocks of invokes
|
||||
/// with operand bundle "clang.arc.rv". If the edge to the normal destination
|
||||
/// block is a critical edge, split it.
|
||||
std::pair<bool, bool> insertAfterInvokes(Function &F, DominatorTree *DT);
|
||||
|
||||
/// Insert a retainRV/claimRV call.
|
||||
CallInst *insertRVCall(Instruction *InsertPt, CallBase *AnnotatedCall);
|
||||
|
||||
/// Insert a retainRV/claimRV call with colors.
|
||||
CallInst *insertRVCallWithColors(
|
||||
Instruction *InsertPt, CallBase *AnnotatedCall,
|
||||
const DenseMap<BasicBlock *, ColorVector> &BlockColors);
|
||||
|
||||
/// See if an instruction is a bundled retainRV/claimRV call.
|
||||
bool contains(const Instruction *I) const {
|
||||
if (auto *CI = dyn_cast<CallInst>(I))
|
||||
return RVCalls.count(CI);
|
||||
return false;
|
||||
}
|
||||
|
||||
/// Remove a retainRV/claimRV call entirely.
|
||||
void eraseInst(CallInst *CI) {
|
||||
auto It = RVCalls.find(CI);
|
||||
if (It != RVCalls.end()) {
|
||||
auto *NewCall = CallBase::removeOperandBundle(
|
||||
It->second, LLVMContext::OB_clang_arc_rv, It->second);
|
||||
NewCall->copyMetadata(*It->second);
|
||||
It->second->replaceAllUsesWith(NewCall);
|
||||
It->second->eraseFromParent();
|
||||
RVCalls.erase(It);
|
||||
}
|
||||
EraseInstruction(CI);
|
||||
}
|
||||
|
||||
private:
|
||||
/// A map of inserted retainRV/claimRV calls to annotated calls/invokes.
|
||||
DenseMap<CallInst *, CallBase *> RVCalls;
|
||||
|
||||
ARCRuntimeEntryPoints &EP;
|
||||
bool ContractPass;
|
||||
};
|
||||
|
||||
} // end namespace objcarc
|
||||
} // end namespace llvm
|
||||
|
||||
|
@ -32,6 +32,7 @@
|
||||
#include "llvm/ADT/Statistic.h"
|
||||
#include "llvm/Analysis/AliasAnalysis.h"
|
||||
#include "llvm/Analysis/EHPersonalities.h"
|
||||
#include "llvm/Analysis/ObjCARCUtil.h"
|
||||
#include "llvm/IR/Dominators.h"
|
||||
#include "llvm/IR/InlineAsm.h"
|
||||
#include "llvm/IR/InstIterator.h"
|
||||
@ -63,13 +64,12 @@ namespace {
|
||||
|
||||
class ObjCARCContract {
|
||||
bool Changed;
|
||||
bool CFGChanged;
|
||||
AAResults *AA;
|
||||
DominatorTree *DT;
|
||||
ProvenanceAnalysis PA;
|
||||
ARCRuntimeEntryPoints EP;
|
||||
|
||||
/// A flag indicating whether this optimization pass should run.
|
||||
bool Run;
|
||||
BundledRetainClaimRVs *BundledInsts = nullptr;
|
||||
|
||||
/// The inline asm string to insert between calls and RetainRV calls to make
|
||||
/// the optimization work on targets which need it.
|
||||
@ -98,6 +98,7 @@ class ObjCARCContract {
|
||||
public:
|
||||
bool init(Module &M);
|
||||
bool run(Function &F, AAResults *AA, DominatorTree *DT);
|
||||
bool hasCFGChanged() const { return CFGChanged; }
|
||||
};
|
||||
|
||||
class ObjCARCContractLegacyPass : public FunctionPass {
|
||||
@ -304,32 +305,6 @@ findRetainForStoreStrongContraction(Value *New, StoreInst *Store,
|
||||
return Retain;
|
||||
}
|
||||
|
||||
/// Create a call instruction with the correct funclet token. Should be used
|
||||
/// instead of calling CallInst::Create directly.
|
||||
static CallInst *
|
||||
createCallInst(FunctionType *FTy, Value *Func, ArrayRef<Value *> Args,
|
||||
const Twine &NameStr, Instruction *InsertBefore,
|
||||
const DenseMap<BasicBlock *, ColorVector> &BlockColors) {
|
||||
SmallVector<OperandBundleDef, 1> OpBundles;
|
||||
if (!BlockColors.empty()) {
|
||||
const ColorVector &CV = BlockColors.find(InsertBefore->getParent())->second;
|
||||
assert(CV.size() == 1 && "non-unique color for block!");
|
||||
Instruction *EHPad = CV.front()->getFirstNonPHI();
|
||||
if (EHPad->isEHPad())
|
||||
OpBundles.emplace_back("funclet", EHPad);
|
||||
}
|
||||
|
||||
return CallInst::Create(FTy, Func, Args, OpBundles, NameStr, InsertBefore);
|
||||
}
|
||||
|
||||
static CallInst *
|
||||
createCallInst(FunctionCallee Func, ArrayRef<Value *> Args, const Twine &NameStr,
|
||||
Instruction *InsertBefore,
|
||||
const DenseMap<BasicBlock *, ColorVector> &BlockColors) {
|
||||
return createCallInst(Func.getFunctionType(), Func.getCallee(), Args, NameStr,
|
||||
InsertBefore, BlockColors);
|
||||
}
|
||||
|
||||
/// Attempt to merge an objc_release with a store, load, and objc_retain to form
|
||||
/// an objc_storeStrong. An objc_storeStrong:
|
||||
///
|
||||
@ -411,7 +386,8 @@ void ObjCARCContract::tryToContractReleaseIntoStoreStrong(
|
||||
if (Args[1]->getType() != I8X)
|
||||
Args[1] = new BitCastInst(Args[1], I8X, "", Store);
|
||||
Function *Decl = EP.get(ARCRuntimeEntryPointKind::StoreStrong);
|
||||
CallInst *StoreStrong = createCallInst(Decl, Args, "", Store, BlockColors);
|
||||
CallInst *StoreStrong =
|
||||
objcarc::createCallInstWithColors(Decl, Args, "", Store, BlockColors);
|
||||
StoreStrong->setDoesNotThrow();
|
||||
StoreStrong->setDebugLoc(Store->getDebugLoc());
|
||||
|
||||
@ -456,9 +432,14 @@ bool ObjCARCContract::tryToPeepholeInstruction(
|
||||
case ARCInstKind::RetainRV:
|
||||
case ARCInstKind::ClaimRV: {
|
||||
// If we're compiling for a target which needs a special inline-asm
|
||||
// marker to do the return value optimization, insert it now.
|
||||
// marker to do the return value optimization and the retainRV/claimRV call
|
||||
// wasn't bundled with a call, insert the marker now.
|
||||
if (!RVInstMarker)
|
||||
return false;
|
||||
|
||||
if (BundledInsts->contains(Inst))
|
||||
return false;
|
||||
|
||||
BasicBlock::iterator BBI = Inst->getIterator();
|
||||
BasicBlock *InstParent = Inst->getParent();
|
||||
|
||||
@ -486,7 +467,7 @@ bool ObjCARCContract::tryToPeepholeInstruction(
|
||||
RVInstMarker->getString(),
|
||||
/*Constraints=*/"", /*hasSideEffects=*/true);
|
||||
|
||||
createCallInst(IA, None, "", Inst, BlockColors);
|
||||
objcarc::createCallInstWithColors(IA, None, "", Inst, BlockColors);
|
||||
}
|
||||
decline_rv_optimization:
|
||||
return false;
|
||||
@ -525,6 +506,12 @@ bool ObjCARCContract::tryToPeepholeInstruction(
|
||||
Inst->eraseFromParent();
|
||||
return true;
|
||||
default:
|
||||
if (auto *CI = dyn_cast<CallInst>(Inst))
|
||||
if (CI->getIntrinsicID() == Intrinsic::objc_clang_arc_noop_use) {
|
||||
// Remove calls to @llvm.objc.clang.arc.noop.use(...).
|
||||
Changed = true;
|
||||
CI->eraseFromParent();
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@ -534,16 +521,10 @@ bool ObjCARCContract::tryToPeepholeInstruction(
|
||||
//===----------------------------------------------------------------------===//
|
||||
|
||||
bool ObjCARCContract::init(Module &M) {
|
||||
// If nothing in the Module uses ARC, don't do anything.
|
||||
Run = ModuleHasARC(M);
|
||||
if (!Run)
|
||||
return false;
|
||||
|
||||
EP.init(&M);
|
||||
|
||||
// Initialize RVInstMarker.
|
||||
const char *MarkerKey = "clang.arc.retainAutoreleasedReturnValueMarker";
|
||||
RVInstMarker = dyn_cast_or_null<MDString>(M.getModuleFlag(MarkerKey));
|
||||
RVInstMarker = getRVInstMarker(M);
|
||||
|
||||
return false;
|
||||
}
|
||||
@ -552,14 +533,16 @@ bool ObjCARCContract::run(Function &F, AAResults *A, DominatorTree *D) {
|
||||
if (!EnableARCOpts)
|
||||
return false;
|
||||
|
||||
// If nothing in the Module uses ARC, don't do anything.
|
||||
if (!Run)
|
||||
return false;
|
||||
|
||||
Changed = false;
|
||||
Changed = CFGChanged = false;
|
||||
AA = A;
|
||||
DT = D;
|
||||
PA.setAA(A);
|
||||
BundledRetainClaimRVs BRV(EP, true);
|
||||
BundledInsts = &BRV;
|
||||
|
||||
std::pair<bool, bool> R = BundledInsts->insertAfterInvokes(F, DT);
|
||||
Changed |= R.first;
|
||||
CFGChanged |= R.second;
|
||||
|
||||
DenseMap<BasicBlock *, ColorVector> BlockColors;
|
||||
if (F.hasPersonalityFn() &&
|
||||
@ -584,6 +567,13 @@ bool ObjCARCContract::run(Function &F, AAResults *A, DominatorTree *D) {
|
||||
|
||||
LLVM_DEBUG(dbgs() << "Visiting: " << *Inst << "\n");
|
||||
|
||||
if (auto *CI = dyn_cast<CallInst>(Inst))
|
||||
if (objcarc::hasRVOpBundle(CI)) {
|
||||
BundledInsts->insertRVCallWithColors(&*I, CI, BlockColors);
|
||||
--I;
|
||||
Changed = true;
|
||||
}
|
||||
|
||||
// First try to peephole Inst. If there is nothing further we can do in
|
||||
// terms of undoing objc-arc-expand, process the next inst.
|
||||
if (tryToPeepholeInstruction(F, Inst, I, TailOkForStoreStrongs,
|
||||
@ -733,7 +723,6 @@ INITIALIZE_PASS_END(ObjCARCContractLegacyPass, "objc-arc-contract",
|
||||
void ObjCARCContractLegacyPass::getAnalysisUsage(AnalysisUsage &AU) const {
|
||||
AU.addRequired<AAResultsWrapperPass>();
|
||||
AU.addRequired<DominatorTreeWrapperPass>();
|
||||
AU.setPreservesCFG();
|
||||
}
|
||||
|
||||
Pass *llvm::createObjCARCContractPass() {
|
||||
@ -757,9 +746,11 @@ PreservedAnalyses ObjCARCContractPass::run(Function &F,
|
||||
|
||||
bool Changed = OCAC.run(F, &AM.getResult<AAManager>(F),
|
||||
&AM.getResult<DominatorTreeAnalysis>(F));
|
||||
bool CFGChanged = OCAC.hasCFGChanged();
|
||||
if (Changed) {
|
||||
PreservedAnalyses PA;
|
||||
PA.preserveSet<CFGAnalyses>();
|
||||
if (!CFGChanged)
|
||||
PA.preserveSet<CFGAnalyses>();
|
||||
return PA;
|
||||
}
|
||||
return PreservedAnalyses::all();
|
||||
|
@ -41,6 +41,7 @@
|
||||
#include "llvm/Analysis/ObjCARCAliasAnalysis.h"
|
||||
#include "llvm/Analysis/ObjCARCAnalysisUtils.h"
|
||||
#include "llvm/Analysis/ObjCARCInstKind.h"
|
||||
#include "llvm/Analysis/ObjCARCUtil.h"
|
||||
#include "llvm/IR/BasicBlock.h"
|
||||
#include "llvm/IR/CFG.h"
|
||||
#include "llvm/IR/Constant.h"
|
||||
@ -483,6 +484,7 @@ namespace {
|
||||
/// The main ARC optimization pass.
|
||||
class ObjCARCOpt {
|
||||
bool Changed;
|
||||
bool CFGChanged;
|
||||
ProvenanceAnalysis PA;
|
||||
|
||||
/// A cache of references to runtime entry point constants.
|
||||
@ -492,8 +494,7 @@ class ObjCARCOpt {
|
||||
/// MDKind identifiers.
|
||||
ARCMDKindCache MDKindCache;
|
||||
|
||||
/// A flag indicating whether this optimization pass should run.
|
||||
bool Run;
|
||||
BundledRetainClaimRVs *BundledInsts = nullptr;
|
||||
|
||||
/// A flag indicating whether the optimization that removes or moves
|
||||
/// retain/release pairs should be performed.
|
||||
@ -573,6 +574,7 @@ class ObjCARCOpt {
|
||||
void init(Module &M);
|
||||
bool run(Function &F, AAResults &AA);
|
||||
void releaseMemory();
|
||||
bool hasCFGChanged() const { return CFGChanged; }
|
||||
};
|
||||
|
||||
/// The main ARC optimization pass.
|
||||
@ -610,8 +612,6 @@ Pass *llvm::createObjCARCOptPass() { return new ObjCARCOptLegacyPass(); }
|
||||
void ObjCARCOptLegacyPass::getAnalysisUsage(AnalysisUsage &AU) const {
|
||||
AU.addRequired<ObjCARCAAWrapperPass>();
|
||||
AU.addRequired<AAResultsWrapperPass>();
|
||||
// ARC optimization doesn't currently split critical edges.
|
||||
AU.setPreservesCFG();
|
||||
}
|
||||
|
||||
/// Turn objc_retainAutoreleasedReturnValue into objc_retain if the operand is
|
||||
@ -640,6 +640,9 @@ ObjCARCOpt::OptimizeRetainRVCall(Function &F, Instruction *RetainRV) {
|
||||
}
|
||||
}
|
||||
|
||||
assert(!BundledInsts->contains(RetainRV) &&
|
||||
"a bundled retainRV's argument should be a call");
|
||||
|
||||
// Turn it to a plain objc_retain.
|
||||
Changed = true;
|
||||
++NumPeeps;
|
||||
@ -661,6 +664,9 @@ bool ObjCARCOpt::OptimizeInlinedAutoreleaseRVCall(
|
||||
Function &F, DenseMap<BasicBlock *, ColorVector> &BlockColors,
|
||||
Instruction *Inst, const Value *&Arg, ARCInstKind Class,
|
||||
Instruction *AutoreleaseRV, const Value *&AutoreleaseRVArg) {
|
||||
if (BundledInsts->contains(Inst))
|
||||
return false;
|
||||
|
||||
// Must be in the same basic block.
|
||||
assert(Inst->getParent() == AutoreleaseRV->getParent());
|
||||
|
||||
@ -844,6 +850,12 @@ void ObjCARCOpt::OptimizeIndividualCalls(Function &F) {
|
||||
for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
|
||||
Instruction *Inst = &*I++;
|
||||
|
||||
if (auto *CI = dyn_cast<CallInst>(Inst))
|
||||
if (objcarc::hasRVOpBundle(CI)) {
|
||||
BundledInsts->insertRVCall(&*I, CI);
|
||||
Changed = true;
|
||||
}
|
||||
|
||||
ARCInstKind Class = GetBasicARCInstKind(Inst);
|
||||
|
||||
// Skip this loop if this instruction isn't itself an ARC intrinsic.
|
||||
@ -922,6 +934,11 @@ void ObjCARCOpt::OptimizeIndividualCallImpl(
|
||||
// We can delete this call if it takes an inert value.
|
||||
SmallPtrSet<Value *, 1> VisitedPhis;
|
||||
|
||||
if (BundledInsts->contains(Inst)) {
|
||||
UsedInThisFunction |= 1 << unsigned(Class);
|
||||
return;
|
||||
}
|
||||
|
||||
if (IsNoopOnGlobal(Class))
|
||||
if (isInertARCValue(Inst->getOperand(0), VisitedPhis)) {
|
||||
if (!Inst->getType()->isVoidTy())
|
||||
@ -1542,7 +1559,7 @@ ObjCARCOpt::VisitInstructionTopDown(Instruction *Inst,
|
||||
if (Ptr == Arg)
|
||||
continue; // Handled above.
|
||||
TopDownPtrState &S = MI->second;
|
||||
if (S.HandlePotentialAlterRefCount(Inst, Ptr, PA, Class))
|
||||
if (S.HandlePotentialAlterRefCount(Inst, Ptr, PA, Class, *BundledInsts))
|
||||
continue;
|
||||
|
||||
S.HandlePotentialUse(Inst, Ptr, PA, Class);
|
||||
@ -2343,7 +2360,7 @@ void ObjCARCOpt::OptimizeReturns(Function &F) {
|
||||
++NumRets;
|
||||
LLVM_DEBUG(dbgs() << "Erasing: " << *Retain << "\nErasing: " << *Autorelease
|
||||
<< "\n");
|
||||
EraseInstruction(Retain);
|
||||
BundledInsts->eraseInst(Retain);
|
||||
EraseInstruction(Autorelease);
|
||||
}
|
||||
}
|
||||
@ -2376,11 +2393,6 @@ void ObjCARCOpt::init(Module &M) {
|
||||
if (!EnableARCOpts)
|
||||
return;
|
||||
|
||||
// If nothing in the Module uses ARC, don't do anything.
|
||||
Run = ModuleHasARC(M);
|
||||
if (!Run)
|
||||
return;
|
||||
|
||||
// Intuitively, objc_retain and others are nocapture, however in practice
|
||||
// they are not, because they return their argument value. And objc_release
|
||||
// calls finalizers which can have arbitrary side effects.
|
||||
@ -2394,16 +2406,18 @@ bool ObjCARCOpt::run(Function &F, AAResults &AA) {
|
||||
if (!EnableARCOpts)
|
||||
return false;
|
||||
|
||||
// If nothing in the Module uses ARC, don't do anything.
|
||||
if (!Run)
|
||||
return false;
|
||||
|
||||
Changed = false;
|
||||
Changed = CFGChanged = false;
|
||||
BundledRetainClaimRVs BRV(EP, false);
|
||||
BundledInsts = &BRV;
|
||||
|
||||
LLVM_DEBUG(dbgs() << "<<< ObjCARCOpt: Visiting Function: " << F.getName()
|
||||
<< " >>>"
|
||||
"\n");
|
||||
|
||||
std::pair<bool, bool> R = BundledInsts->insertAfterInvokes(F, nullptr);
|
||||
Changed |= R.first;
|
||||
CFGChanged |= R.second;
|
||||
|
||||
PA.setAA(&AA);
|
||||
|
||||
#ifndef NDEBUG
|
||||
@ -2468,9 +2482,11 @@ PreservedAnalyses ObjCARCOptPass::run(Function &F,
|
||||
OCAO.init(*F.getParent());
|
||||
|
||||
bool Changed = OCAO.run(F, AM.getResult<AAManager>(F));
|
||||
bool CFGChanged = OCAO.hasCFGChanged();
|
||||
if (Changed) {
|
||||
PreservedAnalyses PA;
|
||||
PA.preserveSet<CFGAnalyses>();
|
||||
if (!CFGChanged)
|
||||
PA.preserveSet<CFGAnalyses>();
|
||||
return PA;
|
||||
}
|
||||
return PreservedAnalyses::all();
|
||||
|
@ -11,6 +11,7 @@
|
||||
#include "ObjCARC.h"
|
||||
#include "llvm/Analysis/ObjCARCAnalysisUtils.h"
|
||||
#include "llvm/Analysis/ObjCARCInstKind.h"
|
||||
#include "llvm/Analysis/ObjCARCUtil.h"
|
||||
#include "llvm/IR/BasicBlock.h"
|
||||
#include "llvm/IR/Instruction.h"
|
||||
#include "llvm/IR/Instructions.h"
|
||||
@ -280,6 +281,12 @@ void BottomUpPtrState::HandlePotentialUse(BasicBlock *BB, Instruction *Inst,
|
||||
InsertAfter = skipDebugIntrinsics(InsertAfter);
|
||||
|
||||
InsertReverseInsertPt(&*InsertAfter);
|
||||
|
||||
// Don't insert anything between a call/invoke with operand bundle
|
||||
// "clang.arc.rv" and the retainRV/claimRV call that uses the call result.
|
||||
if (auto *CB = dyn_cast<CallBase>(Inst))
|
||||
if (objcarc::hasRVOpBundle(CB))
|
||||
SetCFGHazardAfflicted(true);
|
||||
};
|
||||
|
||||
// Check for possible direct uses.
|
||||
@ -377,10 +384,9 @@ bool TopDownPtrState::MatchWithRelease(ARCMDKindCache &Cache,
|
||||
llvm_unreachable("Sequence unknown enum value");
|
||||
}
|
||||
|
||||
bool TopDownPtrState::HandlePotentialAlterRefCount(Instruction *Inst,
|
||||
const Value *Ptr,
|
||||
ProvenanceAnalysis &PA,
|
||||
ARCInstKind Class) {
|
||||
bool TopDownPtrState::HandlePotentialAlterRefCount(
|
||||
Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA,
|
||||
ARCInstKind Class, const BundledRetainClaimRVs &BundledRVs) {
|
||||
// Check for possible releases. Treat clang.arc.use as a releasing instruction
|
||||
// to prevent sinking a retain past it.
|
||||
if (!CanDecrementRefCount(Inst, Ptr, PA, Class) &&
|
||||
@ -396,6 +402,11 @@ bool TopDownPtrState::HandlePotentialAlterRefCount(Instruction *Inst,
|
||||
assert(!HasReverseInsertPts());
|
||||
InsertReverseInsertPt(Inst);
|
||||
|
||||
// Don't insert anything between a call/invoke with operand bundle
|
||||
// "clang.arc.rv" and the retainRV/claimRV call that uses the call result.
|
||||
if (BundledRVs.contains(Inst))
|
||||
SetCFGHazardAfflicted(true);
|
||||
|
||||
// One call can't cause a transition from S_Retain to S_CanRelease
|
||||
// and S_CanRelease to S_Use. If we've made the first transition,
|
||||
// we're done.
|
||||
|
@ -31,6 +31,7 @@ class Value;
|
||||
namespace objcarc {
|
||||
|
||||
class ARCMDKindCache;
|
||||
class BundledRetainClaimRVs;
|
||||
class ProvenanceAnalysis;
|
||||
|
||||
/// \enum Sequence
|
||||
@ -202,7 +203,8 @@ struct TopDownPtrState : PtrState {
|
||||
ProvenanceAnalysis &PA, ARCInstKind Class);
|
||||
|
||||
bool HandlePotentialAlterRefCount(Instruction *Inst, const Value *Ptr,
|
||||
ProvenanceAnalysis &PA, ARCInstKind Class);
|
||||
ProvenanceAnalysis &PA, ARCInstKind Class,
|
||||
const BundledRetainClaimRVs &BundledRVs);
|
||||
};
|
||||
|
||||
} // end namespace objcarc
|
||||
|
@ -247,7 +247,9 @@ static bool markTails(Function &F, bool &AllCallsAreTailCalls,
|
||||
isa<PseudoProbeInst>(&I))
|
||||
continue;
|
||||
|
||||
bool IsNoTail = CI->isNoTailCall() || CI->hasOperandBundles();
|
||||
// Special-case operand bundle "clang.arc.rv".
|
||||
bool IsNoTail = CI->isNoTailCall() || CI->hasOperandBundlesOtherThan(
|
||||
LLVMContext::OB_clang_arc_rv);
|
||||
|
||||
if (!IsNoTail && CI->doesNotAccessMemory()) {
|
||||
// A call to a readnone function whose arguments are all things computed
|
||||
|
@ -27,8 +27,9 @@
|
||||
#include "llvm/Analysis/CaptureTracking.h"
|
||||
#include "llvm/Analysis/EHPersonalities.h"
|
||||
#include "llvm/Analysis/InstructionSimplify.h"
|
||||
#include "llvm/Analysis/ObjCARCAnalysisUtils.h"
|
||||
#include "llvm/Analysis/ObjCARCUtil.h"
|
||||
#include "llvm/Analysis/ProfileSummaryInfo.h"
|
||||
#include "llvm/Transforms/Utils/Local.h"
|
||||
#include "llvm/Analysis/ValueTracking.h"
|
||||
#include "llvm/Analysis/VectorUtils.h"
|
||||
#include "llvm/IR/Argument.h"
|
||||
@ -61,6 +62,7 @@
|
||||
#include "llvm/Support/ErrorHandling.h"
|
||||
#include "llvm/Transforms/Utils/AssumeBundleBuilder.h"
|
||||
#include "llvm/Transforms/Utils/Cloning.h"
|
||||
#include "llvm/Transforms/Utils/Local.h"
|
||||
#include "llvm/Transforms/Utils/ValueMapper.h"
|
||||
#include <algorithm>
|
||||
#include <cassert>
|
||||
@ -1650,6 +1652,98 @@ void llvm::updateProfileCallee(
|
||||
}
|
||||
}
|
||||
|
||||
/// An operand bundle "clang.arc.rv" on a call indicates the call result is
|
||||
/// implicitly consumed by a call to retainRV or claimRV immediately after the
|
||||
/// call. This function inlines the retainRV/claimRV calls.
|
||||
///
|
||||
/// There are three cases to consider:
|
||||
///
|
||||
/// 1. If there is a call to autoreleaseRV that takes a pointer to the returned
|
||||
/// object in the callee return block, the autoreleaseRV call and the
|
||||
/// retainRV/claimRV call in the caller cancel out. If the call in the caller
|
||||
/// is a claimRV call, a call to objc_release is emitted.
|
||||
///
|
||||
/// 2. If there is a call in the callee return block that doesn't have operand
|
||||
/// bundle "clang.arc.rv", the operand bundle on the original call is
|
||||
/// transferred to the call in the callee.
|
||||
///
|
||||
/// 3. Otherwise, a call to objc_retain is inserted if the call in the caller is
|
||||
/// a retainRV call.
|
||||
static void
|
||||
inlineRetainOrClaimRVCalls(CallBase &CB,
|
||||
const SmallVectorImpl<ReturnInst *> &Returns) {
|
||||
Module *Mod = CB.getModule();
|
||||
bool IsRetainRV = objcarc::hasRVOpBundle(&CB, true), IsClaimRV = !IsRetainRV;
|
||||
|
||||
for (auto *RI : Returns) {
|
||||
Value *RetOpnd = objcarc::GetRCIdentityRoot(RI->getOperand(0));
|
||||
BasicBlock::reverse_iterator I = ++(RI->getIterator().getReverse());
|
||||
BasicBlock::reverse_iterator EI = RI->getParent()->rend();
|
||||
bool InsertRetainCall = IsRetainRV;
|
||||
IRBuilder<> Builder(RI->getContext());
|
||||
|
||||
// Walk backwards through the basic block looking for either a matching
|
||||
// autoreleaseRV call or an unannotated call.
|
||||
for (; I != EI;) {
|
||||
auto CurI = I++;
|
||||
|
||||
// Ignore casts.
|
||||
if (isa<CastInst>(*CurI))
|
||||
continue;
|
||||
|
||||
if (auto *II = dyn_cast<IntrinsicInst>(&*CurI)) {
|
||||
if (II->getIntrinsicID() == Intrinsic::objc_autoreleaseReturnValue &&
|
||||
II->hasNUses(0) &&
|
||||
objcarc::GetRCIdentityRoot(II->getOperand(0)) == RetOpnd) {
|
||||
// If we've found a matching authoreleaseRV call:
|
||||
// - If the call is annotated with claimRV, insert a call to
|
||||
// objc_release and erase the autoreleaseRV call.
|
||||
// - If the call is annotated with retainRV, just erase the
|
||||
// autoreleaseRV call.
|
||||
if (IsClaimRV) {
|
||||
Builder.SetInsertPoint(II);
|
||||
Function *IFn =
|
||||
Intrinsic::getDeclaration(Mod, Intrinsic::objc_release);
|
||||
Value *BC =
|
||||
Builder.CreateBitCast(RetOpnd, IFn->getArg(0)->getType());
|
||||
Builder.CreateCall(IFn, BC, "");
|
||||
}
|
||||
II->eraseFromParent();
|
||||
InsertRetainCall = false;
|
||||
}
|
||||
} else if (auto *CI = dyn_cast<CallInst>(&*CurI)) {
|
||||
if (objcarc::GetRCIdentityRoot(CI) == RetOpnd &&
|
||||
!objcarc::hasRVOpBundle(CI)) {
|
||||
// If we've found an unannotated call that defines RetOpnd, add a
|
||||
// "clang.arc.rv" operand bundle.
|
||||
Value *BundleArgs[] = {
|
||||
ConstantInt::get(Builder.getInt64Ty(),
|
||||
objcarc::getRVOperandBundleEnum(IsRetainRV))};
|
||||
OperandBundleDef OB("clang.arc.rv", BundleArgs);
|
||||
auto *NewCall = CallBase::addOperandBundle(
|
||||
CI, LLVMContext::OB_clang_arc_rv, OB, CI);
|
||||
NewCall->copyMetadata(*CI);
|
||||
CI->replaceAllUsesWith(NewCall);
|
||||
CI->eraseFromParent();
|
||||
InsertRetainCall = false;
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
if (InsertRetainCall) {
|
||||
// The call has operand bundle "clang.arc.rv"="retain" and we've failed to
|
||||
// find a matching autoreleaseRV or an annotated call in the callee. Emit
|
||||
// a call to objc_retain.
|
||||
Builder.SetInsertPoint(RI);
|
||||
Function *IFn = Intrinsic::getDeclaration(Mod, Intrinsic::objc_retain);
|
||||
Value *BC = Builder.CreateBitCast(RetOpnd, IFn->getArg(0)->getType());
|
||||
Builder.CreateCall(IFn, BC, "");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// This function inlines the called function into the basic block of the
|
||||
/// caller. This returns false if it is not possible to inline this call.
|
||||
/// The program is still in a well defined state if this occurs though.
|
||||
@ -1687,6 +1781,8 @@ llvm::InlineResult llvm::InlineFunction(CallBase &CB, InlineFunctionInfo &IFI,
|
||||
// ... and "funclet" operand bundles.
|
||||
if (Tag == LLVMContext::OB_funclet)
|
||||
continue;
|
||||
if (Tag == LLVMContext::OB_clang_arc_rv)
|
||||
continue;
|
||||
|
||||
return InlineResult::failure("unsupported operand bundle");
|
||||
}
|
||||
@ -1853,6 +1949,10 @@ llvm::InlineResult llvm::InlineFunction(CallBase &CB, InlineFunctionInfo &IFI,
|
||||
// Remember the first block that is newly cloned over.
|
||||
FirstNewBlock = LastBlock; ++FirstNewBlock;
|
||||
|
||||
// Insert retainRV/clainRV runtime calls.
|
||||
if (objcarc::hasRVOpBundle(&CB))
|
||||
inlineRetainOrClaimRVCalls(CB, Returns);
|
||||
|
||||
if (IFI.CallerBFI != nullptr && IFI.CalleeBFI != nullptr)
|
||||
// Update the BFI of blocks cloned into the caller.
|
||||
updateCallerBFI(OrigBB, VMap, IFI.CallerBFI, IFI.CalleeBFI,
|
||||
|
@ -9,6 +9,7 @@
|
||||
; CHECK-NEXT: <OPERAND_BUNDLE_TAG
|
||||
; CHECK-NEXT: <OPERAND_BUNDLE_TAG
|
||||
; CHECK-NEXT: <OPERAND_BUNDLE_TAG
|
||||
; CHECK-NEXT: <OPERAND_BUNDLE_TAG
|
||||
; CHECK-NEXT: </OPERAND_BUNDLE_TAGS_BLOCK
|
||||
|
||||
; CHECK: <FUNCTION_BLOCK
|
||||
|
@ -33,7 +33,7 @@ define dso_local i8* @rv_marker_1() {
|
||||
; GISEL-NOT: mov x29, x29
|
||||
;
|
||||
entry:
|
||||
%call = call "rv_marker" i8* @foo1()
|
||||
%call = call i8* @foo1() [ "clang.arc.rv"(i64 0) ]
|
||||
ret i8* %call
|
||||
}
|
||||
|
||||
@ -49,7 +49,7 @@ define dso_local void @rv_marker_2_select(i32 %c) {
|
||||
entry:
|
||||
%tobool.not = icmp eq i32 %c, 0
|
||||
%.sink = select i1 %tobool.not, i32 2, i32 1
|
||||
%call1 = call "rv_marker" i8* @foo0(i32 %.sink)
|
||||
%call1 = call i8* @foo0(i32 %.sink) [ "clang.arc.rv"(i64 0) ]
|
||||
tail call void @foo2(i8* %call1)
|
||||
ret void
|
||||
}
|
||||
@ -61,7 +61,7 @@ define dso_local void @rv_marker_3() personality i8* bitcast (i32 (...)* @__gxx_
|
||||
; SELDAG-NEXT: mov x29, x29
|
||||
;
|
||||
entry:
|
||||
%call = call "rv_marker" i8* @foo1()
|
||||
%call = call i8* @foo1() [ "clang.arc.rv"(i64 0) ]
|
||||
invoke void @objc_object(i8* %call) #5
|
||||
to label %invoke.cont unwind label %lpad
|
||||
|
||||
@ -87,7 +87,7 @@ entry:
|
||||
%s = alloca %struct.S, align 1
|
||||
%0 = getelementptr inbounds %struct.S, %struct.S* %s, i64 0, i32 0
|
||||
call void @llvm.lifetime.start.p0i8(i64 1, i8* nonnull %0) #2
|
||||
%call = invoke "rv_marker" i8* @foo1()
|
||||
%call = invoke i8* @foo1() [ "clang.arc.rv"(i64 0) ]
|
||||
to label %invoke.cont unwind label %lpad
|
||||
|
||||
invoke.cont: ; preds = %entry
|
||||
@ -127,7 +127,7 @@ define dso_local i8* @rv_marker_5_indirect_call() {
|
||||
;
|
||||
entry:
|
||||
%0 = load i8* ()*, i8* ()** @fptr, align 8
|
||||
%call = call "rv_marker" i8* %0()
|
||||
%call = call i8* %0() [ "clang.arc.rv"(i64 0) ]
|
||||
tail call void @foo2(i8* %call)
|
||||
ret i8* %call
|
||||
}
|
||||
@ -142,7 +142,7 @@ define dso_local void @rv_marker_multiarg(i64 %a, i64 %b, i64 %c) {
|
||||
; CHECK-NEXT: bl foo
|
||||
; SELDAG-NEXT: mov x29, x29
|
||||
; GISEL-NOT: mov x29, x29
|
||||
call "rv_marker" void @foo(i64 %c, i64 %b, i64 %a)
|
||||
call void @foo(i64 %c, i64 %b, i64 %a) [ "clang.arc.rv"(i64 0) ]
|
||||
ret void
|
||||
}
|
||||
|
||||
|
@ -1,4 +1,8 @@
|
||||
; RUN: opt < %s -deadargelim -S | not grep DEAD
|
||||
; RUN: opt < %s -deadargelim -S | FileCheck %s
|
||||
|
||||
@g0 = global i8 0, align 8
|
||||
|
||||
; CHECK-NOT: DEAD
|
||||
|
||||
; Dead arg only used by dead retval
|
||||
define internal i32 @test(i32 %DEADARG) {
|
||||
@ -16,3 +20,22 @@ define i32 @test3() {
|
||||
ret i32 %Y
|
||||
}
|
||||
|
||||
; The callee function's return type shouldn't be changed if the call result is
|
||||
; used.
|
||||
|
||||
; CHECK-LABEL: define internal i8* @callee4()
|
||||
|
||||
define internal i8* @callee4(i8* %a0) {
|
||||
ret i8* @g0;
|
||||
}
|
||||
|
||||
declare void @llvm.objc.clang.arc.noop.use(...)
|
||||
|
||||
; CHECK-LABEL: define i8* @test4(
|
||||
; CHECK: tail call i8* @callee4() [ "clang.arc.rv"(i64 0) ]
|
||||
|
||||
define i8* @test4() {
|
||||
%call = tail call i8* @callee4(i8* @g0) [ "clang.arc.rv"(i64 0) ]
|
||||
call void (...) @llvm.objc.clang.arc.noop.use(i8* %call)
|
||||
ret i8* @g0
|
||||
}
|
||||
|
175
llvm/test/Transforms/Inline/inline-retainRV-call.ll
Normal file
175
llvm/test/Transforms/Inline/inline-retainRV-call.ll
Normal file
@ -0,0 +1,175 @@
|
||||
; RUN: opt < %s -inline -S | FileCheck %s
|
||||
|
||||
@g0 = global i8* null, align 8
|
||||
declare i8* @foo0()
|
||||
|
||||
define i8* @callee0_autoreleaseRV() {
|
||||
%call = call i8* @foo0() [ "clang.arc.rv"(i64 0) ]
|
||||
%1 = tail call i8* @llvm.objc.autoreleaseReturnValue(i8* %call)
|
||||
ret i8* %call
|
||||
}
|
||||
|
||||
; CHECK-LABEL: define void @test0_autoreleaseRV(
|
||||
; CHECK: call i8* @foo0() [ "clang.arc.rv"(i64 0) ]
|
||||
|
||||
define void @test0_autoreleaseRV() {
|
||||
%call = call i8* @callee0_autoreleaseRV() [ "clang.arc.rv"(i64 0) ]
|
||||
ret void
|
||||
}
|
||||
|
||||
; CHECK-LABEL: define void @test0_claimRV_autoreleaseRV(
|
||||
; CHECK: %[[CALL:.*]] = call i8* @foo0() [ "clang.arc.rv"(i64 0) ]
|
||||
; CHECK: call void @llvm.objc.release(i8* %[[CALL]])
|
||||
; CHECK-NEXT: ret void
|
||||
|
||||
define void @test0_claimRV_autoreleaseRV() {
|
||||
%call = call i8* @callee0_autoreleaseRV() [ "clang.arc.rv"(i64 1) ]
|
||||
ret void
|
||||
}
|
||||
|
||||
; CHECK-LABEL: define void @test1_autoreleaseRV(
|
||||
; CHECK: invoke i8* @foo0() [ "clang.arc.rv"(i64 0) ]
|
||||
|
||||
define void @test1_autoreleaseRV() personality i8* bitcast (i32 (...)* @__gxx_personality_v0 to i8*) {
|
||||
entry:
|
||||
%call = invoke i8* @callee0_autoreleaseRV() [ "clang.arc.rv"(i64 0) ]
|
||||
to label %invoke.cont unwind label %lpad
|
||||
|
||||
invoke.cont:
|
||||
ret void
|
||||
|
||||
lpad:
|
||||
%0 = landingpad { i8*, i32 }
|
||||
cleanup
|
||||
resume { i8*, i32 } undef
|
||||
}
|
||||
|
||||
; CHECK-LABEL: define void @test1_claimRV_autoreleaseRV(
|
||||
; CHECK: %[[INVOKE:.*]] = invoke i8* @foo0() [ "clang.arc.rv"(i64 0) ]
|
||||
; CHECK: call void @llvm.objc.release(i8* %[[INVOKE]])
|
||||
; CHECK-NEXT: br
|
||||
|
||||
define void @test1_claimRV_autoreleaseRV() personality i8* bitcast (i32 (...)* @__gxx_personality_v0 to i8*) {
|
||||
entry:
|
||||
%call = invoke i8* @callee0_autoreleaseRV() [ "clang.arc.rv"(i64 1) ]
|
||||
to label %invoke.cont unwind label %lpad
|
||||
|
||||
invoke.cont:
|
||||
ret void
|
||||
|
||||
lpad:
|
||||
%0 = landingpad { i8*, i32 }
|
||||
cleanup
|
||||
resume { i8*, i32 } undef
|
||||
}
|
||||
|
||||
define i8* @callee1_no_autoreleaseRV() {
|
||||
%call = call i8* @foo0()
|
||||
ret i8* %call
|
||||
}
|
||||
|
||||
; CHECK-LABEL: define void @test2_no_autoreleaseRV(
|
||||
; CHECK: call i8* @foo0() [ "clang.arc.rv"(i64 0) ]
|
||||
; CHECK-NEXT: ret void
|
||||
|
||||
define void @test2_no_autoreleaseRV() {
|
||||
%call = call i8* @callee1_no_autoreleaseRV() [ "clang.arc.rv"(i64 0) ]
|
||||
ret void
|
||||
}
|
||||
|
||||
; CHECK-LABEL: define void @test2_claimRV_no_autoreleaseRV(
|
||||
; CHECK: call i8* @foo0() [ "clang.arc.rv"(i64 1) ]
|
||||
; CHECK-NEXT: ret void
|
||||
|
||||
define void @test2_claimRV_no_autoreleaseRV() {
|
||||
%call = call i8* @callee1_no_autoreleaseRV() [ "clang.arc.rv"(i64 1) ]
|
||||
ret void
|
||||
}
|
||||
|
||||
; CHECK-LABEL: define void @test3_no_autoreleaseRV(
|
||||
; CHECK: invoke i8* @foo0() [ "clang.arc.rv"(i64 0) ]
|
||||
|
||||
define void @test3_no_autoreleaseRV() personality i8* bitcast (i32 (...)* @__gxx_personality_v0 to i8*) {
|
||||
entry:
|
||||
%call = invoke i8* @callee1_no_autoreleaseRV() [ "clang.arc.rv"(i64 0) ]
|
||||
to label %invoke.cont unwind label %lpad
|
||||
|
||||
invoke.cont:
|
||||
ret void
|
||||
|
||||
lpad:
|
||||
%0 = landingpad { i8*, i32 }
|
||||
cleanup
|
||||
resume { i8*, i32 } undef
|
||||
}
|
||||
|
||||
define i8* @callee2_nocall() {
|
||||
%1 = load i8*, i8** @g0, align 8
|
||||
ret i8* %1
|
||||
}
|
||||
|
||||
; Check that a call to @llvm.objc.retain is inserted if there is no matching
|
||||
; autoreleaseRV call or a call.
|
||||
|
||||
; CHECK-LABEL: define void @test4_nocall(
|
||||
; CHECK: %[[V0:.*]] = load i8*, i8** @g0,
|
||||
; CHECK-NEXT: call i8* @llvm.objc.retain(i8* %[[V0]])
|
||||
; CHECK-NEXT: ret void
|
||||
|
||||
define void @test4_nocall() {
|
||||
%call = call i8* @callee2_nocall() [ "clang.arc.rv"(i64 0) ]
|
||||
ret void
|
||||
}
|
||||
|
||||
; CHECK-LABEL: define void @test4_claimRV_nocall(
|
||||
; CHECK: %[[V0:.*]] = load i8*, i8** @g0,
|
||||
; CHECK-NEXT: ret void
|
||||
|
||||
define void @test4_claimRV_nocall() {
|
||||
%call = call i8* @callee2_nocall() [ "clang.arc.rv"(i64 1) ]
|
||||
ret void
|
||||
}
|
||||
|
||||
; Check that a call to @llvm.objc.retain is inserted if call to @foo already has
|
||||
; the attribute. I'm not sure this will happen in practice.
|
||||
|
||||
define i8* @callee3_marker() {
|
||||
%1 = call i8* @foo0() [ "clang.arc.rv"(i64 0) ]
|
||||
ret i8* %1
|
||||
}
|
||||
|
||||
; CHECK-LABEL: define void @test5(
|
||||
; CHECK: %[[V0:.*]] = call i8* @foo0() [ "clang.arc.rv"(i64 0) ]
|
||||
; CHECK-NEXT: call i8* @llvm.objc.retain(i8* %[[V0]])
|
||||
; CHECK-NEXT: ret void
|
||||
|
||||
define void @test5() {
|
||||
%call = call i8* @callee3_marker() [ "clang.arc.rv"(i64 0) ]
|
||||
ret void
|
||||
}
|
||||
|
||||
; Don't pair up an autoreleaseRV in the callee and an retainRV in the caller
|
||||
; if there is an instruction between the ret instruction and the call to
|
||||
; autoreleaseRV that isn't a cast instruction.
|
||||
|
||||
define i8* @callee0_autoreleaseRV2() {
|
||||
%call = call i8* @foo0() [ "clang.arc.rv"(i64 0) ]
|
||||
%1 = tail call i8* @llvm.objc.autoreleaseReturnValue(i8* %call)
|
||||
store i8* null, i8** @g0
|
||||
ret i8* %call
|
||||
}
|
||||
|
||||
; CHECK-LABEL: define void @test6(
|
||||
; CHECK: %[[V0:.*]] = call i8* @foo0() [ "clang.arc.rv"(i64 0) ]
|
||||
; CHECK: call i8* @llvm.objc.autoreleaseReturnValue(i8* %[[V0]])
|
||||
; CHECK: store i8* null, i8** @g0, align 8
|
||||
; CHECK: call i8* @llvm.objc.retain(i8* %[[V0]])
|
||||
; CHECK-NEXT: ret void
|
||||
|
||||
define void @test6() {
|
||||
%call = call i8* @callee0_autoreleaseRV2() [ "clang.arc.rv"(i64 0) ]
|
||||
ret void
|
||||
}
|
||||
|
||||
declare i8* @llvm.objc.autoreleaseReturnValue(i8*)
|
||||
declare i32 @__gxx_personality_v0(...)
|
@ -10,6 +10,16 @@
|
||||
; }
|
||||
; }
|
||||
|
||||
; CHECK-LABEL: define void @"\01?g@@YAXXZ"()
|
||||
; CHECK-LABEL: catch
|
||||
; CHECK: call void asm sideeffect "movl{{.*}}%ebp, %ebp{{.*}}", ""() [ "funclet"(token %1) ]
|
||||
|
||||
; CHECK-LABEL: catch.1
|
||||
; CHECK: call void asm sideeffect "movl{{.*}}%ebp, %ebp{{.*}}", ""() [ "funclet"(token %1) ]
|
||||
|
||||
; CHECK-LABEL: invoke.cont
|
||||
; CHECK: call void asm sideeffect "movl{{.*}}%ebp, %ebp{{.*}}", ""(){{$}}
|
||||
|
||||
define void @"\01?g@@YAXXZ"() personality i8* bitcast (i32 (...)* @__CxxFrameHandler3 to i8*) {
|
||||
entry:
|
||||
%call = invoke i8* @"\01?f@@YAPAUobjc_object@@XZ"()
|
||||
@ -40,23 +50,41 @@ invoke.cont: ; preds = %entry
|
||||
ret void
|
||||
}
|
||||
|
||||
; CHECK-LABEL: define dso_local void @"?test_attr_claimRV@@YAXXZ"()
|
||||
; CHECK: %[[CALL4:.*]] = notail call i8* @"?noexcept_func@@YAPAUobjc_object@@XZ"() [ "clang.arc.rv"(i64 1) ]
|
||||
; CHECK: call i8* @llvm.objc.unsafeClaimAutoreleasedReturnValue(i8* %[[CALL4]])
|
||||
|
||||
; CHECK: %[[V1:.*]] = cleanuppad
|
||||
; CHECK: %[[CALL:.*]] = notail call i8* @"?noexcept_func@@YAPAUobjc_object@@XZ"() [ "funclet"(token %[[V1]]), "clang.arc.rv"(i64 1) ]
|
||||
; CHECK: call i8* @llvm.objc.unsafeClaimAutoreleasedReturnValue(i8* %[[CALL]]) [ "funclet"(token %[[V1]]) ]
|
||||
|
||||
define dso_local void @"?test_attr_claimRV@@YAXXZ"() local_unnamed_addr #0 personality i8* bitcast (i32 (...)* @__CxxFrameHandler3 to i8*) {
|
||||
entry:
|
||||
invoke void @"?foo@@YAXXZ"()
|
||||
to label %invoke.cont unwind label %ehcleanup
|
||||
|
||||
invoke.cont: ; preds = %entry
|
||||
%call.i4 = tail call i8* @"?noexcept_func@@YAPAUobjc_object@@XZ"() #2 [ "clang.arc.rv"(i64 1) ]
|
||||
ret void
|
||||
|
||||
ehcleanup: ; preds = %entry
|
||||
%0 = cleanuppad within none []
|
||||
%call.i = call i8* @"?noexcept_func@@YAPAUobjc_object@@XZ"() #2 [ "funclet"(token %0), "clang.arc.rv"(i64 1) ]
|
||||
cleanupret from %0 unwind to caller
|
||||
}
|
||||
|
||||
declare i8* @"\01?f@@YAPAUobjc_object@@XZ"()
|
||||
|
||||
declare i32 @__CxxFrameHandler3(...)
|
||||
|
||||
declare void @"?foo@@YAXXZ"()
|
||||
declare i8* @"?noexcept_func@@YAPAUobjc_object@@XZ"()
|
||||
|
||||
declare dllimport i8* @llvm.objc.retainAutoreleasedReturnValue(i8*)
|
||||
declare i8* @llvm.objc.unsafeClaimAutoreleasedReturnValue(i8*)
|
||||
|
||||
declare dllimport void @llvm.objc.release(i8*)
|
||||
|
||||
!llvm.module.flags = !{!0}
|
||||
|
||||
!0 = !{i32 1, !"clang.arc.retainAutoreleasedReturnValueMarker", !"movl\09%ebp, %ebp\09\09// marker for objc_retainAutoreleaseReturnValue"}
|
||||
|
||||
; CHECK-LABEL: catch
|
||||
; CHECK: call void asm sideeffect "movl{{.*}}%ebp, %ebp{{.*}}", ""() [ "funclet"(token %1) ]
|
||||
|
||||
; CHECK-LABEL: catch.1
|
||||
; CHECK: call void asm sideeffect "movl{{.*}}%ebp, %ebp{{.*}}", ""() [ "funclet"(token %1) ]
|
||||
|
||||
; CHECK-LABEL: invoke.cont
|
||||
; CHECK: call void asm sideeffect "movl{{.*}}%ebp, %ebp{{.*}}", ""(){{$}}
|
||||
|
63
llvm/test/Transforms/ObjCARC/contract-rv-attr.ll
Normal file
63
llvm/test/Transforms/ObjCARC/contract-rv-attr.ll
Normal file
@ -0,0 +1,63 @@
|
||||
; RUN: opt -objc-arc-contract -S < %s | FileCheck %s
|
||||
; RUN: opt -passes=objc-arc-contract -S < %s | FileCheck %s
|
||||
|
||||
; CHECK-LABEL: define void @test0() {
|
||||
; CHECK: %[[CALL:.*]] = notail call i8* @foo() [ "clang.arc.rv"(i64 0) ]
|
||||
; CHECK: call i8* @llvm.objc.retainAutoreleasedReturnValue(i8* %[[CALL]])
|
||||
|
||||
define void @test0() {
|
||||
%call1 = call i8* @foo() [ "clang.arc.rv"(i64 0) ]
|
||||
ret void
|
||||
}
|
||||
|
||||
; CHECK-LABEL: define void @test1() {
|
||||
; CHECK: %[[CALL:.*]] = notail call i8* @foo() [ "clang.arc.rv"(i64 1) ]
|
||||
; CHECK: call i8* @llvm.objc.unsafeClaimAutoreleasedReturnValue(i8* %[[CALL]])
|
||||
|
||||
define void @test1() {
|
||||
%call1 = call i8* @foo() [ "clang.arc.rv"(i64 1) ]
|
||||
ret void
|
||||
}
|
||||
|
||||
; CHECK-LABEL:define i8* @test2(
|
||||
; CHECK: %[[CALL1:.*]] = invoke i8* @foo() [ "clang.arc.rv"(i64 0) ]
|
||||
|
||||
; CHECK: %[[V0:.*]] = call i8* @llvm.objc.retainAutoreleasedReturnValue(i8* %[[CALL1]])
|
||||
; CHECK-NEXT: br
|
||||
|
||||
; CHECK: %[[CALL3:.*]] = invoke i8* @foo() [ "clang.arc.rv"(i64 0) ]
|
||||
|
||||
; CHECK: %[[V2:.*]] = call i8* @llvm.objc.retainAutoreleasedReturnValue(i8* %[[CALL3]])
|
||||
; CHECK-NEXT: br
|
||||
|
||||
; CHECK: %[[RETVAL:.*]] = phi i8* [ %[[V0]], {{.*}} ], [ %[[V2]], {{.*}} ]
|
||||
; CHECK: ret i8* %[[RETVAL]]
|
||||
|
||||
define i8* @test2(i1 zeroext %b) personality i8* bitcast (i32 (...)* @__gxx_personality_v0 to i8*) {
|
||||
entry:
|
||||
br i1 %b, label %if.then, label %if.end
|
||||
|
||||
if.then:
|
||||
%call1 = invoke i8* @foo() [ "clang.arc.rv"(i64 0) ]
|
||||
to label %cleanup unwind label %lpad
|
||||
|
||||
lpad:
|
||||
%0 = landingpad { i8*, i32 }
|
||||
cleanup
|
||||
resume { i8*, i32 } undef
|
||||
|
||||
if.end:
|
||||
%call3 = invoke i8* @foo() [ "clang.arc.rv"(i64 0) ]
|
||||
to label %cleanup unwind label %lpad
|
||||
|
||||
cleanup:
|
||||
%retval.0 = phi i8* [ %call1, %if.then ], [ %call3, %if.end ]
|
||||
ret i8* %retval.0
|
||||
}
|
||||
|
||||
declare i8* @foo()
|
||||
declare i32 @__gxx_personality_v0(...)
|
||||
|
||||
!llvm.module.flags = !{!0}
|
||||
|
||||
!0 = !{i32 1, !"clang.arc.retainAutoreleasedReturnValueMarker", !"mov\09fp, fp\09\09// marker for objc_retainAutoreleaseReturnValue"}
|
@ -227,7 +227,15 @@ define void @test13() {
|
||||
ret void
|
||||
}
|
||||
|
||||
; CHECK-LABEL: define void @test14(
|
||||
; CHECK-NOT: clang.arc.noop.use
|
||||
; CHECK: ret void
|
||||
define void @test14(i8* %a, i8* %b) {
|
||||
call void (...) @llvm.objc.clang.arc.noop.use(i8* %a, i8* %b) nounwind
|
||||
ret void
|
||||
}
|
||||
|
||||
declare void @llvm.objc.clang.arc.use(...) nounwind
|
||||
declare void @llvm.objc.clang.arc.noop.use(...) nounwind
|
||||
|
||||
; CHECK: attributes [[NUW]] = { nounwind }
|
||||
|
@ -8,8 +8,10 @@ declare void @llvm.objc.release(i8*)
|
||||
declare i8* @llvm.objc.autorelease(i8*)
|
||||
|
||||
declare void @llvm.objc.clang.arc.use(...)
|
||||
declare void @llvm.objc.clang.arc.noop.use(...)
|
||||
|
||||
declare void @test0_helper(i8*, i8**)
|
||||
declare void @can_release(i8*)
|
||||
|
||||
; Ensure that we honor clang.arc.use as a use and don't miscompile
|
||||
; the reduced test case from <rdar://13195034>.
|
||||
@ -108,6 +110,21 @@ entry:
|
||||
ret void
|
||||
}
|
||||
|
||||
; ARC optimizer should be able to safely remove the retain/release pair as the
|
||||
; call to @llvm.objc.clang.arc.noop.use is a no-op.
|
||||
|
||||
; CHECK-LABEL: define void @test_arc_noop_use(
|
||||
; CHECK-NEXT: call void @can_release(i8* %x)
|
||||
; CHECK-NEXT: call void (...) @llvm.objc.clang.arc.noop.use(
|
||||
; CHECK-NEXT: ret void
|
||||
|
||||
define void @test_arc_noop_use(i8** %out, i8* %x) {
|
||||
call i8* @llvm.objc.retain(i8* %x)
|
||||
call void @can_release(i8* %x)
|
||||
call void (...) @llvm.objc.clang.arc.noop.use(i8* %x)
|
||||
call void @llvm.objc.release(i8* %x), !clang.imprecise_release !0
|
||||
ret void
|
||||
}
|
||||
|
||||
!0 = !{}
|
||||
|
||||
|
@ -452,6 +452,29 @@ bb1:
|
||||
ret i8* %v3
|
||||
}
|
||||
|
||||
; Remove operand bundle "clang.arc.rv" and the autoreleaseRV call if the call
|
||||
; is a tail call.
|
||||
|
||||
; CHECK-LABEL: define i8* @test31(
|
||||
; CHECK: %[[CALL:.*]] = tail call i8* @returner()
|
||||
; CHECK: ret i8* %[[CALL]]
|
||||
|
||||
define i8* @test31() {
|
||||
%call = tail call i8* @returner() [ "clang.arc.rv"(i64 0) ]
|
||||
%1 = call i8* @llvm.objc.autoreleaseReturnValue(i8* %call)
|
||||
ret i8* %1
|
||||
}
|
||||
|
||||
; CHECK-LABEL: define i8* @test32(
|
||||
; CHECK: %[[CALL:.*]] = call i8* @returner() [ "clang.arc.rv"(i64 0) ]
|
||||
; CHECK: call i8* @llvm.objc.autoreleaseReturnValue(i8* %[[CALL]])
|
||||
|
||||
define i8* @test32() {
|
||||
%call = call i8* @returner() [ "clang.arc.rv"(i64 0) ]
|
||||
%1 = call i8* @llvm.objc.autoreleaseReturnValue(i8* %call)
|
||||
ret i8* %1
|
||||
}
|
||||
|
||||
!0 = !{}
|
||||
|
||||
; CHECK: attributes [[NUW]] = { nounwind }
|
||||
|
@ -55,3 +55,13 @@ catch:
|
||||
exit:
|
||||
ret void
|
||||
}
|
||||
|
||||
; CHECK-LABEL: @test_clang_arc_rv(
|
||||
; CHECK: tail call i8* @getObj(
|
||||
|
||||
declare i8* @getObj()
|
||||
|
||||
define i8* @test_clang_arc_rv() {
|
||||
%r = call i8* @getObj() [ "clang.arc.rv"(i64 0) ]
|
||||
ret i8* %r
|
||||
}
|
Loading…
Reference in New Issue
Block a user