mirror of
https://github.com/RPCS3/llvm-mirror.git
synced 2024-12-04 17:56:53 +00:00
e29dba6476
Currently, when edge weights are assigned to edges that are created when lowering switch statement, the weight on the edge to default statement (let's call it "default weight" here) is not considered. We need to distribute this weight properly. However, without value profiling, we have no idea how to distribute it. In this patch, I applied the heuristic that this weight is evenly distributed to successors. For example, given a switch statement with cases 1,2,3,5,10,11,20, and every edge from switch to each successor has weight 10. If there is a binary search tree built to test if n < 10, then its two out-edges will have weight 4x10+10/2 = 45 and 3x10 + 10/2 = 35 respectively (currently they are 40 and 30 without considering the default weight). Each distribution (which is 5 here) will be stored in each SwitchWorkListItem for further distribution. There are some exceptions: For a jump table header which doesn't have any edge to default statement, we don't distribute the default weight to it. For a bit test header which covers a contiguous range and hence has no edges to default statement, we don't distribute the default weight to it. When the branch checks a single value or a contiguous range with no edge to default statement, we don't distribute the default weight to it. In other cases, the default weight is evenly distributed to successors. Differential Revision: http://reviews.llvm.org/D12418 llvm-svn: 246522
66 lines
1.4 KiB
LLVM
66 lines
1.4 KiB
LLVM
; RUN: llc < %s -mtriple=thumbv8 -arm-atomic-cfg-tidy=0 | FileCheck %s
|
|
; RUN: llc < %s -mtriple=thumbv7 -arm-atomic-cfg-tidy=0 -arm-restrict-it | FileCheck %s
|
|
; CHECK: it ne
|
|
; CHECK-NEXT: cmpne
|
|
; CHECK-NEXT: bne [[JUMPTARGET:.LBB[0-9]+_[0-9]+]]
|
|
; CHECK: cbz
|
|
; CHECK-NEXT: %if.else163
|
|
; CHECK-NEXT: mov.w
|
|
; CHECK-NEXT: b
|
|
; CHECK: [[JUMPTARGET]]:{{.*}}%if.else173
|
|
; CHECK-NEXT: mov.w
|
|
; CHECK-NEXT: pop
|
|
; CHECK-NEXT: %if.else145
|
|
; CHECK-NEXT: mov.w
|
|
|
|
%struct.hc = type { i32, i32, i32, i32 }
|
|
|
|
define i32 @t(i32 %type) optsize {
|
|
entry:
|
|
br i1 undef, label %if.then, label %if.else
|
|
|
|
if.then:
|
|
unreachable
|
|
|
|
if.else:
|
|
br i1 undef, label %if.then15, label %if.else18
|
|
|
|
if.then15:
|
|
unreachable
|
|
|
|
if.else18:
|
|
switch i32 %type, label %if.else173 [
|
|
i32 3, label %if.then115
|
|
i32 1, label %if.then102
|
|
]
|
|
|
|
if.then102:
|
|
br i1 undef, label %cond.true10.i, label %t.exit
|
|
|
|
cond.true10.i:
|
|
br label %t.exit
|
|
|
|
t.exit:
|
|
unreachable
|
|
|
|
if.then115:
|
|
br i1 undef, label %if.else163, label %if.else145
|
|
|
|
if.else145:
|
|
%call150 = call fastcc %struct.hc* @foo(%struct.hc* undef, i32 34865152) optsize
|
|
br label %while.body172
|
|
|
|
if.else163:
|
|
%call168 = call fastcc %struct.hc* @foo(%struct.hc* undef, i32 34078720) optsize
|
|
br label %while.body172
|
|
|
|
while.body172:
|
|
br label %while.body172
|
|
|
|
if.else173:
|
|
ret i32 -1
|
|
}
|
|
|
|
declare hidden fastcc %struct.hc* @foo(%struct.hc* nocapture, i32) nounwind optsize
|
|
|