1 /*
2 * Copyright (C) 2017 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "load_store_analysis.h"
18
19 namespace art {
20
21 // A cap for the number of heap locations to prevent pathological time/space consumption.
22 // The number of heap locations for most of the methods stays below this threshold.
23 constexpr size_t kMaxNumberOfHeapLocations = 32;
24
25 // Test if two integer ranges [l1,h1] and [l2,h2] overlap.
26 // Note that the ranges are inclusive on both ends.
27 // l1|------|h1
28 // l2|------|h2
CanIntegerRangesOverlap(int64_t l1,int64_t h1,int64_t l2,int64_t h2)29 static bool CanIntegerRangesOverlap(int64_t l1, int64_t h1, int64_t l2, int64_t h2) {
30 return std::max(l1, l2) <= std::min(h1, h2);
31 }
32
IsAddOrSub(const HInstruction * instruction)33 static bool IsAddOrSub(const HInstruction* instruction) {
34 return instruction->IsAdd() || instruction->IsSub();
35 }
36
CanBinaryOpAndIndexAlias(const HBinaryOperation * idx1,const size_t vector_length1,const HInstruction * idx2,const size_t vector_length2)37 static bool CanBinaryOpAndIndexAlias(const HBinaryOperation* idx1,
38 const size_t vector_length1,
39 const HInstruction* idx2,
40 const size_t vector_length2) {
41 if (!IsAddOrSub(idx1)) {
42 // We currently only support Add and Sub operations.
43 return true;
44 }
45 if (idx1->AsBinaryOperation()->GetLeastConstantLeft() != idx2) {
46 // Cannot analyze [i+CONST1] and [j].
47 return true;
48 }
49 if (!idx1->GetConstantRight()->IsIntConstant()) {
50 return true;
51 }
52
53 // Since 'i' are the same in [i+CONST] and [i],
54 // further compare [CONST] and [0].
55 int64_t l1 = idx1->IsAdd() ?
56 idx1->GetConstantRight()->AsIntConstant()->GetValue() :
57 -idx1->GetConstantRight()->AsIntConstant()->GetValue();
58 int64_t l2 = 0;
59 int64_t h1 = l1 + (vector_length1 - 1);
60 int64_t h2 = l2 + (vector_length2 - 1);
61 return CanIntegerRangesOverlap(l1, h1, l2, h2);
62 }
63
CanBinaryOpsAlias(const HBinaryOperation * idx1,const size_t vector_length1,const HBinaryOperation * idx2,const size_t vector_length2)64 static bool CanBinaryOpsAlias(const HBinaryOperation* idx1,
65 const size_t vector_length1,
66 const HBinaryOperation* idx2,
67 const size_t vector_length2) {
68 if (!IsAddOrSub(idx1) || !IsAddOrSub(idx2)) {
69 // We currently only support Add and Sub operations.
70 return true;
71 }
72 if (idx1->AsBinaryOperation()->GetLeastConstantLeft() !=
73 idx2->AsBinaryOperation()->GetLeastConstantLeft()) {
74 // Cannot analyze [i+CONST1] and [j+CONST2].
75 return true;
76 }
77 if (!idx1->GetConstantRight()->IsIntConstant() ||
78 !idx2->GetConstantRight()->IsIntConstant()) {
79 return true;
80 }
81
82 // Since 'i' are the same in [i+CONST1] and [i+CONST2],
83 // further compare [CONST1] and [CONST2].
84 int64_t l1 = idx1->IsAdd() ?
85 idx1->GetConstantRight()->AsIntConstant()->GetValue() :
86 -idx1->GetConstantRight()->AsIntConstant()->GetValue();
87 int64_t l2 = idx2->IsAdd() ?
88 idx2->GetConstantRight()->AsIntConstant()->GetValue() :
89 -idx2->GetConstantRight()->AsIntConstant()->GetValue();
90 int64_t h1 = l1 + (vector_length1 - 1);
91 int64_t h2 = l2 + (vector_length2 - 1);
92 return CanIntegerRangesOverlap(l1, h1, l2, h2);
93 }
94
CanArrayElementsAlias(const HInstruction * idx1,const size_t vector_length1,const HInstruction * idx2,const size_t vector_length2) const95 bool HeapLocationCollector::CanArrayElementsAlias(const HInstruction* idx1,
96 const size_t vector_length1,
97 const HInstruction* idx2,
98 const size_t vector_length2) const {
99 DCHECK(idx1 != nullptr);
100 DCHECK(idx2 != nullptr);
101 DCHECK_GE(vector_length1, HeapLocation::kScalar);
102 DCHECK_GE(vector_length2, HeapLocation::kScalar);
103
104 // [i] and [i].
105 if (idx1 == idx2) {
106 return true;
107 }
108
109 // [CONST1] and [CONST2].
110 if (idx1->IsIntConstant() && idx2->IsIntConstant()) {
111 int64_t l1 = idx1->AsIntConstant()->GetValue();
112 int64_t l2 = idx2->AsIntConstant()->GetValue();
113 // To avoid any overflow in following CONST+vector_length calculation,
114 // use int64_t instead of int32_t.
115 int64_t h1 = l1 + (vector_length1 - 1);
116 int64_t h2 = l2 + (vector_length2 - 1);
117 return CanIntegerRangesOverlap(l1, h1, l2, h2);
118 }
119
120 // [i+CONST] and [i].
121 if (idx1->IsBinaryOperation() &&
122 idx1->AsBinaryOperation()->GetConstantRight() != nullptr &&
123 idx1->AsBinaryOperation()->GetLeastConstantLeft() == idx2) {
124 return CanBinaryOpAndIndexAlias(idx1->AsBinaryOperation(),
125 vector_length1,
126 idx2,
127 vector_length2);
128 }
129
130 // [i] and [i+CONST].
131 if (idx2->IsBinaryOperation() &&
132 idx2->AsBinaryOperation()->GetConstantRight() != nullptr &&
133 idx2->AsBinaryOperation()->GetLeastConstantLeft() == idx1) {
134 return CanBinaryOpAndIndexAlias(idx2->AsBinaryOperation(),
135 vector_length2,
136 idx1,
137 vector_length1);
138 }
139
140 // [i+CONST1] and [i+CONST2].
141 if (idx1->IsBinaryOperation() &&
142 idx1->AsBinaryOperation()->GetConstantRight() != nullptr &&
143 idx2->IsBinaryOperation() &&
144 idx2->AsBinaryOperation()->GetConstantRight() != nullptr) {
145 return CanBinaryOpsAlias(idx1->AsBinaryOperation(),
146 vector_length1,
147 idx2->AsBinaryOperation(),
148 vector_length2);
149 }
150
151 // By default, MAY alias.
152 return true;
153 }
154
Run()155 bool LoadStoreAnalysis::Run() {
156 for (HBasicBlock* block : graph_->GetReversePostOrder()) {
157 heap_location_collector_.VisitBasicBlock(block);
158 }
159
160 if (heap_location_collector_.GetNumberOfHeapLocations() > kMaxNumberOfHeapLocations) {
161 // Bail out if there are too many heap locations to deal with.
162 heap_location_collector_.CleanUp();
163 return false;
164 }
165 if (!heap_location_collector_.HasHeapStores()) {
166 // Without heap stores, this pass would act mostly as GVN on heap accesses.
167 heap_location_collector_.CleanUp();
168 return false;
169 }
170 if (heap_location_collector_.HasVolatile() || heap_location_collector_.HasMonitorOps()) {
171 // Don't do load/store elimination if the method has volatile field accesses or
172 // monitor operations, for now.
173 // TODO: do it right.
174 heap_location_collector_.CleanUp();
175 return false;
176 }
177
178 heap_location_collector_.BuildAliasingMatrix();
179 return true;
180 }
181
182 } // namespace art
183