gen_loadstore.cc revision 596bea8f6331fe8ad6eb456afa1f7dfe1b92e09c
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "dex/compiler_ir.h"
18#include "dex/compiler_internals.h"
19#include "dex/quick/mir_to_lir-inl.h"
20#include "invoke_type.h"
21
22namespace art {
23
24/* This file contains target-independent codegen and support. */
25
26/*
27 * Load an immediate value into a fixed or temp register.  Target
28 * register is clobbered, and marked in_use.
29 */
30LIR* Mir2Lir::LoadConstant(RegStorage r_dest, int value) {
31  if (IsTemp(r_dest)) {
32    Clobber(r_dest);
33    MarkInUse(r_dest);
34  }
35  return LoadConstantNoClobber(r_dest, value);
36}
37
38/*
39 * Temporary workaround for Issue 7250540.  If we're loading a constant zero into a
40 * promoted floating point register, also copy a zero into the int/ref identity of
41 * that sreg.
42 */
43void Mir2Lir::Workaround7250540(RegLocation rl_dest, RegStorage zero_reg) {
44  if (rl_dest.fp) {
45    int pmap_index = SRegToPMap(rl_dest.s_reg_low);
46    if (promotion_map_[pmap_index].fp_location == kLocPhysReg) {
47      // Now, determine if this vreg is ever used as a reference.  If not, we're done.
48      bool used_as_reference = false;
49      int base_vreg = mir_graph_->SRegToVReg(rl_dest.s_reg_low);
50      for (int i = 0; !used_as_reference && (i < mir_graph_->GetNumSSARegs()); i++) {
51        if (mir_graph_->SRegToVReg(mir_graph_->reg_location_[i].s_reg_low) == base_vreg) {
52          used_as_reference |= mir_graph_->reg_location_[i].ref;
53        }
54      }
55      if (!used_as_reference) {
56        return;
57      }
58      RegStorage temp_reg = zero_reg;
59      if (!temp_reg.Valid()) {
60        temp_reg = AllocTemp();
61        LoadConstant(temp_reg, 0);
62      }
63      if (promotion_map_[pmap_index].core_location == kLocPhysReg) {
64        // Promoted - just copy in a zero
65        OpRegCopy(RegStorage::Solo32(promotion_map_[pmap_index].core_reg), temp_reg);
66      } else {
67        // Lives in the frame, need to store.
68        ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
69        StoreBaseDisp(TargetReg(kSp), SRegOffset(rl_dest.s_reg_low), temp_reg, k32);
70      }
71      if (!zero_reg.Valid()) {
72        FreeTemp(temp_reg);
73      }
74    }
75  }
76}
77
78/*
79 * Load a Dalvik register into a physical register.  Take care when
80 * using this routine, as it doesn't perform any bookkeeping regarding
81 * register liveness.  That is the responsibility of the caller.
82 */
83void Mir2Lir::LoadValueDirect(RegLocation rl_src, RegStorage r_dest) {
84  rl_src = UpdateLoc(rl_src);
85  if (rl_src.location == kLocPhysReg) {
86    OpRegCopy(r_dest, rl_src.reg);
87  } else if (IsInexpensiveConstant(rl_src)) {
88    // On 64-bit targets, will sign extend.  Make sure constant reference is always NULL.
89    DCHECK(!rl_src.ref || (mir_graph_->ConstantValue(rl_src) == 0));
90    LoadConstantNoClobber(r_dest, mir_graph_->ConstantValue(rl_src));
91  } else {
92    DCHECK((rl_src.location == kLocDalvikFrame) ||
93           (rl_src.location == kLocCompilerTemp));
94    ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
95    if (rl_src.ref) {
96      LoadRefDisp(TargetReg(kSp), SRegOffset(rl_src.s_reg_low), r_dest);
97    } else {
98      Load32Disp(TargetReg(kSp), SRegOffset(rl_src.s_reg_low), r_dest);
99    }
100  }
101}
102
103/*
104 * Similar to LoadValueDirect, but clobbers and allocates the target
105 * register.  Should be used when loading to a fixed register (for example,
106 * loading arguments to an out of line call.
107 */
108void Mir2Lir::LoadValueDirectFixed(RegLocation rl_src, RegStorage r_dest) {
109  Clobber(r_dest);
110  MarkInUse(r_dest);
111  LoadValueDirect(rl_src, r_dest);
112}
113
114/*
115 * Load a Dalvik register pair into a physical register[s].  Take care when
116 * using this routine, as it doesn't perform any bookkeeping regarding
117 * register liveness.  That is the responsibility of the caller.
118 */
119void Mir2Lir::LoadValueDirectWide(RegLocation rl_src, RegStorage r_dest) {
120  rl_src = UpdateLocWide(rl_src);
121  if (rl_src.location == kLocPhysReg) {
122    OpRegCopyWide(r_dest, rl_src.reg);
123  } else if (IsInexpensiveConstant(rl_src)) {
124    LoadConstantWide(r_dest, mir_graph_->ConstantValueWide(rl_src));
125  } else {
126    DCHECK((rl_src.location == kLocDalvikFrame) ||
127           (rl_src.location == kLocCompilerTemp));
128    ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
129    LoadBaseDisp(TargetReg(kSp), SRegOffset(rl_src.s_reg_low), r_dest, k64);
130  }
131}
132
133/*
134 * Similar to LoadValueDirect, but clobbers and allocates the target
135 * registers.  Should be used when loading to a fixed registers (for example,
136 * loading arguments to an out of line call.
137 */
138void Mir2Lir::LoadValueDirectWideFixed(RegLocation rl_src, RegStorage r_dest) {
139  Clobber(r_dest);
140  MarkInUse(r_dest);
141  LoadValueDirectWide(rl_src, r_dest);
142}
143
144RegLocation Mir2Lir::LoadValue(RegLocation rl_src, RegisterClass op_kind) {
145  DCHECK(!rl_src.ref || op_kind == kRefReg);
146  rl_src = UpdateLoc(rl_src);
147  if (rl_src.location == kLocPhysReg) {
148    if (!RegClassMatches(op_kind, rl_src.reg)) {
149      // Wrong register class, realloc, copy and transfer ownership.
150      RegStorage new_reg = AllocTypedTemp(rl_src.fp, op_kind);
151      OpRegCopy(new_reg, rl_src.reg);
152      // Clobber the old reg.
153      Clobber(rl_src.reg);
154      // ...and mark the new one live.
155      rl_src.reg = new_reg;
156      MarkLive(rl_src);
157    }
158    return rl_src;
159  }
160
161  DCHECK_NE(rl_src.s_reg_low, INVALID_SREG);
162  rl_src.reg = AllocTypedTemp(rl_src.fp, op_kind);
163  LoadValueDirect(rl_src, rl_src.reg);
164  rl_src.location = kLocPhysReg;
165  MarkLive(rl_src);
166  return rl_src;
167}
168
169RegLocation Mir2Lir::LoadValue(RegLocation rl_src) {
170  return LoadValue(rl_src, LocToRegClass(rl_src));
171}
172
173void Mir2Lir::StoreValue(RegLocation rl_dest, RegLocation rl_src) {
174  /*
175   * Sanity checking - should never try to store to the same
176   * ssa name during the compilation of a single instruction
177   * without an intervening ClobberSReg().
178   */
179  if (kIsDebugBuild) {
180    DCHECK((live_sreg_ == INVALID_SREG) ||
181           (rl_dest.s_reg_low != live_sreg_));
182    live_sreg_ = rl_dest.s_reg_low;
183  }
184  LIR* def_start;
185  LIR* def_end;
186  DCHECK(!rl_dest.wide);
187  DCHECK(!rl_src.wide);
188  rl_src = UpdateLoc(rl_src);
189  rl_dest = UpdateLoc(rl_dest);
190  if (rl_src.location == kLocPhysReg) {
191    if (IsLive(rl_src.reg) ||
192      IsPromoted(rl_src.reg) ||
193      (rl_dest.location == kLocPhysReg)) {
194      // Src is live/promoted or Dest has assigned reg.
195      rl_dest = EvalLoc(rl_dest, kAnyReg, false);
196      OpRegCopy(rl_dest.reg, rl_src.reg);
197    } else {
198      // Just re-assign the registers.  Dest gets Src's regs
199      rl_dest.reg = rl_src.reg;
200      Clobber(rl_src.reg);
201    }
202  } else {
203    // Load Src either into promoted Dest or temps allocated for Dest
204    rl_dest = EvalLoc(rl_dest, kAnyReg, false);
205    LoadValueDirect(rl_src, rl_dest.reg);
206  }
207
208  // Dest is now live and dirty (until/if we flush it to home location)
209  MarkLive(rl_dest);
210  MarkDirty(rl_dest);
211
212
213  ResetDefLoc(rl_dest);
214  if (IsDirty(rl_dest.reg) && LiveOut(rl_dest.s_reg_low)) {
215    def_start = last_lir_insn_;
216    if (rl_dest.ref) {
217      ScopedMemRefType mem_ref_type(this, ResourceMask::kHeapRef);
218      StoreRefDisp(TargetReg(kSp), SRegOffset(rl_dest.s_reg_low), rl_dest.reg);
219    } else {
220      ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
221      Store32Disp(TargetReg(kSp), SRegOffset(rl_dest.s_reg_low), rl_dest.reg);
222    }
223    MarkClean(rl_dest);
224    def_end = last_lir_insn_;
225    if (!rl_dest.ref) {
226      // Exclude references from store elimination
227      MarkDef(rl_dest, def_start, def_end);
228    }
229  }
230}
231
232RegLocation Mir2Lir::LoadValueWide(RegLocation rl_src, RegisterClass op_kind) {
233  DCHECK(rl_src.wide);
234  rl_src = UpdateLocWide(rl_src);
235  if (rl_src.location == kLocPhysReg) {
236    if (!RegClassMatches(op_kind, rl_src.reg)) {
237      // Wrong register class, realloc, copy and transfer ownership.
238      RegStorage new_regs = AllocTypedTempWide(rl_src.fp, op_kind);
239      OpRegCopyWide(new_regs, rl_src.reg);
240      // Clobber the old regs.
241      Clobber(rl_src.reg);
242      // ...and mark the new ones live.
243      rl_src.reg = new_regs;
244      MarkLive(rl_src);
245    }
246    return rl_src;
247  }
248
249  DCHECK_NE(rl_src.s_reg_low, INVALID_SREG);
250  DCHECK_NE(GetSRegHi(rl_src.s_reg_low), INVALID_SREG);
251  rl_src.reg = AllocTypedTempWide(rl_src.fp, op_kind);
252  LoadValueDirectWide(rl_src, rl_src.reg);
253  rl_src.location = kLocPhysReg;
254  MarkLive(rl_src);
255  return rl_src;
256}
257
258void Mir2Lir::StoreValueWide(RegLocation rl_dest, RegLocation rl_src) {
259  /*
260   * Sanity checking - should never try to store to the same
261   * ssa name during the compilation of a single instruction
262   * without an intervening ClobberSReg().
263   */
264  if (kIsDebugBuild) {
265    DCHECK((live_sreg_ == INVALID_SREG) ||
266           (rl_dest.s_reg_low != live_sreg_));
267    live_sreg_ = rl_dest.s_reg_low;
268  }
269  LIR* def_start;
270  LIR* def_end;
271  DCHECK(rl_dest.wide);
272  DCHECK(rl_src.wide);
273  rl_src = UpdateLocWide(rl_src);
274  rl_dest = UpdateLocWide(rl_dest);
275  if (rl_src.location == kLocPhysReg) {
276    if (IsLive(rl_src.reg) ||
277        IsPromoted(rl_src.reg) ||
278        (rl_dest.location == kLocPhysReg)) {
279      /*
280       * If src reg[s] are tied to the original Dalvik vreg via liveness or promotion, we
281       * can't repurpose them.  Similarly, if the dest reg[s] are tied to Dalvik vregs via
282       * promotion, we can't just re-assign.  In these cases, we have to copy.
283       */
284      rl_dest = EvalLoc(rl_dest, kAnyReg, false);
285      OpRegCopyWide(rl_dest.reg, rl_src.reg);
286    } else {
287      // Just re-assign the registers.  Dest gets Src's regs
288      rl_dest.reg = rl_src.reg;
289      Clobber(rl_src.reg);
290    }
291  } else {
292    // Load Src either into promoted Dest or temps allocated for Dest
293    rl_dest = EvalLoc(rl_dest, kAnyReg, false);
294    LoadValueDirectWide(rl_src, rl_dest.reg);
295  }
296
297  // Dest is now live and dirty (until/if we flush it to home location)
298  MarkLive(rl_dest);
299  MarkWide(rl_dest.reg);
300  MarkDirty(rl_dest);
301
302  ResetDefLocWide(rl_dest);
303  if (IsDirty(rl_dest.reg) && (LiveOut(rl_dest.s_reg_low) ||
304      LiveOut(GetSRegHi(rl_dest.s_reg_low)))) {
305    def_start = last_lir_insn_;
306    DCHECK_EQ((mir_graph_->SRegToVReg(rl_dest.s_reg_low)+1),
307              mir_graph_->SRegToVReg(GetSRegHi(rl_dest.s_reg_low)));
308    ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
309    StoreBaseDisp(TargetReg(kSp), SRegOffset(rl_dest.s_reg_low), rl_dest.reg, k64);
310    MarkClean(rl_dest);
311    def_end = last_lir_insn_;
312    MarkDefWide(rl_dest, def_start, def_end);
313  }
314}
315
316void Mir2Lir::StoreFinalValue(RegLocation rl_dest, RegLocation rl_src) {
317  DCHECK_EQ(rl_src.location, kLocPhysReg);
318
319  if (rl_dest.location == kLocPhysReg) {
320    OpRegCopy(rl_dest.reg, rl_src.reg);
321  } else {
322    // Just re-assign the register.  Dest gets Src's reg.
323    rl_dest.location = kLocPhysReg;
324    rl_dest.reg = rl_src.reg;
325    Clobber(rl_src.reg);
326  }
327
328  // Dest is now live and dirty (until/if we flush it to home location)
329  MarkLive(rl_dest);
330  MarkDirty(rl_dest);
331
332
333  ResetDefLoc(rl_dest);
334  if (IsDirty(rl_dest.reg) && LiveOut(rl_dest.s_reg_low)) {
335    LIR *def_start = last_lir_insn_;
336    ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
337    Store32Disp(TargetReg(kSp), SRegOffset(rl_dest.s_reg_low), rl_dest.reg);
338    MarkClean(rl_dest);
339    LIR *def_end = last_lir_insn_;
340    if (!rl_dest.ref) {
341      // Exclude references from store elimination
342      MarkDef(rl_dest, def_start, def_end);
343    }
344  }
345}
346
347void Mir2Lir::StoreFinalValueWide(RegLocation rl_dest, RegLocation rl_src) {
348  DCHECK(rl_dest.wide);
349  DCHECK(rl_src.wide);
350  DCHECK_EQ(rl_src.location, kLocPhysReg);
351
352  if (rl_dest.location == kLocPhysReg) {
353    OpRegCopyWide(rl_dest.reg, rl_src.reg);
354  } else {
355    // Just re-assign the registers.  Dest gets Src's regs.
356    rl_dest.location = kLocPhysReg;
357    rl_dest.reg = rl_src.reg;
358    Clobber(rl_src.reg);
359  }
360
361  // Dest is now live and dirty (until/if we flush it to home location).
362  MarkLive(rl_dest);
363  MarkWide(rl_dest.reg);
364  MarkDirty(rl_dest);
365
366  ResetDefLocWide(rl_dest);
367  if (IsDirty(rl_dest.reg) && (LiveOut(rl_dest.s_reg_low) ||
368      LiveOut(GetSRegHi(rl_dest.s_reg_low)))) {
369    LIR *def_start = last_lir_insn_;
370    DCHECK_EQ((mir_graph_->SRegToVReg(rl_dest.s_reg_low)+1),
371              mir_graph_->SRegToVReg(GetSRegHi(rl_dest.s_reg_low)));
372    ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg);
373    StoreBaseDisp(TargetReg(kSp), SRegOffset(rl_dest.s_reg_low), rl_dest.reg, k64);
374    MarkClean(rl_dest);
375    LIR *def_end = last_lir_insn_;
376    MarkDefWide(rl_dest, def_start, def_end);
377  }
378}
379
380/* Utilities to load the current Method* */
381void Mir2Lir::LoadCurrMethodDirect(RegStorage r_tgt) {
382  LoadValueDirectFixed(mir_graph_->GetMethodLoc(), r_tgt);
383}
384
385RegLocation Mir2Lir::LoadCurrMethod() {
386  return LoadValue(mir_graph_->GetMethodLoc(), kRefReg);
387}
388
389RegLocation Mir2Lir::ForceTemp(RegLocation loc) {
390  DCHECK(!loc.wide);
391  DCHECK(loc.location == kLocPhysReg);
392  DCHECK(!loc.reg.IsFloat());
393  if (IsTemp(loc.reg)) {
394    Clobber(loc.reg);
395  } else {
396    RegStorage temp_low = AllocTemp();
397    OpRegCopy(temp_low, loc.reg);
398    loc.reg = temp_low;
399  }
400
401  // Ensure that this doesn't represent the original SR any more.
402  loc.s_reg_low = INVALID_SREG;
403  return loc;
404}
405
406RegLocation Mir2Lir::ForceTempWide(RegLocation loc) {
407  DCHECK(loc.wide);
408  DCHECK(loc.location == kLocPhysReg);
409  DCHECK(!loc.reg.IsFloat());
410
411  if (!loc.reg.IsPair()) {
412    if (IsTemp(loc.reg)) {
413      Clobber(loc.reg);
414    } else {
415      RegStorage temp = AllocTempWide();
416      OpRegCopy(temp, loc.reg);
417      loc.reg = temp;
418    }
419  } else {
420    if (IsTemp(loc.reg.GetLow())) {
421      Clobber(loc.reg.GetLow());
422    } else {
423      RegStorage temp_low = AllocTemp();
424      OpRegCopy(temp_low, loc.reg.GetLow());
425      loc.reg.SetLowReg(temp_low.GetReg());
426    }
427    if (IsTemp(loc.reg.GetHigh())) {
428      Clobber(loc.reg.GetHigh());
429    } else {
430      RegStorage temp_high = AllocTemp();
431      OpRegCopy(temp_high, loc.reg.GetHigh());
432      loc.reg.SetHighReg(temp_high.GetReg());
433    }
434  }
435
436  // Ensure that this doesn't represent the original SR any more.
437  loc.s_reg_low = INVALID_SREG;
438  return loc;
439}
440
441}  // namespace art
442