stack_map.h revision bdba92d48a01ab9c2d95159166ba12918df8d980
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_STACK_MAP_H_
18#define ART_RUNTIME_STACK_MAP_H_
19
20#include "base/bit_vector.h"
21#include "memory_region.h"
22#include "utils.h"
23
24namespace art {
25
26// Size of a frame slot, in bytes.  This constant is a signed value,
27// to please the compiler in arithmetic operations involving int32_t
28// (signed) values.
29static ssize_t constexpr kFrameSlotSize = 4;
30
31// Size of Dex virtual registers.
32static size_t constexpr kVRegSize = 4;
33
34class CodeInfo;
35
36/**
37 * Classes in the following file are wrapper on stack map information backed
38 * by a MemoryRegion. As such they read and write to the region, they don't have
39 * their own fields.
40 */
41
42/**
43 * Inline information for a specific PC. The information is of the form:
44 * [inlining_depth, [method_dex reference]+]
45 */
46class InlineInfo {
47 public:
48  explicit InlineInfo(MemoryRegion region) : region_(region) {}
49
50  uint8_t GetDepth() const {
51    return region_.LoadUnaligned<uint8_t>(kDepthOffset);
52  }
53
54  void SetDepth(uint8_t depth) {
55    region_.StoreUnaligned<uint8_t>(kDepthOffset, depth);
56  }
57
58  uint32_t GetMethodReferenceIndexAtDepth(uint8_t depth) const {
59    return region_.LoadUnaligned<uint32_t>(kFixedSize + depth * SingleEntrySize());
60  }
61
62  void SetMethodReferenceIndexAtDepth(uint8_t depth, uint32_t index) {
63    region_.StoreUnaligned<uint32_t>(kFixedSize + depth * SingleEntrySize(), index);
64  }
65
66  static size_t SingleEntrySize() {
67    return sizeof(uint32_t);
68  }
69
70 private:
71  // TODO: Instead of plain types such as "uint8_t", introduce
72  // typedefs (and document the memory layout of InlineInfo).
73  static constexpr int kDepthOffset = 0;
74  static constexpr int kFixedSize = kDepthOffset + sizeof(uint8_t);
75
76  MemoryRegion region_;
77
78  friend class CodeInfo;
79  friend class StackMap;
80  friend class StackMapStream;
81};
82
83// Dex register location container used by DexRegisterMap and StackMapStream.
84class DexRegisterLocation {
85 public:
86  /*
87   * The location kind used to populate the Dex register information in a
88   * StackMapStream can either be:
89   * - kNone: the register has no location yet, meaning it has not been set;
90   * - kConstant: value holds the constant;
91   * - kStack: value holds the stack offset;
92   * - kRegister: value holds the physical register number;
93   * - kFpuRegister: value holds the physical register number.
94   *
95   * In addition, DexRegisterMap also uses these values:
96   * - kInStackLargeOffset: value holds a "large" stack offset (greater than
97   *   128 bytes);
98   * - kConstantLargeValue: value holds a "large" constant (lower than or
99   *   equal to -16, or greater than 16).
100   */
101  enum class Kind : uint8_t {
102    // Short location kinds, for entries fitting on one byte (3 bits
103    // for the kind, 5 bits for the value) in a DexRegisterMap.
104    kNone = 0,                // 0b000
105    kInStack = 1,             // 0b001
106    kInRegister = 2,          // 0b010
107    kInFpuRegister = 3,       // 0b011
108    kConstant = 4,            // 0b100
109
110    // Large location kinds, requiring a 5-byte encoding (1 byte for the
111    // kind, 4 bytes for the value).
112
113    // Stack location at a large offset, meaning that the offset value
114    // divided by the stack frame slot size (4 bytes) cannot fit on a
115    // 5-bit unsigned integer (i.e., this offset value is greater than
116    // or equal to 2^5 * 4 = 128 bytes).
117    kInStackLargeOffset = 5,  // 0b101
118
119    // Large constant, that cannot fit on a 5-bit signed integer (i.e.,
120    // lower than -2^(5-1) = -16, or greater than or equal to
121    // 2^(5-1) - 1 = 15).
122    kConstantLargeValue = 6,  // 0b110
123
124    kLastLocationKind = kConstantLargeValue
125  };
126
127  static_assert(
128      sizeof(Kind) == 1u,
129      "art::DexRegisterLocation::Kind has a size different from one byte.");
130
131  static const char* PrettyDescriptor(Kind kind) {
132    switch (kind) {
133      case Kind::kNone:
134        return "none";
135      case Kind::kInStack:
136        return "in stack";
137      case Kind::kInRegister:
138        return "in register";
139      case Kind::kInFpuRegister:
140        return "in fpu register";
141      case Kind::kConstant:
142        return "as constant";
143      case Kind::kInStackLargeOffset:
144        return "in stack (large offset)";
145      case Kind::kConstantLargeValue:
146        return "as constant (large value)";
147      default:
148        UNREACHABLE();
149    }
150  }
151
152  static bool IsShortLocationKind(Kind kind) {
153    switch (kind) {
154      case Kind::kNone:
155      case Kind::kInStack:
156      case Kind::kInRegister:
157      case Kind::kInFpuRegister:
158      case Kind::kConstant:
159        return true;
160
161      case Kind::kInStackLargeOffset:
162      case Kind::kConstantLargeValue:
163        return false;
164
165      default:
166        UNREACHABLE();
167    }
168  }
169
170  // Convert `kind` to a "surface" kind, i.e. one that doesn't include
171  // any value with a "large" qualifier.
172  // TODO: Introduce another enum type for the surface kind?
173  static Kind ConvertToSurfaceKind(Kind kind) {
174    switch (kind) {
175      case Kind::kNone:
176      case Kind::kInStack:
177      case Kind::kInRegister:
178      case Kind::kInFpuRegister:
179      case Kind::kConstant:
180        return kind;
181
182      case Kind::kInStackLargeOffset:
183        return Kind::kInStack;
184
185      case Kind::kConstantLargeValue:
186        return Kind::kConstant;
187
188      default:
189        UNREACHABLE();
190    }
191  }
192
193  DexRegisterLocation(Kind kind, int32_t value)
194      : kind_(kind), value_(value) {}
195
196  static DexRegisterLocation None() {
197    return DexRegisterLocation(Kind::kNone, 0);
198  }
199
200  // Get the "surface" kind of the location, i.e., the one that doesn't
201  // include any value with a "large" qualifier.
202  Kind GetKind() const {
203    return ConvertToSurfaceKind(kind_);
204  }
205
206  // Get the value of the location.
207  int32_t GetValue() const { return value_; }
208
209  // Get the actual kind of the location.
210  Kind GetInternalKind() const { return kind_; }
211
212  bool operator==(DexRegisterLocation other) const {
213    return kind_ == other.kind_ && value_ == other.value_;
214  }
215
216  bool operator!=(DexRegisterLocation other) const {
217    return !(*this == other);
218  }
219
220 private:
221  Kind kind_;
222  int32_t value_;
223};
224
225/**
226 * Information on dex register values for a specific PC. The information is
227 * of the form:
228 * [live_bit_mask, DexRegisterLocation+].
229 * DexRegisterLocations are either 1- or 5-byte wide (see art::DexRegisterLocation::Kind).
230 */
231class DexRegisterMap {
232 public:
233  explicit DexRegisterMap(MemoryRegion region) : region_(region) {}
234
235  // Short (compressed) location, fitting on one byte.
236  typedef uint8_t ShortLocation;
237
238  static size_t LiveBitMaskSize(uint16_t number_of_dex_registers) {
239    return RoundUp(number_of_dex_registers, kBitsPerByte) / kBitsPerByte;
240  }
241
242  void SetLiveBitMask(size_t offset,
243                      uint16_t number_of_dex_registers,
244                      const BitVector& live_dex_registers_mask) {
245    for (uint16_t i = 0; i < number_of_dex_registers; i++) {
246      region_.StoreBit(offset + i, live_dex_registers_mask.IsBitSet(i));
247    }
248  }
249
250  void SetRegisterInfo(size_t offset, const DexRegisterLocation& dex_register_location) {
251    DexRegisterLocation::Kind kind = ComputeCompressedKind(dex_register_location);
252    int32_t value = dex_register_location.GetValue();
253    if (DexRegisterLocation::IsShortLocationKind(kind)) {
254      // Short location.  Compress the kind and the value as a single byte.
255      if (kind == DexRegisterLocation::Kind::kInStack) {
256        // Instead of storing stack offsets expressed in bytes for
257        // short stack locations, store slot offsets.  A stack offset
258        // is a multiple of 4 (kFrameSlotSize).  This means that by
259        // dividing it by 4, we can fit values from the [0, 128)
260        // interval in a short stack location, and not just values
261        // from the [0, 32) interval.
262        DCHECK_EQ(value % kFrameSlotSize, 0);
263        value /= kFrameSlotSize;
264      }
265      DCHECK(IsUint<kValueBits>(value)) << value;
266      region_.StoreUnaligned<ShortLocation>(offset, MakeShortLocation(kind, value));
267    } else {
268      // Large location.  Write the location on one byte and the value
269      // on 4 bytes.
270      DCHECK(!IsUint<kValueBits>(value)) << value;
271      if (kind == DexRegisterLocation::Kind::kInStackLargeOffset) {
272        // Also divide large stack offsets by 4 for the sake of consistency.
273        DCHECK_EQ(value % kFrameSlotSize, 0);
274        value /= kFrameSlotSize;
275      }
276      // Data can be unaligned as the written Dex register locations can
277      // either be 1-byte or 5-byte wide.  Use
278      // art::MemoryRegion::StoreUnaligned instead of
279      // art::MemoryRegion::Store to prevent unligned word accesses on ARM.
280      region_.StoreUnaligned<DexRegisterLocation::Kind>(offset, kind);
281      region_.StoreUnaligned<int32_t>(offset + sizeof(DexRegisterLocation::Kind), value);
282    }
283  }
284
285  bool IsDexRegisterLive(uint16_t dex_register_index) const {
286    size_t offset = kFixedSize;
287    return region_.LoadBit(offset + dex_register_index);
288  }
289
290  static constexpr size_t kNoDexRegisterLocationOffset = -1;
291
292  static size_t GetDexRegisterMapLocationsOffset(uint16_t number_of_dex_registers) {
293    return kLiveBitMaskOffset + LiveBitMaskSize(number_of_dex_registers);
294  }
295
296  // Find the offset of the Dex register location number `dex_register_index`.
297  size_t FindLocationOffset(uint16_t dex_register_index, uint16_t number_of_dex_registers) const {
298    if (!IsDexRegisterLive(dex_register_index)) return kNoDexRegisterLocationOffset;
299    size_t offset = GetDexRegisterMapLocationsOffset(number_of_dex_registers);
300    // Skip the first `dex_register_index - 1` entries.
301    for (uint16_t i = 0; i < dex_register_index; ++i) {
302      if (IsDexRegisterLive(i)) {
303        // Read the first next byte and inspect its first 3 bits to decide
304        // whether it is a short or a large location.
305        DexRegisterLocation::Kind kind = ExtractKindAtOffset(offset);
306        if (DexRegisterLocation::IsShortLocationKind(kind)) {
307          // Short location.  Skip the current byte.
308          offset += SingleShortEntrySize();
309        } else {
310          // Large location.  Skip the 5 next bytes.
311          offset += SingleLargeEntrySize();
312        }
313      }
314    }
315    return offset;
316  }
317
318  // Get the surface kind.
319  DexRegisterLocation::Kind GetLocationKind(uint16_t dex_register_index,
320                                            uint16_t number_of_dex_registers) const {
321    return IsDexRegisterLive(dex_register_index)
322        ? DexRegisterLocation::ConvertToSurfaceKind(
323              GetLocationInternalKind(dex_register_index, number_of_dex_registers))
324        : DexRegisterLocation::Kind::kNone;
325  }
326
327  // Get the internal kind.
328  DexRegisterLocation::Kind GetLocationInternalKind(uint16_t dex_register_index,
329                                                    uint16_t number_of_dex_registers) const {
330    return IsDexRegisterLive(dex_register_index)
331        ? ExtractKindAtOffset(FindLocationOffset(dex_register_index, number_of_dex_registers))
332        : DexRegisterLocation::Kind::kNone;
333  }
334
335  // TODO: Rename as GetDexRegisterLocation?
336  DexRegisterLocation GetLocationKindAndValue(uint16_t dex_register_index,
337                                              uint16_t number_of_dex_registers) const {
338    if (!IsDexRegisterLive(dex_register_index)) {
339      return DexRegisterLocation::None();
340    }
341    size_t offset = FindLocationOffset(dex_register_index, number_of_dex_registers);
342    // Read the first byte and inspect its first 3 bits to get the location.
343    ShortLocation first_byte = region_.LoadUnaligned<ShortLocation>(offset);
344    DexRegisterLocation::Kind kind = ExtractKindFromShortLocation(first_byte);
345    if (DexRegisterLocation::IsShortLocationKind(kind)) {
346      // Short location.  Extract the value from the remaining 5 bits.
347      int32_t value = ExtractValueFromShortLocation(first_byte);
348      if (kind == DexRegisterLocation::Kind::kInStack) {
349        // Convert the stack slot (short) offset to a byte offset value.
350        value *= kFrameSlotSize;
351      }
352      return DexRegisterLocation(kind, value);
353    } else {
354      // Large location.  Read the four next bytes to get the value.
355      int32_t value = region_.LoadUnaligned<int32_t>(offset + sizeof(DexRegisterLocation::Kind));
356      if (kind == DexRegisterLocation::Kind::kInStackLargeOffset) {
357        // Convert the stack slot (large) offset to a byte offset value.
358        value *= kFrameSlotSize;
359      }
360      return DexRegisterLocation(kind, value);
361    }
362  }
363
364  int32_t GetStackOffsetInBytes(uint16_t dex_register_index,
365                                uint16_t number_of_dex_registers) const {
366    DexRegisterLocation location =
367        GetLocationKindAndValue(dex_register_index, number_of_dex_registers);
368    DCHECK(location.GetKind() == DexRegisterLocation::Kind::kInStack);
369    // GetLocationKindAndValue returns the offset in bytes.
370    return location.GetValue();
371  }
372
373  int32_t GetConstant(uint16_t dex_register_index, uint16_t number_of_dex_registers) const {
374    DexRegisterLocation location =
375        GetLocationKindAndValue(dex_register_index, number_of_dex_registers);
376    DCHECK(location.GetKind() == DexRegisterLocation::Kind::kConstant);
377    return location.GetValue();
378  }
379
380  int32_t GetMachineRegister(uint16_t dex_register_index, uint16_t number_of_dex_registers) const {
381    DexRegisterLocation location =
382        GetLocationKindAndValue(dex_register_index, number_of_dex_registers);
383    DCHECK(location.GetInternalKind() == DexRegisterLocation::Kind::kInRegister
384           || location.GetInternalKind() == DexRegisterLocation::Kind::kInFpuRegister)
385        << DexRegisterLocation::PrettyDescriptor(location.GetInternalKind());
386    return location.GetValue();
387  }
388
389  // Compute the compressed kind of `location`.
390  static DexRegisterLocation::Kind ComputeCompressedKind(const DexRegisterLocation& location) {
391    switch (location.GetInternalKind()) {
392      case DexRegisterLocation::Kind::kNone:
393        DCHECK_EQ(location.GetValue(), 0);
394        return DexRegisterLocation::Kind::kNone;
395
396      case DexRegisterLocation::Kind::kInRegister:
397        DCHECK_GE(location.GetValue(), 0);
398        DCHECK_LT(location.GetValue(), 1 << DexRegisterMap::kValueBits);
399        return DexRegisterLocation::Kind::kInRegister;
400
401      case DexRegisterLocation::Kind::kInFpuRegister:
402        DCHECK_GE(location.GetValue(), 0);
403        DCHECK_LT(location.GetValue(), 1 << DexRegisterMap::kValueBits);
404        return DexRegisterLocation::Kind::kInFpuRegister;
405
406      case DexRegisterLocation::Kind::kInStack:
407        DCHECK_EQ(location.GetValue() % kFrameSlotSize, 0);
408        return IsUint<DexRegisterMap::kValueBits>(location.GetValue() / kFrameSlotSize)
409            ? DexRegisterLocation::Kind::kInStack
410            : DexRegisterLocation::Kind::kInStackLargeOffset;
411
412      case DexRegisterLocation::Kind::kConstant:
413        return IsUint<DexRegisterMap::kValueBits>(location.GetValue())
414            ? DexRegisterLocation::Kind::kConstant
415            : DexRegisterLocation::Kind::kConstantLargeValue;
416
417      default:
418        LOG(FATAL) << "Unexpected location kind"
419                   << DexRegisterLocation::PrettyDescriptor(location.GetInternalKind());
420        UNREACHABLE();
421    }
422  }
423
424  // Can `location` be turned into a short location?
425  static bool CanBeEncodedAsShortLocation(const DexRegisterLocation& location) {
426    switch (location.GetInternalKind()) {
427      case DexRegisterLocation::Kind::kNone:
428      case DexRegisterLocation::Kind::kInRegister:
429      case DexRegisterLocation::Kind::kInFpuRegister:
430        return true;
431
432      case DexRegisterLocation::Kind::kInStack:
433        DCHECK_EQ(location.GetValue() % kFrameSlotSize, 0);
434        return IsUint<kValueBits>(location.GetValue() / kFrameSlotSize);
435
436      case DexRegisterLocation::Kind::kConstant:
437        return IsUint<kValueBits>(location.GetValue());
438
439      default:
440        UNREACHABLE();
441    }
442  }
443
444  static size_t EntrySize(const DexRegisterLocation& location) {
445    return CanBeEncodedAsShortLocation(location)
446        ? DexRegisterMap::SingleShortEntrySize()
447        : DexRegisterMap::SingleLargeEntrySize();
448  }
449
450  static size_t SingleShortEntrySize() {
451    return sizeof(ShortLocation);
452  }
453
454  static size_t SingleLargeEntrySize() {
455    return sizeof(DexRegisterLocation::Kind) + sizeof(int32_t);
456  }
457
458  size_t Size() const {
459    return region_.size();
460  }
461
462  static constexpr int kLiveBitMaskOffset = 0;
463  static constexpr int kFixedSize = kLiveBitMaskOffset;
464
465 private:
466  // Width of the kind "field" in a short location, in bits.
467  static constexpr size_t kKindBits = 3;
468  // Width of the value "field" in a short location, in bits.
469  static constexpr size_t kValueBits = 5;
470
471  static constexpr uint8_t kKindMask = (1 << kKindBits) - 1;
472  static constexpr int32_t kValueMask = (1 << kValueBits) - 1;
473  static constexpr size_t kKindOffset = 0;
474  static constexpr size_t kValueOffset = kKindBits;
475
476  static ShortLocation MakeShortLocation(DexRegisterLocation::Kind kind, int32_t value) {
477    DCHECK(IsUint<kKindBits>(static_cast<uint8_t>(kind))) << static_cast<uint8_t>(kind);
478    DCHECK(IsUint<kValueBits>(value)) << value;
479    return (static_cast<uint8_t>(kind) & kKindMask) << kKindOffset
480        | (value & kValueMask) << kValueOffset;
481  }
482
483  static DexRegisterLocation::Kind ExtractKindFromShortLocation(ShortLocation location) {
484    uint8_t kind = (location >> kKindOffset) & kKindMask;
485    DCHECK_LE(kind, static_cast<uint8_t>(DexRegisterLocation::Kind::kLastLocationKind));
486    // We do not encode kNone locations in the stack map.
487    DCHECK_NE(kind, static_cast<uint8_t>(DexRegisterLocation::Kind::kNone));
488    return static_cast<DexRegisterLocation::Kind>(kind);
489  }
490
491  static int32_t ExtractValueFromShortLocation(ShortLocation location) {
492    return (location >> kValueOffset) & kValueMask;
493  }
494
495  // Extract a location kind from the byte at position `offset`.
496  DexRegisterLocation::Kind ExtractKindAtOffset(size_t offset) const {
497    ShortLocation first_byte = region_.LoadUnaligned<ShortLocation>(offset);
498    return ExtractKindFromShortLocation(first_byte);
499  }
500
501  MemoryRegion region_;
502
503  friend class CodeInfo;
504  friend class StackMapStream;
505};
506
507/**
508 * A Stack Map holds compilation information for a specific PC necessary for:
509 * - Mapping it to a dex PC,
510 * - Knowing which stack entries are objects,
511 * - Knowing which registers hold objects,
512 * - Knowing the inlining information,
513 * - Knowing the values of dex registers.
514 *
515 * The information is of the form:
516 * [dex_pc, native_pc_offset, dex_register_map_offset, inlining_info_offset, register_mask, stack_mask].
517 *
518 * Note that register_mask is fixed size, but stack_mask is variable size, depending on the
519 * stack size of a method.
520 */
521class StackMap {
522 public:
523  explicit StackMap(MemoryRegion region) : region_(region) {}
524
525  uint32_t GetDexPc(const CodeInfo& info) const;
526
527  void SetDexPc(const CodeInfo& info, uint32_t dex_pc);
528
529  uint32_t GetNativePcOffset(const CodeInfo& info) const;
530
531  void SetNativePcOffset(const CodeInfo& info, uint32_t native_pc_offset);
532
533  uint32_t GetDexRegisterMapOffset(const CodeInfo& info) const;
534
535  void SetDexRegisterMapOffset(const CodeInfo& info, uint32_t offset);
536
537  uint32_t GetInlineDescriptorOffset(const CodeInfo& info) const;
538
539  void SetInlineDescriptorOffset(const CodeInfo& info, uint32_t offset);
540
541  uint32_t GetRegisterMask(const CodeInfo& info) const;
542
543  void SetRegisterMask(const CodeInfo& info, uint32_t mask);
544
545  MemoryRegion GetStackMask(const CodeInfo& info) const;
546
547  void SetStackMask(const CodeInfo& info, const BitVector& sp_map) {
548    MemoryRegion region = GetStackMask(info);
549    for (size_t i = 0; i < region.size_in_bits(); i++) {
550      region.StoreBit(i, sp_map.IsBitSet(i));
551    }
552  }
553
554  bool HasDexRegisterMap(const CodeInfo& info) const {
555    return GetDexRegisterMapOffset(info) != kNoDexRegisterMap;
556  }
557
558  bool HasInlineInfo(const CodeInfo& info) const {
559    return GetInlineDescriptorOffset(info) != kNoInlineInfo;
560  }
561
562  bool Equals(const StackMap& other) const {
563    return region_.pointer() == other.region_.pointer()
564       && region_.size() == other.region_.size();
565  }
566
567  static size_t ComputeStackMapSize(size_t stack_mask_size,
568                                    bool has_inline_info,
569                                    bool is_small_inline_info,
570                                    bool is_small_dex_map,
571                                    bool is_small_dex_pc,
572                                    bool is_small_native_pc);
573
574  static size_t ComputeStackMapSize(size_t stack_mask_size,
575                                    size_t inline_info_size,
576                                    size_t dex_register_map_size,
577                                    size_t dex_pc_max,
578                                    size_t native_pc_max);
579
580  // TODO: Revisit this abstraction if we allow 3 bytes encoding.
581  typedef uint8_t kSmallEncoding;
582  typedef uint32_t kLargeEncoding;
583  static constexpr size_t kBytesForSmallEncoding = sizeof(kSmallEncoding);
584  static constexpr size_t kBitsForSmallEncoding = kBitsPerByte * kBytesForSmallEncoding;
585  static constexpr size_t kBytesForLargeEncoding = sizeof(kLargeEncoding);
586  static constexpr size_t kBitsForLargeEncoding = kBitsPerByte * kBytesForLargeEncoding;
587
588  // Special (invalid) offset for the DexRegisterMapOffset field meaning
589  // that there is no Dex register map for this stack map.
590  static constexpr uint32_t kNoDexRegisterMap = -1;
591  static constexpr uint32_t kNoDexRegisterMapSmallEncoding =
592      std::numeric_limits<kSmallEncoding>::max();
593
594  // Special (invalid) offset for the InlineDescriptorOffset field meaning
595  // that there is no inline info for this stack map.
596  static constexpr uint32_t kNoInlineInfo = -1;
597  static constexpr uint32_t kNoInlineInfoSmallEncoding =
598    std::numeric_limits<kSmallEncoding>::max();
599
600  // Returns the number of bytes needed for an entry in the StackMap.
601  static size_t NumberOfBytesForEntry(bool small_encoding) {
602    return small_encoding ? kBytesForSmallEncoding : kBytesForLargeEncoding;
603  }
604
605 private:
606  // TODO: Instead of plain types such as "uint32_t", introduce
607  // typedefs (and document the memory layout of StackMap).
608  static constexpr int kRegisterMaskOffset = 0;
609  static constexpr int kFixedSize = kRegisterMaskOffset + sizeof(uint32_t);
610  static constexpr int kStackMaskOffset = kFixedSize;
611
612  MemoryRegion region_;
613
614  friend class CodeInfo;
615  friend class StackMapStream;
616};
617
618
619/**
620 * Wrapper around all compiler information collected for a method.
621 * The information is of the form:
622 * [overall_size, number_of_stack_maps, stack_mask_size, StackMap+, DexRegisterInfo+, InlineInfo*].
623 */
624class CodeInfo {
625 public:
626  explicit CodeInfo(MemoryRegion region) : region_(region) {}
627
628  explicit CodeInfo(const void* data) {
629    uint32_t size = reinterpret_cast<const uint32_t*>(data)[0];
630    region_ = MemoryRegion(const_cast<void*>(data), size);
631  }
632
633  void SetEncoding(size_t inline_info_size,
634                   size_t dex_register_map_size,
635                   size_t dex_pc_max,
636                   size_t native_pc_max) {
637    if (inline_info_size != 0) {
638      region_.StoreBit(kHasInlineInfoBitOffset, 1);
639      region_.StoreBit(kHasSmallInlineInfoBitOffset, IsUint<StackMap::kBitsForSmallEncoding>(
640          // + 1 to also encode kNoInlineInfo: if an inline info offset
641          // is at 0xFF, we want to overflow to a larger encoding, because it will
642          // conflict with kNoInlineInfo.
643          // The offset is relative to the dex register map. TODO: Change this.
644          inline_info_size + dex_register_map_size + 1));
645    } else {
646      region_.StoreBit(kHasInlineInfoBitOffset, 0);
647      region_.StoreBit(kHasSmallInlineInfoBitOffset, 0);
648    }
649    region_.StoreBit(kHasSmallDexRegisterMapBitOffset,
650                     // + 1 to also encode kNoDexRegisterMap: if a dex register map offset
651                     // is at 0xFF, we want to overflow to a larger encoding, because it will
652                     // conflict with kNoDexRegisterMap.
653                     IsUint<StackMap::kBitsForSmallEncoding>(dex_register_map_size + 1));
654    region_.StoreBit(kHasSmallDexPcBitOffset, IsUint<StackMap::kBitsForSmallEncoding>(dex_pc_max));
655    region_.StoreBit(kHasSmallNativePcBitOffset,
656                     IsUint<StackMap::kBitsForSmallEncoding>(native_pc_max));
657  }
658
659  bool HasInlineInfo() const {
660    return region_.LoadBit(kHasInlineInfoBitOffset);
661  }
662
663  bool HasSmallInlineInfo() const {
664    return region_.LoadBit(kHasSmallInlineInfoBitOffset);
665  }
666
667  bool HasSmallDexRegisterMap() const {
668    return region_.LoadBit(kHasSmallDexRegisterMapBitOffset);
669  }
670
671  bool HasSmallNativePc() const {
672    return region_.LoadBit(kHasSmallNativePcBitOffset);
673  }
674
675  bool HasSmallDexPc() const {
676    return region_.LoadBit(kHasSmallDexPcBitOffset);
677  }
678
679  size_t ComputeStackMapRegisterMaskOffset() const {
680    return StackMap::kRegisterMaskOffset;
681  }
682
683  size_t ComputeStackMapStackMaskOffset() const {
684    return StackMap::kStackMaskOffset;
685  }
686
687  size_t ComputeStackMapDexPcOffset() const {
688    return ComputeStackMapStackMaskOffset() + GetStackMaskSize();
689  }
690
691  size_t ComputeStackMapNativePcOffset() const {
692    return ComputeStackMapDexPcOffset()
693        + (HasSmallDexPc() ? sizeof(uint8_t) : sizeof(uint32_t));
694  }
695
696  size_t ComputeStackMapDexRegisterMapOffset() const {
697    return ComputeStackMapNativePcOffset()
698        + (HasSmallNativePc() ? sizeof(uint8_t) : sizeof(uint32_t));
699  }
700
701  size_t ComputeStackMapInlineInfoOffset() const {
702    CHECK(HasInlineInfo());
703    return ComputeStackMapDexRegisterMapOffset()
704        + (HasSmallDexRegisterMap() ? sizeof(uint8_t) : sizeof(uint32_t));
705  }
706
707  StackMap GetStackMapAt(size_t i) const {
708    size_t size = StackMapSize();
709    return StackMap(GetStackMaps().Subregion(i * size, size));
710  }
711
712  uint32_t GetOverallSize() const {
713    return region_.LoadUnaligned<uint32_t>(kOverallSizeOffset);
714  }
715
716  void SetOverallSize(uint32_t size) {
717    region_.StoreUnaligned<uint32_t>(kOverallSizeOffset, size);
718  }
719
720  uint32_t GetStackMaskSize() const {
721    return region_.LoadUnaligned<uint32_t>(kStackMaskSizeOffset);
722  }
723
724  void SetStackMaskSize(uint32_t size) {
725    region_.StoreUnaligned<uint32_t>(kStackMaskSizeOffset, size);
726  }
727
728  size_t GetNumberOfStackMaps() const {
729    return region_.LoadUnaligned<uint32_t>(kNumberOfStackMapsOffset);
730  }
731
732  void SetNumberOfStackMaps(uint32_t number_of_stack_maps) {
733    region_.StoreUnaligned<uint32_t>(kNumberOfStackMapsOffset, number_of_stack_maps);
734  }
735
736  // Get the size of one stack map of this CodeInfo object, in bytes.
737  // All stack maps of a CodeInfo have the same size.
738  size_t StackMapSize() const {
739    return StackMap::ComputeStackMapSize(GetStackMaskSize(),
740                                         HasInlineInfo(),
741                                         HasSmallInlineInfo(),
742                                         HasSmallDexRegisterMap(),
743                                         HasSmallDexPc(),
744                                         HasSmallNativePc());
745  }
746
747  // Get the size all the stack maps of this CodeInfo object, in bytes.
748  size_t StackMapsSize() const {
749    return StackMapSize() * GetNumberOfStackMaps();
750  }
751
752  size_t GetDexRegisterMapsOffset() const {
753    return CodeInfo::kFixedSize + StackMapsSize();
754  }
755
756  uint32_t GetStackMapsOffset() const {
757    return kFixedSize;
758  }
759
760  DexRegisterMap GetDexRegisterMapOf(StackMap stack_map, uint32_t number_of_dex_registers) const {
761    DCHECK(stack_map.HasDexRegisterMap(*this));
762    uint32_t offset = stack_map.GetDexRegisterMapOffset(*this) + GetDexRegisterMapsOffset();
763    size_t size = ComputeDexRegisterMapSize(offset, number_of_dex_registers);
764    return DexRegisterMap(region_.Subregion(offset, size));
765  }
766
767  InlineInfo GetInlineInfoOf(StackMap stack_map) const {
768    DCHECK(stack_map.HasInlineInfo(*this));
769    uint32_t offset = stack_map.GetInlineDescriptorOffset(*this) + GetDexRegisterMapsOffset();
770    uint8_t depth = region_.LoadUnaligned<uint8_t>(offset);
771    return InlineInfo(region_.Subregion(offset,
772        InlineInfo::kFixedSize + depth * InlineInfo::SingleEntrySize()));
773  }
774
775  StackMap GetStackMapForDexPc(uint32_t dex_pc) const {
776    for (size_t i = 0, e = GetNumberOfStackMaps(); i < e; ++i) {
777      StackMap stack_map = GetStackMapAt(i);
778      if (stack_map.GetDexPc(*this) == dex_pc) {
779        return stack_map;
780      }
781    }
782    LOG(FATAL) << "Unreachable";
783    UNREACHABLE();
784  }
785
786  StackMap GetStackMapForNativePcOffset(uint32_t native_pc_offset) const {
787    // TODO: stack maps are sorted by native pc, we can do a binary search.
788    for (size_t i = 0, e = GetNumberOfStackMaps(); i < e; ++i) {
789      StackMap stack_map = GetStackMapAt(i);
790      if (stack_map.GetNativePcOffset(*this) == native_pc_offset) {
791        return stack_map;
792      }
793    }
794    LOG(FATAL) << "Unreachable";
795    UNREACHABLE();
796  }
797
798  void Dump(std::ostream& os, uint16_t number_of_dex_registers) const;
799  void DumpStackMapHeader(std::ostream& os, size_t stack_map_num) const;
800
801 private:
802  // TODO: Instead of plain types such as "uint32_t", introduce
803  // typedefs (and document the memory layout of CodeInfo).
804  static constexpr int kOverallSizeOffset = 0;
805  static constexpr int kEncodingInfoOffset = kOverallSizeOffset + sizeof(uint32_t);
806  static constexpr int kNumberOfStackMapsOffset = kEncodingInfoOffset + sizeof(uint8_t);
807  static constexpr int kStackMaskSizeOffset = kNumberOfStackMapsOffset + sizeof(uint32_t);
808  static constexpr int kFixedSize = kStackMaskSizeOffset + sizeof(uint32_t);
809
810  static constexpr int kHasInlineInfoBitOffset = (kEncodingInfoOffset * kBitsPerByte);
811  static constexpr int kHasSmallInlineInfoBitOffset = kHasInlineInfoBitOffset + 1;
812  static constexpr int kHasSmallDexRegisterMapBitOffset = kHasSmallInlineInfoBitOffset + 1;
813  static constexpr int kHasSmallDexPcBitOffset = kHasSmallDexRegisterMapBitOffset + 1;
814  static constexpr int kHasSmallNativePcBitOffset = kHasSmallDexPcBitOffset + 1;
815
816  MemoryRegion GetStackMaps() const {
817    return region_.size() == 0
818        ? MemoryRegion()
819        : region_.Subregion(kFixedSize, StackMapsSize());
820  }
821
822  // Compute the size of a Dex register map starting at offset `origin` in
823  // `region_` and containing `number_of_dex_registers` locations.
824  size_t ComputeDexRegisterMapSize(uint32_t origin, uint32_t number_of_dex_registers) const {
825    // TODO: Ideally, we would like to use art::DexRegisterMap::Size or
826    // art::DexRegisterMap::FindLocationOffset, but the DexRegisterMap is not
827    // yet built.  Try to factor common code.
828    size_t offset =
829        origin + DexRegisterMap::GetDexRegisterMapLocationsOffset(number_of_dex_registers);
830
831    // Create a temporary DexRegisterMap to be able to call DexRegisterMap.IsDexRegisterLive.
832    DexRegisterMap only_live_mask(MemoryRegion(region_.Subregion(origin, offset - origin)));
833
834    // Skip the first `number_of_dex_registers - 1` entries.
835    for (uint16_t i = 0; i < number_of_dex_registers; ++i) {
836      if (only_live_mask.IsDexRegisterLive(i)) {
837        // Read the first next byte and inspect its first 3 bits to decide
838        // whether it is a short or a large location.
839        DexRegisterMap::ShortLocation first_byte =
840            region_.LoadUnaligned<DexRegisterMap::ShortLocation>(offset);
841        DexRegisterLocation::Kind kind =
842            DexRegisterMap::ExtractKindFromShortLocation(first_byte);
843        if (DexRegisterLocation::IsShortLocationKind(kind)) {
844          // Short location.  Skip the current byte.
845          offset += DexRegisterMap::SingleShortEntrySize();
846        } else {
847          // Large location.  Skip the 5 next bytes.
848          offset += DexRegisterMap::SingleLargeEntrySize();
849        }
850      }
851    }
852    size_t size = offset - origin;
853    return size;
854  }
855
856  MemoryRegion region_;
857  friend class StackMapStream;
858};
859
860}  // namespace art
861
862#endif  // ART_RUNTIME_STACK_MAP_H_
863