stack_map.h revision ede7bf8a5cef965974e7c1edcf46a41fbe50a49d
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_STACK_MAP_H_
18#define ART_RUNTIME_STACK_MAP_H_
19
20#include "base/bit_vector.h"
21#include "memory_region.h"
22#include "utils.h"
23
24namespace art {
25
26// Size of a frame slot, in bytes.  This constant is a signed value,
27// to please the compiler in arithmetic operations involving int32_t
28// (signed) values.
29static ssize_t constexpr kFrameSlotSize = 4;
30
31// Word alignment required on ARM, in bytes.
32static constexpr size_t kWordAlignment = 4;
33
34/**
35 * Classes in the following file are wrapper on stack map information backed
36 * by a MemoryRegion. As such they read and write to the region, they don't have
37 * their own fields.
38 */
39
40/**
41 * Inline information for a specific PC. The information is of the form:
42 * [inlining_depth, [method_dex reference]+]
43 */
44class InlineInfo {
45 public:
46  explicit InlineInfo(MemoryRegion region) : region_(region) {}
47
48  uint8_t GetDepth() const {
49    return region_.LoadUnaligned<uint8_t>(kDepthOffset);
50  }
51
52  void SetDepth(uint8_t depth) {
53    region_.StoreUnaligned<uint8_t>(kDepthOffset, depth);
54  }
55
56  uint32_t GetMethodReferenceIndexAtDepth(uint8_t depth) const {
57    return region_.LoadUnaligned<uint32_t>(kFixedSize + depth * SingleEntrySize());
58  }
59
60  void SetMethodReferenceIndexAtDepth(uint8_t depth, uint32_t index) {
61    region_.StoreUnaligned<uint32_t>(kFixedSize + depth * SingleEntrySize(), index);
62  }
63
64  static size_t SingleEntrySize() {
65    return sizeof(uint32_t);
66  }
67
68 private:
69  // TODO: Instead of plain types such as "uint8_t", introduce
70  // typedefs (and document the memory layout of InlineInfo).
71  static constexpr int kDepthOffset = 0;
72  static constexpr int kFixedSize = kDepthOffset + sizeof(uint8_t);
73
74  MemoryRegion region_;
75
76  friend class CodeInfo;
77  friend class StackMap;
78  friend class StackMapStream;
79};
80
81// Dex register location container used by DexRegisterMap and StackMapStream.
82class DexRegisterLocation {
83 public:
84  /*
85   * The location kind used to populate the Dex register information in a
86   * StackMapStream can either be:
87   * - kNone: the register has no location yet, meaning it has not been set;
88   * - kConstant: value holds the constant;
89   * - kStack: value holds the stack offset;
90   * - kRegister: value holds the physical register number;
91   * - kFpuRegister: value holds the physical register number.
92   *
93   * In addition, DexRegisterMap also uses these values:
94   * - kInStackLargeOffset: value holds a "large" stack offset (greater than
95   *   128 bytes);
96   * - kConstantLargeValue: value holds a "large" constant (lower than or
97   *   equal to -16, or greater than 16).
98   */
99  enum class Kind : uint8_t {
100    // Short location kinds, for entries fitting on one byte (3 bits
101    // for the kind, 5 bits for the value) in a DexRegisterMap.
102    kNone = 0,                // 0b000
103    kInStack = 1,             // 0b001
104    kInRegister = 2,          // 0b010
105    kInFpuRegister = 3,       // 0b011
106    kConstant = 4,            // 0b100
107
108    // Large location kinds, requiring a 5-byte encoding (1 byte for the
109    // kind, 4 bytes for the value).
110
111    // Stack location at a large offset, meaning that the offset value
112    // divided by the stack frame slot size (4 bytes) cannot fit on a
113    // 5-bit unsigned integer (i.e., this offset value is greater than
114    // or equal to 2^5 * 4 = 128 bytes).
115    kInStackLargeOffset = 5,  // 0b101
116
117    // Large constant, that cannot fit on a 5-bit signed integer (i.e.,
118    // lower than -2^(5-1) = -16, or greater than or equal to
119    // 2^(5-1) - 1 = 15).
120    kConstantLargeValue = 6,  // 0b110
121
122    kLastLocationKind = kConstantLargeValue
123  };
124
125  static_assert(
126      sizeof(Kind) == 1u,
127      "art::DexRegisterLocation::Kind has a size different from one byte.");
128
129  static const char* PrettyDescriptor(Kind kind) {
130    switch (kind) {
131      case Kind::kNone:
132        return "none";
133      case Kind::kInStack:
134        return "in stack";
135      case Kind::kInRegister:
136        return "in register";
137      case Kind::kInFpuRegister:
138        return "in fpu register";
139      case Kind::kConstant:
140        return "as constant";
141      case Kind::kInStackLargeOffset:
142        return "in stack (large offset)";
143      case Kind::kConstantLargeValue:
144        return "as constant (large value)";
145      default:
146        UNREACHABLE();
147    }
148  }
149
150  static bool IsShortLocationKind(Kind kind) {
151    switch (kind) {
152      case Kind::kNone:
153      case Kind::kInStack:
154      case Kind::kInRegister:
155      case Kind::kInFpuRegister:
156      case Kind::kConstant:
157        return true;
158
159      case Kind::kInStackLargeOffset:
160      case Kind::kConstantLargeValue:
161        return false;
162
163      default:
164        UNREACHABLE();
165    }
166  }
167
168  // Convert `kind` to a "surface" kind, i.e. one that doesn't include
169  // any value with a "large" qualifier.
170  // TODO: Introduce another enum type for the surface kind?
171  static Kind ConvertToSurfaceKind(Kind kind) {
172    switch (kind) {
173      case Kind::kNone:
174      case Kind::kInStack:
175      case Kind::kInRegister:
176      case Kind::kInFpuRegister:
177      case Kind::kConstant:
178        return kind;
179
180      case Kind::kInStackLargeOffset:
181        return Kind::kInStack;
182
183      case Kind::kConstantLargeValue:
184        return Kind::kConstant;
185
186      default:
187        UNREACHABLE();
188    }
189  }
190
191  DexRegisterLocation(Kind kind, int32_t value)
192      : kind_(kind), value_(value) {}
193
194  // Get the "surface" kind of the location, i.e., the one that doesn't
195  // include any value with a "large" qualifier.
196  Kind GetKind() const {
197    return ConvertToSurfaceKind(kind_);
198  }
199
200  // Get the value of the location.
201  int32_t GetValue() const { return value_; }
202
203  // Get the actual kind of the location.
204  Kind GetInternalKind() const { return kind_; }
205
206 private:
207  Kind kind_;
208  int32_t value_;
209};
210
211/**
212 * Information on dex register values for a specific PC. The information is
213 * of the form:
214 * [location_kind, register_value]+.
215 * either on 1 or 5 bytes (see art::DexRegisterLocation::Kind).
216 */
217class DexRegisterMap {
218 public:
219  explicit DexRegisterMap(MemoryRegion region) : region_(region) {}
220
221  // Short (compressed) location, fitting on one byte.
222  typedef uint8_t ShortLocation;
223
224  void SetRegisterInfo(size_t offset, const DexRegisterLocation& dex_register_location) {
225    DexRegisterLocation::Kind kind = ComputeCompressedKind(dex_register_location);
226    int32_t value = dex_register_location.GetValue();
227    if (DexRegisterLocation::IsShortLocationKind(kind)) {
228      // Short location.  Compress the kind and the value as a single byte.
229      if (kind == DexRegisterLocation::Kind::kInStack) {
230        // Instead of storing stack offsets expressed in bytes for
231        // short stack locations, store slot offsets.  A stack offset
232        // is a multiple of 4 (kFrameSlotSize).  This means that by
233        // dividing it by 4, we can fit values from the [0, 128)
234        // interval in a short stack location, and not just values
235        // from the [0, 32) interval.
236        DCHECK_EQ(value % kFrameSlotSize, 0);
237        value /= kFrameSlotSize;
238      }
239      DCHECK(IsUint<kValueBits>(value)) << value;
240      region_.StoreUnaligned<ShortLocation>(offset, MakeShortLocation(kind, value));
241    } else {
242      // Large location.  Write the location on one byte and the value
243      // on 4 bytes.
244      DCHECK(!IsUint<kValueBits>(value)) << value;
245      if (kind == DexRegisterLocation::Kind::kInStackLargeOffset) {
246        // Also divide large stack offsets by 4 for the sake of consistency.
247        DCHECK_EQ(value % kFrameSlotSize, 0);
248        value /= kFrameSlotSize;
249      }
250      // Data can be unaligned as the written Dex register locations can
251      // either be 1-byte or 5-byte wide.  Use
252      // art::MemoryRegion::StoreUnaligned instead of
253      // art::MemoryRegion::Store to prevent unligned word accesses on ARM.
254      region_.StoreUnaligned<DexRegisterLocation::Kind>(offset, kind);
255      region_.StoreUnaligned<int32_t>(offset + sizeof(DexRegisterLocation::Kind), value);
256    }
257  }
258
259  // Find the offset of the Dex register location number `dex_register_index`.
260  size_t FindLocationOffset(uint16_t dex_register_index) const {
261    size_t offset = kFixedSize;
262    // Skip the first `dex_register_index - 1` entries.
263    for (uint16_t i = 0; i < dex_register_index; ++i) {
264      // Read the first next byte and inspect its first 3 bits to decide
265      // whether it is a short or a large location.
266      DexRegisterLocation::Kind kind = ExtractKindAtOffset(offset);
267      if (DexRegisterLocation::IsShortLocationKind(kind)) {
268        // Short location.  Skip the current byte.
269        offset += SingleShortEntrySize();
270      } else {
271        // Large location.  Skip the 5 next bytes.
272        offset += SingleLargeEntrySize();
273      }
274    }
275    return offset;
276  }
277
278  // Get the surface kind.
279  DexRegisterLocation::Kind GetLocationKind(uint16_t dex_register_index) const {
280    return DexRegisterLocation::ConvertToSurfaceKind(GetLocationInternalKind(dex_register_index));
281  }
282
283  // Get the internal kind.
284  DexRegisterLocation::Kind GetLocationInternalKind(uint16_t dex_register_index) const {
285    size_t offset = FindLocationOffset(dex_register_index);
286    return ExtractKindAtOffset(offset);
287  }
288
289  // TODO: Rename as GetDexRegisterLocation?
290  DexRegisterLocation GetLocationKindAndValue(uint16_t dex_register_index) const {
291    size_t offset = FindLocationOffset(dex_register_index);
292    // Read the first byte and inspect its first 3 bits to get the location.
293    ShortLocation first_byte = region_.LoadUnaligned<ShortLocation>(offset);
294    DexRegisterLocation::Kind kind = ExtractKindFromShortLocation(first_byte);
295    if (DexRegisterLocation::IsShortLocationKind(kind)) {
296      // Short location.  Extract the value from the remaining 5 bits.
297      int32_t value = ExtractValueFromShortLocation(first_byte);
298      if (kind == DexRegisterLocation::Kind::kInStack) {
299        // Convert the stack slot (short) offset to a byte offset value.
300        value *= kFrameSlotSize;
301      }
302      return DexRegisterLocation(kind, value);
303    } else {
304      // Large location.  Read the four next bytes to get the value.
305      int32_t value = region_.LoadUnaligned<int32_t>(offset + sizeof(DexRegisterLocation::Kind));
306      if (kind == DexRegisterLocation::Kind::kInStackLargeOffset) {
307        // Convert the stack slot (large) offset to a byte offset value.
308        value *= kFrameSlotSize;
309      }
310      return DexRegisterLocation(kind, value);
311    }
312  }
313
314  int32_t GetStackOffsetInBytes(uint16_t dex_register_index) const {
315    DexRegisterLocation location = GetLocationKindAndValue(dex_register_index);
316    DCHECK(location.GetKind() == DexRegisterLocation::Kind::kInStack);
317    // GetLocationKindAndValue returns the offset in bytes.
318    return location.GetValue();
319  }
320
321  int32_t GetConstant(uint16_t dex_register_index) const {
322    DexRegisterLocation location = GetLocationKindAndValue(dex_register_index);
323    DCHECK(location.GetKind() == DexRegisterLocation::Kind::kConstant);
324    return location.GetValue();
325  }
326
327  int32_t GetMachineRegister(uint16_t dex_register_index) const {
328    DexRegisterLocation location = GetLocationKindAndValue(dex_register_index);
329    DCHECK(location.GetInternalKind() == DexRegisterLocation::Kind::kInRegister
330           || location.GetInternalKind() == DexRegisterLocation::Kind::kInFpuRegister)
331        << DexRegisterLocation::PrettyDescriptor(location.GetInternalKind());
332    return location.GetValue();
333  }
334
335  // Compute the compressed kind of `location`.
336  static DexRegisterLocation::Kind ComputeCompressedKind(const DexRegisterLocation& location) {
337    switch (location.GetInternalKind()) {
338      case DexRegisterLocation::Kind::kNone:
339        DCHECK_EQ(location.GetValue(), 0);
340        return DexRegisterLocation::Kind::kNone;
341
342      case DexRegisterLocation::Kind::kInRegister:
343        DCHECK_GE(location.GetValue(), 0);
344        DCHECK_LT(location.GetValue(), 1 << DexRegisterMap::kValueBits);
345        return DexRegisterLocation::Kind::kInRegister;
346
347      case DexRegisterLocation::Kind::kInFpuRegister:
348        DCHECK_GE(location.GetValue(), 0);
349        DCHECK_LT(location.GetValue(), 1 << DexRegisterMap::kValueBits);
350        return DexRegisterLocation::Kind::kInFpuRegister;
351
352      case DexRegisterLocation::Kind::kInStack:
353        DCHECK_EQ(location.GetValue() % kFrameSlotSize, 0);
354        return IsUint<DexRegisterMap::kValueBits>(location.GetValue() / kFrameSlotSize)
355            ? DexRegisterLocation::Kind::kInStack
356            : DexRegisterLocation::Kind::kInStackLargeOffset;
357
358      case DexRegisterLocation::Kind::kConstant:
359        return IsUint<DexRegisterMap::kValueBits>(location.GetValue())
360            ? DexRegisterLocation::Kind::kConstant
361            : DexRegisterLocation::Kind::kConstantLargeValue;
362
363      default:
364        LOG(FATAL) << "Unexpected location kind"
365                   << DexRegisterLocation::PrettyDescriptor(location.GetInternalKind());
366        UNREACHABLE();
367    }
368  }
369
370  // Can `location` be turned into a short location?
371  static bool CanBeEncodedAsShortLocation(const DexRegisterLocation& location) {
372    switch (location.GetInternalKind()) {
373      case DexRegisterLocation::Kind::kNone:
374      case DexRegisterLocation::Kind::kInRegister:
375      case DexRegisterLocation::Kind::kInFpuRegister:
376        return true;
377
378      case DexRegisterLocation::Kind::kInStack:
379        DCHECK_EQ(location.GetValue() % kFrameSlotSize, 0);
380        return IsUint<kValueBits>(location.GetValue() / kFrameSlotSize);
381
382      case DexRegisterLocation::Kind::kConstant:
383        return IsUint<kValueBits>(location.GetValue());
384
385      default:
386        UNREACHABLE();
387    }
388  }
389
390  static size_t EntrySize(const DexRegisterLocation& location) {
391    return CanBeEncodedAsShortLocation(location)
392        ? DexRegisterMap::SingleShortEntrySize()
393        : DexRegisterMap::SingleLargeEntrySize();
394  }
395
396  static size_t SingleShortEntrySize() {
397    return sizeof(ShortLocation);
398  }
399
400  static size_t SingleLargeEntrySize() {
401    return sizeof(DexRegisterLocation::Kind) + sizeof(int32_t);
402  }
403
404  size_t Size() const {
405    return region_.size();
406  }
407
408  static constexpr int kFixedSize = 0;
409
410 private:
411  // Width of the kind "field" in a short location, in bits.
412  static constexpr size_t kKindBits = 3;
413  // Width of the value "field" in a short location, in bits.
414  static constexpr size_t kValueBits = 5;
415
416  static constexpr uint8_t kKindMask = (1 << kKindBits) - 1;
417  static constexpr int32_t kValueMask = (1 << kValueBits) - 1;
418  static constexpr size_t kKindOffset = 0;
419  static constexpr size_t kValueOffset = kKindBits;
420
421  static ShortLocation MakeShortLocation(DexRegisterLocation::Kind kind, int32_t value) {
422    DCHECK(IsUint<kKindBits>(static_cast<uint8_t>(kind))) << static_cast<uint8_t>(kind);
423    DCHECK(IsUint<kValueBits>(value)) << value;
424    return (static_cast<uint8_t>(kind) & kKindMask) << kKindOffset
425        | (value & kValueMask) << kValueOffset;
426  }
427
428  static DexRegisterLocation::Kind ExtractKindFromShortLocation(ShortLocation location) {
429    uint8_t kind = (location >> kKindOffset) & kKindMask;
430    DCHECK_LE(kind, static_cast<uint8_t>(DexRegisterLocation::Kind::kLastLocationKind));
431    return static_cast<DexRegisterLocation::Kind>(kind);
432  }
433
434  static int32_t ExtractValueFromShortLocation(ShortLocation location) {
435    return (location >> kValueOffset) & kValueMask;
436  }
437
438  // Extract a location kind from the byte at position `offset`.
439  DexRegisterLocation::Kind ExtractKindAtOffset(size_t offset) const {
440    ShortLocation first_byte = region_.LoadUnaligned<ShortLocation>(offset);
441    return ExtractKindFromShortLocation(first_byte);
442  }
443
444  MemoryRegion region_;
445
446  friend class CodeInfo;
447  friend class StackMapStream;
448};
449
450/**
451 * A Stack Map holds compilation information for a specific PC necessary for:
452 * - Mapping it to a dex PC,
453 * - Knowing which stack entries are objects,
454 * - Knowing which registers hold objects,
455 * - Knowing the inlining information,
456 * - Knowing the values of dex registers.
457 *
458 * The information is of the form:
459 * [dex_pc, native_pc_offset, dex_register_map_offset, inlining_info_offset, register_mask, stack_mask].
460 *
461 * Note that register_mask is fixed size, but stack_mask is variable size, depending on the
462 * stack size of a method.
463 */
464class StackMap {
465 public:
466  explicit StackMap(MemoryRegion region) : region_(region) {}
467
468  uint32_t GetDexPc() const {
469    return region_.LoadUnaligned<uint32_t>(kDexPcOffset);
470  }
471
472  void SetDexPc(uint32_t dex_pc) {
473    region_.StoreUnaligned<uint32_t>(kDexPcOffset, dex_pc);
474  }
475
476  uint32_t GetNativePcOffset() const {
477    return region_.LoadUnaligned<uint32_t>(kNativePcOffsetOffset);
478  }
479
480  void SetNativePcOffset(uint32_t native_pc_offset) {
481    region_.StoreUnaligned<uint32_t>(kNativePcOffsetOffset, native_pc_offset);
482  }
483
484  uint32_t GetDexRegisterMapOffset() const {
485    return region_.LoadUnaligned<uint32_t>(kDexRegisterMapOffsetOffset);
486  }
487
488  void SetDexRegisterMapOffset(uint32_t offset) {
489    region_.StoreUnaligned<uint32_t>(kDexRegisterMapOffsetOffset, offset);
490  }
491
492  uint32_t GetInlineDescriptorOffset() const {
493    return region_.LoadUnaligned<uint32_t>(kInlineDescriptorOffsetOffset);
494  }
495
496  void SetInlineDescriptorOffset(uint32_t offset) {
497    region_.StoreUnaligned<uint32_t>(kInlineDescriptorOffsetOffset, offset);
498  }
499
500  uint32_t GetRegisterMask() const {
501    return region_.LoadUnaligned<uint32_t>(kRegisterMaskOffset);
502  }
503
504  void SetRegisterMask(uint32_t mask) {
505    region_.StoreUnaligned<uint32_t>(kRegisterMaskOffset, mask);
506  }
507
508  MemoryRegion GetStackMask() const {
509    return region_.Subregion(kStackMaskOffset, StackMaskSize());
510  }
511
512  void SetStackMask(const BitVector& sp_map) {
513    MemoryRegion region = GetStackMask();
514    for (size_t i = 0; i < region.size_in_bits(); i++) {
515      region.StoreBit(i, sp_map.IsBitSet(i));
516    }
517  }
518
519  bool HasDexRegisterMap() const {
520    return GetDexRegisterMapOffset() != kNoDexRegisterMap;
521  }
522
523  bool HasInlineInfo() const {
524    return GetInlineDescriptorOffset() != kNoInlineInfo;
525  }
526
527  bool Equals(const StackMap& other) const {
528    return region_.pointer() == other.region_.pointer()
529       && region_.size() == other.region_.size();
530  }
531
532  static size_t ComputeStackMapSize(size_t stack_mask_size) {
533    return StackMap::kFixedSize + stack_mask_size;
534  }
535
536  // Special (invalid) offset for the DexRegisterMapOffset field meaning
537  // that there is no Dex register map for this stack map.
538  static constexpr uint32_t kNoDexRegisterMap = -1;
539
540  // Special (invalid) offset for the InlineDescriptorOffset field meaning
541  // that there is no inline info for this stack map.
542  static constexpr uint32_t kNoInlineInfo = -1;
543
544 private:
545  // TODO: Instead of plain types such as "uint32_t", introduce
546  // typedefs (and document the memory layout of StackMap).
547  static constexpr int kDexPcOffset = 0;
548  static constexpr int kNativePcOffsetOffset = kDexPcOffset + sizeof(uint32_t);
549  static constexpr int kDexRegisterMapOffsetOffset = kNativePcOffsetOffset + sizeof(uint32_t);
550  static constexpr int kInlineDescriptorOffsetOffset =
551      kDexRegisterMapOffsetOffset + sizeof(uint32_t);
552  static constexpr int kRegisterMaskOffset = kInlineDescriptorOffsetOffset + sizeof(uint32_t);
553  static constexpr int kFixedSize = kRegisterMaskOffset + sizeof(uint32_t);
554  static constexpr int kStackMaskOffset = kFixedSize;
555
556  size_t StackMaskSize() const { return region_.size() - kFixedSize; }
557
558  MemoryRegion region_;
559
560  friend class CodeInfo;
561  friend class StackMapStream;
562};
563
564
565/**
566 * Wrapper around all compiler information collected for a method.
567 * The information is of the form:
568 * [overall_size, number_of_stack_maps, stack_mask_size, StackMap+, DexRegisterInfo+, InlineInfo*].
569 */
570class CodeInfo {
571 public:
572  explicit CodeInfo(MemoryRegion region) : region_(region) {}
573
574  explicit CodeInfo(const void* data) {
575    uint32_t size = reinterpret_cast<const uint32_t*>(data)[0];
576    region_ = MemoryRegion(const_cast<void*>(data), size);
577  }
578
579  StackMap GetStackMapAt(size_t i) const {
580    size_t size = StackMapSize();
581    return StackMap(GetStackMaps().Subregion(i * size, size));
582  }
583
584  uint32_t GetOverallSize() const {
585    return region_.Load<uint32_t>(kOverallSizeOffset);
586  }
587
588  void SetOverallSize(uint32_t size) {
589    region_.Store<uint32_t>(kOverallSizeOffset, size);
590  }
591
592  uint32_t GetStackMaskSize() const {
593    return region_.Load<uint32_t>(kStackMaskSizeOffset);
594  }
595
596  void SetStackMaskSize(uint32_t size) {
597    region_.Store<uint32_t>(kStackMaskSizeOffset, size);
598  }
599
600  size_t GetNumberOfStackMaps() const {
601    return region_.Load<uint32_t>(kNumberOfStackMapsOffset);
602  }
603
604  void SetNumberOfStackMaps(uint32_t number_of_stack_maps) {
605    region_.Store<uint32_t>(kNumberOfStackMapsOffset, number_of_stack_maps);
606  }
607
608  // Get the size of one stack map of this CodeInfo object, in bytes.
609  // All stack maps of a CodeInfo have the same size.
610  size_t StackMapSize() const {
611    return StackMap::ComputeStackMapSize(GetStackMaskSize());
612  }
613
614  // Get the size all the stack maps of this CodeInfo object, in bytes.
615  size_t StackMapsSize() const {
616    return StackMapSize() * GetNumberOfStackMaps();
617  }
618
619  uint32_t GetStackMapsOffset() const {
620    return kFixedSize;
621  }
622
623  DexRegisterMap GetDexRegisterMapOf(StackMap stack_map, uint32_t number_of_dex_registers) const {
624    DCHECK(stack_map.HasDexRegisterMap());
625    uint32_t offset = stack_map.GetDexRegisterMapOffset();
626    size_t size = ComputeDexRegisterMapSize(offset, number_of_dex_registers);
627    return DexRegisterMap(region_.Subregion(offset, size));
628  }
629
630  InlineInfo GetInlineInfoOf(StackMap stack_map) const {
631    DCHECK(stack_map.HasInlineInfo());
632    uint32_t offset = stack_map.GetInlineDescriptorOffset();
633    uint8_t depth = region_.Load<uint8_t>(offset);
634    return InlineInfo(region_.Subregion(offset,
635        InlineInfo::kFixedSize + depth * InlineInfo::SingleEntrySize()));
636  }
637
638  StackMap GetStackMapForDexPc(uint32_t dex_pc) const {
639    for (size_t i = 0, e = GetNumberOfStackMaps(); i < e; ++i) {
640      StackMap stack_map = GetStackMapAt(i);
641      if (stack_map.GetDexPc() == dex_pc) {
642        return stack_map;
643      }
644    }
645    LOG(FATAL) << "Unreachable";
646    UNREACHABLE();
647  }
648
649  StackMap GetStackMapForNativePcOffset(uint32_t native_pc_offset) const {
650    // TODO: stack maps are sorted by native pc, we can do a binary search.
651    for (size_t i = 0, e = GetNumberOfStackMaps(); i < e; ++i) {
652      StackMap stack_map = GetStackMapAt(i);
653      if (stack_map.GetNativePcOffset() == native_pc_offset) {
654        return stack_map;
655      }
656    }
657    LOG(FATAL) << "Unreachable";
658    UNREACHABLE();
659  }
660
661 private:
662  // TODO: Instead of plain types such as "uint32_t", introduce
663  // typedefs (and document the memory layout of CodeInfo).
664  static constexpr int kOverallSizeOffset = 0;
665  static constexpr int kNumberOfStackMapsOffset = kOverallSizeOffset + sizeof(uint32_t);
666  static constexpr int kStackMaskSizeOffset = kNumberOfStackMapsOffset + sizeof(uint32_t);
667  static constexpr int kFixedSize = kStackMaskSizeOffset + sizeof(uint32_t);
668
669  MemoryRegion GetStackMaps() const {
670    return region_.size() == 0
671        ? MemoryRegion()
672        : region_.Subregion(kFixedSize, StackMapsSize());
673  }
674
675  // Compute the size of a Dex register map starting at offset `origin` in
676  // `region_` and containing `number_of_dex_registers` locations.
677  size_t ComputeDexRegisterMapSize(uint32_t origin, uint32_t number_of_dex_registers) const {
678    // TODO: Ideally, we would like to use art::DexRegisterMap::Size or
679    // art::DexRegisterMap::FindLocationOffset, but the DexRegisterMap is not
680    // yet built.  Try to factor common code.
681    size_t offset = origin + DexRegisterMap::kFixedSize;
682    // Skip the first `number_of_dex_registers - 1` entries.
683    for (uint16_t i = 0; i < number_of_dex_registers; ++i) {
684      // Read the first next byte and inspect its first 3 bits to decide
685      // whether it is a short or a large location.
686      DexRegisterMap::ShortLocation first_byte =
687          region_.LoadUnaligned<DexRegisterMap::ShortLocation>(offset);
688      DexRegisterLocation::Kind kind =
689          DexRegisterMap::ExtractKindFromShortLocation(first_byte);
690      if (DexRegisterLocation::IsShortLocationKind(kind)) {
691        // Short location.  Skip the current byte.
692        offset += DexRegisterMap::SingleShortEntrySize();
693      } else {
694        // Large location.  Skip the 5 next bytes.
695        offset += DexRegisterMap::SingleLargeEntrySize();
696      }
697    }
698    size_t size = offset - origin;
699    return size;
700  }
701
702  MemoryRegion region_;
703  friend class StackMapStream;
704};
705
706}  // namespace art
707
708#endif  // ART_RUNTIME_STACK_MAP_H_
709