summaryrefslogtreecommitdiffstats
path: root/runtime/stack_map.h
blob: 629fc9a347a9abde55d28e12680dea54d751306f (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
/*
 * Copyright (C) 2014 The Android Open Source Project
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

#ifndef ART_RUNTIME_STACK_MAP_H_
#define ART_RUNTIME_STACK_MAP_H_

#include "base/bit_vector.h"
#include "memory_region.h"
#include "utils.h"

namespace art {

// Size of a frame slot, in bytes.  This constant is a signed value,
// to please the compiler in arithmetic operations involving int32_t
// (signed) values.
static ssize_t constexpr kFrameSlotSize = 4;

// Word alignment required on ARM, in bytes.
static constexpr size_t kWordAlignment = 4;

// Size of Dex virtual registers.
static size_t constexpr kVRegSize = 4;

class CodeInfo;

/**
 * Classes in the following file are wrapper on stack map information backed
 * by a MemoryRegion. As such they read and write to the region, they don't have
 * their own fields.
 */

/**
 * Inline information for a specific PC. The information is of the form:
 * [inlining_depth, [method_dex reference]+]
 */
class InlineInfo {
 public:
  explicit InlineInfo(MemoryRegion region) : region_(region) {}

  uint8_t GetDepth() const {
    return region_.LoadUnaligned<uint8_t>(kDepthOffset);
  }

  void SetDepth(uint8_t depth) {
    region_.StoreUnaligned<uint8_t>(kDepthOffset, depth);
  }

  uint32_t GetMethodReferenceIndexAtDepth(uint8_t depth) const {
    return region_.LoadUnaligned<uint32_t>(kFixedSize + depth * SingleEntrySize());
  }

  void SetMethodReferenceIndexAtDepth(uint8_t depth, uint32_t index) {
    region_.StoreUnaligned<uint32_t>(kFixedSize + depth * SingleEntrySize(), index);
  }

  static size_t SingleEntrySize() {
    return sizeof(uint32_t);
  }

 private:
  // TODO: Instead of plain types such as "uint8_t", introduce
  // typedefs (and document the memory layout of InlineInfo).
  static constexpr int kDepthOffset = 0;
  static constexpr int kFixedSize = kDepthOffset + sizeof(uint8_t);

  MemoryRegion region_;

  friend class CodeInfo;
  friend class StackMap;
  friend class StackMapStream;
};

// Dex register location container used by DexRegisterMap and StackMapStream.
class DexRegisterLocation {
 public:
  /*
   * The location kind used to populate the Dex register information in a
   * StackMapStream can either be:
   * - kNone: the register has no location yet, meaning it has not been set;
   * - kConstant: value holds the constant;
   * - kStack: value holds the stack offset;
   * - kRegister: value holds the physical register number;
   * - kFpuRegister: value holds the physical register number.
   *
   * In addition, DexRegisterMap also uses these values:
   * - kInStackLargeOffset: value holds a "large" stack offset (greater than
   *   128 bytes);
   * - kConstantLargeValue: value holds a "large" constant (lower than or
   *   equal to -16, or greater than 16).
   */
  enum class Kind : uint8_t {
    // Short location kinds, for entries fitting on one byte (3 bits
    // for the kind, 5 bits for the value) in a DexRegisterMap.
    kNone = 0,                // 0b000
    kInStack = 1,             // 0b001
    kInRegister = 2,          // 0b010
    kInFpuRegister = 3,       // 0b011
    kConstant = 4,            // 0b100

    // Large location kinds, requiring a 5-byte encoding (1 byte for the
    // kind, 4 bytes for the value).

    // Stack location at a large offset, meaning that the offset value
    // divided by the stack frame slot size (4 bytes) cannot fit on a
    // 5-bit unsigned integer (i.e., this offset value is greater than
    // or equal to 2^5 * 4 = 128 bytes).
    kInStackLargeOffset = 5,  // 0b101

    // Large constant, that cannot fit on a 5-bit signed integer (i.e.,
    // lower than -2^(5-1) = -16, or greater than or equal to
    // 2^(5-1) - 1 = 15).
    kConstantLargeValue = 6,  // 0b110

    kLastLocationKind = kConstantLargeValue
  };

  static_assert(
      sizeof(Kind) == 1u,
      "art::DexRegisterLocation::Kind has a size different from one byte.");

  static const char* PrettyDescriptor(Kind kind) {
    switch (kind) {
      case Kind::kNone:
        return "none";
      case Kind::kInStack:
        return "in stack";
      case Kind::kInRegister:
        return "in register";
      case Kind::kInFpuRegister:
        return "in fpu register";
      case Kind::kConstant:
        return "as constant";
      case Kind::kInStackLargeOffset:
        return "in stack (large offset)";
      case Kind::kConstantLargeValue:
        return "as constant (large value)";
      default:
        UNREACHABLE();
    }
  }

  static bool IsShortLocationKind(Kind kind) {
    switch (kind) {
      case Kind::kNone:
      case Kind::kInStack:
      case Kind::kInRegister:
      case Kind::kInFpuRegister:
      case Kind::kConstant:
        return true;

      case Kind::kInStackLargeOffset:
      case Kind::kConstantLargeValue:
        return false;

      default:
        UNREACHABLE();
    }
  }

  // Convert `kind` to a "surface" kind, i.e. one that doesn't include
  // any value with a "large" qualifier.
  // TODO: Introduce another enum type for the surface kind?
  static Kind ConvertToSurfaceKind(Kind kind) {
    switch (kind) {
      case Kind::kNone:
      case Kind::kInStack:
      case Kind::kInRegister:
      case Kind::kInFpuRegister:
      case Kind::kConstant:
        return kind;

      case Kind::kInStackLargeOffset:
        return Kind::kInStack;

      case Kind::kConstantLargeValue:
        return Kind::kConstant;

      default:
        UNREACHABLE();
    }
  }

  DexRegisterLocation(Kind kind, int32_t value)
      : kind_(kind), value_(value) {}

  static DexRegisterLocation None() {
    return DexRegisterLocation(Kind::kNone, 0);
  }

  // Get the "surface" kind of the location, i.e., the one that doesn't
  // include any value with a "large" qualifier.
  Kind GetKind() const {
    return ConvertToSurfaceKind(kind_);
  }

  // Get the value of the location.
  int32_t GetValue() const { return value_; }

  // Get the actual kind of the location.
  Kind GetInternalKind() const { return kind_; }

 private:
  Kind kind_;
  int32_t value_;
};

/**
 * Information on dex register values for a specific PC. The information is
 * of the form:
 * [live_bit_mask, DexRegisterLocation+].
 * DexRegisterLocations are either 1- or 5-byte wide (see art::DexRegisterLocation::Kind).
 */
class DexRegisterMap {
 public:
  explicit DexRegisterMap(MemoryRegion region) : region_(region) {}

  // Short (compressed) location, fitting on one byte.
  typedef uint8_t ShortLocation;

  static size_t LiveBitMaskSize(uint16_t number_of_dex_registers) {
    return RoundUp(number_of_dex_registers, kBitsPerByte) / kBitsPerByte;
  }

  void SetLiveBitMask(size_t offset,
                      uint16_t number_of_dex_registers,
                      const BitVector& live_dex_registers_mask) {
    for (uint16_t i = 0; i < number_of_dex_registers; i++) {
      region_.StoreBit(offset + i, live_dex_registers_mask.IsBitSet(i));
    }
  }

  void SetRegisterInfo(size_t offset, const DexRegisterLocation& dex_register_location) {
    DexRegisterLocation::Kind kind = ComputeCompressedKind(dex_register_location);
    int32_t value = dex_register_location.GetValue();
    if (DexRegisterLocation::IsShortLocationKind(kind)) {
      // Short location.  Compress the kind and the value as a single byte.
      if (kind == DexRegisterLocation::Kind::kInStack) {
        // Instead of storing stack offsets expressed in bytes for
        // short stack locations, store slot offsets.  A stack offset
        // is a multiple of 4 (kFrameSlotSize).  This means that by
        // dividing it by 4, we can fit values from the [0, 128)
        // interval in a short stack location, and not just values
        // from the [0, 32) interval.
        DCHECK_EQ(value % kFrameSlotSize, 0);
        value /= kFrameSlotSize;
      }
      DCHECK(IsUint<kValueBits>(value)) << value;
      region_.StoreUnaligned<ShortLocation>(offset, MakeShortLocation(kind, value));
    } else {
      // Large location.  Write the location on one byte and the value
      // on 4 bytes.
      DCHECK(!IsUint<kValueBits>(value)) << value;
      if (kind == DexRegisterLocation::Kind::kInStackLargeOffset) {
        // Also divide large stack offsets by 4 for the sake of consistency.
        DCHECK_EQ(value % kFrameSlotSize, 0);
        value /= kFrameSlotSize;
      }
      // Data can be unaligned as the written Dex register locations can
      // either be 1-byte or 5-byte wide.  Use
      // art::MemoryRegion::StoreUnaligned instead of
      // art::MemoryRegion::Store to prevent unligned word accesses on ARM.
      region_.StoreUnaligned<DexRegisterLocation::Kind>(offset, kind);
      region_.StoreUnaligned<int32_t>(offset + sizeof(DexRegisterLocation::Kind), value);
    }
  }

  bool IsDexRegisterLive(uint16_t dex_register_index) const {
    size_t offset = kFixedSize;
    return region_.LoadBit(offset + dex_register_index);
  }

  static constexpr size_t kNoDexRegisterLocationOffset = -1;

  static size_t GetDexRegisterMapLocationsOffset(uint16_t number_of_dex_registers) {
    return kLiveBitMaskOffset + LiveBitMaskSize(number_of_dex_registers);
  }

  // Find the offset of the Dex register location number `dex_register_index`.
  size_t FindLocationOffset(uint16_t dex_register_index, uint16_t number_of_dex_registers) const {
    if (!IsDexRegisterLive(dex_register_index)) return kNoDexRegisterLocationOffset;
    size_t offset = GetDexRegisterMapLocationsOffset(number_of_dex_registers);
    // Skip the first `dex_register_index - 1` entries.
    for (uint16_t i = 0; i < dex_register_index; ++i) {
      if (IsDexRegisterLive(i)) {
        // Read the first next byte and inspect its first 3 bits to decide
        // whether it is a short or a large location.
        DexRegisterLocation::Kind kind = ExtractKindAtOffset(offset);
        if (DexRegisterLocation::IsShortLocationKind(kind)) {
          // Short location.  Skip the current byte.
          offset += SingleShortEntrySize();
        } else {
          // Large location.  Skip the 5 next bytes.
          offset += SingleLargeEntrySize();
        }
      }
    }
    return offset;
  }

  // Get the surface kind.
  DexRegisterLocation::Kind GetLocationKind(uint16_t dex_register_index,
                                            uint16_t number_of_dex_registers) const {
    return IsDexRegisterLive(dex_register_index)
        ? DexRegisterLocation::ConvertToSurfaceKind(
              GetLocationInternalKind(dex_register_index, number_of_dex_registers))
        : DexRegisterLocation::Kind::kNone;
  }

  // Get the internal kind.
  DexRegisterLocation::Kind GetLocationInternalKind(uint16_t dex_register_index,
                                                    uint16_t number_of_dex_registers) const {
    return IsDexRegisterLive(dex_register_index)
        ? ExtractKindAtOffset(FindLocationOffset(dex_register_index, number_of_dex_registers))
        : DexRegisterLocation::Kind::kNone;
  }

  // TODO: Rename as GetDexRegisterLocation?
  DexRegisterLocation GetLocationKindAndValue(uint16_t dex_register_index,
                                              uint16_t number_of_dex_registers) const {
    if (!IsDexRegisterLive(dex_register_index)) {
      return DexRegisterLocation::None();
    }
    size_t offset = FindLocationOffset(dex_register_index, number_of_dex_registers);
    // Read the first byte and inspect its first 3 bits to get the location.
    ShortLocation first_byte = region_.LoadUnaligned<ShortLocation>(offset);
    DexRegisterLocation::Kind kind = ExtractKindFromShortLocation(first_byte);
    if (DexRegisterLocation::IsShortLocationKind(kind)) {
      // Short location.  Extract the value from the remaining 5 bits.
      int32_t value = ExtractValueFromShortLocation(first_byte);
      if (kind == DexRegisterLocation::Kind::kInStack) {
        // Convert the stack slot (short) offset to a byte offset value.
        value *= kFrameSlotSize;
      }
      return DexRegisterLocation(kind, value);
    } else {
      // Large location.  Read the four next bytes to get the value.
      int32_t value = region_.LoadUnaligned<int32_t>(offset + sizeof(DexRegisterLocation::Kind));
      if (kind == DexRegisterLocation::Kind::kInStackLargeOffset) {
        // Convert the stack slot (large) offset to a byte offset value.
        value *= kFrameSlotSize;
      }
      return DexRegisterLocation(kind, value);
    }
  }

  int32_t GetStackOffsetInBytes(uint16_t dex_register_index,
                                uint16_t number_of_dex_registers) const {
    DexRegisterLocation location =
        GetLocationKindAndValue(dex_register_index, number_of_dex_registers);
    DCHECK(location.GetKind() == DexRegisterLocation::Kind::kInStack);
    // GetLocationKindAndValue returns the offset in bytes.
    return location.GetValue();
  }

  int32_t GetConstant(uint16_t dex_register_index, uint16_t number_of_dex_registers) const {
    DexRegisterLocation location =
        GetLocationKindAndValue(dex_register_index, number_of_dex_registers);
    DCHECK(location.GetKind() == DexRegisterLocation::Kind::kConstant);
    return location.GetValue();
  }

  int32_t GetMachineRegister(uint16_t dex_register_index, uint16_t number_of_dex_registers) const {
    DexRegisterLocation location =
        GetLocationKindAndValue(dex_register_index, number_of_dex_registers);
    DCHECK(location.GetInternalKind() == DexRegisterLocation::Kind::kInRegister
           || location.GetInternalKind() == DexRegisterLocation::Kind::kInFpuRegister)
        << DexRegisterLocation::PrettyDescriptor(location.GetInternalKind());
    return location.GetValue();
  }

  // Compute the compressed kind of `location`.
  static DexRegisterLocation::Kind ComputeCompressedKind(const DexRegisterLocation& location) {
    switch (location.GetInternalKind()) {
      case DexRegisterLocation::Kind::kNone:
        DCHECK_EQ(location.GetValue(), 0);
        return DexRegisterLocation::Kind::kNone;

      case DexRegisterLocation::Kind::kInRegister:
        DCHECK_GE(location.GetValue(), 0);
        DCHECK_LT(location.GetValue(), 1 << DexRegisterMap::kValueBits);
        return DexRegisterLocation::Kind::kInRegister;

      case DexRegisterLocation::Kind::kInFpuRegister:
        DCHECK_GE(location.GetValue(), 0);
        DCHECK_LT(location.GetValue(), 1 << DexRegisterMap::kValueBits);
        return DexRegisterLocation::Kind::kInFpuRegister;

      case DexRegisterLocation::Kind::kInStack:
        DCHECK_EQ(location.GetValue() % kFrameSlotSize, 0);
        return IsUint<DexRegisterMap::kValueBits>(location.GetValue() / kFrameSlotSize)
            ? DexRegisterLocation::Kind::kInStack
            : DexRegisterLocation::Kind::kInStackLargeOffset;

      case DexRegisterLocation::Kind::kConstant:
        return IsUint<DexRegisterMap::kValueBits>(location.GetValue())
            ? DexRegisterLocation::Kind::kConstant
            : DexRegisterLocation::Kind::kConstantLargeValue;

      default:
        LOG(FATAL) << "Unexpected location kind"
                   << DexRegisterLocation::PrettyDescriptor(location.GetInternalKind());
        UNREACHABLE();
    }
  }

  // Can `location` be turned into a short location?
  static bool CanBeEncodedAsShortLocation(const DexRegisterLocation& location) {
    switch (location.GetInternalKind()) {
      case DexRegisterLocation::Kind::kNone:
      case DexRegisterLocation::Kind::kInRegister:
      case DexRegisterLocation::Kind::kInFpuRegister:
        return true;

      case DexRegisterLocation::Kind::kInStack:
        DCHECK_EQ(location.GetValue() % kFrameSlotSize, 0);
        return IsUint<kValueBits>(location.GetValue() / kFrameSlotSize);

      case DexRegisterLocation::Kind::kConstant:
        return IsUint<kValueBits>(location.GetValue());

      default:
        UNREACHABLE();
    }
  }

  static size_t EntrySize(const DexRegisterLocation& location) {
    return CanBeEncodedAsShortLocation(location)
        ? DexRegisterMap::SingleShortEntrySize()
        : DexRegisterMap::SingleLargeEntrySize();
  }

  static size_t SingleShortEntrySize() {
    return sizeof(ShortLocation);
  }

  static size_t SingleLargeEntrySize() {
    return sizeof(DexRegisterLocation::Kind) + sizeof(int32_t);
  }

  size_t Size() const {
    return region_.size();
  }

  static constexpr int kLiveBitMaskOffset = 0;
  static constexpr int kFixedSize = kLiveBitMaskOffset;

 private:
  // Width of the kind "field" in a short location, in bits.
  static constexpr size_t kKindBits = 3;
  // Width of the value "field" in a short location, in bits.
  static constexpr size_t kValueBits = 5;

  static constexpr uint8_t kKindMask = (1 << kKindBits) - 1;
  static constexpr int32_t kValueMask = (1 << kValueBits) - 1;
  static constexpr size_t kKindOffset = 0;
  static constexpr size_t kValueOffset = kKindBits;

  static ShortLocation MakeShortLocation(DexRegisterLocation::Kind kind, int32_t value) {
    DCHECK(IsUint<kKindBits>(static_cast<uint8_t>(kind))) << static_cast<uint8_t>(kind);
    DCHECK(IsUint<kValueBits>(value)) << value;
    return (static_cast<uint8_t>(kind) & kKindMask) << kKindOffset
        | (value & kValueMask) << kValueOffset;
  }

  static DexRegisterLocation::Kind ExtractKindFromShortLocation(ShortLocation location) {
    uint8_t kind = (location >> kKindOffset) & kKindMask;
    DCHECK_LE(kind, static_cast<uint8_t>(DexRegisterLocation::Kind::kLastLocationKind));
    // We do not encode kNone locations in the stack map.
    DCHECK_NE(kind, static_cast<uint8_t>(DexRegisterLocation::Kind::kNone));
    return static_cast<DexRegisterLocation::Kind>(kind);
  }

  static int32_t ExtractValueFromShortLocation(ShortLocation location) {
    return (location >> kValueOffset) & kValueMask;
  }

  // Extract a location kind from the byte at position `offset`.
  DexRegisterLocation::Kind ExtractKindAtOffset(size_t offset) const {
    ShortLocation first_byte = region_.LoadUnaligned<ShortLocation>(offset);
    return ExtractKindFromShortLocation(first_byte);
  }

  MemoryRegion region_;

  friend class CodeInfo;
  friend class StackMapStream;
};

/**
 * A Stack Map holds compilation information for a specific PC necessary for:
 * - Mapping it to a dex PC,
 * - Knowing which stack entries are objects,
 * - Knowing which registers hold objects,
 * - Knowing the inlining information,
 * - Knowing the values of dex registers.
 *
 * The information is of the form:
 * [dex_pc, native_pc_offset, dex_register_map_offset, inlining_info_offset, register_mask, stack_mask].
 *
 * Note that register_mask is fixed size, but stack_mask is variable size, depending on the
 * stack size of a method.
 */
class StackMap {
 public:
  explicit StackMap(MemoryRegion region) : region_(region) {}

  uint32_t GetDexPc(const CodeInfo& info) const;

  void SetDexPc(const CodeInfo& info, uint32_t dex_pc);

  uint32_t GetNativePcOffset(const CodeInfo& info) const;

  void SetNativePcOffset(const CodeInfo& info, uint32_t native_pc_offset);

  uint32_t GetDexRegisterMapOffset(const CodeInfo& info) const;

  void SetDexRegisterMapOffset(const CodeInfo& info, uint32_t offset);

  uint32_t GetInlineDescriptorOffset(const CodeInfo& info) const;

  void SetInlineDescriptorOffset(const CodeInfo& info, uint32_t offset);

  uint32_t GetRegisterMask(const CodeInfo& info) const;

  void SetRegisterMask(const CodeInfo& info, uint32_t mask);

  MemoryRegion GetStackMask(const CodeInfo& info) const;

  void SetStackMask(const CodeInfo& info, const BitVector& sp_map) {
    MemoryRegion region = GetStackMask(info);
    for (size_t i = 0; i < region.size_in_bits(); i++) {
      region.StoreBit(i, sp_map.IsBitSet(i));
    }
  }

  bool HasDexRegisterMap(const CodeInfo& info) const {
    return GetDexRegisterMapOffset(info) != kNoDexRegisterMap;
  }

  bool HasInlineInfo(const CodeInfo& info) const {
    return GetInlineDescriptorOffset(info) != kNoInlineInfo;
  }

  bool Equals(const StackMap& other) const {
    return region_.pointer() == other.region_.pointer()
       && region_.size() == other.region_.size();
  }

  static size_t ComputeStackMapSize(size_t stack_mask_size,
                                    bool has_inline_info,
                                    bool is_small_inline_info,
                                    bool is_small_dex_map,
                                    bool is_small_dex_pc,
                                    bool is_small_native_pc);

  static size_t ComputeStackMapSize(size_t stack_mask_size,
                                    size_t inline_info_size,
                                    size_t dex_register_map_size,
                                    size_t dex_pc_max,
                                    size_t native_pc_max);

  // TODO: Revisit this abstraction if we allow 3 bytes encoding.
  typedef uint8_t kSmallEncoding;
  typedef uint32_t kLargeEncoding;
  static constexpr size_t kBytesForSmallEncoding = sizeof(kSmallEncoding);
  static constexpr size_t kBitsForSmallEncoding = kBitsPerByte * kBytesForSmallEncoding;
  static constexpr size_t kBytesForLargeEncoding = sizeof(kLargeEncoding);
  static constexpr size_t kBitsForLargeEncoding = kBitsPerByte * kBytesForLargeEncoding;

  // Special (invalid) offset for the DexRegisterMapOffset field meaning
  // that there is no Dex register map for this stack map.
  static constexpr uint32_t kNoDexRegisterMap = -1;
  static constexpr uint32_t kNoDexRegisterMapSmallEncoding =
      std::numeric_limits<kSmallEncoding>::max();

  // Special (invalid) offset for the InlineDescriptorOffset field meaning
  // that there is no inline info for this stack map.
  static constexpr uint32_t kNoInlineInfo = -1;
  static constexpr uint32_t kNoInlineInfoSmallEncoding =
    std::numeric_limits<kSmallEncoding>::max();

  // Returns the number of bytes needed for an entry in the StackMap.
  static size_t NumberOfBytesForEntry(bool small_encoding) {
    return small_encoding ? kBytesForSmallEncoding : kBytesForLargeEncoding;
  }

 private:
  // TODO: Instead of plain types such as "uint32_t", introduce
  // typedefs (and document the memory layout of StackMap).
  static constexpr int kRegisterMaskOffset = 0;
  static constexpr int kFixedSize = kRegisterMaskOffset + sizeof(uint32_t);
  static constexpr int kStackMaskOffset = kFixedSize;

  MemoryRegion region_;

  friend class CodeInfo;
  friend class StackMapStream;
};


/**
 * Wrapper around all compiler information collected for a method.
 * The information is of the form:
 * [overall_size, number_of_stack_maps, stack_mask_size, StackMap+, DexRegisterInfo+, InlineInfo*].
 */
class CodeInfo {
 public:
  explicit CodeInfo(MemoryRegion region) : region_(region) {}

  explicit CodeInfo(const void* data) {
    uint32_t size = reinterpret_cast<const uint32_t*>(data)[0];
    region_ = MemoryRegion(const_cast<void*>(data), size);
  }

  void SetEncoding(size_t inline_info_size,
                   size_t dex_register_map_size,
                   size_t dex_pc_max,
                   size_t native_pc_max) {
    if (inline_info_size != 0) {
      region_.StoreBit(kHasInlineInfoBitOffset, 1);
      region_.StoreBit(kHasSmallInlineInfoBitOffset, IsUint<StackMap::kBitsForSmallEncoding>(
          // + 1 to also encode kNoInlineInfo: if an inline info offset
          // is at 0xFF, we want to overflow to a larger encoding, because it will
          // conflict with kNoInlineInfo.
          // The offset is relative to the dex register map. TODO: Change this.
          inline_info_size + dex_register_map_size + 1));
    } else {
      region_.StoreBit(kHasInlineInfoBitOffset, 0);
      region_.StoreBit(kHasSmallInlineInfoBitOffset, 0);
    }
    region_.StoreBit(kHasSmallDexRegisterMapBitOffset,
                     // + 1 to also encode kNoDexRegisterMap: if a dex register map offset
                     // is at 0xFF, we want to overflow to a larger encoding, because it will
                     // conflict with kNoDexRegisterMap.
                     IsUint<StackMap::kBitsForSmallEncoding>(dex_register_map_size + 1));
    region_.StoreBit(kHasSmallDexPcBitOffset, IsUint<StackMap::kBitsForSmallEncoding>(dex_pc_max));
    region_.StoreBit(kHasSmallNativePcBitOffset,
                     IsUint<StackMap::kBitsForSmallEncoding>(native_pc_max));
  }

  bool HasInlineInfo() const {
    return region_.LoadBit(kHasInlineInfoBitOffset);
  }

  bool HasSmallInlineInfo() const {
    return region_.LoadBit(kHasSmallInlineInfoBitOffset);
  }

  bool HasSmallDexRegisterMap() const {
    return region_.LoadBit(kHasSmallDexRegisterMapBitOffset);
  }

  bool HasSmallNativePc() const {
    return region_.LoadBit(kHasSmallNativePcBitOffset);
  }

  bool HasSmallDexPc() const {
    return region_.LoadBit(kHasSmallDexPcBitOffset);
  }

  size_t ComputeStackMapRegisterMaskOffset() const {
    return StackMap::kRegisterMaskOffset;
  }

  size_t ComputeStackMapStackMaskOffset() const {
    return StackMap::kStackMaskOffset;
  }

  size_t ComputeStackMapDexPcOffset() const {
    return ComputeStackMapStackMaskOffset() + GetStackMaskSize();
  }

  size_t ComputeStackMapNativePcOffset() const {
    return ComputeStackMapDexPcOffset()
        + (HasSmallDexPc() ? sizeof(uint8_t) : sizeof(uint32_t));
  }

  size_t ComputeStackMapDexRegisterMapOffset() const {
    return ComputeStackMapNativePcOffset()
        + (HasSmallNativePc() ? sizeof(uint8_t) : sizeof(uint32_t));
  }

  size_t ComputeStackMapInlineInfoOffset() const {
    CHECK(HasInlineInfo());
    return ComputeStackMapDexRegisterMapOffset()
        + (HasSmallDexRegisterMap() ? sizeof(uint8_t) : sizeof(uint32_t));
  }

  StackMap GetStackMapAt(size_t i) const {
    size_t size = StackMapSize();
    return StackMap(GetStackMaps().Subregion(i * size, size));
  }

  uint32_t GetOverallSize() const {
    return region_.LoadUnaligned<uint32_t>(kOverallSizeOffset);
  }

  void SetOverallSize(uint32_t size) {
    region_.StoreUnaligned<uint32_t>(kOverallSizeOffset, size);
  }

  uint32_t GetStackMaskSize() const {
    return region_.LoadUnaligned<uint32_t>(kStackMaskSizeOffset);
  }

  void SetStackMaskSize(uint32_t size) {
    region_.StoreUnaligned<uint32_t>(kStackMaskSizeOffset, size);
  }

  size_t GetNumberOfStackMaps() const {
    return region_.LoadUnaligned<uint32_t>(kNumberOfStackMapsOffset);
  }

  void SetNumberOfStackMaps(uint32_t number_of_stack_maps) {
    region_.StoreUnaligned<uint32_t>(kNumberOfStackMapsOffset, number_of_stack_maps);
  }

  // Get the size of one stack map of this CodeInfo object, in bytes.
  // All stack maps of a CodeInfo have the same size.
  size_t StackMapSize() const {
    return StackMap::ComputeStackMapSize(GetStackMaskSize(),
                                         HasInlineInfo(),
                                         HasSmallInlineInfo(),
                                         HasSmallDexRegisterMap(),
                                         HasSmallDexPc(),
                                         HasSmallNativePc());
  }

  // Get the size all the stack maps of this CodeInfo object, in bytes.
  size_t StackMapsSize() const {
    return StackMapSize() * GetNumberOfStackMaps();
  }

  size_t GetDexRegisterMapsOffset() const {
    return CodeInfo::kFixedSize + StackMapsSize();
  }

  uint32_t GetStackMapsOffset() const {
    return kFixedSize;
  }

  DexRegisterMap GetDexRegisterMapOf(StackMap stack_map, uint32_t number_of_dex_registers) const {
    DCHECK(stack_map.HasDexRegisterMap(*this));
    uint32_t offset = stack_map.GetDexRegisterMapOffset(*this) + GetDexRegisterMapsOffset();
    size_t size = ComputeDexRegisterMapSize(offset, number_of_dex_registers);
    return DexRegisterMap(region_.Subregion(offset, size));
  }

  InlineInfo GetInlineInfoOf(StackMap stack_map) const {
    DCHECK(stack_map.HasInlineInfo(*this));
    uint32_t offset = stack_map.GetInlineDescriptorOffset(*this) + GetDexRegisterMapsOffset();
    uint8_t depth = region_.LoadUnaligned<uint8_t>(offset);
    return InlineInfo(region_.Subregion(offset,
        InlineInfo::kFixedSize + depth * InlineInfo::SingleEntrySize()));
  }

  StackMap GetStackMapForDexPc(uint32_t dex_pc) const {
    for (size_t i = 0, e = GetNumberOfStackMaps(); i < e; ++i) {
      StackMap stack_map = GetStackMapAt(i);
      if (stack_map.GetDexPc(*this) == dex_pc) {
        return stack_map;
      }
    }
    LOG(FATAL) << "Unreachable";
    UNREACHABLE();
  }

  StackMap GetStackMapForNativePcOffset(uint32_t native_pc_offset) const {
    // TODO: stack maps are sorted by native pc, we can do a binary search.
    for (size_t i = 0, e = GetNumberOfStackMaps(); i < e; ++i) {
      StackMap stack_map = GetStackMapAt(i);
      if (stack_map.GetNativePcOffset(*this) == native_pc_offset) {
        return stack_map;
      }
    }
    LOG(FATAL) << "Unreachable";
    UNREACHABLE();
  }

  void Dump(std::ostream& os, uint16_t number_of_dex_registers) const;
  void DumpStackMapHeader(std::ostream& os, size_t stack_map_num) const;

 private:
  // TODO: Instead of plain types such as "uint32_t", introduce
  // typedefs (and document the memory layout of CodeInfo).
  static constexpr int kOverallSizeOffset = 0;
  static constexpr int kEncodingInfoOffset = kOverallSizeOffset + sizeof(uint32_t);
  static constexpr int kNumberOfStackMapsOffset = kEncodingInfoOffset + sizeof(uint8_t);
  static constexpr int kStackMaskSizeOffset = kNumberOfStackMapsOffset + sizeof(uint32_t);
  static constexpr int kFixedSize = kStackMaskSizeOffset + sizeof(uint32_t);

  static constexpr int kHasInlineInfoBitOffset = (kEncodingInfoOffset * kBitsPerByte);
  static constexpr int kHasSmallInlineInfoBitOffset = kHasInlineInfoBitOffset + 1;
  static constexpr int kHasSmallDexRegisterMapBitOffset = kHasSmallInlineInfoBitOffset + 1;
  static constexpr int kHasSmallDexPcBitOffset = kHasSmallDexRegisterMapBitOffset + 1;
  static constexpr int kHasSmallNativePcBitOffset = kHasSmallDexPcBitOffset + 1;

  MemoryRegion GetStackMaps() const {
    return region_.size() == 0
        ? MemoryRegion()
        : region_.Subregion(kFixedSize, StackMapsSize());
  }

  // Compute the size of a Dex register map starting at offset `origin` in
  // `region_` and containing `number_of_dex_registers` locations.
  size_t ComputeDexRegisterMapSize(uint32_t origin, uint32_t number_of_dex_registers) const {
    // TODO: Ideally, we would like to use art::DexRegisterMap::Size or
    // art::DexRegisterMap::FindLocationOffset, but the DexRegisterMap is not
    // yet built.  Try to factor common code.
    size_t offset =
        origin + DexRegisterMap::GetDexRegisterMapLocationsOffset(number_of_dex_registers);

    // Create a temporary DexRegisterMap to be able to call DexRegisterMap.IsDexRegisterLive.
    DexRegisterMap only_live_mask(MemoryRegion(region_.Subregion(origin, offset - origin)));

    // Skip the first `number_of_dex_registers - 1` entries.
    for (uint16_t i = 0; i < number_of_dex_registers; ++i) {
      if (only_live_mask.IsDexRegisterLive(i)) {
        // Read the first next byte and inspect its first 3 bits to decide
        // whether it is a short or a large location.
        DexRegisterMap::ShortLocation first_byte =
            region_.LoadUnaligned<DexRegisterMap::ShortLocation>(offset);
        DexRegisterLocation::Kind kind =
            DexRegisterMap::ExtractKindFromShortLocation(first_byte);
        if (DexRegisterLocation::IsShortLocationKind(kind)) {
          // Short location.  Skip the current byte.
          offset += DexRegisterMap::SingleShortEntrySize();
        } else {
          // Large location.  Skip the 5 next bytes.
          offset += DexRegisterMap::SingleLargeEntrySize();
        }
      }
    }
    size_t size = offset - origin;
    return size;
  }

  MemoryRegion region_;
  friend class StackMapStream;
};

}  // namespace art

#endif  // ART_RUNTIME_STACK_MAP_H_