Support InlineInfo in StackMap.
Change-Id: I9956091775cedc609fdae7dec1433fcb8858a477
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc
index 0e776b3..4805cee 100644
--- a/compiler/optimizing/code_generator.cc
+++ b/compiler/optimizing/code_generator.cc
@@ -645,23 +645,34 @@
}
}
+ uint32_t outer_dex_pc = dex_pc;
+ uint32_t outer_environment_size = 0;
+ uint32_t inlining_depth = 0;
+ if (instruction != nullptr) {
+ for (HEnvironment* environment = instruction->GetEnvironment();
+ environment != nullptr;
+ environment = environment->GetParent()) {
+ outer_dex_pc = environment->GetDexPc();
+ outer_environment_size = environment->Size();
+ if (environment != instruction->GetEnvironment()) {
+ inlining_depth++;
+ }
+ }
+ }
+
// Collect PC infos for the mapping table.
struct PcInfo pc_info;
- pc_info.dex_pc = dex_pc;
+ pc_info.dex_pc = outer_dex_pc;
pc_info.native_pc = GetAssembler()->CodeSize();
pc_infos_.Add(pc_info);
- uint32_t inlining_depth = 0;
-
if (instruction == nullptr) {
// For stack overflow checks.
- stack_map_stream_.BeginStackMapEntry(dex_pc, pc_info.native_pc, 0, 0, 0, inlining_depth);
+ stack_map_stream_.BeginStackMapEntry(pc_info.dex_pc, pc_info.native_pc, 0, 0, 0, 0);
stack_map_stream_.EndStackMapEntry();
return;
}
LocationSummary* locations = instruction->GetLocations();
- HEnvironment* environment = instruction->GetEnvironment();
- size_t environment_size = instruction->EnvironmentSize();
uint32_t register_mask = locations->GetRegisterMask();
if (locations->OnlyCallsOnSlowPath()) {
@@ -674,23 +685,32 @@
}
// The register mask must be a subset of callee-save registers.
DCHECK_EQ(register_mask & core_callee_save_mask_, register_mask);
- stack_map_stream_.BeginStackMapEntry(dex_pc,
+ stack_map_stream_.BeginStackMapEntry(pc_info.dex_pc,
pc_info.native_pc,
register_mask,
locations->GetStackMask(),
- environment_size,
+ outer_environment_size,
inlining_depth);
- if (environment != nullptr) {
- // TODO: Handle parent environment.
- DCHECK(environment->GetParent() == nullptr);
- DCHECK_EQ(environment->GetDexPc(), dex_pc);
+
+ EmitEnvironment(instruction->GetEnvironment(), slow_path);
+ stack_map_stream_.EndStackMapEntry();
+}
+
+void CodeGenerator::EmitEnvironment(HEnvironment* environment, SlowPathCode* slow_path) {
+ if (environment == nullptr) return;
+
+ if (environment->GetParent() != nullptr) {
+ // We emit the parent environment first.
+ EmitEnvironment(environment->GetParent(), slow_path);
+ stack_map_stream_.BeginInlineInfoEntry(
+ environment->GetMethodIdx(), environment->GetDexPc(), environment->Size());
}
// Walk over the environment, and record the location of dex registers.
- for (size_t i = 0; i < environment_size; ++i) {
+ for (size_t i = 0, environment_size = environment->Size(); i < environment_size; ++i) {
HInstruction* current = environment->GetInstructionAt(i);
if (current == nullptr) {
- stack_map_stream_.AddDexRegisterEntry(i, DexRegisterLocation::Kind::kNone, 0);
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kNone, 0);
continue;
}
@@ -701,41 +721,44 @@
if (current->IsLongConstant()) {
int64_t value = current->AsLongConstant()->GetValue();
stack_map_stream_.AddDexRegisterEntry(
- i, DexRegisterLocation::Kind::kConstant, Low32Bits(value));
+ DexRegisterLocation::Kind::kConstant, Low32Bits(value));
stack_map_stream_.AddDexRegisterEntry(
- ++i, DexRegisterLocation::Kind::kConstant, High32Bits(value));
+ DexRegisterLocation::Kind::kConstant, High32Bits(value));
+ ++i;
DCHECK_LT(i, environment_size);
} else if (current->IsDoubleConstant()) {
int64_t value = bit_cast<int64_t, double>(current->AsDoubleConstant()->GetValue());
stack_map_stream_.AddDexRegisterEntry(
- i, DexRegisterLocation::Kind::kConstant, Low32Bits(value));
+ DexRegisterLocation::Kind::kConstant, Low32Bits(value));
stack_map_stream_.AddDexRegisterEntry(
- ++i, DexRegisterLocation::Kind::kConstant, High32Bits(value));
+ DexRegisterLocation::Kind::kConstant, High32Bits(value));
+ ++i;
DCHECK_LT(i, environment_size);
} else if (current->IsIntConstant()) {
int32_t value = current->AsIntConstant()->GetValue();
- stack_map_stream_.AddDexRegisterEntry(i, DexRegisterLocation::Kind::kConstant, value);
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kConstant, value);
} else if (current->IsNullConstant()) {
- stack_map_stream_.AddDexRegisterEntry(i, DexRegisterLocation::Kind::kConstant, 0);
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kConstant, 0);
} else {
DCHECK(current->IsFloatConstant()) << current->DebugName();
int32_t value = bit_cast<int32_t, float>(current->AsFloatConstant()->GetValue());
- stack_map_stream_.AddDexRegisterEntry(i, DexRegisterLocation::Kind::kConstant, value);
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kConstant, value);
}
break;
}
case Location::kStackSlot: {
stack_map_stream_.AddDexRegisterEntry(
- i, DexRegisterLocation::Kind::kInStack, location.GetStackIndex());
+ DexRegisterLocation::Kind::kInStack, location.GetStackIndex());
break;
}
case Location::kDoubleStackSlot: {
stack_map_stream_.AddDexRegisterEntry(
- i, DexRegisterLocation::Kind::kInStack, location.GetStackIndex());
+ DexRegisterLocation::Kind::kInStack, location.GetStackIndex());
stack_map_stream_.AddDexRegisterEntry(
- ++i, DexRegisterLocation::Kind::kInStack, location.GetHighStackIndex(kVRegSize));
+ DexRegisterLocation::Kind::kInStack, location.GetHighStackIndex(kVRegSize));
+ ++i;
DCHECK_LT(i, environment_size);
break;
}
@@ -744,16 +767,18 @@
int id = location.reg();
if (slow_path != nullptr && slow_path->IsCoreRegisterSaved(id)) {
uint32_t offset = slow_path->GetStackOffsetOfCoreRegister(id);
- stack_map_stream_.AddDexRegisterEntry(i, DexRegisterLocation::Kind::kInStack, offset);
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInStack, offset);
if (current->GetType() == Primitive::kPrimLong) {
stack_map_stream_.AddDexRegisterEntry(
- ++i, DexRegisterLocation::Kind::kInStack, offset + kVRegSize);
+ DexRegisterLocation::Kind::kInStack, offset + kVRegSize);
+ ++i;
DCHECK_LT(i, environment_size);
}
} else {
- stack_map_stream_.AddDexRegisterEntry(i, DexRegisterLocation::Kind::kInRegister, id);
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInRegister, id);
if (current->GetType() == Primitive::kPrimLong) {
- stack_map_stream_.AddDexRegisterEntry(++i, DexRegisterLocation::Kind::kInRegister, id);
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInRegister, id);
+ ++i;
DCHECK_LT(i, environment_size);
}
}
@@ -764,17 +789,18 @@
int id = location.reg();
if (slow_path != nullptr && slow_path->IsFpuRegisterSaved(id)) {
uint32_t offset = slow_path->GetStackOffsetOfFpuRegister(id);
- stack_map_stream_.AddDexRegisterEntry(i, DexRegisterLocation::Kind::kInStack, offset);
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInStack, offset);
if (current->GetType() == Primitive::kPrimDouble) {
stack_map_stream_.AddDexRegisterEntry(
- ++i, DexRegisterLocation::Kind::kInStack, offset + kVRegSize);
+ DexRegisterLocation::Kind::kInStack, offset + kVRegSize);
+ ++i;
DCHECK_LT(i, environment_size);
}
} else {
- stack_map_stream_.AddDexRegisterEntry(i, DexRegisterLocation::Kind::kInFpuRegister, id);
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInFpuRegister, id);
if (current->GetType() == Primitive::kPrimDouble) {
- stack_map_stream_.AddDexRegisterEntry(
- ++i, DexRegisterLocation::Kind::kInFpuRegister, id);
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInFpuRegister, id);
+ ++i;
DCHECK_LT(i, environment_size);
}
}
@@ -786,16 +812,17 @@
int high = location.high();
if (slow_path != nullptr && slow_path->IsFpuRegisterSaved(low)) {
uint32_t offset = slow_path->GetStackOffsetOfFpuRegister(low);
- stack_map_stream_.AddDexRegisterEntry(i, DexRegisterLocation::Kind::kInStack, offset);
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInStack, offset);
} else {
- stack_map_stream_.AddDexRegisterEntry(i, DexRegisterLocation::Kind::kInFpuRegister, low);
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInFpuRegister, low);
}
if (slow_path != nullptr && slow_path->IsFpuRegisterSaved(high)) {
uint32_t offset = slow_path->GetStackOffsetOfFpuRegister(high);
- stack_map_stream_.AddDexRegisterEntry(++i, DexRegisterLocation::Kind::kInStack, offset);
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInStack, offset);
+ ++i;
} else {
- stack_map_stream_.AddDexRegisterEntry(
- ++i, DexRegisterLocation::Kind::kInFpuRegister, high);
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInFpuRegister, high);
+ ++i;
}
DCHECK_LT(i, environment_size);
break;
@@ -806,23 +833,23 @@
int high = location.high();
if (slow_path != nullptr && slow_path->IsCoreRegisterSaved(low)) {
uint32_t offset = slow_path->GetStackOffsetOfCoreRegister(low);
- stack_map_stream_.AddDexRegisterEntry(i, DexRegisterLocation::Kind::kInStack, offset);
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInStack, offset);
} else {
- stack_map_stream_.AddDexRegisterEntry(i, DexRegisterLocation::Kind::kInRegister, low);
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInRegister, low);
}
if (slow_path != nullptr && slow_path->IsCoreRegisterSaved(high)) {
uint32_t offset = slow_path->GetStackOffsetOfCoreRegister(high);
- stack_map_stream_.AddDexRegisterEntry(++i, DexRegisterLocation::Kind::kInStack, offset);
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInStack, offset);
} else {
- stack_map_stream_.AddDexRegisterEntry(
- ++i, DexRegisterLocation::Kind::kInRegister, high);
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInRegister, high);
}
+ ++i;
DCHECK_LT(i, environment_size);
break;
}
case Location::kInvalid: {
- stack_map_stream_.AddDexRegisterEntry(i, DexRegisterLocation::Kind::kNone, 0);
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kNone, 0);
break;
}
@@ -830,7 +857,10 @@
LOG(FATAL) << "Unexpected kind " << location.GetKind();
}
}
- stack_map_stream_.EndStackMapEntry();
+
+ if (environment->GetParent() != nullptr) {
+ stack_map_stream_.EndInlineInfoEntry();
+ }
}
bool CodeGenerator::CanMoveNullCheckToUser(HNullCheck* null_check) {
diff --git a/compiler/optimizing/code_generator.h b/compiler/optimizing/code_generator.h
index bdbd571..a902760 100644
--- a/compiler/optimizing/code_generator.h
+++ b/compiler/optimizing/code_generator.h
@@ -436,6 +436,7 @@
size_t GetStackOffsetOfSavedRegister(size_t index);
void CompileInternal(CodeAllocator* allocator, bool is_baseline);
void BlockIfInRegister(Location location, bool is_out = false) const;
+ void EmitEnvironment(HEnvironment* environment, SlowPathCode* slow_path);
HGraph* const graph_;
const CompilerOptions& compiler_options_;
diff --git a/compiler/optimizing/stack_map_stream.cc b/compiler/optimizing/stack_map_stream.cc
index 8344fc3..89035a3 100644
--- a/compiler/optimizing/stack_map_stream.cc
+++ b/compiler/optimizing/stack_map_stream.cc
@@ -13,7 +13,6 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-
#include "stack_map_stream.h"
namespace art {
@@ -52,6 +51,7 @@
dex_pc_max_ = std::max(dex_pc_max_, dex_pc);
native_pc_offset_max_ = std::max(native_pc_offset_max_, native_pc_offset);
register_mask_max_ = std::max(register_mask_max_, register_mask);
+ current_dex_register_ = 0;
}
void StackMapStream::EndStackMapEntry() {
@@ -60,11 +60,7 @@
current_entry_ = StackMapEntry();
}
-void StackMapStream::AddDexRegisterEntry(uint16_t dex_register,
- DexRegisterLocation::Kind kind,
- int32_t value) {
- DCHECK_LT(dex_register, current_entry_.num_dex_registers);
-
+void StackMapStream::AddDexRegisterEntry(DexRegisterLocation::Kind kind, int32_t value) {
if (kind != DexRegisterLocation::Kind::kNone) {
// Ensure we only use non-compressed location kind at this stage.
DCHECK(DexRegisterLocation::IsShortLocationKind(kind))
@@ -87,18 +83,47 @@
location_catalog_entries_indices_.Insert(std::make_pair(location, index));
}
- current_entry_.live_dex_registers_mask->SetBit(dex_register);
- current_entry_.dex_register_map_hash +=
- (1 << (dex_register % (sizeof(current_entry_.dex_register_map_hash) * kBitsPerByte)));
- current_entry_.dex_register_map_hash += static_cast<uint32_t>(value);
- current_entry_.dex_register_map_hash += static_cast<uint32_t>(kind);
+ if (in_inline_frame_) {
+ // TODO: Support sharing DexRegisterMap across InlineInfo.
+ DCHECK_LT(current_dex_register_, current_inline_info_.num_dex_registers);
+ current_inline_info_.live_dex_registers_mask->SetBit(current_dex_register_);
+ } else {
+ DCHECK_LT(current_dex_register_, current_entry_.num_dex_registers);
+ current_entry_.live_dex_registers_mask->SetBit(current_dex_register_);
+ current_entry_.dex_register_map_hash += (1 <<
+ (current_dex_register_ % (sizeof(current_entry_.dex_register_map_hash) * kBitsPerByte)));
+ current_entry_.dex_register_map_hash += static_cast<uint32_t>(value);
+ current_entry_.dex_register_map_hash += static_cast<uint32_t>(kind);
+ }
}
+ current_dex_register_++;
}
-void StackMapStream::AddInlineInfoEntry(uint32_t method_index) {
- InlineInfoEntry entry;
- entry.method_index = method_index;
- inline_infos_.Add(entry);
+void StackMapStream::BeginInlineInfoEntry(uint32_t method_index,
+ uint32_t dex_pc,
+ uint32_t num_dex_registers) {
+ DCHECK(!in_inline_frame_);
+ in_inline_frame_ = true;
+ current_inline_info_.method_index = method_index;
+ current_inline_info_.dex_pc = dex_pc;
+ current_inline_info_.num_dex_registers = num_dex_registers;
+ current_inline_info_.dex_register_locations_start_index = dex_register_locations_.Size();
+ if (num_dex_registers != 0) {
+ current_inline_info_.live_dex_registers_mask =
+ new (allocator_) ArenaBitVector(allocator_, num_dex_registers, true);
+ } else {
+ current_inline_info_.live_dex_registers_mask = nullptr;
+ }
+ current_dex_register_ = 0;
+}
+
+void StackMapStream::EndInlineInfoEntry() {
+ DCHECK(in_inline_frame_);
+ DCHECK_EQ(current_dex_register_, current_inline_info_.num_dex_registers)
+ << "Inline information contains less registers than expected";
+ in_inline_frame_ = false;
+ inline_infos_.Add(current_inline_info_);
+ current_inline_info_ = InlineInfoEntry();
}
size_t StackMapStream::PrepareForFillIn() {
@@ -142,17 +167,18 @@
return size;
}
-size_t StackMapStream::ComputeDexRegisterMapSize(const StackMapEntry& entry) const {
+size_t StackMapStream::ComputeDexRegisterMapSize(uint32_t num_dex_registers,
+ const BitVector& live_dex_registers_mask) const {
// Size of the map in bytes.
size_t size = DexRegisterMap::kFixedSize;
// Add the live bit mask for the Dex register liveness.
- size += DexRegisterMap::GetLiveBitMaskSize(entry.num_dex_registers);
+ size += DexRegisterMap::GetLiveBitMaskSize(num_dex_registers);
// Compute the size of the set of live Dex register entries.
size_t number_of_live_dex_registers = 0;
for (size_t dex_register_number = 0;
- dex_register_number < entry.num_dex_registers;
+ dex_register_number < num_dex_registers;
++dex_register_number) {
- if (entry.live_dex_registers_mask->IsBitSet(dex_register_number)) {
+ if (live_dex_registers_mask.IsBitSet(dex_register_number)) {
++number_of_live_dex_registers;
}
}
@@ -167,11 +193,18 @@
size_t StackMapStream::ComputeDexRegisterMapsSize() const {
size_t size = 0;
+ size_t inline_info_index = 0;
for (size_t i = 0; i < stack_maps_.Size(); ++i) {
StackMapEntry entry = stack_maps_.Get(i);
if (entry.same_dex_register_map_as_ == kNoSameDexMapFound) {
+ size += ComputeDexRegisterMapSize(entry.num_dex_registers, *entry.live_dex_registers_mask);
+ } else {
// Entries with the same dex map will have the same offset.
- size += ComputeDexRegisterMapSize(entry);
+ }
+ for (size_t j = 0; j < entry.inlining_depth; ++j) {
+ InlineInfoEntry inline_entry = inline_infos_.Get(inline_info_index++);
+ size += ComputeDexRegisterMapSize(inline_entry.num_dex_registers,
+ *inline_entry.live_dex_registers_mask);
}
}
return size;
@@ -247,34 +280,19 @@
.GetDexRegisterMapOffset(code_info));
} else {
// New dex registers maps should be added to the stack map.
- MemoryRegion register_region =
- dex_register_locations_region.Subregion(
- next_dex_register_map_offset,
- ComputeDexRegisterMapSize(entry));
+ MemoryRegion register_region = dex_register_locations_region.Subregion(
+ next_dex_register_map_offset,
+ ComputeDexRegisterMapSize(entry.num_dex_registers, *entry.live_dex_registers_mask));
next_dex_register_map_offset += register_region.size();
DexRegisterMap dex_register_map(register_region);
stack_map.SetDexRegisterMapOffset(
code_info, register_region.start() - dex_register_locations_region.start());
- // Set the live bit mask.
- dex_register_map.SetLiveBitMask(entry.num_dex_registers, *entry.live_dex_registers_mask);
-
- // Set the dex register location mapping data.
- for (size_t dex_register_number = 0, index_in_dex_register_locations = 0;
- dex_register_number < entry.num_dex_registers;
- ++dex_register_number) {
- if (entry.live_dex_registers_mask->IsBitSet(dex_register_number)) {
- size_t location_catalog_entry_index =
- dex_register_locations_.Get(entry.dex_register_locations_start_index
- + index_in_dex_register_locations);
- dex_register_map.SetLocationCatalogEntryIndex(
- index_in_dex_register_locations,
- location_catalog_entry_index,
- entry.num_dex_registers,
- location_catalog_entries_.Size());
- ++index_in_dex_register_locations;
- }
- }
+ // Set the dex register location.
+ FillInDexRegisterMap(dex_register_map,
+ entry.num_dex_registers,
+ *entry.live_dex_registers_mask,
+ entry.dex_register_locations_start_index);
}
}
@@ -291,9 +309,29 @@
code_info, inline_region.start() - dex_register_locations_region.start());
inline_info.SetDepth(entry.inlining_depth);
- for (size_t j = 0; j < entry.inlining_depth; ++j) {
- InlineInfoEntry inline_entry = inline_infos_.Get(j + entry.inline_infos_start_index);
- inline_info.SetMethodReferenceIndexAtDepth(j, inline_entry.method_index);
+ for (size_t depth = 0; depth < entry.inlining_depth; ++depth) {
+ InlineInfoEntry inline_entry = inline_infos_.Get(depth + entry.inline_infos_start_index);
+ inline_info.SetMethodIndexAtDepth(depth, inline_entry.method_index);
+ inline_info.SetDexPcAtDepth(depth, inline_entry.dex_pc);
+ if (inline_entry.num_dex_registers == 0) {
+ // No dex map available.
+ inline_info.SetDexRegisterMapOffsetAtDepth(depth, StackMap::kNoDexRegisterMap);
+ DCHECK(inline_entry.live_dex_registers_mask == nullptr);
+ } else {
+ MemoryRegion register_region = dex_register_locations_region.Subregion(
+ next_dex_register_map_offset,
+ ComputeDexRegisterMapSize(inline_entry.num_dex_registers,
+ *inline_entry.live_dex_registers_mask));
+ next_dex_register_map_offset += register_region.size();
+ DexRegisterMap dex_register_map(register_region);
+ inline_info.SetDexRegisterMapOffsetAtDepth(
+ depth, register_region.start() - dex_register_locations_region.start());
+
+ FillInDexRegisterMap(dex_register_map,
+ inline_entry.num_dex_registers,
+ *inline_entry.live_dex_registers_mask,
+ inline_entry.dex_register_locations_start_index);
+ }
}
} else {
if (inline_info_size_ != 0) {
@@ -303,6 +341,28 @@
}
}
+void StackMapStream::FillInDexRegisterMap(DexRegisterMap dex_register_map,
+ uint32_t num_dex_registers,
+ const BitVector& live_dex_registers_mask,
+ uint32_t start_index_in_dex_register_locations) const {
+ dex_register_map.SetLiveBitMask(num_dex_registers, live_dex_registers_mask);
+ // Set the dex register location mapping data.
+ for (size_t dex_register_number = 0, index_in_dex_register_locations = 0;
+ dex_register_number < num_dex_registers;
+ ++dex_register_number) {
+ if (live_dex_registers_mask.IsBitSet(dex_register_number)) {
+ size_t location_catalog_entry_index = dex_register_locations_.Get(
+ start_index_in_dex_register_locations + index_in_dex_register_locations);
+ dex_register_map.SetLocationCatalogEntryIndex(
+ index_in_dex_register_locations,
+ location_catalog_entry_index,
+ num_dex_registers,
+ location_catalog_entries_.Size());
+ ++index_in_dex_register_locations;
+ }
+ }
+}
+
size_t StackMapStream::FindEntryWithTheSameDexMap() {
size_t current_entry_index = stack_maps_.Size();
auto entries_it = dex_map_hash_to_stack_map_indices_.find(current_entry_.dex_register_map_hash);
diff --git a/compiler/optimizing/stack_map_stream.h b/compiler/optimizing/stack_map_stream.h
index 0c626be..4c03f9f 100644
--- a/compiler/optimizing/stack_map_stream.h
+++ b/compiler/optimizing/stack_map_stream.h
@@ -72,6 +72,7 @@
number_of_stack_maps_with_inline_info_(0),
dex_map_hash_to_stack_map_indices_(std::less<uint32_t>(), allocator->Adapter()),
current_entry_(),
+ current_inline_info_(),
stack_mask_size_(0),
inline_info_size_(0),
dex_register_maps_size_(0),
@@ -81,7 +82,9 @@
stack_maps_start_(0),
dex_register_maps_start_(0),
inline_infos_start_(0),
- needed_size_(0) {}
+ needed_size_(0),
+ current_dex_register_(0),
+ in_inline_frame_(false) {}
// See runtime/stack_map.h to know what these fields contain.
struct StackMapEntry {
@@ -99,7 +102,11 @@
};
struct InlineInfoEntry {
+ uint32_t dex_pc;
uint32_t method_index;
+ uint32_t num_dex_registers;
+ BitVector* live_dex_registers_mask;
+ size_t dex_register_locations_start_index;
};
void BeginStackMapEntry(uint32_t dex_pc,
@@ -110,11 +117,12 @@
uint8_t inlining_depth);
void EndStackMapEntry();
- void AddDexRegisterEntry(uint16_t dex_register,
- DexRegisterLocation::Kind kind,
- int32_t value);
+ void AddDexRegisterEntry(DexRegisterLocation::Kind kind, int32_t value);
- void AddInlineInfoEntry(uint32_t method_index);
+ void BeginInlineInfoEntry(uint32_t method_index,
+ uint32_t dex_pc,
+ uint32_t num_dex_registers);
+ void EndInlineInfoEntry();
// Prepares the stream to fill in a memory region. Must be called before FillIn.
// Returns the size (in bytes) needed to store this stream.
@@ -123,7 +131,8 @@
private:
size_t ComputeDexRegisterLocationCatalogSize() const;
- size_t ComputeDexRegisterMapSize(const StackMapEntry& entry) const;
+ size_t ComputeDexRegisterMapSize(uint32_t num_dex_registers,
+ const BitVector& live_dex_registers_mask) const;
size_t ComputeDexRegisterMapsSize() const;
size_t ComputeInlineInfoSize() const;
@@ -131,6 +140,10 @@
// or kNoSameDexMapFound if no such entry exists.
size_t FindEntryWithTheSameDexMap();
bool HaveTheSameDexMaps(const StackMapEntry& a, const StackMapEntry& b) const;
+ void FillInDexRegisterMap(DexRegisterMap dex_register_map,
+ uint32_t num_dex_registers,
+ const BitVector& live_dex_registers_mask,
+ uint32_t start_index_in_dex_register_locations) const;
ArenaAllocator* allocator_;
GrowableArray<StackMapEntry> stack_maps_;
@@ -155,6 +168,7 @@
ArenaSafeMap<uint32_t, GrowableArray<uint32_t>> dex_map_hash_to_stack_map_indices_;
StackMapEntry current_entry_;
+ InlineInfoEntry current_inline_info_;
size_t stack_mask_size_;
size_t inline_info_size_;
size_t dex_register_maps_size_;
@@ -165,6 +179,8 @@
size_t dex_register_maps_start_;
size_t inline_infos_start_;
size_t needed_size_;
+ uint32_t current_dex_register_;
+ bool in_inline_frame_;
static constexpr uint32_t kNoSameDexMapFound = -1;
diff --git a/compiler/optimizing/stack_map_test.cc b/compiler/optimizing/stack_map_test.cc
index 3291a77..e04fa98 100644
--- a/compiler/optimizing/stack_map_test.cc
+++ b/compiler/optimizing/stack_map_test.cc
@@ -41,8 +41,8 @@
ArenaBitVector sp_mask(&arena, 0, false);
size_t number_of_dex_registers = 2;
stream.BeginStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0);
- stream.AddDexRegisterEntry(0, Kind::kInStack, 0); // Short location.
- stream.AddDexRegisterEntry(1, Kind::kConstant, -2); // Short location.
+ stream.AddDexRegisterEntry(Kind::kInStack, 0); // Short location.
+ stream.AddDexRegisterEntry(Kind::kConstant, -2); // Short location.
stream.EndStackMapEntry();
size_t size = stream.PrepareForFillIn();
@@ -124,19 +124,22 @@
sp_mask1.SetBit(2);
sp_mask1.SetBit(4);
size_t number_of_dex_registers = 2;
+ size_t number_of_dex_registers_in_inline_info = 0;
stream.BeginStackMapEntry(0, 64, 0x3, &sp_mask1, number_of_dex_registers, 2);
- stream.AddDexRegisterEntry(0, Kind::kInStack, 0); // Short location.
- stream.AddDexRegisterEntry(1, Kind::kConstant, -2); // Large location.
- stream.AddInlineInfoEntry(42);
- stream.AddInlineInfoEntry(82);
+ stream.AddDexRegisterEntry(Kind::kInStack, 0); // Short location.
+ stream.AddDexRegisterEntry(Kind::kConstant, -2); // Large location.
+ stream.BeginInlineInfoEntry(82, 3, number_of_dex_registers_in_inline_info);
+ stream.EndInlineInfoEntry();
+ stream.BeginInlineInfoEntry(42, 2, number_of_dex_registers_in_inline_info);
+ stream.EndInlineInfoEntry();
stream.EndStackMapEntry();
ArenaBitVector sp_mask2(&arena, 0, true);
sp_mask2.SetBit(3);
sp_mask1.SetBit(8);
stream.BeginStackMapEntry(1, 128, 0xFF, &sp_mask2, number_of_dex_registers, 0);
- stream.AddDexRegisterEntry(0, Kind::kInRegister, 18); // Short location.
- stream.AddDexRegisterEntry(1, Kind::kInFpuRegister, 3); // Short location.
+ stream.AddDexRegisterEntry(Kind::kInRegister, 18); // Short location.
+ stream.AddDexRegisterEntry(Kind::kInFpuRegister, 3); // Short location.
stream.EndStackMapEntry();
size_t size = stream.PrepareForFillIn();
@@ -211,8 +214,10 @@
ASSERT_TRUE(stack_map.HasInlineInfo(code_info));
InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map);
ASSERT_EQ(2u, inline_info.GetDepth());
- ASSERT_EQ(42u, inline_info.GetMethodReferenceIndexAtDepth(0));
- ASSERT_EQ(82u, inline_info.GetMethodReferenceIndexAtDepth(1));
+ ASSERT_EQ(82u, inline_info.GetMethodIndexAtDepth(0));
+ ASSERT_EQ(42u, inline_info.GetMethodIndexAtDepth(1));
+ ASSERT_EQ(3u, inline_info.GetDexPcAtDepth(0));
+ ASSERT_EQ(2u, inline_info.GetDexPcAtDepth(1));
}
// Second stack map.
@@ -277,8 +282,8 @@
ArenaBitVector sp_mask(&arena, 0, false);
uint32_t number_of_dex_registers = 2;
stream.BeginStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0);
- stream.AddDexRegisterEntry(0, Kind::kNone, 0); // No location.
- stream.AddDexRegisterEntry(1, Kind::kConstant, -2); // Large location.
+ stream.AddDexRegisterEntry(Kind::kNone, 0); // No location.
+ stream.AddDexRegisterEntry(Kind::kConstant, -2); // Large location.
stream.EndStackMapEntry();
size_t size = stream.PrepareForFillIn();
@@ -364,13 +369,13 @@
// as using a single value (in the whole CodeInfo object) would
// make this Dex register mapping data empty (see
// art::DexRegisterMap::SingleEntrySizeInBits).
- stream.AddDexRegisterEntry(i, Kind::kConstant, i % 2); // Short location.
+ stream.AddDexRegisterEntry(Kind::kConstant, i % 2); // Short location.
}
stream.EndStackMapEntry();
// Create the second stack map (and its Dex register map).
stream.BeginStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0);
for (uint32_t i = 0; i < number_of_dex_registers; ++i) {
- stream.AddDexRegisterEntry(i, Kind::kConstant, 0); // Short location.
+ stream.AddDexRegisterEntry(Kind::kConstant, 0); // Short location.
}
stream.EndStackMapEntry();
@@ -420,18 +425,18 @@
uint32_t number_of_dex_registers = 2;
// First stack map.
stream.BeginStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0);
- stream.AddDexRegisterEntry(0, Kind::kInRegister, 0); // Short location.
- stream.AddDexRegisterEntry(1, Kind::kConstant, -2); // Large location.
+ stream.AddDexRegisterEntry(Kind::kInRegister, 0); // Short location.
+ stream.AddDexRegisterEntry(Kind::kConstant, -2); // Large location.
stream.EndStackMapEntry();
// Second stack map, which should share the same dex register map.
stream.BeginStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0);
- stream.AddDexRegisterEntry(0, Kind::kInRegister, 0); // Short location.
- stream.AddDexRegisterEntry(1, Kind::kConstant, -2); // Large location.
+ stream.AddDexRegisterEntry(Kind::kInRegister, 0); // Short location.
+ stream.AddDexRegisterEntry(Kind::kConstant, -2); // Large location.
stream.EndStackMapEntry();
// Third stack map (doesn't share the dex register map).
stream.BeginStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0);
- stream.AddDexRegisterEntry(0, Kind::kInRegister, 2); // Short location.
- stream.AddDexRegisterEntry(1, Kind::kConstant, -2); // Large location.
+ stream.AddDexRegisterEntry(Kind::kInRegister, 2); // Short location.
+ stream.AddDexRegisterEntry(Kind::kConstant, -2); // Large location.
stream.EndStackMapEntry();
size_t size = stream.PrepareForFillIn();
@@ -500,4 +505,167 @@
ASSERT_FALSE(stack_map.HasInlineInfo(code_info));
}
+TEST(StackMapTest, InlineTest) {
+ ArenaPool pool;
+ ArenaAllocator arena(&pool);
+ StackMapStream stream(&arena);
+
+ ArenaBitVector sp_mask1(&arena, 0, true);
+ sp_mask1.SetBit(2);
+ sp_mask1.SetBit(4);
+
+ // First stack map.
+ stream.BeginStackMapEntry(0, 64, 0x3, &sp_mask1, 2, 2);
+ stream.AddDexRegisterEntry(Kind::kInStack, 0);
+ stream.AddDexRegisterEntry(Kind::kConstant, 4);
+
+ stream.BeginInlineInfoEntry(42, 2, 1);
+ stream.AddDexRegisterEntry(Kind::kInStack, 8);
+ stream.EndInlineInfoEntry();
+ stream.BeginInlineInfoEntry(82, 3, 3);
+ stream.AddDexRegisterEntry(Kind::kInStack, 16);
+ stream.AddDexRegisterEntry(Kind::kConstant, 20);
+ stream.AddDexRegisterEntry(Kind::kInRegister, 15);
+ stream.EndInlineInfoEntry();
+
+ stream.EndStackMapEntry();
+
+ // Second stack map.
+ stream.BeginStackMapEntry(2, 22, 0x3, &sp_mask1, 2, 3);
+ stream.AddDexRegisterEntry(Kind::kInStack, 56);
+ stream.AddDexRegisterEntry(Kind::kConstant, 0);
+
+ stream.BeginInlineInfoEntry(42, 2, 1);
+ stream.AddDexRegisterEntry(Kind::kInStack, 12);
+ stream.EndInlineInfoEntry();
+ stream.BeginInlineInfoEntry(82, 3, 3);
+ stream.AddDexRegisterEntry(Kind::kInStack, 80);
+ stream.AddDexRegisterEntry(Kind::kConstant, 10);
+ stream.AddDexRegisterEntry(Kind::kInRegister, 5);
+ stream.EndInlineInfoEntry();
+ stream.BeginInlineInfoEntry(52, 5, 0);
+ stream.EndInlineInfoEntry();
+
+ stream.EndStackMapEntry();
+
+ // Third stack map.
+ stream.BeginStackMapEntry(4, 56, 0x3, &sp_mask1, 2, 0);
+ stream.AddDexRegisterEntry(Kind::kNone, 0);
+ stream.AddDexRegisterEntry(Kind::kConstant, 4);
+ stream.EndStackMapEntry();
+
+ // Fourth stack map.
+ stream.BeginStackMapEntry(6, 78, 0x3, &sp_mask1, 2, 3);
+ stream.AddDexRegisterEntry(Kind::kInStack, 56);
+ stream.AddDexRegisterEntry(Kind::kConstant, 0);
+
+ stream.BeginInlineInfoEntry(42, 2, 0);
+ stream.EndInlineInfoEntry();
+ stream.BeginInlineInfoEntry(52, 5, 1);
+ stream.AddDexRegisterEntry(Kind::kInRegister, 2);
+ stream.EndInlineInfoEntry();
+ stream.BeginInlineInfoEntry(52, 10, 2);
+ stream.AddDexRegisterEntry(Kind::kNone, 0);
+ stream.AddDexRegisterEntry(Kind::kInRegister, 3);
+ stream.EndInlineInfoEntry();
+
+ stream.EndStackMapEntry();
+
+ size_t size = stream.PrepareForFillIn();
+ void* memory = arena.Alloc(size, kArenaAllocMisc);
+ MemoryRegion region(memory, size);
+ stream.FillIn(region);
+
+ CodeInfo ci(region);
+
+ {
+ // Verify first stack map.
+ StackMap sm0 = ci.GetStackMapAt(0);
+
+ DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm0, 2);
+ ASSERT_EQ(0, dex_registers0.GetStackOffsetInBytes(0, 2, ci));
+ ASSERT_EQ(4, dex_registers0.GetConstant(1, 2, ci));
+
+ InlineInfo if0 = ci.GetInlineInfoOf(sm0);
+ ASSERT_EQ(2u, if0.GetDepth());
+ ASSERT_EQ(2u, if0.GetDexPcAtDepth(0));
+ ASSERT_EQ(42u, if0.GetMethodIndexAtDepth(0));
+ ASSERT_EQ(3u, if0.GetDexPcAtDepth(1));
+ ASSERT_EQ(82u, if0.GetMethodIndexAtDepth(1));
+
+ DexRegisterMap dex_registers1 = ci.GetDexRegisterMapAtDepth(0, if0, 1);
+ ASSERT_EQ(8, dex_registers1.GetStackOffsetInBytes(0, 1, ci));
+
+ DexRegisterMap dex_registers2 = ci.GetDexRegisterMapAtDepth(1, if0, 3);
+ ASSERT_EQ(16, dex_registers2.GetStackOffsetInBytes(0, 3, ci));
+ ASSERT_EQ(20, dex_registers2.GetConstant(1, 3, ci));
+ ASSERT_EQ(15, dex_registers2.GetMachineRegister(2, 3, ci));
+ }
+
+ {
+ // Verify second stack map.
+ StackMap sm1 = ci.GetStackMapAt(1);
+
+ DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm1, 2);
+ ASSERT_EQ(56, dex_registers0.GetStackOffsetInBytes(0, 2, ci));
+ ASSERT_EQ(0, dex_registers0.GetConstant(1, 2, ci));
+
+ InlineInfo if1 = ci.GetInlineInfoOf(sm1);
+ ASSERT_EQ(3u, if1.GetDepth());
+ ASSERT_EQ(2u, if1.GetDexPcAtDepth(0));
+ ASSERT_EQ(42u, if1.GetMethodIndexAtDepth(0));
+ ASSERT_EQ(3u, if1.GetDexPcAtDepth(1));
+ ASSERT_EQ(82u, if1.GetMethodIndexAtDepth(1));
+ ASSERT_EQ(5u, if1.GetDexPcAtDepth(2));
+ ASSERT_EQ(52u, if1.GetMethodIndexAtDepth(2));
+
+ DexRegisterMap dex_registers1 = ci.GetDexRegisterMapAtDepth(0, if1, 1);
+ ASSERT_EQ(12, dex_registers1.GetStackOffsetInBytes(0, 1, ci));
+
+ DexRegisterMap dex_registers2 = ci.GetDexRegisterMapAtDepth(1, if1, 3);
+ ASSERT_EQ(80, dex_registers2.GetStackOffsetInBytes(0, 3, ci));
+ ASSERT_EQ(10, dex_registers2.GetConstant(1, 3, ci));
+ ASSERT_EQ(5, dex_registers2.GetMachineRegister(2, 3, ci));
+
+ ASSERT_FALSE(if1.HasDexRegisterMapAtDepth(2));
+ }
+
+ {
+ // Verify third stack map.
+ StackMap sm2 = ci.GetStackMapAt(2);
+
+ DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm2, 2);
+ ASSERT_FALSE(dex_registers0.IsDexRegisterLive(0));
+ ASSERT_EQ(4, dex_registers0.GetConstant(1, 2, ci));
+ ASSERT_FALSE(sm2.HasInlineInfo(ci));
+ }
+
+ {
+ // Verify fourth stack map.
+ StackMap sm3 = ci.GetStackMapAt(3);
+
+ DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm3, 2);
+ ASSERT_EQ(56, dex_registers0.GetStackOffsetInBytes(0, 2, ci));
+ ASSERT_EQ(0, dex_registers0.GetConstant(1, 2, ci));
+
+ InlineInfo if2 = ci.GetInlineInfoOf(sm3);
+ ASSERT_EQ(3u, if2.GetDepth());
+ ASSERT_EQ(2u, if2.GetDexPcAtDepth(0));
+ ASSERT_EQ(42u, if2.GetMethodIndexAtDepth(0));
+ ASSERT_EQ(5u, if2.GetDexPcAtDepth(1));
+ ASSERT_EQ(52u, if2.GetMethodIndexAtDepth(1));
+ ASSERT_EQ(10u, if2.GetDexPcAtDepth(2));
+ ASSERT_EQ(52u, if2.GetMethodIndexAtDepth(2));
+
+ ASSERT_FALSE(if2.HasDexRegisterMapAtDepth(0));
+
+ DexRegisterMap dex_registers1 = ci.GetDexRegisterMapAtDepth(1, if2, 1);
+ ASSERT_EQ(2, dex_registers1.GetMachineRegister(0, 1, ci));
+
+ DexRegisterMap dex_registers2 = ci.GetDexRegisterMapAtDepth(2, if2, 2);
+ ASSERT_FALSE(dex_registers2.IsDexRegisterLive(0));
+ ASSERT_EQ(3, dex_registers2.GetMachineRegister(1, 2, ci));
+ }
+}
+
} // namespace art
diff --git a/runtime/stack_map.cc b/runtime/stack_map.cc
index 11e7e44..6a0c07d 100644
--- a/runtime/stack_map.cc
+++ b/runtime/stack_map.cc
@@ -257,21 +257,48 @@
DumpStackMapHeader(os, i);
if (stack_map.HasDexRegisterMap(*this)) {
DexRegisterMap dex_register_map = GetDexRegisterMapOf(stack_map, number_of_dex_registers);
- // TODO: Display the bit mask of live Dex registers.
- for (size_t j = 0; j < number_of_dex_registers; ++j) {
- if (dex_register_map.IsDexRegisterLive(j)) {
- size_t location_catalog_entry_index = dex_register_map.GetLocationCatalogEntryIndex(
- j, number_of_dex_registers, number_of_location_catalog_entries);
- DexRegisterLocation location =
- dex_register_map.GetDexRegisterLocation(j, number_of_dex_registers, *this);
- DumpRegisterMapping(
- os, j, location, "v",
- "\t[entry " + std::to_string(static_cast<int>(location_catalog_entry_index)) + "]");
- }
- }
+ dex_register_map.Dump(os, *this, number_of_dex_registers);
}
}
- // TODO: Dump the stack map's inline information.
+ // TODO: Dump the stack map's inline information? We need to know more from the caller:
+ // we need to know the number of dex registers for each inlined method.
+}
+
+void DexRegisterMap::Dump(std::ostream& os,
+ const CodeInfo& code_info,
+ uint16_t number_of_dex_registers) const {
+ size_t number_of_location_catalog_entries =
+ code_info.GetNumberOfDexRegisterLocationCatalogEntries();
+ // TODO: Display the bit mask of live Dex registers.
+ for (size_t j = 0; j < number_of_dex_registers; ++j) {
+ if (IsDexRegisterLive(j)) {
+ size_t location_catalog_entry_index = GetLocationCatalogEntryIndex(
+ j, number_of_dex_registers, number_of_location_catalog_entries);
+ DexRegisterLocation location = GetDexRegisterLocation(j, number_of_dex_registers, code_info);
+ DumpRegisterMapping(
+ os, j, location, "v",
+ "\t[entry " + std::to_string(static_cast<int>(location_catalog_entry_index)) + "]");
+ }
+ }
+}
+
+void InlineInfo::Dump(std::ostream& os,
+ const CodeInfo& code_info,
+ uint16_t number_of_dex_registers[]) const {
+ os << "InlineInfo with depth " << static_cast<uint32_t>(GetDepth()) << "\n";
+
+ for (size_t i = 0; i < GetDepth(); ++i) {
+ os << " At depth " << i
+ << std::hex
+ << " (dex_pc=0x" << GetDexPcAtDepth(i)
+ << ", method_index=0x" << GetMethodIndexAtDepth(i)
+ << ")\n";
+ if (HasDexRegisterMapAtDepth(i)) {
+ DexRegisterMap dex_register_map =
+ code_info.GetDexRegisterMapAtDepth(i, *this, number_of_dex_registers[i]);
+ dex_register_map.Dump(os, code_info, number_of_dex_registers[i]);
+ }
+ }
}
} // namespace art
diff --git a/runtime/stack_map.h b/runtime/stack_map.h
index f68cafe..16ae772 100644
--- a/runtime/stack_map.h
+++ b/runtime/stack_map.h
@@ -39,47 +39,6 @@
* their own fields.
*/
-/**
- * Inline information for a specific PC. The information is of the form:
- * [inlining_depth, [method_dex reference]+]
- */
-class InlineInfo {
- public:
- explicit InlineInfo(MemoryRegion region) : region_(region) {}
-
- uint8_t GetDepth() const {
- return region_.LoadUnaligned<uint8_t>(kDepthOffset);
- }
-
- void SetDepth(uint8_t depth) {
- region_.StoreUnaligned<uint8_t>(kDepthOffset, depth);
- }
-
- uint32_t GetMethodReferenceIndexAtDepth(uint8_t depth) const {
- return region_.LoadUnaligned<uint32_t>(kFixedSize + depth * SingleEntrySize());
- }
-
- void SetMethodReferenceIndexAtDepth(uint8_t depth, uint32_t index) {
- region_.StoreUnaligned<uint32_t>(kFixedSize + depth * SingleEntrySize(), index);
- }
-
- static size_t SingleEntrySize() {
- return sizeof(uint32_t);
- }
-
- private:
- // TODO: Instead of plain types such as "uint8_t", introduce
- // typedefs (and document the memory layout of InlineInfo).
- static constexpr int kDepthOffset = 0;
- static constexpr int kFixedSize = kDepthOffset + sizeof(uint8_t);
-
- MemoryRegion region_;
-
- friend class CodeInfo;
- friend class StackMap;
- friend class StackMapStream;
-};
-
// Dex register location container used by DexRegisterMap and StackMapStream.
class DexRegisterLocation {
public:
@@ -506,7 +465,8 @@
const CodeInfo& code_info) const {
DexRegisterLocation location =
GetDexRegisterLocation(dex_register_number, number_of_dex_registers, code_info);
- DCHECK(location.GetKind() == DexRegisterLocation::Kind::kConstant);
+ DCHECK(location.GetKind() == DexRegisterLocation::Kind::kConstant)
+ << DexRegisterLocation::PrettyDescriptor(location.GetKind());
return location.GetValue();
}
@@ -641,6 +601,8 @@
return region_.size();
}
+ void Dump(std::ostream& o, const CodeInfo& code_info, uint16_t number_of_dex_registers) const;
+
private:
// Return the index in the Dex register map corresponding to the Dex
// register number `dex_register_number`.
@@ -675,9 +637,6 @@
* The information is of the form:
* [dex_pc, native_pc_offset, dex_register_map_offset, inlining_info_offset, register_mask,
* stack_mask].
- *
- * Note that register_mask is fixed size, but stack_mask is variable size, depending on the
- * stack size of a method.
*/
class StackMap {
public:
@@ -759,6 +718,72 @@
friend class StackMapStream;
};
+/**
+ * Inline information for a specific PC. The information is of the form:
+ * [inlining_depth, [dex_pc, method_index, dex_register_map_offset]+]
+ */
+class InlineInfo {
+ public:
+ explicit InlineInfo(MemoryRegion region) : region_(region) {}
+
+ uint8_t GetDepth() const {
+ return region_.LoadUnaligned<uint8_t>(kDepthOffset);
+ }
+
+ void SetDepth(uint8_t depth) {
+ region_.StoreUnaligned<uint8_t>(kDepthOffset, depth);
+ }
+
+ uint32_t GetMethodIndexAtDepth(uint8_t depth) const {
+ return region_.LoadUnaligned<uint32_t>(kFixedSize + depth * SingleEntrySize());
+ }
+
+ void SetMethodIndexAtDepth(uint8_t depth, uint32_t index) {
+ region_.StoreUnaligned<uint32_t>(kFixedSize + depth * SingleEntrySize(), index);
+ }
+
+ uint32_t GetDexPcAtDepth(uint8_t depth) const {
+ return region_.LoadUnaligned<uint32_t>(
+ kFixedSize + depth * SingleEntrySize() + sizeof(uint32_t));
+ }
+
+ void SetDexPcAtDepth(uint8_t depth, uint32_t dex_pc) {
+ region_.StoreUnaligned<uint32_t>(
+ kFixedSize + depth * SingleEntrySize() + sizeof(uint32_t), dex_pc);
+ }
+
+ uint32_t GetDexRegisterMapOffsetAtDepth(uint8_t depth) const {
+ return region_.LoadUnaligned<uint32_t>(
+ kFixedSize + depth * SingleEntrySize() + sizeof(uint32_t) + sizeof(uint32_t));
+ }
+
+ void SetDexRegisterMapOffsetAtDepth(uint8_t depth, uint32_t offset) {
+ region_.StoreUnaligned<uint32_t>(
+ kFixedSize + depth * SingleEntrySize() + sizeof(uint32_t) + sizeof(uint32_t), offset);
+ }
+
+ bool HasDexRegisterMapAtDepth(uint8_t depth) const {
+ return GetDexRegisterMapOffsetAtDepth(depth) != StackMap::kNoDexRegisterMap;
+ }
+
+ static size_t SingleEntrySize() {
+ return sizeof(uint32_t) + sizeof(uint32_t) + sizeof(uint32_t);
+ }
+
+ void Dump(std::ostream& os, const CodeInfo& info, uint16_t* number_of_dex_registers) const;
+
+ private:
+ // TODO: Instead of plain types such as "uint8_t", introduce
+ // typedefs (and document the memory layout of InlineInfo).
+ static constexpr int kDepthOffset = 0;
+ static constexpr int kFixedSize = kDepthOffset + sizeof(uint8_t);
+
+ MemoryRegion region_;
+
+ friend class CodeInfo;
+ friend class StackMap;
+ friend class StackMapStream;
+};
/**
* Wrapper around all compiler information collected for a method.
@@ -960,6 +985,17 @@
return DexRegisterMap(region_.Subregion(offset, size));
}
+ // Return the `DexRegisterMap` pointed by `inline_info` at depth `depth`.
+ DexRegisterMap GetDexRegisterMapAtDepth(uint8_t depth,
+ InlineInfo inline_info,
+ uint32_t number_of_dex_registers) const {
+ DCHECK(inline_info.HasDexRegisterMapAtDepth(depth));
+ uint32_t offset =
+ GetDexRegisterMapsOffset() + inline_info.GetDexRegisterMapOffsetAtDepth(depth);
+ size_t size = ComputeDexRegisterMapSizeOf(offset, number_of_dex_registers);
+ return DexRegisterMap(region_.Subregion(offset, size));
+ }
+
InlineInfo GetInlineInfoOf(StackMap stack_map) const {
DCHECK(stack_map.HasInlineInfo(*this));
uint32_t offset = stack_map.GetInlineDescriptorOffset(*this) + GetDexRegisterMapsOffset();