am 11881488: am ba16b079: More native bridge tests

* commit '1188148803f707c930532a43b3f4bf65bcdfa7aa':
  More native bridge tests
diff --git a/runtime/indirect_reference_table-inl.h b/runtime/indirect_reference_table-inl.h
index c826716..2bf6ab9 100644
--- a/runtime/indirect_reference_table-inl.h
+++ b/runtime/indirect_reference_table-inl.h
@@ -46,7 +46,7 @@
     AbortIfNoCheckJNI();
     return false;
   }
-  if (UNLIKELY(table_[idx].IsNull())) {
+  if (UNLIKELY(table_[idx].GetReference()->IsNull())) {
     LOG(ERROR) << "JNI ERROR (app bug): accessed deleted " << kind_ << " " << iref;
     AbortIfNoCheckJNI();
     return false;
@@ -76,10 +76,10 @@
     return kInvalidIndirectRefObject;
   }
   uint32_t idx = ExtractIndex(iref);
-  mirror::Object* obj = table_[idx].Read<kWithoutReadBarrier>();
+  mirror::Object* obj = table_[idx].GetReference()->Read<kWithoutReadBarrier>();
   if (LIKELY(obj != kClearedJniWeakGlobal)) {
     // The read barrier or VerifyObject won't handle kClearedJniWeakGlobal.
-    obj = table_[idx].Read();
+    obj = table_[idx].GetReference()->Read();
     VerifyObject(obj);
   }
   return obj;
diff --git a/runtime/indirect_reference_table.cc b/runtime/indirect_reference_table.cc
index 9b2b82e..2b1a257 100644
--- a/runtime/indirect_reference_table.cc
+++ b/runtime/indirect_reference_table.cc
@@ -63,34 +63,22 @@
 }
 
 IndirectReferenceTable::IndirectReferenceTable(size_t initialCount,
-                                               size_t maxCount, IndirectRefKind desiredKind) {
+                                               size_t maxCount, IndirectRefKind desiredKind)
+    : kind_(desiredKind),
+      max_entries_(maxCount) {
   CHECK_GT(initialCount, 0U);
   CHECK_LE(initialCount, maxCount);
   CHECK_NE(desiredKind, kHandleScopeOrInvalid);
 
   std::string error_str;
-  const size_t initial_bytes = initialCount * sizeof(const mirror::Object*);
-  const size_t table_bytes = maxCount * sizeof(const mirror::Object*);
+  const size_t table_bytes = maxCount * sizeof(IrtEntry);
   table_mem_map_.reset(MemMap::MapAnonymous("indirect ref table", nullptr, table_bytes,
                                             PROT_READ | PROT_WRITE, false, &error_str));
   CHECK(table_mem_map_.get() != nullptr) << error_str;
   CHECK_EQ(table_mem_map_->Size(), table_bytes);
-
-  table_ = reinterpret_cast<GcRoot<mirror::Object>*>(table_mem_map_->Begin());
+  table_ = reinterpret_cast<IrtEntry*>(table_mem_map_->Begin());
   CHECK(table_ != nullptr);
-  memset(table_, 0xd1, initial_bytes);
-
-  const size_t slot_bytes = maxCount * sizeof(IndirectRefSlot);
-  slot_mem_map_.reset(MemMap::MapAnonymous("indirect ref table slots", nullptr, slot_bytes,
-                                           PROT_READ | PROT_WRITE, false, &error_str));
-  CHECK(slot_mem_map_.get() != nullptr) << error_str;
-  slot_data_ = reinterpret_cast<IndirectRefSlot*>(slot_mem_map_->Begin());
-  CHECK(slot_data_ != nullptr);
-
   segment_state_.all = IRT_FIRST_SEGMENT;
-  alloc_entries_ = initialCount;
-  max_entries_ = maxCount;
-  kind_ = desiredKind;
 }
 
 IndirectReferenceTable::~IndirectReferenceTable() {
@@ -104,24 +92,12 @@
   CHECK(obj != NULL);
   VerifyObject(obj);
   DCHECK(table_ != NULL);
-  DCHECK_LE(alloc_entries_, max_entries_);
   DCHECK_GE(segment_state_.parts.numHoles, prevState.parts.numHoles);
 
-  if (topIndex == alloc_entries_) {
-    // reached end of allocated space; did we hit buffer max?
-    if (topIndex == max_entries_) {
-      LOG(FATAL) << "JNI ERROR (app bug): " << kind_ << " table overflow "
-                 << "(max=" << max_entries_ << ")\n"
-                 << MutatorLockedDumpable<IndirectReferenceTable>(*this);
-    }
-
-    size_t newSize = alloc_entries_ * 2;
-    if (newSize > max_entries_) {
-      newSize = max_entries_;
-    }
-    DCHECK_GT(newSize, alloc_entries_);
-
-    alloc_entries_ = newSize;
+  if (topIndex == max_entries_) {
+    LOG(FATAL) << "JNI ERROR (app bug): " << kind_ << " table overflow "
+               << "(max=" << max_entries_ << ")\n"
+               << MutatorLockedDumpable<IndirectReferenceTable>(*this);
   }
 
   // We know there's enough room in the table.  Now we just need to find
@@ -129,27 +105,26 @@
   // add to the end of the list.
   IndirectRef result;
   int numHoles = segment_state_.parts.numHoles - prevState.parts.numHoles;
+  size_t index;
   if (numHoles > 0) {
     DCHECK_GT(topIndex, 1U);
     // Find the first hole; likely to be near the end of the list.
-    GcRoot<mirror::Object>* pScan = &table_[topIndex - 1];
-    DCHECK(!pScan->IsNull());
+    IrtEntry* pScan = &table_[topIndex - 1];
+    DCHECK(!pScan->GetReference()->IsNull());
     --pScan;
-    while (!pScan->IsNull()) {
+    while (!pScan->GetReference()->IsNull()) {
       DCHECK_GE(pScan, table_ + prevState.parts.topIndex);
       --pScan;
     }
-    UpdateSlotAdd(obj, pScan - table_);
-    result = ToIndirectRef(pScan - table_);
-    *pScan = GcRoot<mirror::Object>(obj);
+    index = pScan - table_;
     segment_state_.parts.numHoles--;
   } else {
     // Add to the end.
-    UpdateSlotAdd(obj, topIndex);
-    result = ToIndirectRef(topIndex);
-    table_[topIndex++] = GcRoot<mirror::Object>(obj);
+    index = topIndex++;
     segment_state_.parts.topIndex = topIndex;
   }
+  table_[index].Add(obj);
+  result = ToIndirectRef(index);
   if (false) {
     LOG(INFO) << "+++ added at " << ExtractIndex(result) << " top=" << segment_state_.parts.topIndex
               << " holes=" << segment_state_.parts.numHoles;
@@ -182,7 +157,6 @@
   int bottomIndex = prevState.parts.topIndex;
 
   DCHECK(table_ != NULL);
-  DCHECK_LE(alloc_entries_, max_entries_);
   DCHECK_GE(segment_state_.parts.numHoles, prevState.parts.numHoles);
 
   int idx = ExtractIndex(iref);
@@ -192,7 +166,6 @@
     LOG(WARNING) << "Attempt to remove local handle scope entry from IRT, ignoring";
     return true;
   }
-
   if (idx < bottomIndex) {
     // Wrong segment.
     LOG(WARNING) << "Attempt to remove index outside index area (" << idx
@@ -206,23 +179,23 @@
     return false;
   }
 
-  if (idx == topIndex-1) {
+  if (idx == topIndex - 1) {
     // Top-most entry.  Scan up and consume holes.
 
     if (!CheckEntry("remove", iref, idx)) {
       return false;
     }
 
-    table_[idx] = GcRoot<mirror::Object>(nullptr);
+    *table_[idx].GetReference() = GcRoot<mirror::Object>(nullptr);
     int numHoles = segment_state_.parts.numHoles - prevState.parts.numHoles;
     if (numHoles != 0) {
       while (--topIndex > bottomIndex && numHoles != 0) {
         if (false) {
-          LOG(INFO) << "+++ checking for hole at " << topIndex-1
+          LOG(INFO) << "+++ checking for hole at " << topIndex - 1
                     << " (cookie=" << cookie << ") val="
-                    << table_[topIndex - 1].Read<kWithoutReadBarrier>();
+                    << table_[topIndex - 1].GetReference()->Read<kWithoutReadBarrier>();
         }
-        if (!table_[topIndex-1].IsNull()) {
+        if (!table_[topIndex - 1].GetReference()->IsNull()) {
           break;
         }
         if (false) {
@@ -242,7 +215,7 @@
     // Not the top-most entry.  This creates a hole.  We NULL out the
     // entry to prevent somebody from deleting it twice and screwing up
     // the hole count.
-    if (table_[idx].IsNull()) {
+    if (table_[idx].GetReference()->IsNull()) {
       LOG(INFO) << "--- WEIRD: removing null entry " << idx;
       return false;
     }
@@ -250,7 +223,7 @@
       return false;
     }
 
-    table_[idx] = GcRoot<mirror::Object>(nullptr);
+    *table_[idx].GetReference() = GcRoot<mirror::Object>(nullptr);
     segment_state_.parts.numHoles++;
     if (false) {
       LOG(INFO) << "+++ left hole at " << idx << ", holes=" << segment_state_.parts.numHoles;
@@ -272,7 +245,7 @@
   os << kind_ << " table dump:\n";
   ReferenceTable::Table entries;
   for (size_t i = 0; i < Capacity(); ++i) {
-    mirror::Object* obj = table_[i].Read<kWithoutReadBarrier>();
+    mirror::Object* obj = table_[i].GetReference()->Read<kWithoutReadBarrier>();
     if (UNLIKELY(obj == nullptr)) {
       // Remove NULLs.
     } else if (UNLIKELY(obj == kClearedJniWeakGlobal)) {
@@ -280,7 +253,7 @@
       // while the read barrier won't.
       entries.push_back(GcRoot<mirror::Object>(obj));
     } else {
-      obj = table_[i].Read();
+      obj = table_[i].GetReference()->Read();
       entries.push_back(GcRoot<mirror::Object>(obj));
     }
   }
diff --git a/runtime/indirect_reference_table.h b/runtime/indirect_reference_table.h
index fb910e2..5a178ea 100644
--- a/runtime/indirect_reference_table.h
+++ b/runtime/indirect_reference_table.h
@@ -127,16 +127,6 @@
   return static_cast<IndirectRefKind>(reinterpret_cast<uintptr_t>(iref) & 0x03);
 }
 
-/*
- * Extended debugging structure.  We keep a parallel array of these, one
- * per slot in the table.
- */
-static const size_t kIRTPrevCount = 4;
-struct IndirectRefSlot {
-  uint32_t serial;
-  const mirror::Object* previous[kIRTPrevCount];
-};
-
 /* use as initial value for "cookie", and when table has only one segment */
 static const uint32_t IRT_FIRST_SEGMENT = 0;
 
@@ -203,9 +193,35 @@
   } parts;
 };
 
+// Try to choose kIRTPrevCount so that sizeof(IrtEntry) is a power of 2.
+// Contains multiple entries but only one active one, this helps us detect use after free errors
+// since the serial stored in the indirect ref wont match.
+static const size_t kIRTPrevCount = kIsDebugBuild ? 7 : 3;
+class PACKED(4) IrtEntry {
+ public:
+  void Add(mirror::Object* obj) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+    ++serial_;
+    if (serial_ == kIRTPrevCount) {
+      serial_ = 0;
+    }
+    references_[serial_] = GcRoot<mirror::Object>(obj);
+  }
+  GcRoot<mirror::Object>* GetReference() {
+    DCHECK_LT(serial_, kIRTPrevCount);
+    return &references_[serial_];
+  }
+  uint32_t GetSerial() const {
+    return serial_;
+  }
+
+ private:
+  uint32_t serial_;
+  GcRoot<mirror::Object> references_[kIRTPrevCount];
+};
+
 class IrtIterator {
  public:
-  explicit IrtIterator(GcRoot<mirror::Object>* table, size_t i, size_t capacity)
+  explicit IrtIterator(IrtEntry* table, size_t i, size_t capacity)
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
       : table_(table), i_(i), capacity_(capacity) {
     SkipNullsAndTombstones();
@@ -219,7 +235,7 @@
 
   mirror::Object** operator*() {
     // This does not have a read barrier as this is used to visit roots.
-    return table_[i_].AddressWithoutBarrier();
+    return table_[i_].GetReference()->AddressWithoutBarrier();
   }
 
   bool equals(const IrtIterator& rhs) const {
@@ -230,13 +246,13 @@
   void SkipNullsAndTombstones() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
     // We skip NULLs and tombstones. Clients don't want to see implementation details.
     while (i_ < capacity_ &&
-           (table_[i_].IsNull() ||
-            table_[i_].Read<kWithoutReadBarrier>() == kClearedJniWeakGlobal)) {
+           (table_[i_].GetReference()->IsNull() ||
+            table_[i_].GetReference()->Read<kWithoutReadBarrier>() == kClearedJniWeakGlobal)) {
       ++i_;
     }
   }
 
-  GcRoot<mirror::Object>* const table_;
+  IrtEntry* const table_;
   size_t i_;
   size_t capacity_;
 };
@@ -329,9 +345,7 @@
   }
 
  private:
-  /*
-   * Extract the table index from an indirect reference.
-   */
+  // Extract the table index from an indirect reference.
   static uint32_t ExtractIndex(IndirectRef iref) {
     uintptr_t uref = reinterpret_cast<uintptr_t>(iref);
     return (uref >> 2) & 0xffff;
@@ -343,25 +357,11 @@
    */
   IndirectRef ToIndirectRef(uint32_t tableIndex) const {
     DCHECK_LT(tableIndex, 65536U);
-    uint32_t serialChunk = slot_data_[tableIndex].serial;
-    uintptr_t uref = serialChunk << 20 | (tableIndex << 2) | kind_;
+    uint32_t serialChunk = table_[tableIndex].GetSerial();
+    uintptr_t uref = (serialChunk << 20) | (tableIndex << 2) | kind_;
     return reinterpret_cast<IndirectRef>(uref);
   }
 
-  /*
-   * Update extended debug info when an entry is added.
-   *
-   * We advance the serial number, invalidating any outstanding references to
-   * this slot.
-   */
-  void UpdateSlotAdd(const mirror::Object* obj, int slot) {
-    if (slot_data_ != NULL) {
-      IndirectRefSlot* pSlot = &slot_data_[slot];
-      pSlot->serial++;
-      pSlot->previous[pSlot->serial % kIRTPrevCount] = obj;
-    }
-  }
-
   // Abort if check_jni is not enabled.
   static void AbortIfNoCheckJNI();
 
@@ -374,19 +374,13 @@
 
   // Mem map where we store the indirect refs.
   std::unique_ptr<MemMap> table_mem_map_;
-  // Mem map where we store the extended debugging info.
-  std::unique_ptr<MemMap> slot_mem_map_;
   // bottom of the stack. Do not directly access the object references
   // in this as they are roots. Use Get() that has a read barrier.
-  GcRoot<mirror::Object>* table_;
+  IrtEntry* table_;
   /* bit mask, ORed into all irefs */
-  IndirectRefKind kind_;
-  /* extended debugging info */
-  IndirectRefSlot* slot_data_;
-  /* #of entries we have space for */
-  size_t alloc_entries_;
+  const IndirectRefKind kind_;
   /* max #of entries allowed */
-  size_t max_entries_;
+  const size_t max_entries_;
 };
 
 }  // namespace art