changeset 2501:2d4b2b833d29

7033141: assert(has_cp_cache(i)) failed: oob Summary: Unrewrite bytecodes for OOM error allocating the constant pool cache. Reviewed-by: dcubed, acorn, never
author coleenp
date Fri, 27 May 2011 15:04:48 -0700
parents 7db2b9499c36
children 8cbcd406c42e
files src/share/vm/interpreter/rewriter.cpp src/share/vm/interpreter/rewriter.hpp src/share/vm/oops/instanceKlass.cpp src/share/vm/oops/instanceKlass.hpp src/share/vm/oops/methodOop.cpp src/share/vm/prims/jvmtiRedefineClasses.cpp src/share/vm/prims/methodHandleWalk.cpp
diffstat 7 files changed, 164 insertions(+), 66 deletions(-) [+]
line wrap: on
line diff
--- a/src/share/vm/interpreter/rewriter.cpp	Wed May 25 16:04:09 2011 -0700
+++ b/src/share/vm/interpreter/rewriter.cpp	Fri May 27 15:04:48 2011 -0700
@@ -63,6 +63,15 @@
   _have_invoke_dynamic = ((tag_mask & (1 << JVM_CONSTANT_InvokeDynamic)) != 0);
 }
 
+// Unrewrite the bytecodes if an error occurs.
+void Rewriter::restore_bytecodes() {
+  int len = _methods->length();
+
+  for (int i = len-1; i >= 0; i--) {
+    methodOop method = (methodOop)_methods->obj_at(i);
+    scan_method(method, true);
+  }
+}
 
 // Creates a constant pool cache given a CPC map
 void Rewriter::make_constant_pool_cache(TRAPS) {
@@ -133,57 +142,94 @@
 
 
 // Rewrite a classfile-order CP index into a native-order CPC index.
-void Rewriter::rewrite_member_reference(address bcp, int offset) {
+void Rewriter::rewrite_member_reference(address bcp, int offset, bool reverse) {
   address p = bcp + offset;
-  int  cp_index    = Bytes::get_Java_u2(p);
-  int  cache_index = cp_entry_to_cp_cache(cp_index);
-  Bytes::put_native_u2(p, cache_index);
+  if (!reverse) {
+    int  cp_index    = Bytes::get_Java_u2(p);
+    int  cache_index = cp_entry_to_cp_cache(cp_index);
+    Bytes::put_native_u2(p, cache_index);
+  } else {
+    int cache_index = Bytes::get_native_u2(p);
+    int pool_index = cp_cache_entry_pool_index(cache_index);
+    Bytes::put_Java_u2(p, pool_index);
+  }
 }
 
 
-void Rewriter::rewrite_invokedynamic(address bcp, int offset) {
+void Rewriter::rewrite_invokedynamic(address bcp, int offset, bool reverse) {
   address p = bcp + offset;
-  assert(p[-1] == Bytecodes::_invokedynamic, "");
-  int cp_index = Bytes::get_Java_u2(p);
-  int cpc  = maybe_add_cp_cache_entry(cp_index);  // add lazily
-  int cpc2 = add_secondary_cp_cache_entry(cpc);
+  assert(p[-1] == Bytecodes::_invokedynamic, "not invokedynamic bytecode");
+  if (!reverse) {
+    int cp_index = Bytes::get_Java_u2(p);
+    int cpc  = maybe_add_cp_cache_entry(cp_index);  // add lazily
+    int cpc2 = add_secondary_cp_cache_entry(cpc);
 
-  // Replace the trailing four bytes with a CPC index for the dynamic
-  // call site.  Unlike other CPC entries, there is one per bytecode,
-  // not just one per distinct CP entry.  In other words, the
-  // CPC-to-CP relation is many-to-one for invokedynamic entries.
-  // This means we must use a larger index size than u2 to address
-  // all these entries.  That is the main reason invokedynamic
-  // must have a five-byte instruction format.  (Of course, other JVM
-  // implementations can use the bytes for other purposes.)
-  Bytes::put_native_u4(p, constantPoolCacheOopDesc::encode_secondary_index(cpc2));
-  // Note: We use native_u4 format exclusively for 4-byte indexes.
+    // Replace the trailing four bytes with a CPC index for the dynamic
+    // call site.  Unlike other CPC entries, there is one per bytecode,
+    // not just one per distinct CP entry.  In other words, the
+    // CPC-to-CP relation is many-to-one for invokedynamic entries.
+    // This means we must use a larger index size than u2 to address
+    // all these entries.  That is the main reason invokedynamic
+    // must have a five-byte instruction format.  (Of course, other JVM
+    // implementations can use the bytes for other purposes.)
+    Bytes::put_native_u4(p, constantPoolCacheOopDesc::encode_secondary_index(cpc2));
+    // Note: We use native_u4 format exclusively for 4-byte indexes.
+  } else {
+    int cache_index = constantPoolCacheOopDesc::decode_secondary_index(
+                        Bytes::get_native_u4(p));
+    int secondary_index = cp_cache_secondary_entry_main_index(cache_index);
+    int pool_index = cp_cache_entry_pool_index(secondary_index);
+    assert(_pool->tag_at(pool_index).is_invoke_dynamic(), "wrong index");
+    // zero out 4 bytes
+    Bytes::put_Java_u4(p, 0);
+    Bytes::put_Java_u2(p, pool_index);
+  }
 }
 
 
 // Rewrite some ldc bytecodes to _fast_aldc
-void Rewriter::maybe_rewrite_ldc(address bcp, int offset, bool is_wide) {
-  assert((*bcp) == (is_wide ? Bytecodes::_ldc_w : Bytecodes::_ldc), "");
-  address p = bcp + offset;
-  int cp_index = is_wide ? Bytes::get_Java_u2(p) : (u1)(*p);
-  constantTag tag = _pool->tag_at(cp_index).value();
-  if (tag.is_method_handle() || tag.is_method_type()) {
-    int cache_index = cp_entry_to_cp_cache(cp_index);
-    if (is_wide) {
-      (*bcp) = Bytecodes::_fast_aldc_w;
-      assert(cache_index == (u2)cache_index, "");
-      Bytes::put_native_u2(p, cache_index);
-    } else {
-      (*bcp) = Bytecodes::_fast_aldc;
-      assert(cache_index == (u1)cache_index, "");
-      (*p) = (u1)cache_index;
+void Rewriter::maybe_rewrite_ldc(address bcp, int offset, bool is_wide,
+                                 bool reverse) {
+  if (!reverse) {
+    assert((*bcp) == (is_wide ? Bytecodes::_ldc_w : Bytecodes::_ldc), "not ldc bytecode");
+    address p = bcp + offset;
+    int cp_index = is_wide ? Bytes::get_Java_u2(p) : (u1)(*p);
+    constantTag tag = _pool->tag_at(cp_index).value();
+    if (tag.is_method_handle() || tag.is_method_type()) {
+      int cache_index = cp_entry_to_cp_cache(cp_index);
+      if (is_wide) {
+        (*bcp) = Bytecodes::_fast_aldc_w;
+        assert(cache_index == (u2)cache_index, "index overflow");
+        Bytes::put_native_u2(p, cache_index);
+      } else {
+        (*bcp) = Bytecodes::_fast_aldc;
+        assert(cache_index == (u1)cache_index, "index overflow");
+        (*p) = (u1)cache_index;
+      }
+    }
+  } else {
+    Bytecodes::Code rewritten_bc =
+              (is_wide ? Bytecodes::_fast_aldc_w : Bytecodes::_fast_aldc);
+    if ((*bcp) == rewritten_bc) {
+      address p = bcp + offset;
+      int cache_index = is_wide ? Bytes::get_native_u2(p) : (u1)(*p);
+      int pool_index = cp_cache_entry_pool_index(cache_index);
+      if (is_wide) {
+        (*bcp) = Bytecodes::_ldc_w;
+        assert(pool_index == (u2)pool_index, "index overflow");
+        Bytes::put_Java_u2(p, pool_index);
+      } else {
+        (*bcp) = Bytecodes::_ldc;
+        assert(pool_index == (u1)pool_index, "index overflow");
+        (*p) = (u1)pool_index;
+      }
     }
   }
 }
 
 
 // Rewrites a method given the index_map information
-void Rewriter::scan_method(methodOop method) {
+void Rewriter::scan_method(methodOop method, bool reverse) {
 
   int nof_jsrs = 0;
   bool has_monitor_bytecodes = false;
@@ -236,6 +282,13 @@
 #endif
           break;
         }
+        case Bytecodes::_fast_linearswitch:
+        case Bytecodes::_fast_binaryswitch: {
+#ifndef CC_INTERP
+          (*bcp) = Bytecodes::_lookupswitch;
+#endif
+          break;
+        }
         case Bytecodes::_getstatic      : // fall through
         case Bytecodes::_putstatic      : // fall through
         case Bytecodes::_getfield       : // fall through
@@ -244,16 +297,18 @@
         case Bytecodes::_invokespecial  : // fall through
         case Bytecodes::_invokestatic   :
         case Bytecodes::_invokeinterface:
-          rewrite_member_reference(bcp, prefix_length+1);
+          rewrite_member_reference(bcp, prefix_length+1, reverse);
           break;
         case Bytecodes::_invokedynamic:
-          rewrite_invokedynamic(bcp, prefix_length+1);
+          rewrite_invokedynamic(bcp, prefix_length+1, reverse);
           break;
         case Bytecodes::_ldc:
-          maybe_rewrite_ldc(bcp, prefix_length+1, false);
+        case Bytecodes::_fast_aldc:
+          maybe_rewrite_ldc(bcp, prefix_length+1, false, reverse);
           break;
         case Bytecodes::_ldc_w:
-          maybe_rewrite_ldc(bcp, prefix_length+1, true);
+        case Bytecodes::_fast_aldc_w:
+          maybe_rewrite_ldc(bcp, prefix_length+1, true, reverse);
           break;
         case Bytecodes::_jsr            : // fall through
         case Bytecodes::_jsr_w          : nof_jsrs++;                   break;
@@ -273,12 +328,13 @@
   if (nof_jsrs > 0) {
     method->set_has_jsrs();
     // Second pass will revisit this method.
-    assert(method->has_jsrs(), "");
+    assert(method->has_jsrs(), "didn't we just set this?");
   }
 }
 
 // After constant pool is created, revisit methods containing jsrs.
 methodHandle Rewriter::rewrite_jsrs(methodHandle method, TRAPS) {
+  ResourceMark rm(THREAD);
   ResolveOopMapConflicts romc(method);
   methodHandle original_method = method;
   method = romc.do_potential_rewrite(CHECK_(methodHandle()));
@@ -300,7 +356,6 @@
   return method;
 }
 
-
 void Rewriter::rewrite(instanceKlassHandle klass, TRAPS) {
   ResourceMark rm(THREAD);
   Rewriter     rw(klass, klass->constants(), klass->methods(), CHECK);
@@ -343,34 +398,57 @@
   }
 
   // rewrite methods, in two passes
-  int i, len = _methods->length();
+  int len = _methods->length();
 
-  for (i = len; --i >= 0; ) {
+  for (int i = len-1; i >= 0; i--) {
     methodOop method = (methodOop)_methods->obj_at(i);
     scan_method(method);
   }
 
   // allocate constant pool cache, now that we've seen all the bytecodes
-  make_constant_pool_cache(CHECK);
+  make_constant_pool_cache(THREAD);
+
+  // Restore bytecodes to their unrewritten state if there are exceptions
+  // rewriting bytecodes or allocating the cpCache
+  if (HAS_PENDING_EXCEPTION) {
+    restore_bytecodes();
+    return;
+  }
+}
 
-  for (i = len; --i >= 0; ) {
-    methodHandle m(THREAD, (methodOop)_methods->obj_at(i));
+// Relocate jsr/rets in a method.  This can't be done with the rewriter
+// stage because it can throw other exceptions, leaving the bytecodes
+// pointing at constant pool cache entries.
+// Link and check jvmti dependencies while we're iterating over the methods.
+// JSR292 code calls with a different set of methods, so two entry points.
+void Rewriter::relocate_and_link(instanceKlassHandle this_oop, TRAPS) {
+  objArrayHandle methods(THREAD, this_oop->methods());
+  relocate_and_link(this_oop, methods, THREAD);
+}
+
+void Rewriter::relocate_and_link(instanceKlassHandle this_oop,
+                                 objArrayHandle methods, TRAPS) {
+  int len = methods->length();
+  for (int i = len-1; i >= 0; i--) {
+    methodHandle m(THREAD, (methodOop)methods->obj_at(i));
 
     if (m->has_jsrs()) {
       m = rewrite_jsrs(m, CHECK);
       // Method might have gotten rewritten.
-      _methods->obj_at_put(i, m());
+      methods->obj_at_put(i, m());
     }
 
-    // Set up method entry points for compiler and interpreter.
+    // Set up method entry points for compiler and interpreter    .
     m->link_method(m, CHECK);
 
+    // This is for JVMTI and unrelated to relocator but the last thing we do
 #ifdef ASSERT
     if (StressMethodComparator) {
       static int nmc = 0;
       for (int j = i; j >= 0 && j >= i-4; j--) {
         if ((++nmc % 1000) == 0)  tty->print_cr("Have run MethodComparator %d times...", nmc);
-        bool z = MethodComparator::methods_EMCP(m(), (methodOop)_methods->obj_at(j));
+        bool z = MethodComparator::methods_EMCP(m(),
+                   (methodOop)methods->obj_at(j));
         if (j == i && !z) {
           tty->print("MethodComparator FAIL: "); m->print(); m->print_codes();
           assert(z, "method must compare equal to itself");
--- a/src/share/vm/interpreter/rewriter.hpp	Wed May 25 16:04:09 2011 -0700
+++ b/src/share/vm/interpreter/rewriter.hpp	Fri May 27 15:04:48 2011 -0700
@@ -85,13 +85,15 @@
 
   void compute_index_maps();
   void make_constant_pool_cache(TRAPS);
-  void scan_method(methodOop m);
-  methodHandle rewrite_jsrs(methodHandle m, TRAPS);
+  void scan_method(methodOop m, bool reverse = false);
   void rewrite_Object_init(methodHandle m, TRAPS);
-  void rewrite_member_reference(address bcp, int offset);
-  void rewrite_invokedynamic(address bcp, int offset);
-  void maybe_rewrite_ldc(address bcp, int offset, bool is_wide);
+  void rewrite_member_reference(address bcp, int offset, bool reverse = false);
+  void rewrite_invokedynamic(address bcp, int offset, bool reverse = false);
+  void maybe_rewrite_ldc(address bcp, int offset, bool is_wide, bool reverse = false);
+  // Revert bytecodes in case of an exception.
+  void restore_bytecodes();
 
+  static methodHandle rewrite_jsrs(methodHandle m, TRAPS);
  public:
   // Driver routine:
   static void rewrite(instanceKlassHandle klass, TRAPS);
@@ -100,6 +102,13 @@
   enum {
     _secondary_entry_tag = nth_bit(30)
   };
+
+  // Second pass, not gated by is_rewritten flag
+  static void relocate_and_link(instanceKlassHandle klass, TRAPS);
+  // JSR292 version to call with it's own methods.
+  static void relocate_and_link(instanceKlassHandle klass,
+                                objArrayHandle methods, TRAPS);
+
 };
 
 #endif // SHARE_VM_INTERPRETER_REWRITER_HPP
--- a/src/share/vm/oops/instanceKlass.cpp	Wed May 25 16:04:09 2011 -0700
+++ b/src/share/vm/oops/instanceKlass.cpp	Fri May 27 15:04:48 2011 -0700
@@ -335,6 +335,9 @@
         this_oop->rewrite_class(CHECK_false);
       }
 
+      // relocate jsrs and link methods after they are all rewritten
+      this_oop->relocate_and_link_methods(CHECK_false);
+
       // Initialize the vtable and interface table after
       // methods have been rewritten since rewrite may
       // fabricate new methodOops.
@@ -365,17 +368,8 @@
 
 
 // Rewrite the byte codes of all of the methods of a class.
-// Three cases:
-//    During the link of a newly loaded class.
-//    During the preloading of classes to be written to the shared spaces.
-//      - Rewrite the methods and update the method entry points.
-//
-//    During the link of a class in the shared spaces.
-//      - The methods were already rewritten, update the metho entry points.
-//
 // The rewriter must be called exactly once. Rewriting must happen after
 // verification but before the first method of the class is executed.
-
 void instanceKlass::rewrite_class(TRAPS) {
   assert(is_loaded(), "must be loaded");
   instanceKlassHandle this_oop(THREAD, this->as_klassOop());
@@ -383,10 +377,19 @@
     assert(this_oop()->is_shared(), "rewriting an unshared class?");
     return;
   }
-  Rewriter::rewrite(this_oop, CHECK); // No exception can happen here
+  Rewriter::rewrite(this_oop, CHECK);
   this_oop->set_rewritten();
 }
 
+// Now relocate and link method entry points after class is rewritten.
+// This is outside is_rewritten flag. In case of an exception, it can be
+// executed more than once.
+void instanceKlass::relocate_and_link_methods(TRAPS) {
+  assert(is_loaded(), "must be loaded");
+  instanceKlassHandle this_oop(THREAD, this->as_klassOop());
+  Rewriter::relocate_and_link(this_oop, CHECK);
+}
+
 
 void instanceKlass::initialize_impl(instanceKlassHandle this_oop, TRAPS) {
   // Make sure klass is linked (verified) before initialization
--- a/src/share/vm/oops/instanceKlass.hpp	Wed May 25 16:04:09 2011 -0700
+++ b/src/share/vm/oops/instanceKlass.hpp	Fri May 27 15:04:48 2011 -0700
@@ -392,6 +392,7 @@
   bool link_class_or_fail(TRAPS); // returns false on failure
   void unlink_class();
   void rewrite_class(TRAPS);
+  void relocate_and_link_methods(TRAPS);
   methodOop class_initializer();
 
   // set the class to initialized if no static initializer is present
--- a/src/share/vm/oops/methodOop.cpp	Wed May 25 16:04:09 2011 -0700
+++ b/src/share/vm/oops/methodOop.cpp	Fri May 27 15:04:48 2011 -0700
@@ -693,7 +693,10 @@
 // Called when the method_holder is getting linked. Setup entrypoints so the method
 // is ready to be called from interpreter, compiler, and vtables.
 void methodOopDesc::link_method(methodHandle h_method, TRAPS) {
-  assert(_i2i_entry == NULL, "should only be called once");
+  // If the code cache is full, we may reenter this function for the
+  // leftover methods that weren't linked.
+  if (_i2i_entry != NULL) return;
+
   assert(_adapter == NULL, "init'd to NULL" );
   assert( _code == NULL, "nothing compiled yet" );
 
@@ -717,7 +720,7 @@
   // called from the vtable.  We need adapters on such methods that get loaded
   // later.  Ditto for mega-morphic itable calls.  If this proves to be a
   // problem we'll make these lazily later.
-  (void) make_adapters(h_method, CHECK);
+  if (UseCompiler) (void) make_adapters(h_method, CHECK);
 
   // ONLY USE the h_method now as make_adapter may have blocked
 
--- a/src/share/vm/prims/jvmtiRedefineClasses.cpp	Wed May 25 16:04:09 2011 -0700
+++ b/src/share/vm/prims/jvmtiRedefineClasses.cpp	Fri May 27 15:04:48 2011 -0700
@@ -992,6 +992,9 @@
     }
 
     Rewriter::rewrite(scratch_class, THREAD);
+    if (!HAS_PENDING_EXCEPTION) {
+      Rewriter::relocate_and_link(scratch_class, THREAD);
+    }
     if (HAS_PENDING_EXCEPTION) {
       Symbol* ex_name = PENDING_EXCEPTION->klass()->klass_part()->name();
       CLEAR_PENDING_EXCEPTION;
--- a/src/share/vm/prims/methodHandleWalk.cpp	Wed May 25 16:04:09 2011 -0700
+++ b/src/share/vm/prims/methodHandleWalk.cpp	Fri May 27 15:04:48 2011 -0700
@@ -1399,6 +1399,7 @@
   objArrayHandle methods(THREAD, m_array);
   methods->obj_at_put(0, m());
   Rewriter::rewrite(_target_klass(), cpool, methods, CHECK_(empty));  // Use fake class.
+  Rewriter::relocate_and_link(_target_klass(), methods, CHECK_(empty));  // Use fake class.
 
   // Set the invocation counter's count to the invoke count of the
   // original call site.