changeset 6447:bb1818a74170

8150045, PR3100: arraycopy causes segfaults in SATB during garbage collection Reviewed-by: roland
author aph
date Mon, 23 May 2016 15:39:13 +0000
parents b96466d40a92
children 30fa347a47e6
files src/cpu/aarch64/vm/stubGenerator_aarch64.cpp
diffstat 1 files changed, 19 insertions(+), 13 deletions(-) [+]
line wrap: on
line diff
--- a/src/cpu/aarch64/vm/stubGenerator_aarch64.cpp	Wed Apr 20 11:14:10 2016 +0000
+++ b/src/cpu/aarch64/vm/stubGenerator_aarch64.cpp	Mon May 23 15:39:13 2016 +0000
@@ -687,7 +687,7 @@
   //     count   -  element count
   //     tmp     - scratch register
   //
-  //     Destroy no registers!
+  //     Destroy no registers except rscratch1 and rscratch2
   //
   void  gen_write_ref_array_pre_barrier(Register addr, Register count, bool dest_uninitialized) {
     BarrierSet* bs = Universe::heap()->barrier_set();
@@ -696,12 +696,13 @@
     case BarrierSet::G1SATBCTLogging:
       // With G1, don't generate the call if we statically know that the target in uninitialized
       if (!dest_uninitialized) {
-	__ push(RegSet::range(r0, r29), sp);         // integer registers except lr & sp
+	__ push_call_clobbered_registers();
 	if (count == c_rarg0) {
 	  if (addr == c_rarg1) {
 	    // exactly backwards!!
-	    __ stp(c_rarg0, c_rarg1, __ pre(sp, -2 * wordSize));
-	    __ ldp(c_rarg1, c_rarg0, __ post(sp, -2 * wordSize));
+            __ mov(rscratch1, c_rarg0);
+            __ mov(c_rarg0, c_rarg1);
+            __ mov(c_rarg1, rscratch1);
 	  } else {
 	    __ mov(c_rarg1, count);
 	    __ mov(c_rarg0, addr);
@@ -711,7 +712,7 @@
 	  __ mov(c_rarg1, count);
 	}
 	__ call_VM_leaf(CAST_FROM_FN_PTR(address, BarrierSet::static_write_ref_array_pre), 2);
-	__ pop(RegSet::range(r0, r29), sp);         // integer registers except lr & sp        }
+	__ pop_call_clobbered_registers();
 	break;
       case BarrierSet::CardTableModRef:
       case BarrierSet::CardTableExtension:
@@ -742,7 +743,7 @@
       case BarrierSet::G1SATBCTLogging:
 
         {
-	  __ push(RegSet::range(r0, r29), sp);         // integer registers except lr & sp
+	  __ push_call_clobbered_registers();
           // must compute element count unless barrier set interface is changed (other platforms supply count)
           assert_different_registers(start, end, scratch);
           __ lea(scratch, Address(end, BytesPerHeapOop));
@@ -751,7 +752,7 @@
           __ mov(c_rarg0, start);
           __ mov(c_rarg1, scratch);
           __ call_VM_leaf(CAST_FROM_FN_PTR(address, BarrierSet::static_write_ref_array_post), 2);
-	  __ pop(RegSet::range(r0, r29), sp);         // integer registers except lr & sp        }
+	  __ pop_call_clobbered_registers();
         }
         break;
       case BarrierSet::CardTableModRef:
@@ -1386,10 +1387,10 @@
   //   no-overlap entry point used by generate_conjoint_long_oop_copy().
   //
   address generate_disjoint_oop_copy(bool aligned, address *entry,
-				     const char *name, bool dest_uninitialized = false) {
+				     const char *name, bool dest_uninitialized) {
     const bool is_oop = true;
     const size_t size = UseCompressedOops ? sizeof (jint) : sizeof (jlong);
-    return generate_disjoint_copy(size, aligned, is_oop, entry, name);
+    return generate_disjoint_copy(size, aligned, is_oop, entry, name, dest_uninitialized);
   }
 
   // Arguments:
@@ -1404,10 +1405,11 @@
   //
   address generate_conjoint_oop_copy(bool aligned,
 				     address nooverlap_target, address *entry,
-				     const char *name, bool dest_uninitialized = false) {
+				     const char *name, bool dest_uninitialized) {
     const bool is_oop = true;
     const size_t size = UseCompressedOops ? sizeof (jint) : sizeof (jlong);
-    return generate_conjoint_copy(size, aligned, is_oop, nooverlap_target, entry, name);
+    return generate_conjoint_copy(size, aligned, is_oop, nooverlap_target, entry,
+                                  name, dest_uninitialized);
   }
 
 
@@ -1514,6 +1516,8 @@
     }
 #endif //ASSERT
 
+    gen_write_ref_array_pre_barrier(to, count, dest_uninitialized);
+
     // save the original count
     __ mov(count_save, count);
 
@@ -1655,9 +1659,11 @@
       bool aligned = !UseCompressedOops;
 
       StubRoutines::_arrayof_oop_disjoint_arraycopy
-	= generate_disjoint_oop_copy(aligned, &entry, "arrayof_oop_disjoint_arraycopy");
+	= generate_disjoint_oop_copy(aligned, &entry, "arrayof_oop_disjoint_arraycopy",
+                                     /*dest_uninitialized*/false);
       StubRoutines::_arrayof_oop_arraycopy
-	= generate_conjoint_oop_copy(aligned, entry, &entry_oop_arraycopy, "arrayof_oop_arraycopy");
+	= generate_conjoint_oop_copy(aligned, entry, &entry_oop_arraycopy, "arrayof_oop_arraycopy",
+                                     /*dest_uninitialized*/false);
       // Aligned versions without pre-barriers
       StubRoutines::_arrayof_oop_disjoint_arraycopy_uninit
 	= generate_disjoint_oop_copy(aligned, &entry, "arrayof_oop_disjoint_arraycopy_uninit",