view src/share/vm/interpreter/bytecode.hpp @ 10905:f57189b7648d

8257192: Integrate AArch64 JIT port into 8u 7009641: Don't fail VM when CodeCache is full 8073108: [AArch64] Use x86 and SPARC CPU instructions for GHASH acceleration 8130309: Need to bailout cleanly if creation of stubs fails when codecache is out of space (AArch64 changes) 8131779: AARCH64: add Montgomery multiply intrinsic 8132875: AArch64: Fix error introduced into AArch64 CodeCache by commit for 8130309 8135018: AARCH64: Missing memory barriers for CMS collector 8145320: Create unsafe_arraycopy and generic_arraycopy for AArch64 8148328: aarch64: redundant lsr instructions in stub code. 8148783: aarch64: SEGV running SpecJBB2013 8148948: aarch64: generate_copy_longs calls align() incorrectly 8149080: AArch64: Recognise disjoint array copy in stub code 8149365: aarch64: memory copy does not prefetch on backwards copy 8149907: aarch64: use load/store pair instructions in call_stub 8150038: aarch64: make use of CBZ and CBNZ when comparing narrow pointer with zero 8150045: arraycopy causes segfaults in SATB during garbage collection 8150082: aarch64: optimise small array copy 8150229: aarch64: pipeline class for several instructions is not set correctly 8150313: aarch64: optimise array copy using SIMD instructions 8150394: aarch64: add support for 8.1 LSE CAS instructions 8150652: Remove unused code in AArch64 back end 8151340: aarch64: prefetch the destination word for write prior to ldxr/stxr loops. 8151502: optimize pd_disjoint_words and pd_conjoint_words 8151775: aarch64: add support for 8.1 LSE atomic operations 8152537: aarch64: Make use of CBZ and CBNZ when comparing unsigned values with zero. 8152840: aarch64: improve _unsafe_arraycopy stub routine 8153172: aarch64: hotspot crashes after the 8.1 LSE patch is merged 8153713: aarch64: improve short array clearing using store pair 8153797: aarch64: Add Arrays.fill stub code 8154413: AArch64: Better byte behaviour 8154537: AArch64: some integer rotate instructions are never emitted 8154739: AArch64: TemplateTable::fast_xaccess loads in wrong mode 8155015: Aarch64: bad assert in spill generation code 8155100: AArch64: Relax alignment requirement for byte_map_base 8155612: Aarch64: vector nodes need to support misaligned offset 8155617: aarch64: ClearArray does not use DC ZVA 8155627: Enable SA on AArch64 8155653: TestVectorUnalignedOffset.java not pushed with 8155612 8156731: aarch64: java/util/Arrays/Correct.java fails due to _generic_arraycopy stub routine 8157841: aarch64: prefetch ignores cache line size 8157906: aarch64: some more integer rotate instructions are never emitted 8158913: aarch64: SEGV running Spark terasort 8159052: aarch64: optimise unaligned copies in pd_disjoint_words and pd_conjoint_words 8159063: aarch64: optimise unaligned array copy long 8160748: [AArch64] Inconsistent types for ideal_reg 8161072: AArch64: jtreg compiler/uncommontrap/TestDeoptOOM failure 8161190: AArch64: Fix overflow in immediate cmp instruction 8164113: AArch64: follow-up the fix for 8161598 8165673: AArch64: Fix JNI floating point argument handling 8167200: AArch64: Broken stack pointer adjustment in interpreter 8167421: AArch64: in one core system, fatal error: Illegal threadstate encountered 8167595: AArch64: SEGV in stub code cipherBlockChaining_decryptAESCrypt 8168699: Validate special case invocations [AArch64 support] 8168888: Port 8160591: Improve internal array handling to AArch64. 8170100: AArch64: Crash in C1-compiled code accessing References 8170188: jtreg test compiler/types/TestMeetIncompatibleInterfaceArrays.java causes JVM crash 8170873: PPC64/aarch64: Poor StrictMath performance due to non-optimized compilation 8171537: aarch64: compiler/c1/Test6849574.java generates guarantee failure in C1 8172881: AArch64: assertion failure: the int pressure is incorrect 8173472: AArch64: C1 comparisons with null only use 32-bit instructions 8176100: [AArch64] [REDO][REDO] G1 Needs pre barrier on dereference of weak JNI handles 8177661: Correct ad rule output register types from iRegX to iRegXNoSp 8179954: AArch64: C1 and C2 volatile accesses are not sequentially consistent 8182581: aarch64: fix for crash caused by earlyret of compiled method 8183925: [AArch64] Decouple crash protection from watcher thread 8186325: AArch64: jtreg test hotspot/test/gc/g1/TestJNIWeakG1/TestJNIWeakG1.java SEGV 8187224: aarch64: some inconsistency between aarch64_ad.m4 and aarch64.ad 8189170: [AArch64] Add option to disable stack overflow checking in primordial thread for use with JNI_CreateJavaJVM 8193133: Assertion failure because 0xDEADDEAD can be in-heap 8195685: AArch64 port of 8174962: Better interface invocations 8195859: AArch64: vtableStubs gtest fails after 8174962 8196136: AArch64: Correct register use in patch for JDK-8194686 8196221: AArch64: Mistake in committed patch for JDK-8195859 8199712: [AArch64] Flight Recorder 8203481: Incorrect constraint for unextended_sp in frame:safe_for_sender 8203699: java/lang/invoke/SpecialInterfaceCall fails with SIGILL on aarch64 8205421: AARCH64: StubCodeMark should be placed after alignment 8206163: AArch64: incorrect code generation for StoreCM 8207345: Trampoline generation code reads from uninitialized memory 8207838: AArch64: Float registers incorrectly restored in JNI call 8209413: AArch64: NPE in clhsdb jstack command 8209414: [AArch64] method handle invocation does not respect JVMTI interp_only mode 8209415: Fix JVMTI test failure HS202 8209420: Track membars for volatile accesses so they can be properly optimized 8209835: Aarch64: elide barriers on all volatile operations 8210425: [AArch64] sharedRuntimeTrig/sharedRuntimeTrans compiled without optimization 8211064: [AArch64] Interpreter and c1 don't correctly handle jboolean results in native calls 8211233: MemBarNode::trailing_membar() and MemBarNode::leading_membar() need to handle dying subgraphs better 8213134: AArch64: vector shift failed with MaxVectorSize=8 8213419: [AArch64] C2 may hang in MulLNode::Ideal()/MulINode::Ideal() with gcc 8.2.1 8214857: "bad trailing membar" assert failure at memnode.cpp:3220 8215951: AArch64: jtreg test vmTestbase/nsk/jvmti/PopFrame/popframe005 segfaults 8215961: jdk/jfr/event/os/TestCPUInformation.java fails on AArch64 8216350: AArch64: monitor unlock fast path not called 8216989: CardTableBarrierSetAssembler::gen_write_ref_array_post_barrier() does not check for zero length on AARCH64 8217368: AArch64: C2 recursive stack locking optimisation not triggered 8218185: aarch64: missing LoadStore barrier in TemplateTable::putfield_or_static 8219011: Implement MacroAssembler::warn method on AArch64 8219635: aarch64: missing LoadStore barrier in TemplateTable::fast_storefield 8221220: AArch64: Add StoreStore membar explicitly for Volatile Writes in TemplateTable 8221658: aarch64: add necessary predicate for ubfx patterns 8224671: AArch64: mauve System.arraycopy test failure 8224828: aarch64: rflags is not correct after safepoint poll 8224851: AArch64: fix warnings and errors with Clang and GCC 8.3 8224880: AArch64: java/javac error with AllocatePrefetchDistance 8228400: Remove built-in AArch64 simulator 8228406: Superfluous change in chaitin.hpp 8228593: Revert explicit JDK 7 support additions 8228716: Revert InstanceKlass::print_on debug additions 8228718: Revert incorrect backport of JDK-8129757 to 8-aarch64 8228725: AArch64: Purge method call format support 8228747: Revert "unused" attribute from test_arraycopy_func 8228767: Revert ResourceMark additions 8228770: Revert development hsdis changes 8229123: Revert build fixes for aarch64/zero 8229124: Revert disassembler.cpp changes 8229145: Revert TemplateTable::bytecode() visibility change 8233839: aarch64: missing memory barrier in NewObjectArrayStub and NewTypeArrayStub 8237512: AArch64: aarch64TestHook leaks a BufferBlob 8246482: Build failures with +JFR -PCH 8247979: aarch64: missing side effect of killing flags for clearArray_reg_reg 8248219: aarch64: missing memory barrier in fast_storefield and fast_accessfield Reviewed-by: shade, aph
author andrew
date Mon, 01 Feb 2021 03:48:36 +0000
parents da91efe96a93
children f79e943d15a7
line wrap: on
line source

/*
 * Copyright (c) 1997, 2012, Oracle and/or its affiliates. All rights reserved.
 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
 *
 * This code is free software; you can redistribute it and/or modify it
 * under the terms of the GNU General Public License version 2 only, as
 * published by the Free Software Foundation.
 *
 * This code is distributed in the hope that it will be useful, but WITHOUT
 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 * version 2 for more details (a copy is included in the LICENSE file that
 * accompanied this code).
 *
 * You should have received a copy of the GNU General Public License version
 * 2 along with this work; if not, write to the Free Software Foundation,
 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 *
 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 * or visit www.oracle.com if you need additional information or have any
 * questions.
 *
 */

#ifndef SHARE_VM_INTERPRETER_BYTECODE_HPP
#define SHARE_VM_INTERPRETER_BYTECODE_HPP

#include "interpreter/bytecodes.hpp"
#include "memory/allocation.hpp"
#include "oops/method.hpp"
#ifdef TARGET_ARCH_x86
# include "bytes_x86.hpp"
#endif
#ifdef TARGET_ARCH_aarch64
# include "bytes_aarch64.hpp"
#endif
#ifdef TARGET_ARCH_sparc
# include "bytes_sparc.hpp"
#endif
#ifdef TARGET_ARCH_zero
# include "bytes_zero.hpp"
#endif
#ifdef TARGET_ARCH_arm
# include "bytes_arm.hpp"
#endif
#ifdef TARGET_ARCH_ppc
# include "bytes_ppc.hpp"
#endif

class ciBytecodeStream;

// The base class for different kinds of bytecode abstractions.
// Provides the primitive operations to manipulate code relative
// to the bcp.

class Bytecode: public StackObj {
 protected:
  const address   _bcp;
  const Bytecodes::Code _code;

  // Address computation
  address addr_at            (int offset)        const     { return (address)_bcp + offset; }
  u_char byte_at(int offset) const               { return *addr_at(offset); }
  address aligned_addr_at    (int offset)        const     { return (address)round_to((intptr_t)addr_at(offset), jintSize); }
  int     aligned_offset     (int offset)        const     { return aligned_addr_at(offset) - addr_at(0); }

  // Word access:
  int     get_Java_u2_at     (int offset)        const     { return Bytes::get_Java_u2(addr_at(offset)); }
  int     get_Java_u4_at     (int offset)        const     { return Bytes::get_Java_u4(addr_at(offset)); }
  int     get_native_u2_at   (int offset)        const     { return Bytes::get_native_u2(addr_at(offset)); }
  int     get_native_u4_at   (int offset)        const     { return Bytes::get_native_u4(addr_at(offset)); }

 public:
  Bytecode(Method* method, address bcp): _bcp(bcp), _code(Bytecodes::code_at(method, addr_at(0))) {
    assert(method != NULL, "this form requires a valid Method*");
  }
  // Defined in ciStreams.hpp
  inline Bytecode(const ciBytecodeStream* stream, address bcp = NULL);

  // Attributes
  address bcp() const                            { return _bcp; }
  int instruction_size() const                   { return Bytecodes::length_for_code_at(_code, bcp()); }

  Bytecodes::Code code() const                   { return _code; }
  Bytecodes::Code java_code() const              { return Bytecodes::java_code(code()); }
  Bytecodes::Code invoke_code() const            { return (code() == Bytecodes::_invokehandle) ? code() : java_code(); }

  // Static functions for parsing bytecodes in place.
  int get_index_u1(Bytecodes::Code bc) const {
    assert_same_format_as(bc); assert_index_size(1, bc);
    return *(jubyte*)addr_at(1);
  }
  int get_index_u2(Bytecodes::Code bc, bool is_wide = false) const {
    assert_same_format_as(bc, is_wide); assert_index_size(2, bc, is_wide);
    address p = addr_at(is_wide ? 2 : 1);
    if (can_use_native_byte_order(bc, is_wide))
      return Bytes::get_native_u2(p);
    else  return Bytes::get_Java_u2(p);
  }
  int get_index_u1_cpcache(Bytecodes::Code bc) const {
    assert_same_format_as(bc); assert_index_size(1, bc);
    return *(jubyte*)addr_at(1) + ConstantPool::CPCACHE_INDEX_TAG;
  }
  int get_index_u2_cpcache(Bytecodes::Code bc) const {
    assert_same_format_as(bc); assert_index_size(2, bc); assert_native_index(bc);
    return Bytes::get_native_u2(addr_at(1)) + ConstantPool::CPCACHE_INDEX_TAG;
  }
  int get_index_u4(Bytecodes::Code bc) const {
    assert_same_format_as(bc); assert_index_size(4, bc);
    assert(can_use_native_byte_order(bc), "");
    return Bytes::get_native_u4(addr_at(1));
  }
  bool has_index_u4(Bytecodes::Code bc) const {
    return bc == Bytecodes::_invokedynamic;
  }

  int get_offset_s2(Bytecodes::Code bc) const {
    assert_same_format_as(bc); assert_offset_size(2, bc);
    return (jshort) Bytes::get_Java_u2(addr_at(1));
  }
  int get_offset_s4(Bytecodes::Code bc) const {
    assert_same_format_as(bc); assert_offset_size(4, bc);
    return (jint) Bytes::get_Java_u4(addr_at(1));
  }

  int get_constant_u1(int offset, Bytecodes::Code bc) const {
    assert_same_format_as(bc); assert_constant_size(1, offset, bc);
    return *(jbyte*)addr_at(offset);
  }
  int get_constant_u2(int offset, Bytecodes::Code bc, bool is_wide = false) const {
    assert_same_format_as(bc, is_wide); assert_constant_size(2, offset, bc, is_wide);
    return (jshort) Bytes::get_Java_u2(addr_at(offset));
  }

  // These are used locally and also from bytecode streams.
  void assert_same_format_as(Bytecodes::Code testbc, bool is_wide = false) const NOT_DEBUG_RETURN;
  static void assert_index_size(int required_size, Bytecodes::Code bc, bool is_wide = false) NOT_DEBUG_RETURN;
  static void assert_offset_size(int required_size, Bytecodes::Code bc, bool is_wide = false) NOT_DEBUG_RETURN;
  static void assert_constant_size(int required_size, int where, Bytecodes::Code bc, bool is_wide = false) NOT_DEBUG_RETURN;
  static void assert_native_index(Bytecodes::Code bc, bool is_wide = false) NOT_DEBUG_RETURN;
  static bool can_use_native_byte_order(Bytecodes::Code bc, bool is_wide = false) {
    return (!Bytes::is_Java_byte_ordering_different() || Bytecodes::native_byte_order(bc /*, is_wide*/));
  }
};


// Abstractions for lookupswitch bytecode
class LookupswitchPair VALUE_OBJ_CLASS_SPEC {
 private:
  const address _bcp;

  address addr_at            (int offset)        const     { return _bcp + offset; }
  int     get_Java_u4_at     (int offset)        const     { return Bytes::get_Java_u4(addr_at(offset)); }

 public:
  LookupswitchPair(address bcp): _bcp(bcp) {}
  int  match() const                             { return get_Java_u4_at(0 * jintSize); }
  int  offset() const                            { return get_Java_u4_at(1 * jintSize); }
};


class Bytecode_lookupswitch: public Bytecode {
 public:
  Bytecode_lookupswitch(Method* method, address bcp): Bytecode(method, bcp) { verify(); }
  // Defined in ciStreams.hpp
  inline Bytecode_lookupswitch(const ciBytecodeStream* stream);
  void verify() const PRODUCT_RETURN;

  // Attributes
  int  default_offset() const                    { return get_Java_u4_at(aligned_offset(1 + 0*jintSize)); }
  int  number_of_pairs() const                   { return get_Java_u4_at(aligned_offset(1 + 1*jintSize)); }
  LookupswitchPair pair_at(int i) const          {
    assert(0 <= i && i < number_of_pairs(), "pair index out of bounds");
    return LookupswitchPair(aligned_addr_at(1 + (1 + i)*2*jintSize));
  }
};

class Bytecode_tableswitch: public Bytecode {
 public:
  Bytecode_tableswitch(Method* method, address bcp): Bytecode(method, bcp) { verify(); }
  // Defined in ciStreams.hpp
  inline Bytecode_tableswitch(const ciBytecodeStream* stream);
  void verify() const PRODUCT_RETURN;

  // Attributes
  int  default_offset() const                    { return get_Java_u4_at(aligned_offset(1 + 0*jintSize)); }
  int  low_key() const                           { return get_Java_u4_at(aligned_offset(1 + 1*jintSize)); }
  int  high_key() const                          { return get_Java_u4_at(aligned_offset(1 + 2*jintSize)); }
  int  dest_offset_at(int i) const;
  int  length()                                  { return high_key()-low_key()+1; }
};

// Common code for decoding invokes and field references.

class Bytecode_member_ref: public Bytecode {
 protected:
  const methodHandle _method;                          // method containing the bytecode

  Bytecode_member_ref(methodHandle method, int bci)  : Bytecode(method(), method()->bcp_from(bci)), _method(method) {}

  methodHandle method() const                    { return _method; }
  ConstantPool* constants() const              { return _method->constants(); }
  ConstantPoolCache* cpcache() const           { return _method->constants()->cache(); }
  ConstantPoolCacheEntry* cpcache_entry() const;

 public:
  int          index() const;                    // cache index (loaded from instruction)
  int          pool_index() const;               // constant pool index
  Symbol*      klass() const;                    // returns the klass of the method or field
  Symbol*      name() const;                     // returns the name of the method or field
  Symbol*      signature() const;                // returns the signature of the method or field

  BasicType    result_type() const;              // returns the result type of the getfield or invoke
};

// Abstraction for invoke_{virtual, static, interface, special}

class Bytecode_invoke: public Bytecode_member_ref {
 protected:
  // Constructor that skips verification
  Bytecode_invoke(methodHandle method, int bci, bool unused)  : Bytecode_member_ref(method, bci) {}

 public:
  Bytecode_invoke(methodHandle method, int bci)  : Bytecode_member_ref(method, bci) { verify(); }
  void verify() const;

  // Attributes
  methodHandle static_target(TRAPS);             // "specified" method   (from constant pool)
  Handle       appendix(TRAPS);                  // if CPCE::has_appendix (from constant pool)

  // Testers
  bool is_invokeinterface() const                { return invoke_code() == Bytecodes::_invokeinterface; }
  bool is_invokevirtual() const                  { return invoke_code() == Bytecodes::_invokevirtual; }
  bool is_invokestatic() const                   { return invoke_code() == Bytecodes::_invokestatic; }
  bool is_invokespecial() const                  { return invoke_code() == Bytecodes::_invokespecial; }
  bool is_invokedynamic() const                  { return invoke_code() == Bytecodes::_invokedynamic; }
  bool is_invokehandle() const                   { return invoke_code() == Bytecodes::_invokehandle; }

  bool has_receiver() const                      { return !is_invokestatic() && !is_invokedynamic(); }

  bool is_valid() const                          { return is_invokeinterface() ||
                                                          is_invokevirtual()   ||
                                                          is_invokestatic()    ||
                                                          is_invokespecial()   ||
                                                          is_invokedynamic()   ||
                                                          is_invokehandle(); }

  bool has_appendix()                            { return cpcache_entry()->has_appendix(); }

 private:
  // Helper to skip verification.   Used is_valid() to check if the result is really an invoke
  inline friend Bytecode_invoke Bytecode_invoke_check(methodHandle method, int bci);
};

inline Bytecode_invoke Bytecode_invoke_check(methodHandle method, int bci) {
  return Bytecode_invoke(method, bci, false);
}


// Abstraction for all field accesses (put/get field/static)
class Bytecode_field: public Bytecode_member_ref {
 public:
  Bytecode_field(methodHandle method, int bci)  : Bytecode_member_ref(method, bci) { verify(); }

  // Testers
  bool is_getfield() const                       { return java_code() == Bytecodes::_getfield; }
  bool is_putfield() const                       { return java_code() == Bytecodes::_putfield; }
  bool is_getstatic() const                      { return java_code() == Bytecodes::_getstatic; }
  bool is_putstatic() const                      { return java_code() == Bytecodes::_putstatic; }

  bool is_getter() const                         { return is_getfield()  || is_getstatic(); }
  bool is_static() const                         { return is_getstatic() || is_putstatic(); }

  bool is_valid() const                          { return is_getfield()   ||
                                                          is_putfield()   ||
                                                          is_getstatic()  ||
                                                          is_putstatic(); }
  void verify() const;
};

// Abstraction for checkcast
class Bytecode_checkcast: public Bytecode {
 public:
  Bytecode_checkcast(Method* method, address bcp): Bytecode(method, bcp) { verify(); }
  void verify() const { assert(Bytecodes::java_code(code()) == Bytecodes::_checkcast, "check checkcast"); }

  // Returns index
  long index() const   { return get_index_u2(Bytecodes::_checkcast); };
};

// Abstraction for instanceof
class Bytecode_instanceof: public Bytecode {
 public:
  Bytecode_instanceof(Method* method, address bcp): Bytecode(method, bcp) { verify(); }
  void verify() const { assert(code() == Bytecodes::_instanceof, "check instanceof"); }

  // Returns index
  long index() const   { return get_index_u2(Bytecodes::_instanceof); };
};

class Bytecode_new: public Bytecode {
 public:
  Bytecode_new(Method* method, address bcp): Bytecode(method, bcp) { verify(); }
  void verify() const { assert(java_code() == Bytecodes::_new, "check new"); }

  // Returns index
  long index() const   { return get_index_u2(Bytecodes::_new); };
};

class Bytecode_multianewarray: public Bytecode {
 public:
  Bytecode_multianewarray(Method* method, address bcp): Bytecode(method, bcp) { verify(); }
  void verify() const { assert(java_code() == Bytecodes::_multianewarray, "check new"); }

  // Returns index
  long index() const   { return get_index_u2(Bytecodes::_multianewarray); };
};

class Bytecode_anewarray: public Bytecode {
 public:
  Bytecode_anewarray(Method* method, address bcp): Bytecode(method, bcp) { verify(); }
  void verify() const { assert(java_code() == Bytecodes::_anewarray, "check anewarray"); }

  // Returns index
  long index() const   { return get_index_u2(Bytecodes::_anewarray); };
};

// Abstraction for ldc, ldc_w and ldc2_w
class Bytecode_loadconstant: public Bytecode {
 private:
  const methodHandle _method;

  int raw_index() const;

 public:
  Bytecode_loadconstant(methodHandle method, int bci): Bytecode(method(), method->bcp_from(bci)), _method(method) { verify(); }

  void verify() const {
    assert(_method.not_null(), "must supply method");
    Bytecodes::Code stdc = Bytecodes::java_code(code());
    assert(stdc == Bytecodes::_ldc ||
           stdc == Bytecodes::_ldc_w ||
           stdc == Bytecodes::_ldc2_w, "load constant");
  }

  // Only non-standard bytecodes (fast_aldc) have reference cache indexes.
  bool has_cache_index() const { return code() >= Bytecodes::number_of_java_codes; }

  int pool_index() const;               // index into constant pool
  int cache_index() const {             // index into reference cache (or -1 if none)
    return has_cache_index() ? raw_index() : -1;
  }

  BasicType result_type() const;        // returns the result type of the ldc

  oop resolve_constant(TRAPS) const;
};

#endif // SHARE_VM_INTERPRETER_BYTECODE_HPP