view src/share/vm/interpreter/bytecodes.hpp @ 10905:f57189b7648d

8257192: Integrate AArch64 JIT port into 8u 7009641: Don't fail VM when CodeCache is full 8073108: [AArch64] Use x86 and SPARC CPU instructions for GHASH acceleration 8130309: Need to bailout cleanly if creation of stubs fails when codecache is out of space (AArch64 changes) 8131779: AARCH64: add Montgomery multiply intrinsic 8132875: AArch64: Fix error introduced into AArch64 CodeCache by commit for 8130309 8135018: AARCH64: Missing memory barriers for CMS collector 8145320: Create unsafe_arraycopy and generic_arraycopy for AArch64 8148328: aarch64: redundant lsr instructions in stub code. 8148783: aarch64: SEGV running SpecJBB2013 8148948: aarch64: generate_copy_longs calls align() incorrectly 8149080: AArch64: Recognise disjoint array copy in stub code 8149365: aarch64: memory copy does not prefetch on backwards copy 8149907: aarch64: use load/store pair instructions in call_stub 8150038: aarch64: make use of CBZ and CBNZ when comparing narrow pointer with zero 8150045: arraycopy causes segfaults in SATB during garbage collection 8150082: aarch64: optimise small array copy 8150229: aarch64: pipeline class for several instructions is not set correctly 8150313: aarch64: optimise array copy using SIMD instructions 8150394: aarch64: add support for 8.1 LSE CAS instructions 8150652: Remove unused code in AArch64 back end 8151340: aarch64: prefetch the destination word for write prior to ldxr/stxr loops. 8151502: optimize pd_disjoint_words and pd_conjoint_words 8151775: aarch64: add support for 8.1 LSE atomic operations 8152537: aarch64: Make use of CBZ and CBNZ when comparing unsigned values with zero. 8152840: aarch64: improve _unsafe_arraycopy stub routine 8153172: aarch64: hotspot crashes after the 8.1 LSE patch is merged 8153713: aarch64: improve short array clearing using store pair 8153797: aarch64: Add Arrays.fill stub code 8154413: AArch64: Better byte behaviour 8154537: AArch64: some integer rotate instructions are never emitted 8154739: AArch64: TemplateTable::fast_xaccess loads in wrong mode 8155015: Aarch64: bad assert in spill generation code 8155100: AArch64: Relax alignment requirement for byte_map_base 8155612: Aarch64: vector nodes need to support misaligned offset 8155617: aarch64: ClearArray does not use DC ZVA 8155627: Enable SA on AArch64 8155653: TestVectorUnalignedOffset.java not pushed with 8155612 8156731: aarch64: java/util/Arrays/Correct.java fails due to _generic_arraycopy stub routine 8157841: aarch64: prefetch ignores cache line size 8157906: aarch64: some more integer rotate instructions are never emitted 8158913: aarch64: SEGV running Spark terasort 8159052: aarch64: optimise unaligned copies in pd_disjoint_words and pd_conjoint_words 8159063: aarch64: optimise unaligned array copy long 8160748: [AArch64] Inconsistent types for ideal_reg 8161072: AArch64: jtreg compiler/uncommontrap/TestDeoptOOM failure 8161190: AArch64: Fix overflow in immediate cmp instruction 8164113: AArch64: follow-up the fix for 8161598 8165673: AArch64: Fix JNI floating point argument handling 8167200: AArch64: Broken stack pointer adjustment in interpreter 8167421: AArch64: in one core system, fatal error: Illegal threadstate encountered 8167595: AArch64: SEGV in stub code cipherBlockChaining_decryptAESCrypt 8168699: Validate special case invocations [AArch64 support] 8168888: Port 8160591: Improve internal array handling to AArch64. 8170100: AArch64: Crash in C1-compiled code accessing References 8170188: jtreg test compiler/types/TestMeetIncompatibleInterfaceArrays.java causes JVM crash 8170873: PPC64/aarch64: Poor StrictMath performance due to non-optimized compilation 8171537: aarch64: compiler/c1/Test6849574.java generates guarantee failure in C1 8172881: AArch64: assertion failure: the int pressure is incorrect 8173472: AArch64: C1 comparisons with null only use 32-bit instructions 8176100: [AArch64] [REDO][REDO] G1 Needs pre barrier on dereference of weak JNI handles 8177661: Correct ad rule output register types from iRegX to iRegXNoSp 8179954: AArch64: C1 and C2 volatile accesses are not sequentially consistent 8182581: aarch64: fix for crash caused by earlyret of compiled method 8183925: [AArch64] Decouple crash protection from watcher thread 8186325: AArch64: jtreg test hotspot/test/gc/g1/TestJNIWeakG1/TestJNIWeakG1.java SEGV 8187224: aarch64: some inconsistency between aarch64_ad.m4 and aarch64.ad 8189170: [AArch64] Add option to disable stack overflow checking in primordial thread for use with JNI_CreateJavaJVM 8193133: Assertion failure because 0xDEADDEAD can be in-heap 8195685: AArch64 port of 8174962: Better interface invocations 8195859: AArch64: vtableStubs gtest fails after 8174962 8196136: AArch64: Correct register use in patch for JDK-8194686 8196221: AArch64: Mistake in committed patch for JDK-8195859 8199712: [AArch64] Flight Recorder 8203481: Incorrect constraint for unextended_sp in frame:safe_for_sender 8203699: java/lang/invoke/SpecialInterfaceCall fails with SIGILL on aarch64 8205421: AARCH64: StubCodeMark should be placed after alignment 8206163: AArch64: incorrect code generation for StoreCM 8207345: Trampoline generation code reads from uninitialized memory 8207838: AArch64: Float registers incorrectly restored in JNI call 8209413: AArch64: NPE in clhsdb jstack command 8209414: [AArch64] method handle invocation does not respect JVMTI interp_only mode 8209415: Fix JVMTI test failure HS202 8209420: Track membars for volatile accesses so they can be properly optimized 8209835: Aarch64: elide barriers on all volatile operations 8210425: [AArch64] sharedRuntimeTrig/sharedRuntimeTrans compiled without optimization 8211064: [AArch64] Interpreter and c1 don't correctly handle jboolean results in native calls 8211233: MemBarNode::trailing_membar() and MemBarNode::leading_membar() need to handle dying subgraphs better 8213134: AArch64: vector shift failed with MaxVectorSize=8 8213419: [AArch64] C2 may hang in MulLNode::Ideal()/MulINode::Ideal() with gcc 8.2.1 8214857: "bad trailing membar" assert failure at memnode.cpp:3220 8215951: AArch64: jtreg test vmTestbase/nsk/jvmti/PopFrame/popframe005 segfaults 8215961: jdk/jfr/event/os/TestCPUInformation.java fails on AArch64 8216350: AArch64: monitor unlock fast path not called 8216989: CardTableBarrierSetAssembler::gen_write_ref_array_post_barrier() does not check for zero length on AARCH64 8217368: AArch64: C2 recursive stack locking optimisation not triggered 8218185: aarch64: missing LoadStore barrier in TemplateTable::putfield_or_static 8219011: Implement MacroAssembler::warn method on AArch64 8219635: aarch64: missing LoadStore barrier in TemplateTable::fast_storefield 8221220: AArch64: Add StoreStore membar explicitly for Volatile Writes in TemplateTable 8221658: aarch64: add necessary predicate for ubfx patterns 8224671: AArch64: mauve System.arraycopy test failure 8224828: aarch64: rflags is not correct after safepoint poll 8224851: AArch64: fix warnings and errors with Clang and GCC 8.3 8224880: AArch64: java/javac error with AllocatePrefetchDistance 8228400: Remove built-in AArch64 simulator 8228406: Superfluous change in chaitin.hpp 8228593: Revert explicit JDK 7 support additions 8228716: Revert InstanceKlass::print_on debug additions 8228718: Revert incorrect backport of JDK-8129757 to 8-aarch64 8228725: AArch64: Purge method call format support 8228747: Revert "unused" attribute from test_arraycopy_func 8228767: Revert ResourceMark additions 8228770: Revert development hsdis changes 8229123: Revert build fixes for aarch64/zero 8229124: Revert disassembler.cpp changes 8229145: Revert TemplateTable::bytecode() visibility change 8233839: aarch64: missing memory barrier in NewObjectArrayStub and NewTypeArrayStub 8237512: AArch64: aarch64TestHook leaks a BufferBlob 8246482: Build failures with +JFR -PCH 8247979: aarch64: missing side effect of killing flags for clearArray_reg_reg 8248219: aarch64: missing memory barrier in fast_storefield and fast_accessfield Reviewed-by: shade, aph
author andrew
date Mon, 01 Feb 2021 03:48:36 +0000
parents 32b682649973
children f79e943d15a7
line wrap: on
line source

/*
 * Copyright (c) 1997, 2016, Oracle and/or its affiliates. All rights reserved.
 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
 *
 * This code is free software; you can redistribute it and/or modify it
 * under the terms of the GNU General Public License version 2 only, as
 * published by the Free Software Foundation.
 *
 * This code is distributed in the hope that it will be useful, but WITHOUT
 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 * version 2 for more details (a copy is included in the LICENSE file that
 * accompanied this code).
 *
 * You should have received a copy of the GNU General Public License version
 * 2 along with this work; if not, write to the Free Software Foundation,
 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 *
 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 * or visit www.oracle.com if you need additional information or have any
 * questions.
 *
 */

#ifndef SHARE_VM_INTERPRETER_BYTECODES_HPP
#define SHARE_VM_INTERPRETER_BYTECODES_HPP

#include "memory/allocation.hpp"
#include "utilities/top.hpp"

// Bytecodes specifies all bytecodes used in the VM and
// provides utility functions to get bytecode attributes.

// NOTE: replicated in SA in vm/agent/sun/jvm/hotspot/interpreter/Bytecodes.java
class Bytecodes: AllStatic {
 public:
  enum Code {
    _illegal              =  -1,

    // Java bytecodes
    _nop                  =   0, // 0x00
    _aconst_null          =   1, // 0x01
    _iconst_m1            =   2, // 0x02
    _iconst_0             =   3, // 0x03
    _iconst_1             =   4, // 0x04
    _iconst_2             =   5, // 0x05
    _iconst_3             =   6, // 0x06
    _iconst_4             =   7, // 0x07
    _iconst_5             =   8, // 0x08
    _lconst_0             =   9, // 0x09
    _lconst_1             =  10, // 0x0a
    _fconst_0             =  11, // 0x0b
    _fconst_1             =  12, // 0x0c
    _fconst_2             =  13, // 0x0d
    _dconst_0             =  14, // 0x0e
    _dconst_1             =  15, // 0x0f
    _bipush               =  16, // 0x10
    _sipush               =  17, // 0x11
    _ldc                  =  18, // 0x12
    _ldc_w                =  19, // 0x13
    _ldc2_w               =  20, // 0x14
    _iload                =  21, // 0x15
    _lload                =  22, // 0x16
    _fload                =  23, // 0x17
    _dload                =  24, // 0x18
    _aload                =  25, // 0x19
    _iload_0              =  26, // 0x1a
    _iload_1              =  27, // 0x1b
    _iload_2              =  28, // 0x1c
    _iload_3              =  29, // 0x1d
    _lload_0              =  30, // 0x1e
    _lload_1              =  31, // 0x1f
    _lload_2              =  32, // 0x20
    _lload_3              =  33, // 0x21
    _fload_0              =  34, // 0x22
    _fload_1              =  35, // 0x23
    _fload_2              =  36, // 0x24
    _fload_3              =  37, // 0x25
    _dload_0              =  38, // 0x26
    _dload_1              =  39, // 0x27
    _dload_2              =  40, // 0x28
    _dload_3              =  41, // 0x29
    _aload_0              =  42, // 0x2a
    _aload_1              =  43, // 0x2b
    _aload_2              =  44, // 0x2c
    _aload_3              =  45, // 0x2d
    _iaload               =  46, // 0x2e
    _laload               =  47, // 0x2f
    _faload               =  48, // 0x30
    _daload               =  49, // 0x31
    _aaload               =  50, // 0x32
    _baload               =  51, // 0x33
    _caload               =  52, // 0x34
    _saload               =  53, // 0x35
    _istore               =  54, // 0x36
    _lstore               =  55, // 0x37
    _fstore               =  56, // 0x38
    _dstore               =  57, // 0x39
    _astore               =  58, // 0x3a
    _istore_0             =  59, // 0x3b
    _istore_1             =  60, // 0x3c
    _istore_2             =  61, // 0x3d
    _istore_3             =  62, // 0x3e
    _lstore_0             =  63, // 0x3f
    _lstore_1             =  64, // 0x40
    _lstore_2             =  65, // 0x41
    _lstore_3             =  66, // 0x42
    _fstore_0             =  67, // 0x43
    _fstore_1             =  68, // 0x44
    _fstore_2             =  69, // 0x45
    _fstore_3             =  70, // 0x46
    _dstore_0             =  71, // 0x47
    _dstore_1             =  72, // 0x48
    _dstore_2             =  73, // 0x49
    _dstore_3             =  74, // 0x4a
    _astore_0             =  75, // 0x4b
    _astore_1             =  76, // 0x4c
    _astore_2             =  77, // 0x4d
    _astore_3             =  78, // 0x4e
    _iastore              =  79, // 0x4f
    _lastore              =  80, // 0x50
    _fastore              =  81, // 0x51
    _dastore              =  82, // 0x52
    _aastore              =  83, // 0x53
    _bastore              =  84, // 0x54
    _castore              =  85, // 0x55
    _sastore              =  86, // 0x56
    _pop                  =  87, // 0x57
    _pop2                 =  88, // 0x58
    _dup                  =  89, // 0x59
    _dup_x1               =  90, // 0x5a
    _dup_x2               =  91, // 0x5b
    _dup2                 =  92, // 0x5c
    _dup2_x1              =  93, // 0x5d
    _dup2_x2              =  94, // 0x5e
    _swap                 =  95, // 0x5f
    _iadd                 =  96, // 0x60
    _ladd                 =  97, // 0x61
    _fadd                 =  98, // 0x62
    _dadd                 =  99, // 0x63
    _isub                 = 100, // 0x64
    _lsub                 = 101, // 0x65
    _fsub                 = 102, // 0x66
    _dsub                 = 103, // 0x67
    _imul                 = 104, // 0x68
    _lmul                 = 105, // 0x69
    _fmul                 = 106, // 0x6a
    _dmul                 = 107, // 0x6b
    _idiv                 = 108, // 0x6c
    _ldiv                 = 109, // 0x6d
    _fdiv                 = 110, // 0x6e
    _ddiv                 = 111, // 0x6f
    _irem                 = 112, // 0x70
    _lrem                 = 113, // 0x71
    _frem                 = 114, // 0x72
    _drem                 = 115, // 0x73
    _ineg                 = 116, // 0x74
    _lneg                 = 117, // 0x75
    _fneg                 = 118, // 0x76
    _dneg                 = 119, // 0x77
    _ishl                 = 120, // 0x78
    _lshl                 = 121, // 0x79
    _ishr                 = 122, // 0x7a
    _lshr                 = 123, // 0x7b
    _iushr                = 124, // 0x7c
    _lushr                = 125, // 0x7d
    _iand                 = 126, // 0x7e
    _land                 = 127, // 0x7f
    _ior                  = 128, // 0x80
    _lor                  = 129, // 0x81
    _ixor                 = 130, // 0x82
    _lxor                 = 131, // 0x83
    _iinc                 = 132, // 0x84
    _i2l                  = 133, // 0x85
    _i2f                  = 134, // 0x86
    _i2d                  = 135, // 0x87
    _l2i                  = 136, // 0x88
    _l2f                  = 137, // 0x89
    _l2d                  = 138, // 0x8a
    _f2i                  = 139, // 0x8b
    _f2l                  = 140, // 0x8c
    _f2d                  = 141, // 0x8d
    _d2i                  = 142, // 0x8e
    _d2l                  = 143, // 0x8f
    _d2f                  = 144, // 0x90
    _i2b                  = 145, // 0x91
    _i2c                  = 146, // 0x92
    _i2s                  = 147, // 0x93
    _lcmp                 = 148, // 0x94
    _fcmpl                = 149, // 0x95
    _fcmpg                = 150, // 0x96
    _dcmpl                = 151, // 0x97
    _dcmpg                = 152, // 0x98
    _ifeq                 = 153, // 0x99
    _ifne                 = 154, // 0x9a
    _iflt                 = 155, // 0x9b
    _ifge                 = 156, // 0x9c
    _ifgt                 = 157, // 0x9d
    _ifle                 = 158, // 0x9e
    _if_icmpeq            = 159, // 0x9f
    _if_icmpne            = 160, // 0xa0
    _if_icmplt            = 161, // 0xa1
    _if_icmpge            = 162, // 0xa2
    _if_icmpgt            = 163, // 0xa3
    _if_icmple            = 164, // 0xa4
    _if_acmpeq            = 165, // 0xa5
    _if_acmpne            = 166, // 0xa6
    _goto                 = 167, // 0xa7
    _jsr                  = 168, // 0xa8
    _ret                  = 169, // 0xa9
    _tableswitch          = 170, // 0xaa
    _lookupswitch         = 171, // 0xab
    _ireturn              = 172, // 0xac
    _lreturn              = 173, // 0xad
    _freturn              = 174, // 0xae
    _dreturn              = 175, // 0xaf
    _areturn              = 176, // 0xb0
    _return               = 177, // 0xb1
    _getstatic            = 178, // 0xb2
    _putstatic            = 179, // 0xb3
    _getfield             = 180, // 0xb4
    _putfield             = 181, // 0xb5
    _invokevirtual        = 182, // 0xb6
    _invokespecial        = 183, // 0xb7
    _invokestatic         = 184, // 0xb8
    _invokeinterface      = 185, // 0xb9
    _invokedynamic        = 186, // 0xba     // if EnableInvokeDynamic
    _new                  = 187, // 0xbb
    _newarray             = 188, // 0xbc
    _anewarray            = 189, // 0xbd
    _arraylength          = 190, // 0xbe
    _athrow               = 191, // 0xbf
    _checkcast            = 192, // 0xc0
    _instanceof           = 193, // 0xc1
    _monitorenter         = 194, // 0xc2
    _monitorexit          = 195, // 0xc3
    _wide                 = 196, // 0xc4
    _multianewarray       = 197, // 0xc5
    _ifnull               = 198, // 0xc6
    _ifnonnull            = 199, // 0xc7
    _goto_w               = 200, // 0xc8
    _jsr_w                = 201, // 0xc9
    _breakpoint           = 202, // 0xca

    number_of_java_codes,

    // JVM bytecodes
    _fast_agetfield       = number_of_java_codes,
    _fast_bgetfield       ,
    _fast_cgetfield       ,
    _fast_dgetfield       ,
    _fast_fgetfield       ,
    _fast_igetfield       ,
    _fast_lgetfield       ,
    _fast_sgetfield       ,

    _fast_aputfield       ,
    _fast_bputfield       ,
    _fast_zputfield       ,
    _fast_cputfield       ,
    _fast_dputfield       ,
    _fast_fputfield       ,
    _fast_iputfield       ,
    _fast_lputfield       ,
    _fast_sputfield       ,

    _fast_aload_0         ,
    _fast_iaccess_0       ,
    _fast_aaccess_0       ,
    _fast_faccess_0       ,

    _fast_iload           ,
    _fast_iload2          ,
    _fast_icaload         ,

    _fast_invokevfinal    ,
    _fast_linearswitch    ,
    _fast_binaryswitch    ,

    // special handling of oop constants:
    _fast_aldc            ,
    _fast_aldc_w          ,

    _return_register_finalizer    ,

    // special handling of signature-polymorphic methods:
    _invokehandle         ,

    _shouldnotreachhere,      // For debugging

    // Platform specific JVM bytecodes
#ifdef TARGET_ARCH_x86
# include "bytecodes_x86.hpp"
#endif
#ifdef TARGET_ARCH_aarch64
# include "bytecodes_aarch64.hpp"
#endif
#ifdef TARGET_ARCH_sparc
# include "bytecodes_sparc.hpp"
#endif
#ifdef TARGET_ARCH_zero
# include "bytecodes_zero.hpp"
#endif
#ifdef TARGET_ARCH_arm
# include "bytecodes_arm.hpp"
#endif
#ifdef TARGET_ARCH_ppc
# include "bytecodes_ppc.hpp"
#endif


    number_of_codes
  };

  // Flag bits derived from format strings, can_trap, can_rewrite, etc.:
  enum Flags {
    // semantic flags:
    _bc_can_trap      = 1<<0,     // bytecode execution can trap or block
    _bc_can_rewrite   = 1<<1,     // bytecode execution has an alternate form

    // format bits (determined only by the format string):
    _fmt_has_c        = 1<<2,     // constant, such as sipush "bcc"
    _fmt_has_j        = 1<<3,     // constant pool cache index, such as getfield "bjj"
    _fmt_has_k        = 1<<4,     // constant pool index, such as ldc "bk"
    _fmt_has_i        = 1<<5,     // local index, such as iload
    _fmt_has_o        = 1<<6,     // offset, such as ifeq
    _fmt_has_nbo      = 1<<7,     // contains native-order field(s)
    _fmt_has_u2       = 1<<8,     // contains double-byte field(s)
    _fmt_has_u4       = 1<<9,     // contains quad-byte field
    _fmt_not_variable = 1<<10,    // not of variable length (simple or wide)
    _fmt_not_simple   = 1<<11,    // either wide or variable length
    _all_fmt_bits     = (_fmt_not_simple*2 - _fmt_has_c),

    // Example derived format syndromes:
    _fmt_b      = _fmt_not_variable,
    _fmt_bc     = _fmt_b | _fmt_has_c,
    _fmt_bi     = _fmt_b | _fmt_has_i,
    _fmt_bkk    = _fmt_b | _fmt_has_k | _fmt_has_u2,
    _fmt_bJJ    = _fmt_b | _fmt_has_j | _fmt_has_u2 | _fmt_has_nbo,
    _fmt_bo2    = _fmt_b | _fmt_has_o | _fmt_has_u2,
    _fmt_bo4    = _fmt_b | _fmt_has_o | _fmt_has_u4
  };

 private:
  static bool        _is_initialized;
  static const char* _name          [number_of_codes];
  static BasicType   _result_type   [number_of_codes];
  static s_char      _depth         [number_of_codes];
  static u_char      _lengths       [number_of_codes];
  static Code        _java_code     [number_of_codes];
  static jchar       _flags         [(1<<BitsPerByte)*2]; // all second page for wide formats

  static void        def(Code code, const char* name, const char* format, const char* wide_format, BasicType result_type, int depth, bool can_trap);
  static void        def(Code code, const char* name, const char* format, const char* wide_format, BasicType result_type, int depth, bool can_trap, Code java_code);
  static void        pd_initialize();              // platform specific initialization
  static Code        pd_base_code_for(Code code);  // platform specific base_code_for implementation

  // Verify that bcp points into method
#ifdef ASSERT
  static bool        check_method(const Method* method, address bcp);
#endif
  static bool check_must_rewrite(Bytecodes::Code bc);

 public:
  // Conversion
  static void        check          (Code code)    { assert(is_defined(code),      err_msg("illegal code: %d", (int)code)); }
  static void        wide_check     (Code code)    { assert(wide_is_defined(code), err_msg("illegal code: %d", (int)code)); }
  static Code        cast           (int  code)    { return (Code)code; }


  // Fetch a bytecode, hiding breakpoints as necessary.  The method
  // argument is used for conversion of breakpoints into the original
  // bytecode.  The CI uses these methods but guarantees that
  // breakpoints are hidden so the method argument should be passed as
  // NULL since in that case the bcp and Method* are unrelated
  // memory.
  static Code       code_at(const Method* method, address bcp) {
    assert(method == NULL || check_method(method, bcp), "bcp must point into method");
    Code code = cast(*bcp);
    assert(code != _breakpoint || method != NULL, "need Method* to decode breakpoint");
    return (code != _breakpoint) ? code : non_breakpoint_code_at(method, bcp);
  }
  static Code       java_code_at(const Method* method, address bcp) {
    return java_code(code_at(method, bcp));
  }

  // Fetch a bytecode or a breakpoint:
  static Code       code_or_bp_at(address bcp)    { return (Code)cast(*bcp); }

  static Code       code_at(Method* method, int bci);
  static bool       is_active_breakpoint_at(address bcp) { return (Code)*bcp == _breakpoint; }

  // find a bytecode, behind a breakpoint if necessary:
  static Code       non_breakpoint_code_at(const Method* method, address bcp);

  // Bytecode attributes
  static bool        is_defined     (int  code)    { return 0 <= code && code < number_of_codes && flags(code, false) != 0; }
  static bool        wide_is_defined(int  code)    { return is_defined(code) && flags(code, true) != 0; }
  static const char* name           (Code code)    { check(code);      return _name          [code]; }
  static BasicType   result_type    (Code code)    { check(code);      return _result_type   [code]; }
  static int         depth          (Code code)    { check(code);      return _depth         [code]; }
  // Note: Length functions must return <=0 for invalid bytecodes.
  // Calling check(code) in length functions would throw an unwanted assert.
  static int         length_for     (Code code)    { /*no check*/      return _lengths       [code] & 0xF; }
  static int         wide_length_for(Code code)    { /*no check*/      return _lengths       [code] >> 4; }
  static bool        can_trap       (Code code)    { check(code);      return has_all_flags(code, _bc_can_trap, false); }
  static Code        java_code      (Code code)    { check(code);      return _java_code     [code]; }
  static bool        can_rewrite    (Code code)    { check(code);      return has_all_flags(code, _bc_can_rewrite, false); }
  static bool        must_rewrite(Bytecodes::Code code) { return can_rewrite(code) && check_must_rewrite(code); }
  static bool        native_byte_order(Code code)  { check(code);      return has_all_flags(code, _fmt_has_nbo, false); }
  static bool        uses_cp_cache  (Code code)    { check(code);      return has_all_flags(code, _fmt_has_j, false); }
  // if 'end' is provided, it indicates the end of the code buffer which
  // should not be read past when parsing.
  static int         special_length_at(Bytecodes::Code code, address bcp, address end = NULL);
  static int         special_length_at(Method* method, address bcp, address end = NULL) { return special_length_at(code_at(method, bcp), bcp, end); }
  static int         raw_special_length_at(address bcp, address end = NULL);
  static int         length_for_code_at(Bytecodes::Code code, address bcp)  { int l = length_for(code); return l > 0 ? l : special_length_at(code, bcp); }
  static int         length_at      (Method* method, address bcp)  { return length_for_code_at(code_at(method, bcp), bcp); }
  static int         java_length_at (Method* method, address bcp)  { return length_for_code_at(java_code_at(method, bcp), bcp); }
  static bool        is_java_code   (Code code)    { return 0 <= code && code < number_of_java_codes; }

  static bool        is_aload       (Code code)    { return (code == _aload  || code == _aload_0  || code == _aload_1
                                                                             || code == _aload_2  || code == _aload_3); }
  static bool        is_astore      (Code code)    { return (code == _astore || code == _astore_0 || code == _astore_1
                                                                             || code == _astore_2 || code == _astore_3); }

  static bool        is_store_into_local(Code code){ return (_istore <= code && code <= _astore_3); }
  static bool        is_const       (Code code)    { return (_aconst_null <= code && code <= _ldc2_w); }
  static bool        is_zero_const  (Code code)    { return (code == _aconst_null || code == _iconst_0
                                                           || code == _fconst_0 || code == _dconst_0); }
  static bool        is_return      (Code code)    { return (_ireturn <= code && code <= _return); }
  static bool        is_invoke      (Code code)    { return (_invokevirtual <= code && code <= _invokedynamic); }
  static bool        has_receiver   (Code code)    { assert(is_invoke(code), "");  return code == _invokevirtual ||
                                                                                          code == _invokespecial ||
                                                                                          code == _invokeinterface; }
  static bool        has_optional_appendix(Code code) { return code == _invokedynamic || code == _invokehandle; }

  static int         compute_flags  (const char* format, int more_flags = 0);  // compute the flags
  static int         flags          (int code, bool is_wide) {
    assert(code == (u_char)code, "must be a byte");
    return _flags[code + (is_wide ? (1<<BitsPerByte) : 0)];
  }
  static int         format_bits    (Code code, bool is_wide) { return flags(code, is_wide) & _all_fmt_bits; }
  static bool        has_all_flags  (Code code, int test_flags, bool is_wide) {
    return (flags(code, is_wide) & test_flags) == test_flags;
  }

  // Initialization
  static void        initialize     ();
};

#endif // SHARE_VM_INTERPRETER_BYTECODES_HPP