/*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
#include "precompiled.hpp"
#include "classfile/systemDictionary.hpp"
#include "code/debugInfoRec.hpp"
#include "gc_interface/collectedHeap.inline.hpp"
#include "interpreter/bytecodeStream.hpp"
#include "interpreter/bytecodeTracer.hpp"
#include "interpreter/bytecodes.hpp"
#include "interpreter/interpreter.hpp"
#include "interpreter/oopMapCache.hpp"
#include "memory/gcLocker.hpp"
#include "memory/generation.hpp"
#include "memory/oopFactory.hpp"
#include "oops/klassOop.hpp"
#include "oops/methodDataOop.hpp"
#include "oops/methodOop.hpp"
#include "oops/oop.inline.hpp"
#include "oops/symbol.hpp"
#include "prims/jvmtiExport.hpp"
#include "prims/methodHandles.hpp"
#include "prims/nativeLookup.hpp"
#include "runtime/arguments.hpp"
#include "runtime/compilationPolicy.hpp"
#include "runtime/frame.inline.hpp"
#include "runtime/handles.inline.hpp"
#include "runtime/relocator.hpp"
#include "runtime/sharedRuntime.hpp"
#include "runtime/signature.hpp"
#include "utilities/quickSort.hpp"
#include "utilities/xmlstream.hpp"
// Implementation of methodOopDesc
return _adapter->get_i2c_entry();
}
return _adapter->get_c2i_entry();
}
return _adapter->get_c2i_unverified_entry();
}
}
return name_and_sig_as_C_string(Klass::cast(constants()->pool_holder()), name(), signature(), buf, size);
}
char* methodOopDesc::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature) {
return dest;
}
char* methodOopDesc::name_and_sig_as_C_string(Klass* klass, Symbol* method_name, Symbol* signature, char* buf, int size) {
}
return buf;
}
int methodOopDesc::fast_exception_handler_bci_for(methodHandle mh, KlassHandle ex_klass, int throw_bci, TRAPS) {
// exception table holds quadruple entries of the form (beg_bci, end_bci, handler_bci, klass_index)
// access exception table
// iterate through all entries sequentially
for (int i = 0; i < length; i ++) {
//reacquire the table in case a GC happened
// exception handler bci range covers throw_bci => investigate further
if (klass_index == 0) {
return handler_bci;
return handler_bci;
} else {
// we know the exception class => get the constraint class
// this may require loading of the constraint class; if verification
// fails or some other exception occurs, return handler_bci
return handler_bci;
}
}
}
}
return -1;
}
#ifdef ASSERT
if (!has_capability) {
if (!VerifyStack && !VerifyLastFrame) {
// verify stack calls this outside VM thread
warning("oopmap should only be accessed by the "
"VM, GC task or CMS threads (or during debugging)");
local_mask.print();
}
}
#endif
return;
}
#ifdef ASSERT
err_msg("bcp doesn't belong to this method: bcp: " INTPTR_FORMAT ", method: %s", bcp, name_and_sig_as_C_string()));
}
#endif
}
// Return (int)bcx if it appears to be a valid BCI.
// Return bci_from((address)bcx) if it appears to be a valid BCP.
// Return -1 otherwise.
// Used by profiling code, when invalid data is a possibility.
// The caller is responsible for validating the methodOop itself.
// keep bci as -1 if not a valid bci
// code_size() may return 0 and we allow 0 here
// the method may be native
bci = 0;
}
}
// Assert that if we have dodged any asserts, bci is negative.
return bci;
}
assert((is_native() && bci == 0) || (!is_native() && 0 <= bci && bci < code_size()), "illegal bci");
return bcp;
}
// If native, then include pointers for native_function and signature_handler
}
klassOop k = method_holder();
}
"interpreter entry must be valid");
}
// Attempt to return method oop to original state. Clear any pointers
// (to objects outside the shared spaces). We won't be able to predict
// where they should point in a new JVM. Further initialize some
// entries now in order allow them to be write protected later.
}
// Invocation counter is reset when the methodOop is compiled.
// If the method has compiled code we therefore assume it has
// be excuted more than n times.
// interpreter doesn't bump invocation counter of trivial methods
// compiler does not bump invocation counter of compiled methods
return true;
}
else if (_invocation_counter.carry() || (method_data() != NULL && method_data()->invocation_counter()->carry())) {
// The carry bit is set when the counter overflows and causes
// a compilation to occur. We don't know how many times
// the counter has been reset, so we simply assume it has
// been executed more than n times.
return true;
} else {
return invocation_count() > n;
}
}
#ifndef PRODUCT
if (WizardMode) {
// dump the size of the byte codes
}
if (CountCompiledCalls) {
}
}
#endif
// Build a methodDataOop object to hold information about this method
// collected in the interpreter.
// Do not profile method if current thread holds the pending list lock,
// which avoids deadlock for acquiring the MethodData_lock.
return;
}
// Grab a lock here to prevent multiple
// methodDataOops from being created.
// At the end of the run, the MDO, full of data, will be dumped.
}
}
}
// The current system doesn't use inline caches in the interpreter
// => nothing to do (keep this method around for future use)
}
// not an inline function, to avoid a header dependency on Interpreter
}
}
#ifdef CC_INTERP
}
#endif
}
return code_size() == 1
}
// Returns true if this method is a vanilla constructor, i.e. an "<init>" "()V" method
// which only calls the superclass vanilla constructor and possibly does stores of
// zero constants to local fields:
//
// aload_0
// invokespecial
// indexbyte1
// indexbyte2
//
// followed by an (optional) sequence of:
//
// aload_0
// aconst_null / iconst_0 / fconst_0 / dconst_0
// putfield
// indexbyte1
// indexbyte2
//
// followed by:
//
// return
assert(name() == vmSymbols::object_initializer_name(), "Should only be called for default constructors");
assert(signature() == vmSymbols::void_method_signature(), "Should only be called for default constructors");
// Check if size match
if (cb[0] != Bytecodes::_aload_0 || cb[1] != Bytecodes::_invokespecial || cb[last] != Bytecodes::_return) {
// Does not call superclass default constructor
return false;
}
// Check optional sequence
}
return true;
}
switch( bc ) {
case Bytecodes::_ifnonnull:
case Bytecodes::_if_icmpeq:
case Bytecodes::_if_icmpne:
case Bytecodes::_if_icmplt:
case Bytecodes::_if_icmpgt:
case Bytecodes::_if_icmple:
case Bytecodes::_if_icmpge:
case Bytecodes::_if_acmpeq:
case Bytecodes::_if_acmpne:
break;
break;
}
}
return _access_flags.has_loops();
}
// %%% Should return true for private methods also,
// since there is no way to override them.
}
return is_strict();
}
if (is_final_method()) return true;
return vtable_index() == nonvirtual_vtable_index;
}
if (code_size() != 5) return false;
if (size_of_parameters() != 1) return false;
return true;
}
}
return (is_static() ||
}
// For classfiles version 51 or greater, ensure that the clinit method is
// static. Non-static methods with the name "<clinit>" are not static
// initializers. (older classfiles exempted for backward compatibility)
}
if (length == 0) { // common case
} else {
objArrayOop m_oop = oopFactory::new_objArray(SystemDictionary::Class_klass(), length, CHECK_(objArrayHandle()));
for (int i = 0; i < length; i++) {
CheckedExceptionElement* table = h_this->checked_exceptions_start(); // recompute on each iteration, not gc safe
assert(Klass::cast(k)->is_subclass_of(SystemDictionary::Throwable_klass()), "invalid exception class");
}
return mirrors;
}
};
int best_bci = 0;
if (has_linenumber_table()) {
// The line numbers are a short array of 2-tuples [start_pc, line_number].
// Not necessarily sorted and not necessarily one-to-one.
// perfect match
} else {
}
}
}
}
return best_line;
}
} else {
return true;
}
}
if (must_be_resolved) {
// Make sure klass is resolved in constantpool.
}
}
assert(!is_method_handle_intrinsic() || function == SharedRuntime::native_method_throw_unsatisfied_link_error_entry(), "");
// We can see racers trying to place the same native function into place. Once
// is plenty.
// native_method_throw_unsatisfied_link_error_entry() should only
// be passed when post_event_flag is false.
"post_event_flag mis-match");
// post the bind event, and possible change the bind function
}
// This function can be called more than once. We must make sure that we always
// use the latest registered method -> check if a stub already has been generated.
// If so, we have to make it not_entrant.
nm->make_not_entrant();
}
}
if (is_method_handle_intrinsic())
return false; // special-cased in SharedRuntime::generate_native_wrapper
}
// Note: is_method_handle_intrinsic() is allowed here.
clear_code();
}
methodHandle mh(this);
}
}
void methodOopDesc::print_made_not_compilable(int comp_level, bool is_osr, bool report, const char* reason) {
if (PrintCompilation && report) {
if (comp_level == CompLevel_all) {
} else {
for (int i = (int)CompLevel_none; i <= comp_level; i++) {
}
}
this->print_short_name(tty);
if (size > 0) {
}
}
}
}
}
}
if (number_of_breakpoints() > 0)
return true;
if (is_method_handle_intrinsic())
return !is_synthetic(); // the generated adapters must be compiled
if (comp_level == CompLevel_any)
return is_not_c1_compilable() || is_not_c2_compilable();
if (is_c1_compile(comp_level))
return is_not_c1_compilable();
if (is_c2_compile(comp_level))
return is_not_c2_compilable();
return false;
}
// call this when compiler finds that this method is not compilable
if (comp_level == CompLevel_all) {
} else {
if (is_c1_compile(comp_level))
if (is_c2_compile(comp_level))
}
}
if (is_not_compilable(comp_level))
return true;
if (comp_level == CompLevel_any)
return is_not_c1_osr_compilable() || is_not_c2_osr_compilable();
if (is_c1_compile(comp_level))
return is_not_c1_osr_compilable();
if (is_c2_compile(comp_level))
return is_not_c2_osr_compilable();
return false;
}
if (comp_level == CompLevel_all) {
} else {
if (is_c1_compile(comp_level))
if (is_c2_compile(comp_level))
}
}
// Revert to using the interpreter and clear out the nmethod
// this may be NULL if c2i adapters have not been made yet
// Only should happen at allocate time.
} else {
}
}
// Called by class data sharing to remove any entry points (which are not shared)
_i2i_entry = NULL;
if (is_native()) {
*native_function_addr() = NULL;
}
invocation_counter()->reset();
backedge_counter()->reset();
}
// Called when the method_holder is getting linked. Setup entrypoints so the method
// is ready to be called from interpreter, compiler, and vtables.
// If the code cache is full, we may reenter this function for the
// leftover methods that weren't linked.
if (_i2i_entry != NULL) return;
// Setup interpreter entrypoint
// Sets both _i2i_entry and _from_interpreted_entry
// Don't overwrite already registered native entries.
if (is_native() && !has_native_function()) {
}
// Setup compiler entrypoint. This is made eagerly, so we do not need
// special handling of vtables. An alternative is to make adapters more
// lazily by calling make_adapter() from from_compiled_entry() for the
// normal calls. For vtable calls life gets more complicated. When a
// call-site goes mega-morphic we need adapters in all methods which can be
// called from the vtable. We need adapters on such methods that get loaded
// later. Ditto for mega-morphic itable calls. If this proves to be a
// problem we'll make these lazily later.
// ONLY USE the h_method now as make_adapter may have blocked
}
// Adapters for compiled code are made eagerly here. They are fairly
// small (generally < 100 bytes) and quick to make (and cached and shared)
// so making them eagerly shouldn't be too expensive.
THROW_MSG_NULL(vmSymbols::java_lang_VirtualMachineError(), "out of space in CodeCache for adapters");
}
return adapter->get_c2i_entry();
}
// The verified_code_entry() must be called when a invoke is resolved
// on this method.
// It returns the compiled code entry point, after asserting not null.
// This function is called after potential safepoints so that nmethod
// or adapter that it points to is still live and valid.
// This function must not hit a safepoint!
if (saved_code != NULL) {
methodHandle method(this);
}
}
return _from_compiled_entry;
}
// Check that if an nmethod ref exists, it has a backlink to this or no backlink at all
// (could be racing a deopt).
// Not inline to avoid circular ref.
// cached in a register or local. There's a race on the value of the field.
return code == NULL || (code->method() == NULL) || (code->method() == (methodOop)this && !code->is_osr_method());
}
// Install compiled code. Instantly it can execute.
// These writes must happen in this order, because the interpreter will
// directly jump to from_interpreted_entry which jumps to an i2c adapter
// which jumps to _from_compiled_entry.
// In theory there could be a race here. In practice it is unlikely
// and not worth worrying about.
}
#ifdef SHARK
#else //!SHARK
// Instantly compiled code can execute.
if (!mh->is_method_handle_intrinsic())
#endif //!SHARK
}
if (ik->is_interface()) return false;
// If method is an interface, we skip it - except if it
// is a miranda method
// Check that method is not a miranda method
// No implementation exist - so miranda method
return false;
}
return true;
}
if (vtable_index() == nonvirtual_vtable_index) {
return false;
} else {
}
}
// give advice about whether this methodOop should be cached or not
if (is_old()) {
// This method has been redefined. It is either EMCP or obsolete
// and we don't want to cache it because that would pin the method
// down and prevent it from being collectible if and when it
// finishes executing.
return true;
}
if (mark()->should_not_be_cached()) {
// It is either not safe or not a good idea to cache this
// method at this time because of the state of the embedded
// markOop. See markOop.cpp for the gory details.
return true;
}
// caching this method should be just fine
return false;
}
// Constant pool structure for invoke methods:
enum {
};
// Test if this method is an MH adapter frame generated by Java code.
// Cf. java/lang/invoke/InvokerBytecodeGenerator
}
// Test if this method is an internal MH primitive method.
}
}
// Make an instance of a signature-polymorphic internal MH primitive.
TRAPS) {
if (TraceMethodHandles) {
tty->print_cr("make_method_handle_intrinsic MH.%s%s", name->as_C_string(), signature->as_C_string());
}
// invariant: cp->symbol_at_put is preceded by a refcount increment (more usually a lookup)
{
}
cp->set_preresolution();
// decide on access bits: public or not?
methodHandle m;
{
}
m->set_constants(cp());
#ifdef CC_INTERP
#endif
m->init_intrinsic_id();
#ifdef ASSERT
#endif //ASSERT
// Finally, set up its entry points.
return m;
}
return klass;
}
return NULL;
}
methodHandle methodOopDesc::clone_with_new_data(methodHandle m, u_char* new_code, int new_code_length,
// Code below does not work for native methods - they should never get rewritten anyway
// Allocate new methodOop
// Allocate newm_oop with the is_conc_safe parameter set
// to IsUnsafeConc to indicate that newm_oop is not yet
// safe for concurrent processing by a GC.
CHECK_(methodHandle()));
// Create a shallow copy of methodOopDesc part, but be careful to preserve the new constMethodOop
// Create shallow copy of constMethodOopDesc, but be careful to preserve the methodOop
// is_conc_safe is set to false because that is the value of
// is_conc_safe initialzied into newcm and the copy should
// not overwrite that value. During the window during which it is
// tagged as unsafe, some extra work could be needed during precleaning
// or concurrent marking but those phases will be correct. Setting and
// resetting is done in preference to a careful copying into newcm to
// avoid having to know the precise layout of a constMethodOop.
// NOTE: this is a reachable object that transiently signals "conc_unsafe"
// However, no allocations are done during this window
// during which it is tagged conc_unsafe, so we are assured that any concurrent
// thread will not wait forever for the object to revert to "conc_safe".
// Further, any such conc_unsafe object will indicate a stable size
// through the transition.
// Copy new byte codes
// Copy line number table
if (new_compressed_linenumber_size > 0) {
}
// Copy checked_exceptions
if (checked_exceptions_len > 0) {
m->checked_exceptions_start(),
checked_exceptions_len * sizeof(CheckedExceptionElement));
}
// Copy exception table
if (exception_table_len > 0) {
m->exception_table_start(),
exception_table_len * sizeof(ExceptionTableElement));
}
// Copy local variable number table
if (localvariable_len > 0) {
localvariable_len * sizeof(LocalVariableTableElement));
}
// Only set is_conc_safe to true when changes to newcm are
// complete.
assert(!newcm->is_parsable() || ncmsz < 0 || newcm->size() == ncmsz, "newcm->size() inconsistency");
newcm->set_is_conc_safe(true);
return newm;
}
// if loader is not the default loader (i.e., != NULL), we can't know the intrinsics
// because we are not loading from core libraries
// exception: the AES intrinsics come from lib/ext/sunjce_provider.jar
// which does not use the class default class loader so we check for its loader here
instanceKlass::cast(holder)->class_loader()->klass()->klass_part()->name() != vmSymbols::sun_misc_Launcher_ExtClassLoader()) {
}
// see if the klass name is well-known:
}
// the klass name is well-known:
// ditto for method and signature:
return;
return;
}
// A few slightly irregular cases:
switch (klass_id) {
// Second chance: check in regular Math.
switch (name_id) {
// pretend it is the corresponding method in the non-strict class:
break;
}
break;
// Signature-polymorphic methods: MethodHandle.invoke*, InvokeDynamic.*.
if (!is_native()) break;
break;
}
// Set up its iid. It is an alias method.
return;
}
}
// These two methods are static since a GC may move the methodOopDesc
if (THREAD->is_Compiler_thread()) {
// There is nothing useful this routine can do from within the Compile thread.
// Hopefully, the signature contains only well-known classes.
return false;
}
bool sig_is_loaded = true;
// We are loading classes eagerly. If a ClassNotFoundException or
// a LinkageError was generated, be sure to ignore it.
if (HAS_PENDING_EXCEPTION) {
} else {
return false;
}
}
}
}
return sig_is_loaded;
}
}
}
return false;
}
// Exposed so field engineers can debug VM
#ifdef PRODUCT
#else
#endif
}
// This is only done during class loading, so it is OK to assume method_idnum matches the methods() array
if (annotations == NULL) {
return;
}
int i;
// Copy to temp array
temp_array->clear();
for (i = 0; i < length; i++) {
}
// Copy back using old method indices
for (i = 0; i < length; i++) {
}
}
// Comparer for sorting an object array containing
// methodOops.
// Used non-template method_comparator methods since
// Visual Studio 2003 compiler generates incorrect
// optimized code for it.
}
}
// This is only done during class loading, so it is OK to assume method_idnum matches the methods() array
bool idempotent) {
if (length > 1) {
bool do_annotations = false;
if (methods_annotations != NULL ||
do_annotations = true;
}
if (do_annotations) {
// Remember current method ordering so we can reorder annotations
for (int i = 0; i < length; i++) {
m->set_method_idnum(i);
}
}
{
if (UseCompressedOops) {
QuickSort::sort<narrowOop>((narrowOop*)(methods->base()), length, method_comparator_narrowOop, idempotent);
} else {
}
if (UseConcMarkSweepGC) {
// For CMS we need to dirty the cards for the array
}
}
// Sort annotations if necessary
assert(methods_parameter_annotations == NULL || methods_parameter_annotations->length() == methods->length(), "");
assert(methods_default_annotations == NULL || methods_default_annotations->length() == methods->length(), "");
if (do_annotations) {
// Allocate temporary storage
}
// Reset method ordering
for (int i = 0; i < length; i++) {
m->set_method_idnum(i);
}
}
}
private:
bool _use_separator;
_use_separator = true;
}
public:
_use_separator = false;
}
};
}
//-----------------------------------------------------------------------------------
// Non-product code
#ifndef PRODUCT
}
BytecodeStream s(mh);
}
#endif // not PRODUCT
// Simple compression of line number tables. We use a regular compressed stream, except that we compress deltas
// between (bci,line) pairs since they are smaller. If (bci delta, line delta) fits in (5-bit unsigned, 3-bit unsigned)
// we save it as one byte, otherwise we write a 0xFF escape character and use regular compression. 0x0 is used
// as end-of-stream terminator.
// bci and line number does not compress into single byte.
// Write out escape character and use regular compression for bci and line number.
}
// See comment in methodOop.hpp which explains why this exists.
}
#endif
CompressedLineNumberReadStream::CompressedLineNumberReadStream(u_char* buffer) : CompressedReadStream(buffer) {
_bci = 0;
_line = 0;
};
// Check for terminator
if (next == 0) return false;
if (next == 0xFF) {
// Escape character, regular compression used
_bci += read_signed_int();
_line += read_signed_int();
} else {
// Single byte compression used
}
return true;
}
return bp->orig_bytecode();
}
}
return Bytecodes::_shouldnotreachhere;
}
// and continue, in case there is more than one
}
}
}
// do this last:
}
// bci value of -1 is used to delete all breakpoints in method m (ex: clear_all_breakpoint).
// do this first:
// unhook it
else
delete bp;
// When class is redefined JVMTI sets breakpoint in all versions of EMCP methods
// at same location. So we have multiple matching (method_index and bci)
// BreakpointInfo nodes in BreakpointInfo list. We should just delete one
// breakpoint for clear_breakpoint request and keep all other method versions
// BreakpointInfo for future clear_breakpoint request.
// bcivalue of -1 is used to clear all breakpoints (see clear_all_breakpoints)
// which is being called when class is unloaded. We delete all the Breakpoint
// information for all versions of method. We may not correctly restore the original
// bytecode in all method versions, but that is ok. Because the class is being unloaded
// so these methods won't be used anymore.
if (bci >= 0) {
break;
}
} else {
// This one is a keeper.
}
}
}
clear_matches(this, bci);
}
clear_matches(this, -1);
}
if (TieredCompilation) {
if (invocation_counter()->carry() || ((mdo != NULL) ? mdo->invocation_counter()->carry() : false)) {
return InvocationCounter::count_limit;
} else {
}
} else {
return invocation_counter()->count();
}
}
if (TieredCompilation) {
return InvocationCounter::count_limit;
} else {
}
} else {
return backedge_counter()->count();
}
}
return mdo->highest_comp_level();
} else {
return CompLevel_none;
}
}
return mdo->highest_osr_comp_level();
} else {
return CompLevel_none;
}
}
}
}
}
}
_name_index = m->name_index();
_signature_index = m->signature_index();
}
#ifdef ASSERT
{
}
#endif
{
// Deoptimize all dependents on this method
}
}
}