/*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
#include "precompiled.hpp"
#include "classfile/systemDictionary.hpp"
#include "compiler/compileLog.hpp"
#include "oops/objArrayKlass.hpp"
#include "opto/addnode.hpp"
#include "opto/memnode.hpp"
#include "opto/mulnode.hpp"
#include "opto/rootnode.hpp"
#include "opto/runtime.hpp"
#include "runtime/sharedRuntime.hpp"
//------------------------------make_dtrace_method_entry_exit ----------------
// Dtrace -- record entry or exit of a method if compiled with dtrace support
// Get base of thread-local storage area
// Get method
const TypeInstPtr* method_type = TypeInstPtr::make(TypePtr::Constant, method->klass(), true, method, 0);
// For some reason, this call reads only raw memory.
}
//=============================================================================
//------------------------------do_checkcast-----------------------------------
bool will_link;
// Throw uncommon trap if class is not loaded or the value we are casting
// _from_ is not loaded, and value is not null. If the value _is_ NULL,
// then the checkcast does nothing.
if (!will_link) {
}
// %%% Cannot happen?
}
}
assert( stopped() || _gvn.type(peek())->higher_equal(TypePtr::NULL_PTR), "what's left behind is null" );
if (!stopped()) {
}
return;
}
// Pop from stack AFTER gen_checkcast because it can uncommon trap and
// the debug info has to be correct.
pop();
}
//------------------------------do_instanceof----------------------------------
if (stopped()) return;
// We would like to return false if class is not loaded, emitting a
// dependency, but Java requires instanceof to load its operand.
// Throw uncommon trap if class is not loaded
bool will_link;
if (!will_link) {
}
null_assert(peek());
assert( stopped() || _gvn.type(peek())->higher_equal(TypePtr::NULL_PTR), "what's left behind is null" );
if (!stopped()) {
// The object is now known to be null.
// Shortcut the effect of gen_instanceof and return "false" directly.
pop(); // pop the null
}
return;
}
// Push the bool result back on stack
// Pop from stack AFTER gen_instanceof because it can uncommon trap.
pop();
}
//------------------------------array_store_check------------------------------
// pull array from stack and check that the store is valid
// Shorthand access to array store elements without popping them.
// There's never a type check on null values.
// This cutout lets us avoid the uncommon_trap(Reason_array_check)
// below, which turns into a performance liability if the
// gen_checkcast folds up completely.
return;
}
// Extract the array klass type
// p's type is array-of-OOPS plus klass_offset
Node* array_klass = _gvn.transform( LoadKlassNode::make(_gvn, immutable_memory(), p, TypeInstPtr::KLASS) );
// Get the array klass
// array_klass's type is generally INexact array-of-oop. Heroically
// cast the array klass to EXACT array and uncommon-trap if the cast
// fails.
bool always_see_exact_class = false;
always_see_exact_class = true;
// (If no MDO at all, hope for the best, until a trap actually occurs.)
}
// Is the array klass is exactly its defined type?
// Make a constant out of the inexact array klass
}
if (stopped()) { // MUST uncommon-trap?
} else { // Cast array klass to exactness:
// Use the exact constant value we know it is.
}
}
}
// Come here for polymorphic array klasses
// Extract the array element class
// Check (the hard way) and throw if not a subklass.
// Result is ignored, we just need the CFG effects.
}
// Emit guarded new
// if (klass->_init_thread != current_thread ||
// klass->_init_state != being_initialized)
// uncommon_trap
// Use T_BOOLEAN for instanceKlass::_init_state so the compiler
// can generate code to load it as unsigned byte.
PreserveJVMState pjvms(this);
klass);
}
//------------------------------do_new-----------------------------------------
bool will_link;
// Should initialize, or throw an InstantiationError?
iter().is_unresolved_klass()) {
klass);
return;
}
if (klass->is_being_initialized()) {
}
// Push resultant oop onto stack
// Keep track of whether opportunities exist for StringBuilder
// optimizations.
if (OptimizeStringConcat &&
C->set_has_stringbuilder(true);
}
}
#ifndef PRODUCT
//------------------------------dump_map_adr_mem-------------------------------
// Debug dump of the mapping from address types to MergeMemNode indices.
// Node mapping, if any
} else {
}
}
}
#endif
//=============================================================================
//
// parser methods for profiling
//----------------------test_counter_against_threshold ------------------------
// Test the counter against the limit and uncommon trap if greater.
// This code is largely copied from the range check code in
// array_addressing()
// Test invocation count vs threshold
// Branch to failure if threshold exceeded
}
}
//----------------------increment_and_test_invocation_counter-------------------
if (!count_invocations()) return;
// Get the methodOop node.
// Load the interpreter_invocation_counter from the methodOop.
// Add one to the counter and store
}
//----------------------------method_data_addressing---------------------------
Node* Parse::method_data_addressing(ciMethodData* md, ciProfileData* data, ByteSize counter_offset, Node* idx, uint stride) {
// Get offset within methodDataOop of the data array
// Get cell offset of the ProfileData within data array
// Add in counter_offset, the # of bytes into the ProfileData of counter or flag
if (stride != 0) {
}
return ptr;
}
//--------------------------increment_md_counter_at----------------------------
void Parse::increment_md_counter_at(ciMethodData* md, ciProfileData* data, ByteSize counter_offset, Node* idx, uint stride) {
}
//--------------------------test_for_osr_md_counter_at-------------------------
void Parse::test_for_osr_md_counter_at(ciMethodData* md, ciProfileData* data, ByteSize counter_offset, int limit) {
}
//-------------------------------set_md_flag_at--------------------------------
}
//----------------------------profile_taken_branch-----------------------------
// This is a potential osr_site if we have a backedge.
bool osr_site =
// If we are going to OSR, restart at the target bytecode.
// To do: factor out the the limit calculations below. These duplicate
// the similar limit calculations in the interpreter.
if (method_data_update() || force_update) {
}
// In the new tiered system this is all we need to do. In the old
// (c2 based) tiered sytem we must do the code below.
#ifndef TIERED
if (method_data_update()) {
if (osr_site) {
}
} else {
// With method data update off, use the invocation counter to trigger an
// OSR compilation, as done in the interpreter.
if (osr_site) {
}
}
#endif // TIERED
// Restore the original bytecode.
}
//--------------------------profile_not_taken_branch---------------------------
if (method_data_update() || force_update) {
}
}
//---------------------------------profile_call--------------------------------
if (!method_data_update()) return;
switch (bc()) {
case Bytecodes::_invokevirtual:
case Bytecodes::_invokeinterface:
break;
case Bytecodes::_invokestatic:
case Bytecodes::_invokedynamic:
case Bytecodes::_invokespecial:
break;
default: fatal("unexpected call bytecode");
}
}
//------------------------------profile_generic_call---------------------------
}
//-----------------------------profile_receiver_type---------------------------
// Skip if we aren't tracking receivers
if (TypeProfileWidth < 1) {
return;
}
// Using an adr_type of TypePtr::BOTTOM to work around anti-dep problems.
// A better solution might be to use TypeRawPtr::BOTTOM with RC_NARROW_MEM.
"profile_receiver_type_C",
}
//---------------------------------profile_ret---------------------------------
if (!method_data_update()) return;
// Skip if we aren't tracking ret targets
if (TypeProfileWidth < 1) return;
// Look for the target_bci is already in the table
bool table_full = true;
if (key == target_bci) break;
}
// The target_bci was not found in the table.
if (!table_full) {
// XXX: Make slow call to update RetData
}
return;
}
// the target_bci is already in the table
}
//--------------------------profile_null_checkcast----------------------------
// Set the null-seen flag, done in conjunction with the usual null check. We
// never unset the flag, so this is a one-way switch.
if (!method_data_update()) return;
}
//-----------------------------profile_switch_case-----------------------------
if (!method_data_update()) return;
if (table_index >= 0) {
} else {
}
}