/*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
#ifndef SHARE_VM_OPTO_GRAPHKIT_HPP
#define SHARE_VM_OPTO_GRAPHKIT_HPP
#include "ci/ciMethodData.hpp"
#include "opto/addnode.hpp"
#include "opto/callnode.hpp"
#include "opto/cfgnode.hpp"
#include "opto/compile.hpp"
#include "opto/divnode.hpp"
#include "opto/mulnode.hpp"
#include "opto/phaseX.hpp"
#include "opto/subnode.hpp"
#include "runtime/deoptimization.hpp"
class FastLockNode;
class FastUnlockNode;
class IdealKit;
class LibraryCallKit;
class Parse;
class RootNode;
//-----------------------------------------------------------------------------
//----------------------------GraphKit-----------------------------------------
// Toolkit for building the common sorts of subgraphs.
// Does not know about bytecode parsing or type-flow results.
// It is able to create graphs implementing the semantics of most
// or all bytecodes, so that it can expand intrinsics and calls.
// It may depend on JVMState structure, but it must not depend
// on specific bytecode streams.
friend class PreserveJVMState;
protected:
private:
private:
return _map;
}
public:
GraphKit(); // empty constructor
#ifdef ASSERT
~GraphKit() {
}
#endif
// Handy well-known nodes:
// Create or find a constant node
// (See also macro MakeConX in type.hpp, which uses intcon or longcon.)
// Helper for byte_map_base
// Get base of card map
} else {
return null();
}
}
}
}
// (See also macro find_intptr_t_con in type.hpp, which uses one of these.)
// JVM State accessors:
// Parser mapping from JVM indices into Nodes.
// Low slots are accessed by the StartNode::enum.
// Then come the locals at StartNode::Parms to StartNode::Parms+max_locals();
// Then come JVM stack slots.
// Finally come the monitors, if any.
// See layout accessors in class JVMState.
// Make sure jvms has current bci & sp.
#ifdef ASSERT
// Make sure JVMS has an updated copy of bci and sp.
// Also sanity-check method, depth, and monitor depth.
bool jvms_in_sync() const;
// Make sure the map looks OK.
void verify_map() const;
// Make sure a proposed exception state looks OK.
#endif
// Clone the existing map state. (Implements PreserveJVMState.)
// Set the map to a clone of the given one.
void set_map_clone(SafePointNode* m);
// Tell if the compilation is failing.
// Set _map to NULL, signalling a stop to further bytecode execution.
// Preserve the map intact for future use, and return it back to the caller.
// Stop, but first smash the map's inputs to NULL, to mark it dead.
void stop_and_kill_map();
// Tell if _map is NULL, or control is top.
bool stopped();
// Tell if this method or any caller method has exception handlers.
bool has_ex_handler();
// Save an exception without blowing stack contents or other JVM state.
// (The extra pointer is stuck with add_req on the map, beyond the JVMS.)
// Recover a saved exception from its map.
// Recover a saved exception from its map, and remove it from the map.
#ifdef ASSERT
// Recover a saved exception from its map, and remove it from the map.
#endif
// Push an exception in the canonical position for handlers (stack(0)).
set_sp(1);
clean_stack(1);
}
// Detach and return an exception state.
}
return ex_map;
}
// Add an exception, using the given JVM state, without commoning.
}
// Turn the current JVM state into an exception state, appending the ex_oop.
// Add an exception, using the given JVM state.
// Combine all exceptions with a common exception type into a single state.
// (This is done via combine_exception_states.)
// Combine all exceptions of any sort whatever into a single master state.
}
return phi_map;
}
// Combine the two exception states, building phis as necessary.
// The second argument is updated to include contributions from the first.
// Reset the map to the given state. If there are any half-finished phis
// in it (created by combine_exception_states), transform them now.
// Returns the exception oop. (Caller must call push_ex_oop if required.)
// Collect exceptions from a given JVM state into my exception list.
// Collect all raised exceptions into the current JVM state.
// Clear the current exception list and map, returns the combined states.
// Helper to throw a built-in exception.
// Range checks take the offending index.
// Cast and array store checks take the offending class.
// Others do not take the optional argument.
// The JVMS must allow the bytecode to be re-executed
// via an uncommon trap.
// Helper to check the JavaThread::_should_post_on_exceptions flag
// and branch to an uncommon_trap if it is true (with the specified reason and must_throw)
bool must_throw) ;
// Helper Functions for adding debug information
void kill_dead_locals();
#ifdef ASSERT
bool dead_locals_are_killed();
#endif
// The call may deoptimize. Supply required JVM state as debug info.
// If must_throw is true, the call is guaranteed not to return normally.
bool must_throw = false);
// How many stack inputs does the current BC consume?
// And, how does the stack change after the bytecode?
// Returns false if unknown.
// Add a fixed offset to a pointer
}
}
// Add a variable offset to a pointer
}
// Some convenient shortcuts for common nodes
Node* Bool(Node* cmp, BoolTest::mask relop) { return _gvn.transform(new (C) BoolNode(cmp, relop)); }
// Convert between int and long, and size_t.
// (See macros ConvI2X, etc., in type.hpp for ConvI2X, etc.)
// Find out the klass of an object.
// Find out the length of an array.
// Helper function to do a NULL pointer check or ZERO check based on type.
// Throw an exception if a given value is null.
// Return the value cast to not-null.
// Be clever about equivalent dominating null checks.
}
return null_check(argument(0));
}
}
}
// Throw an uncommon trap if a given value is __not__ null.
// Return the value cast to null, and be clever about dominating checks.
}
// Null check oop. Return null-path control into (*null_control).
// Return a cast-not-null node which depends on the not-null control.
// If never_see_null, use an uncommon trap (*null_control sees a top).
// The cast is not valid along the null path; keep a copy of the original.
bool never_see_null = false);
// Check the null_seen bit.
// Use the type profile to narrow an object type.
// Cast obj to not-null on this path
// Replace all occurrences of one node by another.
}
void push_pair_local(int i) {
// longs are stored in locals in "push" order
}
// the second half is pushed last & popped first; it contains exactly nothing
// the long bits are pushed first & popped last:
return pop();
}
}
// Push the node, which may be zero, one, or two words.
}
else return NULL;
}
// Access unaliased memory
// Access immutable memory
// Set unaliased memory
// Get the entire memory state (probably a MergeMemNode), and reset it
// (The resetting prevents somebody from using the dangling Node pointer.)
Node* reset_memory();
// Get the entire memory state, asserted to be a MergeMemNode.
return mem->as_MergeMem();
}
// Set the entire memory state; produce a new MergeMemNode.
// Create a memory projection from the call, then set_all_memory.
// Create a LoadNode, reading from the parser's memory state.
// (Note: require_atomic_access is useful only with T_LONG.)
bool require_atomic_access = false) {
// This version computes alias_index from bottom_type
}
Node* make_load(Node* ctl, Node* adr, const Type* t, BasicType bt, const TypePtr* adr_type, bool require_atomic_access = false) {
// This version computes alias_index from an address type
}
// This is the base version which is given an alias index.
Node* make_load(Node* ctl, Node* adr, const Type* t, BasicType bt, int adr_idx, bool require_atomic_access = false);
// Create & transform a StoreNode and store the effect into the
// parser's memory state.
bool require_atomic_access = false) {
// This version computes alias_index from an address type
C->get_alias_index(adr_type),
}
// This is the base version which is given alias index
// Return the new StoreXNode
int adr_idx,
bool require_atomic_access = false);
// All in one pre-barrier, store, post_barrier
// Insert a write-barrier'd store. This is to let generational GC
// work; we have to flag all oop-stores before the next GC point.
//
// It comes in 3 flavors of store to an object, array, or unknown.
// We use precise card marks for arrays to avoid scanning the entire
// array. We use imprecise for object. We use precise for unknown
// since we don't know if we have an array or and object or even
// where the object starts.
//
// If val==NULL, it is taken to be a completely unknown value. QQQ
const TypeOopPtr* val_type,
bool use_precise);
const TypeOopPtr* val_type,
}
const TypeOopPtr* val_type,
}
// Could be an array or object we don't know at compile time (unsafe ref.)
// For the few case where the barriers need special help
// Return addressing for an array element.
// Optional constraint on the array size:
// Return a load of array element at idx.
//---------------- Dtrace support --------------------
make_dtrace_method_entry_exit(method, true);
}
make_dtrace_method_entry_exit(method, false);
}
//--------------- stub generation -------------------
public:
const char *name,
int is_fancy_jump,
bool pass_tls,
bool return_pc);
//---------- help for generating calls --------------
// Do a null check on the receiver as it would happen before the call to
// callee (with all arguments still on the stack).
Node* n = null_check_receiver();
return n;
}
// Fill in argument edges for the call from argument(0), argument(1), ...
// (The next step is to call set_edges_for_java_call.)
// Fill in non-argument edges for the call.
// Transform the call, and update the basics: control, i_o, memory.
// (The next step is usually to call set_results_for_java_call.)
bool must_throw = false, bool separate_io_proj = false);
// Finish up a java call that was started by set_edges_for_java_call.
// Call add_exception on any throw arising from the call.
// Return the call result (transformed).
// Similar to set_edges_for_java_call, but simplified for runtime calls.
}
// Replace the call with the current state of the kit. Requires
// that the call was generated with separate io_projs so that
// exceptional control flow can be handled properly.
// helper functions for statistics
// Bail out to the interpreter right now
// The optional klass is the one causing the trap.
// The optional reason is debug information written to the compile log.
// Optional must_throw is the same as with add_safepoint_edges.
void uncommon_trap(int trap_request,
bool must_throw = false, bool keep_exact_action = false);
// Shorthand, to avoid saying "Deoptimization::" so many times.
bool must_throw = false, bool keep_exact_action = false) {
}
// SP when bytecode needs to be reexecuted.
// Report if there were too many traps at the current method and bci.
// If there is no MDO at all, report no trap unless told to assume it.
}
// Report if there were too many recompiles at the current method and bci.
}
// Returns the object (if any) which was created the moment before.
static bool use_ReduceInitialCardMarks() {
return (ReduceInitialCardMarks
}
// Sync Ideal and Graph kits.
void g1_write_barrier_pre(bool do_load,
const TypeOopPtr* val_type,
bool use_precise);
// Helper function for g1
private:
public:
// Helper function to round double arguments before a call
// rounding for strict float precision conformance
// rounding for strict double precision conformance
// rounding for non-strict double stores
const char* call_name,
enum { // flag values for make_runtime_call
};
// merge in all memory slices from new_mem, along the given path
// Helper functions to build synchronizations
int next_monitor();
// Optional 'precedent' is appended as an extra edge, to force ordering.
Node* fast_and_slow(Node* in, const Type *result_type, Node* null_result, IfNode* fast_test, Node* fast_result, address slow_call, const TypeFunc *slow_call_type, Node* slow_arg, klassOop ex_klass, Node* slow_result);
// Generate an instance-of idiom. Used by both the instance-of bytecode
// and the reflective instance-of call.
// Generate a check-cast idiom. Used by both the check-cast bytecode
// and the array-store bytecode
// Generate a subtyping check. Takes as input the subtype and supertype.
// Returns 2 values: sets the default control() to the true path and
// returns the false path. Only reads from constant memory taken from the
// default memory; does not write anything. It also doesn't take in an
// Object; if you wish to check an Object you need to load the Object's
// class prior to coming here.
// Static parse-time type checking logic for gen_subtype_check:
// Exact type check used for predicted calls and casts.
// Rewrites (*casted_receiver) to be casted to the stronger type.
// (Caller is responsible for doing replace_in_map.)
Node* *casted_receiver);
// implementation of object creation
const TypeOopPtr* oop_type);
// java.lang.String helpers
// Handy for making control flow
// Place 'if' on worklist if it will be in graph
return iff;
}
// Place 'if' on worklist if it will be in graph
return iff;
}
// Insert a loop predicate into the graph
void add_predicate(int nargs = 0);
};
// Helper class to support building of control flow branches. Upon
// creation the map and sp at bci are cloned and restored upon de-
// struction. Typical use:
//
// { PreserveJVMState pjvms(this);
// // code of new branch
// }
// // here the JVM state at bci is established
protected:
#ifdef ASSERT
int _bci;
#endif
public:
~PreserveJVMState();
};
// Helper class to build cutouts of the form if (p) ; else {x...}.
// The code {x...} must not fall through.
// The kit's main flow of control is set to the "then" continuation of if(p).
public:
~BuildCutout();
};
// Helper class to preserve the original _reexecute bit and _sp and restore
// them back
protected:
public:
};
#endif // SHARE_VM_OPTO_GRAPHKIT_HPP