callnode.hpp revision 844
/*
* Copyright 1997-2009 Sun Microsystems, Inc. All Rights Reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
* CA 95054 USA or visit www.sun.com if you need additional information or
* have any questions.
*
*/
// Portions of code courtesy of Clifford Click
// Optimization - Graph Style
class Chaitin;
class NamedCounter;
class MultiNode;
class SafePointNode;
class CallNode;
class CallJavaNode;
class CallStaticJavaNode;
class CallDynamicJavaNode;
class CallRuntimeNode;
class CallLeafNode;
class CallLeafNoFPNode;
class AllocateNode;
class AllocateArrayNode;
class LockNode;
class UnlockNode;
class JVMState;
class OopMap;
class State;
class StartNode;
class MachCallNode;
class FastLockNode;
//------------------------------StartNode--------------------------------------
// The method start node
public:
init_req(0,this);
}
virtual int Opcode() const;
virtual bool pinned() const { return true; };
virtual const Type *bottom_type() const;
#ifndef PRODUCT
#endif
};
//------------------------------StartOSRNode-----------------------------------
// The method start node for on stack replacement code
class StartOSRNode : public StartNode {
public:
virtual int Opcode() const;
static const TypeTuple *osr_domain();
};
//------------------------------ParmNode---------------------------------------
// Incoming parameters
public:
}
virtual int Opcode() const;
#ifndef PRODUCT
#endif
};
//------------------------------ReturnNode-------------------------------------
// Return from subroutine node
class ReturnNode : public Node {
public:
virtual int Opcode() const;
virtual bool is_CFG() const { return true; }
virtual bool depends_only_on_test() const { return false; }
#ifndef PRODUCT
virtual void dump_req() const;
#endif
};
//------------------------------RethrowNode------------------------------------
// Rethrow of exception at call site. Ends a procedure before rethrowing;
// ends the current basic block like a ReturnNode. Restores registers and
// unwinds stack. Rethrow happens in the caller's method.
class RethrowNode : public Node {
public:
RethrowNode( Node *cntrl, Node *i_o, Node *memory, Node *frameptr, Node *ret_adr, Node *exception );
virtual int Opcode() const;
virtual bool is_CFG() const { return true; }
virtual bool depends_only_on_test() const { return false; }
#ifndef PRODUCT
virtual void dump_req() const;
#endif
};
//------------------------------TailCallNode-----------------------------------
// Pop stack frame and jump indirect
class TailCallNode : public ReturnNode {
public:
TailCallNode( Node *cntrl, Node *i_o, Node *memory, Node *frameptr, Node *retadr, Node *target, Node *moop )
}
virtual int Opcode() const;
};
//------------------------------TailJumpNode-----------------------------------
// Pop stack frame and jump indirect
class TailJumpNode : public ReturnNode {
public:
}
virtual int Opcode() const;
};
//-------------------------------JVMState-------------------------------------
// A linked list of JVMState nodes captures the whole interpreter state,
// plus GC roots, for all active calls at some call site in this compilation
// unit. (If there is no inlining, then the list has exactly one link.)
// This provides a way to map the optimized program back into the interpreter,
// or to let the GC mark the stack.
class JVMState : public ResourceObj {
private:
int _bci; // Byte Code Index of this JVM point
public:
friend class Compile;
// Because JVMState objects live over the entire lifetime of the
// Compile object, they are allocated into the comp_arena, which
// does not get resource marked or reset during the compile process
void operator delete( void * ) { } // fast deallocation
// Create a new JVMState, ready for abstract interpretation.
// Access functions for the JVM
uint debug_size() const {
}
// Returns the JVM state at the desired depth (1 == root).
// Tells if two JVM states have the same call chain (depth, methods, & bcis).
// Monitors (monitors are stored as (boxNode, objNode) pairs
enum { logMonitorEdges = 1 };
}
&& is_monitor_box(off))
// Initialization functions for the JVM
}
// Miscellaneous utility functions
#ifndef PRODUCT
void dump() const {
}
#endif
};
//------------------------------SafePointNode----------------------------------
// A SafePointNode is a subclass of a MultiNode for convenience (and
// potential code sharing) only - conceptually it is independent of
// the Node semantics.
class SafePointNode : public MultiNode {
public:
// A plain safepoint advertises no memory effects (NULL):
{
}
// Many calls take *all* of memory as input,
// but some produce a limited subset of that memory as output.
// The adr_type reports the call's behavior as a store, not a load.
}
// Functionality from old debug nodes which has changed
}
}
}
}
}
}
}
}
// Handle monitor stack
void pop_monitor ();
Node *peek_monitor_box() const;
Node *peek_monitor_obj() const;
// Access functions for the JVM
MergeMemNode* merged_memory() const {
}
// The parser marks useless maps as dead when it's done with them:
// Exception states bubbling out of subgraphs such as inlined calls
// are recorded here. (There might be more than one, hence the "next".)
// This feature is used only for safepoints which serve as "maps"
// for JVM states during parsing, intrinsic expansion, etc.
SafePointNode* next_exception() const;
void set_next_exception(SafePointNode* n);
// Standard Node stuff
virtual int Opcode() const;
virtual bool pinned() const { return true; }
virtual const RegMask &out_RegMask() const;
static bool needs_polling_address_input();
#ifndef PRODUCT
#endif
};
//------------------------------SafePointScalarObjectNode----------------------
// A SafePointScalarObjectNode represents the state of a scalarized object
// at a safepoint.
class SafePointScalarObjectNode: public TypeNode {
// states of the scalarized object fields are collected.
public:
#ifdef ASSERT
#endif
virtual int Opcode() const;
virtual const RegMask &out_RegMask() const;
// SafePointScalarObject should be always pinned to the control edge
// of the SafePoint node for which it was generated.
virtual bool pinned() const; // { return true; }
// SafePointScalarObject depends on the SafePoint node
// for which it was generated.
virtual bool depends_only_on_test() const; // { return false; }
// Assumes that "this" is an argument to a safepoint node "s", and that
// "new_call" is being created to correspond to "s". But the difference
// between the start index of the jvmstates of "new_call" and "s" is
// "jvms_adj". Produce and return a SafePointScalarObjectNode that
// corresponds appropriately to "this" in "new_call". Assumes that
// "sosn_map" is a map, specific to the translation of "s" to "new_call",
// mapping old SafePointScalarObjectNodes to new, to avoid multiple copies.
#ifndef PRODUCT
#endif
};
//------------------------------CallNode---------------------------------------
// Call nodes now subsume the function of debug nodes at callsites, so they
// contain the functionality of a full scope chain of debug nodes.
class CallNode : public SafePointNode {
public:
float _cnt; // Estimate of number of times called
{
}
virtual const Type *bottom_type() const;
// Are we guaranteed that this node is a safepoint? Not true for leaf calls and
// for some macro nodes whose expansion does not have a safepoint on the fast path.
virtual bool guaranteed_safepoint() { return true; }
// For macro nodes, the JVMState gets modified during expansion, so when cloning
// the node the JVMState must be cloned.
virtual void clone_jvms() { } // default is not to clone
// Returns true if the call may modify n
// Does this node have a use of n other than in debug information?
bool has_non_debug_use(Node *n);
// Returns the unique CheckCastPP of a call
// or result projection is there are several CheckCastPP
// or returns NULL if there is no one.
Node *result_cast();
#ifndef PRODUCT
virtual void dump_req() const;
#endif
};
//------------------------------CallJavaNode-----------------------------------
// Make a static or dynamic subroutine call node using Java calling
// convention. (The "Java" calling convention is the compiler's calling
// convention, as opposed to the interpreter's or that of native C.)
class CallJavaNode : public CallNode {
protected:
bool _optimized_virtual;
public:
const int _bci; // Byte Code Index of call byte code
{
}
virtual int Opcode() const;
void set_optimized_virtual(bool f) { _optimized_virtual = f; }
bool is_optimized_virtual() const { return _optimized_virtual; }
#ifndef PRODUCT
#endif
};
//------------------------------CallStaticJavaNode-----------------------------
// Make a direct subroutine call using Java calling convention (for static
// calls and optimized virtual calls, plus calls to wrappers for run-time
// routines); generates static stub.
class CallStaticJavaNode : public CallJavaNode {
public:
}
// This node calls a runtime stub, which often has narrow memory effects.
}
const char *_name; // Runtime wrapper name
// If this is an uncommon trap, return the request code, else zero.
int uncommon_trap_request() const;
virtual int Opcode() const;
#ifndef PRODUCT
#endif
};
//------------------------------CallDynamicJavaNode----------------------------
// Make a dispatched call using Java calling convention.
class CallDynamicJavaNode : public CallJavaNode {
public:
CallDynamicJavaNode( const TypeFunc *tf , address addr, ciMethod* method, int vtable_index, int bci ) : CallJavaNode(tf,addr,method,bci), _vtable_index(vtable_index) {
}
int _vtable_index;
virtual int Opcode() const;
#ifndef PRODUCT
#endif
};
//------------------------------CallRuntimeNode--------------------------------
// Make a direct subroutine call node into compiled C++ code.
class CallRuntimeNode : public CallNode {
public:
{
}
const char *_name; // Printable name, if _method is NULL
virtual int Opcode() const;
#ifndef PRODUCT
#endif
};
//------------------------------CallLeafNode-----------------------------------
// Make a direct subroutine call node into compiled C++ code, without
// safepoints
class CallLeafNode : public CallRuntimeNode {
public:
{
}
virtual int Opcode() const;
virtual bool guaranteed_safepoint() { return false; }
#ifndef PRODUCT
#endif
};
//------------------------------CallLeafNoFPNode-------------------------------
// CallLeafNode, not using floating point or using it in the same manner as
// the generated code
class CallLeafNoFPNode : public CallLeafNode {
public:
{
}
virtual int Opcode() const;
};
//------------------------------Allocate---------------------------------------
// High-level memory allocation
//
// AllocateNode and AllocateArrayNode are subclasses of CallNode because they will
// get expanded into a code sequence containing a call. Unlike other CallNodes,
// they have 2 memory projections and 2 i_o projections (which are distinguished by
// the _is_io_use flag in the projection.) This is needed when expanding the node in
// order to differentiate the uses of the projection on the normal control path from
// those on the exception return path.
//
class AllocateNode : public CallNode {
public:
enum {
// Output:
// Inputs:
KlassNode, // type (maybe dynamic) of the obj.
InitialTest, // slow-path test (may be constant)
ALength, // array length (or TOP if none)
};
static const TypeFunc* alloc_type() {
// create result type (range)
}
bool _is_scalar_replaceable; // Result of Escape Analysis
// Expansion modifies the JVMState, so we need to clone it
virtual void clone_jvms() {
}
virtual int Opcode() const;
virtual bool guaranteed_safepoint() { return false; }
// allocations do not modify their arguments
// Pattern-match a possible usage of AllocateNode.
// Return null if no allocation is recognized.
// The operand is the pointer produced by the (possible) allocation.
// It must be a projection of the Allocate or its subsequent CastPP.
// (Note: This function is defined in file graphKit.cpp, near
// GraphKit::new_instance/new_array, whose output it recognizes.)
// The 'ptr' may not have an offset unless the 'offset' argument is given.
// Fancy version which uses AddPNode::Ideal_base_and_offset to strip
// an offset, which is reported back to the caller.
// (Note: AllocateNode::Ideal_allocation is defined in graphKit.cpp.)
// Dig the klass operand out of a (possible) allocation site.
}
// Conservatively small estimate of offset of first non-header byte.
int minimum_header_size() {
}
// Return the corresponding initialization barrier (or null if none).
// Walks out edges to find it...
// (Note: Both InitializeNode::allocation and AllocateNode::initialization
// are defined in graphKit.cpp, which sets up the bidirectional relation.)
// Convenience for initialization->maybe_set_complete(phase)
};
//------------------------------AllocateArray---------------------------------
//
// High-level array allocation
//
class AllocateArrayNode : public AllocateNode {
public:
)
{
}
virtual int Opcode() const;
// Dig the length operand out of a array allocation site.
Node* Ideal_length() {
}
// Dig the length operand out of a array allocation site and narrow the
// type with a CastII, if necesssary
// Pattern-match a possible usage of AllocateArrayNode.
// Return null if no allocation is recognized.
}
};
//------------------------------AbstractLockNode-----------------------------------
class AbstractLockNode: public CallNode {
private:
bool _eliminate; // indicates this lock can be safely eliminated
bool _coarsened; // indicates this lock was coarsened
#ifndef PRODUCT
#endif
protected:
// helper functions for lock elimination
//
public:
_coarsened(false),
_eliminate(false)
{
#ifndef PRODUCT
#endif
}
virtual int Opcode() const = 0;
bool is_eliminated() {return _eliminate; }
// mark node as eliminated and update the counter if there is one
void set_eliminated();
bool is_coarsened() { return _coarsened; }
void set_coarsened() { _coarsened = true; }
// locking does not modify its arguments
#ifndef PRODUCT
void create_lock_counter(JVMState* s);
#endif
};
//------------------------------Lock---------------------------------------
// High-level lock operation
//
// This is a subclass of CallNode because it is a macro node which gets expanded
// into a code sequence containing a call. This node takes 3 "parameters":
// 0 - object to lock
// 1 - a BoxLockNode
// 2 - a FastLockNode
//
class LockNode : public AbstractLockNode {
public:
// create input type (domain)
// create result type (range)
}
virtual int Opcode() const;
C->add_macro_node(this);
}
virtual bool guaranteed_safepoint() { return false; }
// Expansion modifies the JVMState, so we need to clone it
virtual void clone_jvms() {
}
};
//------------------------------Unlock---------------------------------------
// High-level unlock operation
class UnlockNode : public AbstractLockNode {
public:
virtual int Opcode() const;
C->add_macro_node(this);
}
// unlock is never a safepoint
virtual bool guaranteed_safepoint() { return false; }
};