mirror of
https://github.com/openjdk/jdk.git
synced 2025-09-20 19:14:38 +02:00
7121140: Allocation paths require explicit memory synchronization operations for RMO systems
Adds store store barrier after initialization of header and body of objects. Reviewed-by: never, kvn
This commit is contained in:
parent
e057d60ca1
commit
97439fb4ff
13 changed files with 196 additions and 7 deletions
|
@ -6773,6 +6773,16 @@ instruct unnecessary_membar_volatile() %{
|
||||||
ins_pipe(empty);
|
ins_pipe(empty);
|
||||||
%}
|
%}
|
||||||
|
|
||||||
|
instruct membar_storestore() %{
|
||||||
|
match(MemBarStoreStore);
|
||||||
|
ins_cost(0);
|
||||||
|
|
||||||
|
size(0);
|
||||||
|
format %{ "!MEMBAR-storestore (empty encoding)" %}
|
||||||
|
ins_encode( );
|
||||||
|
ins_pipe(empty);
|
||||||
|
%}
|
||||||
|
|
||||||
//----------Register Move Instructions-----------------------------------------
|
//----------Register Move Instructions-----------------------------------------
|
||||||
instruct roundDouble_nop(regD dst) %{
|
instruct roundDouble_nop(regD dst) %{
|
||||||
match(Set dst (RoundDouble dst));
|
match(Set dst (RoundDouble dst));
|
||||||
|
|
|
@ -7368,6 +7368,16 @@ instruct unnecessary_membar_volatile() %{
|
||||||
ins_pipe(empty);
|
ins_pipe(empty);
|
||||||
%}
|
%}
|
||||||
|
|
||||||
|
instruct membar_storestore() %{
|
||||||
|
match(MemBarStoreStore);
|
||||||
|
ins_cost(0);
|
||||||
|
|
||||||
|
size(0);
|
||||||
|
format %{ "MEMBAR-storestore (empty encoding)" %}
|
||||||
|
ins_encode( );
|
||||||
|
ins_pipe(empty);
|
||||||
|
%}
|
||||||
|
|
||||||
//----------Move Instructions--------------------------------------------------
|
//----------Move Instructions--------------------------------------------------
|
||||||
instruct castX2P(eAXRegP dst, eAXRegI src) %{
|
instruct castX2P(eAXRegP dst, eAXRegI src) %{
|
||||||
match(Set dst (CastX2P src));
|
match(Set dst (CastX2P src));
|
||||||
|
|
|
@ -6810,6 +6810,16 @@ instruct unnecessary_membar_volatile()
|
||||||
ins_pipe(empty);
|
ins_pipe(empty);
|
||||||
%}
|
%}
|
||||||
|
|
||||||
|
instruct membar_storestore() %{
|
||||||
|
match(MemBarStoreStore);
|
||||||
|
ins_cost(0);
|
||||||
|
|
||||||
|
size(0);
|
||||||
|
format %{ "MEMBAR-storestore (empty encoding)" %}
|
||||||
|
ins_encode( );
|
||||||
|
ins_pipe(empty);
|
||||||
|
%}
|
||||||
|
|
||||||
//----------Move Instructions--------------------------------------------------
|
//----------Move Instructions--------------------------------------------------
|
||||||
|
|
||||||
instruct castX2P(rRegP dst, rRegL src)
|
instruct castX2P(rRegP dst, rRegL src)
|
||||||
|
|
|
@ -627,6 +627,7 @@ bool InstructForm::is_wide_memory_kill(FormDict &globals) const {
|
||||||
if( strcmp(_matrule->_opType,"MemBarAcquire") == 0 ) return true;
|
if( strcmp(_matrule->_opType,"MemBarAcquire") == 0 ) return true;
|
||||||
if( strcmp(_matrule->_opType,"MemBarReleaseLock") == 0 ) return true;
|
if( strcmp(_matrule->_opType,"MemBarReleaseLock") == 0 ) return true;
|
||||||
if( strcmp(_matrule->_opType,"MemBarAcquireLock") == 0 ) return true;
|
if( strcmp(_matrule->_opType,"MemBarAcquireLock") == 0 ) return true;
|
||||||
|
if( strcmp(_matrule->_opType,"MemBarStoreStore") == 0 ) return true;
|
||||||
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -3978,7 +3979,8 @@ bool MatchRule::is_ideal_membar() const {
|
||||||
!strcmp(_opType,"MemBarAcquireLock") ||
|
!strcmp(_opType,"MemBarAcquireLock") ||
|
||||||
!strcmp(_opType,"MemBarReleaseLock") ||
|
!strcmp(_opType,"MemBarReleaseLock") ||
|
||||||
!strcmp(_opType,"MemBarVolatile" ) ||
|
!strcmp(_opType,"MemBarVolatile" ) ||
|
||||||
!strcmp(_opType,"MemBarCPUOrder" ) ;
|
!strcmp(_opType,"MemBarCPUOrder" ) ||
|
||||||
|
!strcmp(_opType,"MemBarStoreStore" );
|
||||||
}
|
}
|
||||||
|
|
||||||
bool MatchRule::is_ideal_loadPC() const {
|
bool MatchRule::is_ideal_loadPC() const {
|
||||||
|
|
|
@ -791,6 +791,10 @@ public:
|
||||||
// are defined in graphKit.cpp, which sets up the bidirectional relation.)
|
// are defined in graphKit.cpp, which sets up the bidirectional relation.)
|
||||||
InitializeNode* initialization();
|
InitializeNode* initialization();
|
||||||
|
|
||||||
|
// Return the corresponding storestore barrier (or null if none).
|
||||||
|
// Walks out edges to find it...
|
||||||
|
MemBarStoreStoreNode* storestore();
|
||||||
|
|
||||||
// Convenience for initialization->maybe_set_complete(phase)
|
// Convenience for initialization->maybe_set_complete(phase)
|
||||||
bool maybe_set_complete(PhaseGVN* phase);
|
bool maybe_set_complete(PhaseGVN* phase);
|
||||||
};
|
};
|
||||||
|
|
|
@ -166,6 +166,7 @@ macro(MemBarCPUOrder)
|
||||||
macro(MemBarRelease)
|
macro(MemBarRelease)
|
||||||
macro(MemBarReleaseLock)
|
macro(MemBarReleaseLock)
|
||||||
macro(MemBarVolatile)
|
macro(MemBarVolatile)
|
||||||
|
macro(MemBarStoreStore)
|
||||||
macro(MergeMem)
|
macro(MergeMem)
|
||||||
macro(MinI)
|
macro(MinI)
|
||||||
macro(ModD)
|
macro(ModD)
|
||||||
|
|
|
@ -1595,6 +1595,7 @@ bool ConnectionGraph::compute_escape() {
|
||||||
GrowableArray<Node*> alloc_worklist;
|
GrowableArray<Node*> alloc_worklist;
|
||||||
GrowableArray<Node*> addp_worklist;
|
GrowableArray<Node*> addp_worklist;
|
||||||
GrowableArray<Node*> ptr_cmp_worklist;
|
GrowableArray<Node*> ptr_cmp_worklist;
|
||||||
|
GrowableArray<Node*> storestore_worklist;
|
||||||
PhaseGVN* igvn = _igvn;
|
PhaseGVN* igvn = _igvn;
|
||||||
|
|
||||||
// Push all useful nodes onto CG list and set their type.
|
// Push all useful nodes onto CG list and set their type.
|
||||||
|
@ -1618,6 +1619,11 @@ bool ConnectionGraph::compute_escape() {
|
||||||
(n->Opcode() == Op_CmpP || n->Opcode() == Op_CmpN)) {
|
(n->Opcode() == Op_CmpP || n->Opcode() == Op_CmpN)) {
|
||||||
// Compare pointers nodes
|
// Compare pointers nodes
|
||||||
ptr_cmp_worklist.append(n);
|
ptr_cmp_worklist.append(n);
|
||||||
|
} else if (n->is_MemBarStoreStore()) {
|
||||||
|
// Collect all MemBarStoreStore nodes so that depending on the
|
||||||
|
// escape status of the associated Allocate node some of them
|
||||||
|
// may be eliminated.
|
||||||
|
storestore_worklist.append(n);
|
||||||
}
|
}
|
||||||
for (DUIterator_Fast imax, i = n->fast_outs(imax); i < imax; i++) {
|
for (DUIterator_Fast imax, i = n->fast_outs(imax); i < imax; i++) {
|
||||||
Node* m = n->fast_out(i); // Get user
|
Node* m = n->fast_out(i); // Get user
|
||||||
|
@ -1724,11 +1730,20 @@ bool ConnectionGraph::compute_escape() {
|
||||||
uint alloc_length = alloc_worklist.length();
|
uint alloc_length = alloc_worklist.length();
|
||||||
for (uint next = 0; next < alloc_length; ++next) {
|
for (uint next = 0; next < alloc_length; ++next) {
|
||||||
Node* n = alloc_worklist.at(next);
|
Node* n = alloc_worklist.at(next);
|
||||||
if (ptnode_adr(n->_idx)->escape_state() == PointsToNode::NoEscape) {
|
PointsToNode::EscapeState es = ptnode_adr(n->_idx)->escape_state();
|
||||||
|
if (es == PointsToNode::NoEscape) {
|
||||||
has_non_escaping_obj = true;
|
has_non_escaping_obj = true;
|
||||||
if (n->is_Allocate()) {
|
if (n->is_Allocate()) {
|
||||||
find_init_values(n, &visited, igvn);
|
find_init_values(n, &visited, igvn);
|
||||||
|
// The object allocated by this Allocate node will never be
|
||||||
|
// seen by an other thread. Mark it so that when it is
|
||||||
|
// expanded no MemBarStoreStore is added.
|
||||||
|
n->as_Allocate()->initialization()->set_does_not_escape();
|
||||||
}
|
}
|
||||||
|
} else if ((es == PointsToNode::ArgEscape) && n->is_Allocate()) {
|
||||||
|
// Same as above. Mark this Allocate node so that when it is
|
||||||
|
// expanded no MemBarStoreStore is added.
|
||||||
|
n->as_Allocate()->initialization()->set_does_not_escape();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1874,6 +1889,25 @@ bool ConnectionGraph::compute_escape() {
|
||||||
igvn->hash_delete(_pcmp_eq);
|
igvn->hash_delete(_pcmp_eq);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// For MemBarStoreStore nodes added in library_call.cpp, check
|
||||||
|
// escape status of associated AllocateNode and optimize out
|
||||||
|
// MemBarStoreStore node if the allocated object never escapes.
|
||||||
|
while (storestore_worklist.length() != 0) {
|
||||||
|
Node *n = storestore_worklist.pop();
|
||||||
|
MemBarStoreStoreNode *storestore = n ->as_MemBarStoreStore();
|
||||||
|
Node *alloc = storestore->in(MemBarNode::Precedent)->in(0);
|
||||||
|
assert (alloc->is_Allocate(), "storestore should point to AllocateNode");
|
||||||
|
PointsToNode::EscapeState es = ptnode_adr(alloc->_idx)->escape_state();
|
||||||
|
if (es == PointsToNode::NoEscape || es == PointsToNode::ArgEscape) {
|
||||||
|
MemBarNode* mb = MemBarNode::make(C, Op_MemBarCPUOrder, Compile::AliasIdxBot);
|
||||||
|
mb->init_req(TypeFunc::Memory, storestore->in(TypeFunc::Memory));
|
||||||
|
mb->init_req(TypeFunc::Control, storestore->in(TypeFunc::Control));
|
||||||
|
|
||||||
|
_igvn->register_new_node_with_optimizer(mb);
|
||||||
|
_igvn->replace_node(storestore, mb);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#ifndef PRODUCT
|
#ifndef PRODUCT
|
||||||
if (PrintEscapeAnalysis) {
|
if (PrintEscapeAnalysis) {
|
||||||
dump(); // Dump ConnectionGraph
|
dump(); // Dump ConnectionGraph
|
||||||
|
|
|
@ -3337,6 +3337,19 @@ InitializeNode* AllocateNode::initialization() {
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Trace Allocate -> Proj[Parm] -> MemBarStoreStore
|
||||||
|
MemBarStoreStoreNode* AllocateNode::storestore() {
|
||||||
|
ProjNode* rawoop = proj_out(AllocateNode::RawAddress);
|
||||||
|
if (rawoop == NULL) return NULL;
|
||||||
|
for (DUIterator_Fast imax, i = rawoop->fast_outs(imax); i < imax; i++) {
|
||||||
|
Node* storestore = rawoop->fast_out(i);
|
||||||
|
if (storestore->is_MemBarStoreStore()) {
|
||||||
|
return storestore->as_MemBarStoreStore();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
//----------------------------- loop predicates ---------------------------
|
//----------------------------- loop predicates ---------------------------
|
||||||
|
|
||||||
//------------------------------add_predicate_impl----------------------------
|
//------------------------------add_predicate_impl----------------------------
|
||||||
|
|
|
@ -4193,12 +4193,17 @@ void LibraryCallKit::copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, b
|
||||||
Node* raw_obj = alloc_obj->in(1);
|
Node* raw_obj = alloc_obj->in(1);
|
||||||
assert(alloc_obj->is_CheckCastPP() && raw_obj->is_Proj() && raw_obj->in(0)->is_Allocate(), "");
|
assert(alloc_obj->is_CheckCastPP() && raw_obj->is_Proj() && raw_obj->in(0)->is_Allocate(), "");
|
||||||
|
|
||||||
|
AllocateNode* alloc = NULL;
|
||||||
if (ReduceBulkZeroing) {
|
if (ReduceBulkZeroing) {
|
||||||
// We will be completely responsible for initializing this object -
|
// We will be completely responsible for initializing this object -
|
||||||
// mark Initialize node as complete.
|
// mark Initialize node as complete.
|
||||||
AllocateNode* alloc = AllocateNode::Ideal_allocation(alloc_obj, &_gvn);
|
alloc = AllocateNode::Ideal_allocation(alloc_obj, &_gvn);
|
||||||
// The object was just allocated - there should be no any stores!
|
// The object was just allocated - there should be no any stores!
|
||||||
guarantee(alloc != NULL && alloc->maybe_set_complete(&_gvn), "");
|
guarantee(alloc != NULL && alloc->maybe_set_complete(&_gvn), "");
|
||||||
|
// Mark as complete_with_arraycopy so that on AllocateNode
|
||||||
|
// expansion, we know this AllocateNode is initialized by an array
|
||||||
|
// copy and a StoreStore barrier exists after the array copy.
|
||||||
|
alloc->initialization()->set_complete_with_arraycopy();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Copy the fastest available way.
|
// Copy the fastest available way.
|
||||||
|
@ -4260,7 +4265,18 @@ void LibraryCallKit::copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, b
|
||||||
}
|
}
|
||||||
|
|
||||||
// Do not let reads from the cloned object float above the arraycopy.
|
// Do not let reads from the cloned object float above the arraycopy.
|
||||||
|
if (alloc != NULL) {
|
||||||
|
// Do not let stores that initialize this object be reordered with
|
||||||
|
// a subsequent store that would make this object accessible by
|
||||||
|
// other threads.
|
||||||
|
// Record what AllocateNode this StoreStore protects so that
|
||||||
|
// escape analysis can go from the MemBarStoreStoreNode to the
|
||||||
|
// AllocateNode and eliminate the MemBarStoreStoreNode if possible
|
||||||
|
// based on the escape status of the AllocateNode.
|
||||||
|
insert_mem_bar(Op_MemBarStoreStore, alloc->proj_out(AllocateNode::RawAddress));
|
||||||
|
} else {
|
||||||
insert_mem_bar(Op_MemBarCPUOrder);
|
insert_mem_bar(Op_MemBarCPUOrder);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
//------------------------inline_native_clone----------------------------
|
//------------------------inline_native_clone----------------------------
|
||||||
|
@ -5003,7 +5019,16 @@ LibraryCallKit::generate_arraycopy(const TypePtr* adr_type,
|
||||||
// the membar also.
|
// the membar also.
|
||||||
//
|
//
|
||||||
// Do not let reads from the cloned object float above the arraycopy.
|
// Do not let reads from the cloned object float above the arraycopy.
|
||||||
if (InsertMemBarAfterArraycopy || alloc != NULL)
|
if (alloc != NULL) {
|
||||||
|
// Do not let stores that initialize this object be reordered with
|
||||||
|
// a subsequent store that would make this object accessible by
|
||||||
|
// other threads.
|
||||||
|
// Record what AllocateNode this StoreStore protects so that
|
||||||
|
// escape analysis can go from the MemBarStoreStoreNode to the
|
||||||
|
// AllocateNode and eliminate the MemBarStoreStoreNode if possible
|
||||||
|
// based on the escape status of the AllocateNode.
|
||||||
|
insert_mem_bar(Op_MemBarStoreStore, alloc->proj_out(AllocateNode::RawAddress));
|
||||||
|
} else if (InsertMemBarAfterArraycopy)
|
||||||
insert_mem_bar(Op_MemBarCPUOrder);
|
insert_mem_bar(Op_MemBarCPUOrder);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1088,6 +1088,12 @@ void PhaseMacroExpand::expand_allocate_common(
|
||||||
Node* klass_node = alloc->in(AllocateNode::KlassNode);
|
Node* klass_node = alloc->in(AllocateNode::KlassNode);
|
||||||
Node* initial_slow_test = alloc->in(AllocateNode::InitialTest);
|
Node* initial_slow_test = alloc->in(AllocateNode::InitialTest);
|
||||||
|
|
||||||
|
Node* storestore = alloc->storestore();
|
||||||
|
if (storestore != NULL) {
|
||||||
|
// Break this link that is no longer useful and confuses register allocation
|
||||||
|
storestore->set_req(MemBarNode::Precedent, top());
|
||||||
|
}
|
||||||
|
|
||||||
assert(ctrl != NULL, "must have control");
|
assert(ctrl != NULL, "must have control");
|
||||||
// We need a Region and corresponding Phi's to merge the slow-path and fast-path results.
|
// We need a Region and corresponding Phi's to merge the slow-path and fast-path results.
|
||||||
// they will not be used if "always_slow" is set
|
// they will not be used if "always_slow" is set
|
||||||
|
@ -1289,10 +1295,66 @@ void PhaseMacroExpand::expand_allocate_common(
|
||||||
0, new_alloc_bytes, T_LONG);
|
0, new_alloc_bytes, T_LONG);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
InitializeNode* init = alloc->initialization();
|
||||||
fast_oop_rawmem = initialize_object(alloc,
|
fast_oop_rawmem = initialize_object(alloc,
|
||||||
fast_oop_ctrl, fast_oop_rawmem, fast_oop,
|
fast_oop_ctrl, fast_oop_rawmem, fast_oop,
|
||||||
klass_node, length, size_in_bytes);
|
klass_node, length, size_in_bytes);
|
||||||
|
|
||||||
|
// If initialization is performed by an array copy, any required
|
||||||
|
// MemBarStoreStore was already added. If the object does not
|
||||||
|
// escape no need for a MemBarStoreStore. Otherwise we need a
|
||||||
|
// MemBarStoreStore so that stores that initialize this object
|
||||||
|
// can't be reordered with a subsequent store that makes this
|
||||||
|
// object accessible by other threads.
|
||||||
|
if (init == NULL || (!init->is_complete_with_arraycopy() && !init->does_not_escape())) {
|
||||||
|
if (init == NULL || init->req() < InitializeNode::RawStores) {
|
||||||
|
// No InitializeNode or no stores captured by zeroing
|
||||||
|
// elimination. Simply add the MemBarStoreStore after object
|
||||||
|
// initialization.
|
||||||
|
MemBarNode* mb = MemBarNode::make(C, Op_MemBarStoreStore, Compile::AliasIdxBot, fast_oop_rawmem);
|
||||||
|
transform_later(mb);
|
||||||
|
|
||||||
|
mb->init_req(TypeFunc::Memory, fast_oop_rawmem);
|
||||||
|
mb->init_req(TypeFunc::Control, fast_oop_ctrl);
|
||||||
|
fast_oop_ctrl = new (C, 1) ProjNode(mb,TypeFunc::Control);
|
||||||
|
transform_later(fast_oop_ctrl);
|
||||||
|
fast_oop_rawmem = new (C, 1) ProjNode(mb,TypeFunc::Memory);
|
||||||
|
transform_later(fast_oop_rawmem);
|
||||||
|
} else {
|
||||||
|
// Add the MemBarStoreStore after the InitializeNode so that
|
||||||
|
// all stores performing the initialization that were moved
|
||||||
|
// before the InitializeNode happen before the storestore
|
||||||
|
// barrier.
|
||||||
|
|
||||||
|
Node* init_ctrl = init->proj_out(TypeFunc::Control);
|
||||||
|
Node* init_mem = init->proj_out(TypeFunc::Memory);
|
||||||
|
|
||||||
|
MemBarNode* mb = MemBarNode::make(C, Op_MemBarStoreStore, Compile::AliasIdxBot);
|
||||||
|
transform_later(mb);
|
||||||
|
|
||||||
|
Node* ctrl = new (C, 1) ProjNode(init,TypeFunc::Control);
|
||||||
|
transform_later(ctrl);
|
||||||
|
Node* mem = new (C, 1) ProjNode(init,TypeFunc::Memory);
|
||||||
|
transform_later(mem);
|
||||||
|
|
||||||
|
// The MemBarStoreStore depends on control and memory coming
|
||||||
|
// from the InitializeNode
|
||||||
|
mb->init_req(TypeFunc::Memory, mem);
|
||||||
|
mb->init_req(TypeFunc::Control, ctrl);
|
||||||
|
|
||||||
|
ctrl = new (C, 1) ProjNode(mb,TypeFunc::Control);
|
||||||
|
transform_later(ctrl);
|
||||||
|
mem = new (C, 1) ProjNode(mb,TypeFunc::Memory);
|
||||||
|
transform_later(mem);
|
||||||
|
|
||||||
|
// All nodes that depended on the InitializeNode for control
|
||||||
|
// and memory must now depend on the MemBarNode that itself
|
||||||
|
// depends on the InitializeNode
|
||||||
|
_igvn.replace_node(init_ctrl, ctrl);
|
||||||
|
_igvn.replace_node(init_mem, mem);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (C->env()->dtrace_extended_probes()) {
|
if (C->env()->dtrace_extended_probes()) {
|
||||||
// Slow-path call
|
// Slow-path call
|
||||||
int size = TypeFunc::Parms + 2;
|
int size = TypeFunc::Parms + 2;
|
||||||
|
|
|
@ -2721,6 +2721,7 @@ MemBarNode* MemBarNode::make(Compile* C, int opcode, int atp, Node* pn) {
|
||||||
case Op_MemBarVolatile: return new(C, len) MemBarVolatileNode(C, atp, pn);
|
case Op_MemBarVolatile: return new(C, len) MemBarVolatileNode(C, atp, pn);
|
||||||
case Op_MemBarCPUOrder: return new(C, len) MemBarCPUOrderNode(C, atp, pn);
|
case Op_MemBarCPUOrder: return new(C, len) MemBarCPUOrderNode(C, atp, pn);
|
||||||
case Op_Initialize: return new(C, len) InitializeNode(C, atp, pn);
|
case Op_Initialize: return new(C, len) InitializeNode(C, atp, pn);
|
||||||
|
case Op_MemBarStoreStore: return new(C, len) MemBarStoreStoreNode(C, atp, pn);
|
||||||
default: ShouldNotReachHere(); return NULL;
|
default: ShouldNotReachHere(); return NULL;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2870,7 +2871,7 @@ Node *MemBarNode::match( const ProjNode *proj, const Matcher *m ) {
|
||||||
|
|
||||||
//---------------------------InitializeNode------------------------------------
|
//---------------------------InitializeNode------------------------------------
|
||||||
InitializeNode::InitializeNode(Compile* C, int adr_type, Node* rawoop)
|
InitializeNode::InitializeNode(Compile* C, int adr_type, Node* rawoop)
|
||||||
: _is_complete(Incomplete),
|
: _is_complete(Incomplete), _does_not_escape(false),
|
||||||
MemBarNode(C, adr_type, rawoop)
|
MemBarNode(C, adr_type, rawoop)
|
||||||
{
|
{
|
||||||
init_class_id(Class_Initialize);
|
init_class_id(Class_Initialize);
|
||||||
|
|
|
@ -918,6 +918,15 @@ public:
|
||||||
virtual int Opcode() const;
|
virtual int Opcode() const;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
class MemBarStoreStoreNode: public MemBarNode {
|
||||||
|
public:
|
||||||
|
MemBarStoreStoreNode(Compile* C, int alias_idx, Node* precedent)
|
||||||
|
: MemBarNode(C, alias_idx, precedent) {
|
||||||
|
init_class_id(Class_MemBarStoreStore);
|
||||||
|
}
|
||||||
|
virtual int Opcode() const;
|
||||||
|
};
|
||||||
|
|
||||||
// Ordering between a volatile store and a following volatile load.
|
// Ordering between a volatile store and a following volatile load.
|
||||||
// Requires multi-CPU visibility?
|
// Requires multi-CPU visibility?
|
||||||
class MemBarVolatileNode: public MemBarNode {
|
class MemBarVolatileNode: public MemBarNode {
|
||||||
|
@ -950,6 +959,8 @@ class InitializeNode: public MemBarNode {
|
||||||
};
|
};
|
||||||
int _is_complete;
|
int _is_complete;
|
||||||
|
|
||||||
|
bool _does_not_escape;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
enum {
|
enum {
|
||||||
Control = TypeFunc::Control,
|
Control = TypeFunc::Control,
|
||||||
|
@ -989,6 +1000,9 @@ public:
|
||||||
void set_complete(PhaseGVN* phase);
|
void set_complete(PhaseGVN* phase);
|
||||||
void set_complete_with_arraycopy() { _is_complete = Complete | WithArraycopy; }
|
void set_complete_with_arraycopy() { _is_complete = Complete | WithArraycopy; }
|
||||||
|
|
||||||
|
bool does_not_escape() { return _does_not_escape; }
|
||||||
|
void set_does_not_escape() { _does_not_escape = true; }
|
||||||
|
|
||||||
#ifdef ASSERT
|
#ifdef ASSERT
|
||||||
// ensure all non-degenerate stores are ordered and non-overlapping
|
// ensure all non-degenerate stores are ordered and non-overlapping
|
||||||
bool stores_are_sane(PhaseTransform* phase);
|
bool stores_are_sane(PhaseTransform* phase);
|
||||||
|
|
|
@ -97,6 +97,7 @@ class MachSpillCopyNode;
|
||||||
class MachTempNode;
|
class MachTempNode;
|
||||||
class Matcher;
|
class Matcher;
|
||||||
class MemBarNode;
|
class MemBarNode;
|
||||||
|
class MemBarStoreStoreNode;
|
||||||
class MemNode;
|
class MemNode;
|
||||||
class MergeMemNode;
|
class MergeMemNode;
|
||||||
class MultiNode;
|
class MultiNode;
|
||||||
|
@ -565,6 +566,7 @@ public:
|
||||||
DEFINE_CLASS_ID(Start, Multi, 2)
|
DEFINE_CLASS_ID(Start, Multi, 2)
|
||||||
DEFINE_CLASS_ID(MemBar, Multi, 3)
|
DEFINE_CLASS_ID(MemBar, Multi, 3)
|
||||||
DEFINE_CLASS_ID(Initialize, MemBar, 0)
|
DEFINE_CLASS_ID(Initialize, MemBar, 0)
|
||||||
|
DEFINE_CLASS_ID(MemBarStoreStore, MemBar, 1)
|
||||||
|
|
||||||
DEFINE_CLASS_ID(Mach, Node, 1)
|
DEFINE_CLASS_ID(Mach, Node, 1)
|
||||||
DEFINE_CLASS_ID(MachReturn, Mach, 0)
|
DEFINE_CLASS_ID(MachReturn, Mach, 0)
|
||||||
|
@ -744,6 +746,7 @@ public:
|
||||||
DEFINE_CLASS_QUERY(MachTemp)
|
DEFINE_CLASS_QUERY(MachTemp)
|
||||||
DEFINE_CLASS_QUERY(Mem)
|
DEFINE_CLASS_QUERY(Mem)
|
||||||
DEFINE_CLASS_QUERY(MemBar)
|
DEFINE_CLASS_QUERY(MemBar)
|
||||||
|
DEFINE_CLASS_QUERY(MemBarStoreStore)
|
||||||
DEFINE_CLASS_QUERY(MergeMem)
|
DEFINE_CLASS_QUERY(MergeMem)
|
||||||
DEFINE_CLASS_QUERY(Multi)
|
DEFINE_CLASS_QUERY(Multi)
|
||||||
DEFINE_CLASS_QUERY(MultiBranch)
|
DEFINE_CLASS_QUERY(MultiBranch)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue