8238691: C2: turn subtype check into macro node

Reviewed-by: vlivanov, thartmann
This commit is contained in:
Roland Westrelin 2020-02-14 10:31:34 +01:00
parent e4b27a48a0
commit 52d46c314b
18 changed files with 443 additions and 71 deletions

View file

@ -746,6 +746,9 @@
range(0, max_juint) \ range(0, max_juint) \
\ \
product(bool, UseProfiledLoopPredicate, true, \ product(bool, UseProfiledLoopPredicate, true, \
"move predicates out of loops based on profiling data") \ "Move predicates out of loops based on profiling data") \
\
diagnostic(bool, ExpandSubTypeCheckAtParseTime, false, \
"Do not use subtype check macro node") \
#endif // SHARE_OPTO_C2_GLOBALS_HPP #endif // SHARE_OPTO_C2_GLOBALS_HPP

View file

@ -46,6 +46,7 @@
#include "opto/opaquenode.hpp" #include "opto/opaquenode.hpp"
#include "opto/rootnode.hpp" #include "opto/rootnode.hpp"
#include "opto/subnode.hpp" #include "opto/subnode.hpp"
#include "opto/subtypenode.hpp"
#include "opto/vectornode.hpp" #include "opto/vectornode.hpp"
#include "utilities/macros.hpp" #include "utilities/macros.hpp"
#if INCLUDE_SHENANDOAHGC #if INCLUDE_SHENANDOAHGC

View file

@ -250,6 +250,7 @@ macro(OverflowMulL)
macro(PCTable) macro(PCTable)
macro(Parm) macro(Parm)
macro(PartialSubtypeCheck) macro(PartialSubtypeCheck)
macro(SubTypeCheck)
macro(Phi) macro(Phi)
macro(PopCountI) macro(PopCountI)
macro(PopCountL) macro(PopCountL)

View file

@ -4239,6 +4239,9 @@ int Compile::static_subtype_check(ciKlass* superk, ciKlass* subk) {
// Add a dependency if there is a chance of a later subclass. // Add a dependency if there is a chance of a later subclass.
dependencies()->assert_leaf_type(ik); dependencies()->assert_leaf_type(ik);
} }
if (ik->is_abstract()) {
return SSC_always_false;
}
return SSC_easy_test; // (3) caller can do a simple ptr comparison return SSC_easy_test; // (3) caller can do a simple ptr comparison
} }
} else { } else {

View file

@ -3199,6 +3199,7 @@ void ConnectionGraph::split_unique_types(GrowableArray<Node *> &alloc_worklist,
op == Op_FastLock || op == Op_AryEq || op == Op_StrComp || op == Op_HasNegatives || op == Op_FastLock || op == Op_AryEq || op == Op_StrComp || op == Op_HasNegatives ||
op == Op_StrCompressedCopy || op == Op_StrInflatedCopy || op == Op_StrCompressedCopy || op == Op_StrInflatedCopy ||
op == Op_StrEquals || op == Op_StrIndexOf || op == Op_StrIndexOfChar || op == Op_StrEquals || op == Op_StrIndexOf || op == Op_StrIndexOfChar ||
op == Op_SubTypeCheck ||
BarrierSet::barrier_set()->barrier_set_c2()->is_gc_barrier_node(use))) { BarrierSet::barrier_set()->barrier_set_c2()->is_gc_barrier_node(use))) {
n->dump(); n->dump();
use->dump(); use->dump();

View file

@ -41,6 +41,7 @@
#include "opto/parse.hpp" #include "opto/parse.hpp"
#include "opto/rootnode.hpp" #include "opto/rootnode.hpp"
#include "opto/runtime.hpp" #include "opto/runtime.hpp"
#include "opto/subtypenode.hpp"
#include "runtime/deoptimization.hpp" #include "runtime/deoptimization.hpp"
#include "runtime/sharedRuntime.hpp" #include "runtime/sharedRuntime.hpp"
#include "utilities/bitMap.inline.hpp" #include "utilities/bitMap.inline.hpp"
@ -2623,21 +2624,94 @@ void GraphKit::make_slow_call_ex(Node* call, ciInstanceKlass* ex_klass, bool sep
set_control(norm); set_control(norm);
} }
static IfNode* gen_subtype_check_compare(Node* ctrl, Node* in1, Node* in2, BoolTest::mask test, float p, PhaseGVN* gvn, BasicType bt) { static IfNode* gen_subtype_check_compare(Node* ctrl, Node* in1, Node* in2, BoolTest::mask test, float p, PhaseGVN& gvn, BasicType bt) {
Node* cmp = NULL; Node* cmp = NULL;
switch(bt) { switch(bt) {
case T_INT: cmp = new CmpINode(in1, in2); break; case T_INT: cmp = new CmpINode(in1, in2); break;
case T_ADDRESS: cmp = new CmpPNode(in1, in2); break; case T_ADDRESS: cmp = new CmpPNode(in1, in2); break;
default: fatal("unexpected comparison type %s", type2name(bt)); default: fatal("unexpected comparison type %s", type2name(bt));
} }
gvn->transform(cmp); gvn.transform(cmp);
Node* bol = gvn->transform(new BoolNode(cmp, test)); Node* bol = gvn.transform(new BoolNode(cmp, test));
IfNode* iff = new IfNode(ctrl, bol, p, COUNT_UNKNOWN); IfNode* iff = new IfNode(ctrl, bol, p, COUNT_UNKNOWN);
gvn->transform(iff); gvn.transform(iff);
if (!bol->is_Con()) gvn->record_for_igvn(iff); if (!bol->is_Con()) gvn.record_for_igvn(iff);
return iff; return iff;
} }
// Find the memory state for the secondary super type cache load when
// a subtype check is expanded at macro expansion time. That field is
// mutable so should not use immutable memory but
// PartialSubtypeCheckNode that might modify it doesn't produce a new
// memory state so bottom memory is the most accurate memory state to
// hook the load with. This follows the implementation used when the
// subtype check is expanded at parse time.
static Node* find_bottom_mem(Node* ctrl, Compile* C) {
const TypePtr* adr_type = TypeKlassPtr::make(TypePtr::NotNull, C->env()->Object_klass(), Type::OffsetBot);
Node_Stack stack(0);
VectorSet seen(Thread::current()->resource_area());
Node* c = ctrl;
Node* mem = NULL;
uint iter = 0;
do {
iter++;
assert(iter < C->live_nodes(), "infinite loop");
if (c->is_Region()) {
for (DUIterator_Fast imax, i = c->fast_outs(imax); i < imax && mem == NULL; i++) {
Node* u = c->fast_out(i);
if (u->is_Phi() && u->bottom_type() == Type::MEMORY &&
(u->adr_type() == TypePtr::BOTTOM || u->adr_type() == adr_type)) {
mem = u;
}
}
if (mem == NULL) {
if (!seen.test_set(c->_idx)) {
stack.push(c, 2);
c = c->in(1);
} else {
Node* phi = NULL;
uint idx = 0;
for (;;) {
phi = stack.node();
idx = stack.index();
if (idx < phi->req()) {
break;
}
stack.pop();
}
c = phi->in(idx);
stack.set_index(idx+1);
}
}
} else if (c->is_Proj() && c->in(0)->adr_type() == TypePtr::BOTTOM) {
for (DUIterator_Fast imax, i = c->in(0)->fast_outs(imax); i < imax; i++) {
Node* u = c->in(0)->fast_out(i);
if (u->bottom_type() == Type::MEMORY && u->as_Proj()->_is_io_use == c->as_Proj()->_is_io_use) {
assert(mem == NULL, "");
mem = u;
}
}
} else if (c->is_CatchProj() && c->in(0)->in(0)->in(0)->adr_type() == TypePtr::BOTTOM) {
Node* call = c->in(0)->in(0)->in(0);
assert(call->is_Call(), "CatchProj with no call?");
CallProjections projs;
call->as_Call()->extract_projections(&projs, false, false);
if (projs.catchall_memproj == NULL) {
mem = projs.fallthrough_memproj;
} else if (c == projs.fallthrough_catchproj) {
mem = projs.fallthrough_memproj;
} else {
assert(c == projs.catchall_catchproj, "strange control");
mem = projs.catchall_memproj;
}
} else {
assert(!c->is_Start(), "should stop before start");
c = c->in(0);
}
} while (mem == NULL);
return mem;
}
//-------------------------------gen_subtype_check----------------------------- //-------------------------------gen_subtype_check-----------------------------
// Generate a subtyping check. Takes as input the subtype and supertype. // Generate a subtyping check. Takes as input the subtype and supertype.
@ -2647,9 +2721,8 @@ static IfNode* gen_subtype_check_compare(Node* ctrl, Node* in1, Node* in2, BoolT
// but that's not exposed to the optimizer. This call also doesn't take in an // but that's not exposed to the optimizer. This call also doesn't take in an
// Object; if you wish to check an Object you need to load the Object's class // Object; if you wish to check an Object you need to load the Object's class
// prior to coming here. // prior to coming here.
Node* Phase::gen_subtype_check(Node* subklass, Node* superklass, Node** ctrl, MergeMemNode* mem, PhaseGVN* gvn) { Node* Phase::gen_subtype_check(Node* subklass, Node* superklass, Node** ctrl, Node* mem, PhaseGVN& gvn) {
Compile* C = gvn->C; Compile* C = gvn.C;
if ((*ctrl)->is_top()) { if ((*ctrl)->is_top()) {
return C->top(); return C->top();
} }
@ -2660,9 +2733,9 @@ Node* Phase::gen_subtype_check(Node* subklass, Node* superklass, Node** ctrl, Me
if (subklass == superklass) if (subklass == superklass)
return C->top(); // false path is dead; no test needed. return C->top(); // false path is dead; no test needed.
if (gvn->type(superklass)->singleton()) { if (gvn.type(superklass)->singleton()) {
ciKlass* superk = gvn->type(superklass)->is_klassptr()->klass(); ciKlass* superk = gvn.type(superklass)->is_klassptr()->klass();
ciKlass* subk = gvn->type(subklass)->is_klassptr()->klass(); ciKlass* subk = gvn.type(subklass)->is_klassptr()->klass();
// In the common case of an exact superklass, try to fold up the // In the common case of an exact superklass, try to fold up the
// test before generating code. You may ask, why not just generate // test before generating code. You may ask, why not just generate
@ -2677,7 +2750,7 @@ Node* Phase::gen_subtype_check(Node* subklass, Node* superklass, Node** ctrl, Me
case Compile::SSC_always_false: case Compile::SSC_always_false:
{ {
Node* always_fail = *ctrl; Node* always_fail = *ctrl;
*ctrl = gvn->C->top(); *ctrl = gvn.C->top();
return always_fail; return always_fail;
} }
case Compile::SSC_always_true: case Compile::SSC_always_true:
@ -2686,8 +2759,8 @@ Node* Phase::gen_subtype_check(Node* subklass, Node* superklass, Node** ctrl, Me
{ {
// Just do a direct pointer compare and be done. // Just do a direct pointer compare and be done.
IfNode* iff = gen_subtype_check_compare(*ctrl, subklass, superklass, BoolTest::eq, PROB_STATIC_FREQUENT, gvn, T_ADDRESS); IfNode* iff = gen_subtype_check_compare(*ctrl, subklass, superklass, BoolTest::eq, PROB_STATIC_FREQUENT, gvn, T_ADDRESS);
*ctrl = gvn->transform(new IfTrueNode(iff)); *ctrl = gvn.transform(new IfTrueNode(iff));
return gvn->transform(new IfFalseNode(iff)); return gvn.transform(new IfFalseNode(iff));
} }
case Compile::SSC_full_test: case Compile::SSC_full_test:
break; break;
@ -2701,11 +2774,11 @@ Node* Phase::gen_subtype_check(Node* subklass, Node* superklass, Node** ctrl, Me
// will always succeed. We could leave a dependency behind to ensure this. // will always succeed. We could leave a dependency behind to ensure this.
// First load the super-klass's check-offset // First load the super-klass's check-offset
Node *p1 = gvn->transform(new AddPNode(superklass, superklass, gvn->MakeConX(in_bytes(Klass::super_check_offset_offset())))); Node *p1 = gvn.transform(new AddPNode(superklass, superklass, gvn.MakeConX(in_bytes(Klass::super_check_offset_offset()))));
Node* m = mem->memory_at(C->get_alias_index(gvn->type(p1)->is_ptr())); Node* m = C->immutable_memory();
Node *chk_off = gvn->transform(new LoadINode(NULL, m, p1, gvn->type(p1)->is_ptr(), TypeInt::INT, MemNode::unordered)); Node *chk_off = gvn.transform(new LoadINode(NULL, m, p1, gvn.type(p1)->is_ptr(), TypeInt::INT, MemNode::unordered));
int cacheoff_con = in_bytes(Klass::secondary_super_cache_offset()); int cacheoff_con = in_bytes(Klass::secondary_super_cache_offset());
bool might_be_cache = (gvn->find_int_con(chk_off, cacheoff_con) == cacheoff_con); bool might_be_cache = (gvn.find_int_con(chk_off, cacheoff_con) == cacheoff_con);
// Load from the sub-klass's super-class display list, or a 1-word cache of // Load from the sub-klass's super-class display list, or a 1-word cache of
// the secondary superclass list, or a failing value with a sentinel offset // the secondary superclass list, or a failing value with a sentinel offset
@ -2715,15 +2788,22 @@ Node* Phase::gen_subtype_check(Node* subklass, Node* superklass, Node** ctrl, Me
// klass loads can never produce a NULL). // klass loads can never produce a NULL).
Node *chk_off_X = chk_off; Node *chk_off_X = chk_off;
#ifdef _LP64 #ifdef _LP64
chk_off_X = gvn->transform(new ConvI2LNode(chk_off_X)); chk_off_X = gvn.transform(new ConvI2LNode(chk_off_X));
#endif #endif
Node *p2 = gvn->transform(new AddPNode(subklass,subklass,chk_off_X)); Node *p2 = gvn.transform(new AddPNode(subklass,subklass,chk_off_X));
// For some types like interfaces the following loadKlass is from a 1-word // For some types like interfaces the following loadKlass is from a 1-word
// cache which is mutable so can't use immutable memory. Other // cache which is mutable so can't use immutable memory. Other
// types load from the super-class display table which is immutable. // types load from the super-class display table which is immutable.
m = mem->memory_at(C->get_alias_index(gvn->type(p2)->is_ptr())); Node *kmem = C->immutable_memory();
Node *kmem = might_be_cache ? m : C->immutable_memory(); if (might_be_cache) {
Node *nkls = gvn->transform(LoadKlassNode::make(*gvn, NULL, kmem, p2, gvn->type(p2)->is_ptr(), TypeKlassPtr::OBJECT_OR_NULL)); assert((C->get_alias_index(TypeKlassPtr::make(TypePtr::NotNull, C->env()->Object_klass(), Type::OffsetBot)) ==
C->get_alias_index(gvn.type(p2)->is_ptr())), "");
if (mem == NULL) {
mem = find_bottom_mem(*ctrl, C);
}
kmem = mem->is_MergeMem() ? mem->as_MergeMem()->memory_at(C->get_alias_index(gvn.type(p2)->is_ptr())) : mem;
}
Node *nkls = gvn.transform(LoadKlassNode::make(gvn, NULL, kmem, p2, gvn.type(p2)->is_ptr(), TypeKlassPtr::OBJECT_OR_NULL));
// Compile speed common case: ARE a subtype and we canNOT fail // Compile speed common case: ARE a subtype and we canNOT fail
if( superklass == nkls ) if( superklass == nkls )
@ -2733,8 +2813,8 @@ Node* Phase::gen_subtype_check(Node* subklass, Node* superklass, Node** ctrl, Me
// time. Test to see if the value loaded just previously from the subklass // time. Test to see if the value loaded just previously from the subklass
// is exactly the superklass. // is exactly the superklass.
IfNode *iff1 = gen_subtype_check_compare(*ctrl, superklass, nkls, BoolTest::eq, PROB_LIKELY(0.83f), gvn, T_ADDRESS); IfNode *iff1 = gen_subtype_check_compare(*ctrl, superklass, nkls, BoolTest::eq, PROB_LIKELY(0.83f), gvn, T_ADDRESS);
Node *iftrue1 = gvn->transform( new IfTrueNode (iff1)); Node *iftrue1 = gvn.transform( new IfTrueNode (iff1));
*ctrl = gvn->transform(new IfFalseNode(iff1)); *ctrl = gvn.transform(new IfFalseNode(iff1));
// Compile speed common case: Check for being deterministic right now. If // Compile speed common case: Check for being deterministic right now. If
// chk_off is a constant and not equal to cacheoff then we are NOT a // chk_off is a constant and not equal to cacheoff then we are NOT a
@ -2748,9 +2828,9 @@ Node* Phase::gen_subtype_check(Node* subklass, Node* superklass, Node** ctrl, Me
// Gather the various success & failures here // Gather the various success & failures here
RegionNode *r_ok_subtype = new RegionNode(4); RegionNode *r_ok_subtype = new RegionNode(4);
gvn->record_for_igvn(r_ok_subtype); gvn.record_for_igvn(r_ok_subtype);
RegionNode *r_not_subtype = new RegionNode(3); RegionNode *r_not_subtype = new RegionNode(3);
gvn->record_for_igvn(r_not_subtype); gvn.record_for_igvn(r_not_subtype);
r_ok_subtype->init_req(1, iftrue1); r_ok_subtype->init_req(1, iftrue1);
@ -2759,17 +2839,17 @@ Node* Phase::gen_subtype_check(Node* subklass, Node* superklass, Node** ctrl, Me
// check-offset points into the subklass display list or the 1-element // check-offset points into the subklass display list or the 1-element
// cache. If it points to the display (and NOT the cache) and the display // cache. If it points to the display (and NOT the cache) and the display
// missed then it's not a subtype. // missed then it's not a subtype.
Node *cacheoff = gvn->intcon(cacheoff_con); Node *cacheoff = gvn.intcon(cacheoff_con);
IfNode *iff2 = gen_subtype_check_compare(*ctrl, chk_off, cacheoff, BoolTest::ne, PROB_LIKELY(0.63f), gvn, T_INT); IfNode *iff2 = gen_subtype_check_compare(*ctrl, chk_off, cacheoff, BoolTest::ne, PROB_LIKELY(0.63f), gvn, T_INT);
r_not_subtype->init_req(1, gvn->transform(new IfTrueNode (iff2))); r_not_subtype->init_req(1, gvn.transform(new IfTrueNode (iff2)));
*ctrl = gvn->transform(new IfFalseNode(iff2)); *ctrl = gvn.transform(new IfFalseNode(iff2));
// Check for self. Very rare to get here, but it is taken 1/3 the time. // Check for self. Very rare to get here, but it is taken 1/3 the time.
// No performance impact (too rare) but allows sharing of secondary arrays // No performance impact (too rare) but allows sharing of secondary arrays
// which has some footprint reduction. // which has some footprint reduction.
IfNode *iff3 = gen_subtype_check_compare(*ctrl, subklass, superklass, BoolTest::eq, PROB_LIKELY(0.36f), gvn, T_ADDRESS); IfNode *iff3 = gen_subtype_check_compare(*ctrl, subklass, superklass, BoolTest::eq, PROB_LIKELY(0.36f), gvn, T_ADDRESS);
r_ok_subtype->init_req(2, gvn->transform(new IfTrueNode(iff3))); r_ok_subtype->init_req(2, gvn.transform(new IfTrueNode(iff3)));
*ctrl = gvn->transform(new IfFalseNode(iff3)); *ctrl = gvn.transform(new IfFalseNode(iff3));
// -- Roads not taken here: -- // -- Roads not taken here: --
// We could also have chosen to perform the self-check at the beginning // We could also have chosen to perform the self-check at the beginning
@ -2792,16 +2872,38 @@ Node* Phase::gen_subtype_check(Node* subklass, Node* superklass, Node** ctrl, Me
// out of line, and it can only improve I-cache density. // out of line, and it can only improve I-cache density.
// The decision to inline or out-of-line this final check is platform // The decision to inline or out-of-line this final check is platform
// dependent, and is found in the AD file definition of PartialSubtypeCheck. // dependent, and is found in the AD file definition of PartialSubtypeCheck.
Node* psc = gvn->transform( Node* psc = gvn.transform(
new PartialSubtypeCheckNode(*ctrl, subklass, superklass)); new PartialSubtypeCheckNode(*ctrl, subklass, superklass));
IfNode *iff4 = gen_subtype_check_compare(*ctrl, psc, gvn->zerocon(T_OBJECT), BoolTest::ne, PROB_FAIR, gvn, T_ADDRESS); IfNode *iff4 = gen_subtype_check_compare(*ctrl, psc, gvn.zerocon(T_OBJECT), BoolTest::ne, PROB_FAIR, gvn, T_ADDRESS);
r_not_subtype->init_req(2, gvn->transform(new IfTrueNode (iff4))); r_not_subtype->init_req(2, gvn.transform(new IfTrueNode (iff4)));
r_ok_subtype ->init_req(3, gvn->transform(new IfFalseNode(iff4))); r_ok_subtype ->init_req(3, gvn.transform(new IfFalseNode(iff4)));
// Return false path; set default control to true path. // Return false path; set default control to true path.
*ctrl = gvn->transform(r_ok_subtype); *ctrl = gvn.transform(r_ok_subtype);
return gvn->transform(r_not_subtype); return gvn.transform(r_not_subtype);
}
Node* GraphKit::gen_subtype_check(Node* obj_or_subklass, Node* superklass) {
if (ExpandSubTypeCheckAtParseTime) {
MergeMemNode* mem = merged_memory();
Node* ctrl = control();
Node* subklass = obj_or_subklass;
if (!_gvn.type(obj_or_subklass)->isa_klassptr()) {
subklass = load_object_klass(obj_or_subklass);
}
Node* n = Phase::gen_subtype_check(subklass, superklass, &ctrl, mem, _gvn);
set_control(ctrl);
return n;
}
const TypePtr* adr_type = TypeKlassPtr::make(TypePtr::NotNull, C->env()->Object_klass(), Type::OffsetBot);
Node* check = _gvn.transform(new SubTypeCheckNode(C, obj_or_subklass, superklass));
Node* bol = _gvn.transform(new BoolNode(check, BoolTest::eq));
IfNode* iff = create_and_xform_if(control(), bol, PROB_STATIC_FREQUENT, COUNT_UNKNOWN);
set_control(_gvn.transform(new IfTrueNode(iff)));
return _gvn.transform(new IfFalseNode(iff));
} }
// Profile-driven exact type check: // Profile-driven exact type check:
@ -2833,10 +2935,9 @@ Node* GraphKit::type_check_receiver(Node* receiver, ciKlass* klass,
Node* GraphKit::subtype_check_receiver(Node* receiver, ciKlass* klass, Node* GraphKit::subtype_check_receiver(Node* receiver, ciKlass* klass,
Node** casted_receiver) { Node** casted_receiver) {
const TypeKlassPtr* tklass = TypeKlassPtr::make(klass); const TypeKlassPtr* tklass = TypeKlassPtr::make(klass);
Node* recv_klass = load_object_klass(receiver);
Node* want_klass = makecon(tklass); Node* want_klass = makecon(tklass);
Node* slow_ctl = gen_subtype_check(recv_klass, want_klass); Node* slow_ctl = gen_subtype_check(receiver, want_klass);
// Cast receiver after successful check // Cast receiver after successful check
const TypeOopPtr* recv_type = tklass->cast_to_exactness(false)->is_klassptr()->as_instance_type(); const TypeOopPtr* recv_type = tklass->cast_to_exactness(false)->is_klassptr()->as_instance_type();
@ -3101,11 +3202,8 @@ Node* GraphKit::gen_instanceof(Node* obj, Node* superklass, bool safe_for_replac
} }
} }
// Load the object's klass
Node* obj_klass = load_object_klass(not_null_obj);
// Generate the subtype check // Generate the subtype check
Node* not_subtype_ctrl = gen_subtype_check(obj_klass, superklass); Node* not_subtype_ctrl = gen_subtype_check(not_null_obj, superklass);
// Plug in the success path to the general merge in slot 1. // Plug in the success path to the general merge in slot 1.
region->init_req(_obj_path, control()); region->init_req(_obj_path, control());
@ -3228,11 +3326,8 @@ Node* GraphKit::gen_checkcast(Node *obj, Node* superklass,
} }
if (cast_obj == NULL) { if (cast_obj == NULL) {
// Load the object's klass
Node* obj_klass = load_object_klass(not_null_obj);
// Generate the subtype check // Generate the subtype check
Node* not_subtype_ctrl = gen_subtype_check( obj_klass, superklass ); Node* not_subtype_ctrl = gen_subtype_check(not_null_obj, superklass );
// Plug in success path into the merge // Plug in success path into the merge
cast_obj = _gvn.transform(new CheckCastPPNode(control(), not_null_obj, toop)); cast_obj = _gvn.transform(new CheckCastPPNode(control(), not_null_obj, toop));
@ -3241,7 +3336,7 @@ Node* GraphKit::gen_checkcast(Node *obj, Node* superklass,
if (not_subtype_ctrl != top()) { // If failure is possible if (not_subtype_ctrl != top()) { // If failure is possible
PreserveJVMState pjvms(this); PreserveJVMState pjvms(this);
set_control(not_subtype_ctrl); set_control(not_subtype_ctrl);
builtin_throw(Deoptimization::Reason_class_check, obj_klass); builtin_throw(Deoptimization::Reason_class_check, load_object_klass(not_null_obj));
} }
} else { } else {
(*failure_control) = not_subtype_ctrl; (*failure_control) = not_subtype_ctrl;

View file

@ -825,13 +825,7 @@ class GraphKit : public Phase {
Node* gen_checkcast( Node *subobj, Node* superkls, Node* gen_checkcast( Node *subobj, Node* superkls,
Node* *failure_control = NULL ); Node* *failure_control = NULL );
Node* gen_subtype_check(Node* subklass, Node* superklass) { Node* gen_subtype_check(Node* obj, Node* superklass);
MergeMemNode* mem = merged_memory();
Node* ctrl = control();
Node* n = Phase::gen_subtype_check(subklass, superklass, &ctrl, mem, &_gvn);
set_control(ctrl);
return n;
}
// Exact type check used for predicted calls and casts. // Exact type check used for predicted calls and casts.
// Rewrites (*casted_receiver) to be casted to the stronger type. // Rewrites (*casted_receiver) to be casted to the stronger type.

View file

@ -3699,8 +3699,7 @@ bool LibraryCallKit::inline_array_copyOf(bool is_copyOfRange) {
// Reason_class_check rather than Reason_intrinsic because we // Reason_class_check rather than Reason_intrinsic because we
// want to intrinsify even if this traps. // want to intrinsify even if this traps.
if (!too_many_traps(Deoptimization::Reason_class_check)) { if (!too_many_traps(Deoptimization::Reason_class_check)) {
Node* not_subtype_ctrl = gen_subtype_check(load_object_klass(original), Node* not_subtype_ctrl = gen_subtype_check(original, klass_node);
klass_node);
if (not_subtype_ctrl != top()) { if (not_subtype_ctrl != top()) {
PreserveJVMState pjvms(this); PreserveJVMState pjvms(this);
@ -4766,16 +4765,17 @@ bool LibraryCallKit::inline_arraycopy() {
} }
// (9) each element of an oop array must be assignable // (9) each element of an oop array must be assignable
Node* src_klass = load_object_klass(src);
Node* dest_klass = load_object_klass(dest); Node* dest_klass = load_object_klass(dest);
Node* not_subtype_ctrl = gen_subtype_check(src_klass, dest_klass); if (src != dest) {
Node* not_subtype_ctrl = gen_subtype_check(src, dest_klass);
if (not_subtype_ctrl != top()) { if (not_subtype_ctrl != top()) {
PreserveJVMState pjvms(this); PreserveJVMState pjvms(this);
set_control(not_subtype_ctrl); set_control(not_subtype_ctrl);
uncommon_trap(Deoptimization::Reason_intrinsic, uncommon_trap(Deoptimization::Reason_intrinsic,
Deoptimization::Action_make_not_entrant); Deoptimization::Action_make_not_entrant);
assert(stopped(), "Should be stopped"); assert(stopped(), "Should be stopped");
}
} }
{ {
PreserveJVMState pjvms(this); PreserveJVMState pjvms(this);

View file

@ -4089,7 +4089,7 @@ Node *PhaseIdealLoop::get_late_ctrl( Node *n, Node *early ) {
} }
} else { } else {
Node *sctrl = has_ctrl(s) ? get_ctrl(s) : s->in(0); Node *sctrl = has_ctrl(s) ? get_ctrl(s) : s->in(0);
assert(sctrl != NULL || s->outcnt() == 0, "must have control"); assert(sctrl != NULL || !s->is_reachable_from_root(), "must have control");
if (sctrl != NULL && !sctrl->is_top() && C->can_alias(s->adr_type(), load_alias_idx) && is_dominator(early, sctrl)) { if (sctrl != NULL && !sctrl->is_top() && C->can_alias(s->adr_type(), load_alias_idx) && is_dominator(early, sctrl)) {
LCA = dom_lca_for_get_late_ctrl(LCA, sctrl, n); LCA = dom_lca_for_get_late_ctrl(LCA, sctrl, n);
} }

View file

@ -40,6 +40,7 @@
#include "opto/opaquenode.hpp" #include "opto/opaquenode.hpp"
#include "opto/rootnode.hpp" #include "opto/rootnode.hpp"
#include "opto/subnode.hpp" #include "opto/subnode.hpp"
#include "opto/subtypenode.hpp"
#include "utilities/macros.hpp" #include "utilities/macros.hpp"
//============================================================================= //=============================================================================
@ -656,6 +657,9 @@ Node *PhaseIdealLoop::conditional_move( Node *region ) {
} }
assert(bol->Opcode() == Op_Bool, "Unexpected node"); assert(bol->Opcode() == Op_Bool, "Unexpected node");
int cmp_op = bol->in(1)->Opcode(); int cmp_op = bol->in(1)->Opcode();
if (cmp_op == Op_SubTypeCheck) { // SubTypeCheck expansion expects an IfNode
return NULL;
}
// It is expensive to generate flags from a float compare. // It is expensive to generate flags from a float compare.
// Avoid duplicated float compare. // Avoid duplicated float compare.
if (phis > 1 && (cmp_op == Op_CmpF || cmp_op == Op_CmpD)) return NULL; if (phis > 1 && (cmp_op == Op_CmpF || cmp_op == Op_CmpD)) return NULL;

View file

@ -35,6 +35,7 @@
#include "opto/compile.hpp" #include "opto/compile.hpp"
#include "opto/convertnode.hpp" #include "opto/convertnode.hpp"
#include "opto/graphKit.hpp" #include "opto/graphKit.hpp"
#include "opto/intrinsicnode.hpp"
#include "opto/locknode.hpp" #include "opto/locknode.hpp"
#include "opto/loopnode.hpp" #include "opto/loopnode.hpp"
#include "opto/macro.hpp" #include "opto/macro.hpp"
@ -46,6 +47,7 @@
#include "opto/rootnode.hpp" #include "opto/rootnode.hpp"
#include "opto/runtime.hpp" #include "opto/runtime.hpp"
#include "opto/subnode.hpp" #include "opto/subnode.hpp"
#include "opto/subtypenode.hpp"
#include "opto/type.hpp" #include "opto/type.hpp"
#include "runtime/sharedRuntime.hpp" #include "runtime/sharedRuntime.hpp"
#include "utilities/macros.hpp" #include "utilities/macros.hpp"
@ -2533,6 +2535,43 @@ void PhaseMacroExpand::expand_unlock_node(UnlockNode *unlock) {
_igvn.replace_node(_memproj_fallthrough, mem_phi); _igvn.replace_node(_memproj_fallthrough, mem_phi);
} }
void PhaseMacroExpand::expand_subtypecheck_node(SubTypeCheckNode *check) {
assert(check->in(SubTypeCheckNode::Control) == NULL, "should be pinned");
Node* bol = check->unique_out();
Node* obj_or_subklass = check->in(SubTypeCheckNode::ObjOrSubKlass);
Node* superklass = check->in(SubTypeCheckNode::SuperKlass);
assert(bol->is_Bool() && bol->as_Bool()->_test._test == BoolTest::ne, "unexpected bool node");
for (DUIterator_Last imin, i = bol->last_outs(imin); i >= imin; --i) {
Node* iff = bol->last_out(i);
assert(iff->is_If(), "where's the if?");
if (iff->in(0)->is_top()) {
_igvn.replace_input_of(iff, 1, C->top());
continue;
}
Node* iftrue = iff->as_If()->proj_out(1);
Node* iffalse = iff->as_If()->proj_out(0);
Node* ctrl = iff->in(0);
Node* subklass = NULL;
if (_igvn.type(obj_or_subklass)->isa_klassptr()) {
subklass = obj_or_subklass;
} else {
Node* k_adr = basic_plus_adr(obj_or_subklass, oopDesc::klass_offset_in_bytes());
subklass = _igvn.transform(LoadKlassNode::make(_igvn, NULL, C->immutable_memory(), k_adr, TypeInstPtr::KLASS));
}
Node* not_subtype_ctrl = Phase::gen_subtype_check(subklass, superklass, &ctrl, NULL, _igvn);
_igvn.replace_input_of(iff, 0, C->top());
_igvn.replace_node(iftrue, not_subtype_ctrl);
_igvn.replace_node(iffalse, ctrl);
}
_igvn.replace_node(check, C->top());
}
//---------------------------eliminate_macro_nodes---------------------- //---------------------------eliminate_macro_nodes----------------------
// Eliminate scalar replaced allocations and associated locks. // Eliminate scalar replaced allocations and associated locks.
void PhaseMacroExpand::eliminate_macro_nodes() { void PhaseMacroExpand::eliminate_macro_nodes() {
@ -2589,6 +2628,8 @@ void PhaseMacroExpand::eliminate_macro_nodes() {
break; break;
case Node::Class_OuterStripMinedLoop: case Node::Class_OuterStripMinedLoop:
break; break;
case Node::Class_SubTypeCheck:
break;
default: default:
assert(n->Opcode() == Op_LoopLimit || assert(n->Opcode() == Op_LoopLimit ||
n->Opcode() == Op_Opaque1 || n->Opcode() == Op_Opaque1 ||
@ -2695,6 +2736,10 @@ bool PhaseMacroExpand::expand_macro_nodes() {
expand_arraycopy_node(n->as_ArrayCopy()); expand_arraycopy_node(n->as_ArrayCopy());
assert(C->macro_count() == (old_macro_count - 1), "expansion must have deleted one node from macro list"); assert(C->macro_count() == (old_macro_count - 1), "expansion must have deleted one node from macro list");
break; break;
case Node::Class_SubTypeCheck:
expand_subtypecheck_node(n->as_SubTypeCheck());
assert(C->macro_count() == (old_macro_count - 1), "expansion must have deleted one node from macro list");
break;
} }
if (C->failing()) return true; if (C->failing()) return true;
} }

View file

@ -30,6 +30,7 @@
class AllocateNode; class AllocateNode;
class AllocateArrayNode; class AllocateArrayNode;
class CallNode; class CallNode;
class SubTypeCheckNode;
class Node; class Node;
class PhaseIterGVN; class PhaseIterGVN;
@ -183,6 +184,8 @@ private:
void expand_arraycopy_node(ArrayCopyNode *ac); void expand_arraycopy_node(ArrayCopyNode *ac);
void expand_subtypecheck_node(SubTypeCheckNode *check);
int replace_input(Node *use, Node *oldref, Node *newref); int replace_input(Node *use, Node *oldref, Node *newref);
void migrate_outs(Node *old, Node *target); void migrate_outs(Node *old, Node *target);
void copy_call_debug_info(CallNode *oldcall, CallNode * newcall); void copy_call_debug_info(CallNode *oldcall, CallNode * newcall);

View file

@ -524,7 +524,7 @@ Node* PhaseMacroExpand::generate_arraycopy(ArrayCopyNode *ac, AllocateArrayNode*
// Test S[] against D[], not S against D, because (probably) // Test S[] against D[], not S against D, because (probably)
// the secondary supertype cache is less busy for S[] than S. // the secondary supertype cache is less busy for S[] than S.
// This usually only matters when D is an interface. // This usually only matters when D is an interface.
Node* not_subtype_ctrl = Phase::gen_subtype_check(src_klass, dest_klass, ctrl, mem, &_igvn); Node* not_subtype_ctrl = Phase::gen_subtype_check(src_klass, dest_klass, ctrl, mem, _igvn);
// Plug failing path into checked_oop_disjoint_arraycopy // Plug failing path into checked_oop_disjoint_arraycopy
if (not_subtype_ctrl != top()) { if (not_subtype_ctrl != top()) {
Node* local_ctrl = not_subtype_ctrl; Node* local_ctrl = not_subtype_ctrl;

View file

@ -146,6 +146,7 @@ class StartNode;
class State; class State;
class StoreNode; class StoreNode;
class SubNode; class SubNode;
class SubTypeCheckNode;
class Type; class Type;
class TypeNode; class TypeNode;
class UnlockNode; class UnlockNode;
@ -706,6 +707,7 @@ public:
DEFINE_CLASS_ID(Cmp, Sub, 0) DEFINE_CLASS_ID(Cmp, Sub, 0)
DEFINE_CLASS_ID(FastLock, Cmp, 0) DEFINE_CLASS_ID(FastLock, Cmp, 0)
DEFINE_CLASS_ID(FastUnlock, Cmp, 1) DEFINE_CLASS_ID(FastUnlock, Cmp, 1)
DEFINE_CLASS_ID(SubTypeCheck,Cmp, 2)
DEFINE_CLASS_ID(MergeMem, Node, 7) DEFINE_CLASS_ID(MergeMem, Node, 7)
DEFINE_CLASS_ID(Bool, Node, 8) DEFINE_CLASS_ID(Bool, Node, 8)
@ -875,6 +877,7 @@ public:
DEFINE_CLASS_QUERY(Start) DEFINE_CLASS_QUERY(Start)
DEFINE_CLASS_QUERY(Store) DEFINE_CLASS_QUERY(Store)
DEFINE_CLASS_QUERY(Sub) DEFINE_CLASS_QUERY(Sub)
DEFINE_CLASS_QUERY(SubTypeCheck)
DEFINE_CLASS_QUERY(Type) DEFINE_CLASS_QUERY(Type)
DEFINE_CLASS_QUERY(Vector) DEFINE_CLASS_QUERY(Vector)
DEFINE_CLASS_QUERY(LoadVector) DEFINE_CLASS_QUERY(LoadVector)

View file

@ -132,7 +132,7 @@ protected:
// Object; if you wish to check an Object you need to load the Object's // Object; if you wish to check an Object you need to load the Object's
// class prior to coming here. // class prior to coming here.
// Used in GraphKit and PhaseMacroExpand // Used in GraphKit and PhaseMacroExpand
static Node* gen_subtype_check(Node* subklass, Node* superklass, Node** ctrl, MergeMemNode* mem, PhaseGVN* gvn); static Node* gen_subtype_check(Node* subklass, Node* superklass, Node** ctrl, Node* mem, PhaseGVN& gvn);
public: public:
Compile * C; Compile * C;

View file

@ -1352,7 +1352,7 @@ Node *BoolNode::Ideal(PhaseGVN *phase, bool can_reshape) {
Node *cmp = in(1); Node *cmp = in(1);
if( !cmp->is_Sub() ) return NULL; if( !cmp->is_Sub() ) return NULL;
int cop = cmp->Opcode(); int cop = cmp->Opcode();
if( cop == Op_FastLock || cop == Op_FastUnlock) return NULL; if( cop == Op_FastLock || cop == Op_FastUnlock || cmp->is_SubTypeCheck()) return NULL;
Node *cmp1 = cmp->in(1); Node *cmp1 = cmp->in(1);
Node *cmp2 = cmp->in(2); Node *cmp2 = cmp->in(2);
if( !cmp1 ) return NULL; if( !cmp1 ) return NULL;

View file

@ -0,0 +1,165 @@
/*
* Copyright (c) 2020, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
#include "precompiled.hpp"
#include "opto/addnode.hpp"
#include "opto/connode.hpp"
#include "opto/convertnode.hpp"
#include "opto/phaseX.hpp"
#include "opto/subnode.hpp"
#include "opto/subtypenode.hpp"
const Type* SubTypeCheckNode::sub(const Type* sub_t, const Type* super_t) const {
ciKlass* superk = super_t->is_klassptr()->klass();
ciKlass* subk = sub_t->isa_klassptr() ? sub_t->is_klassptr()->klass() : sub_t->is_oopptr()->klass();
bool xsuperk = super_t->is_klassptr()->klass_is_exact();
bool xsubk = sub_t->isa_klassptr() ? sub_t->is_klassptr()->klass_is_exact() : sub_t->is_oopptr()->klass_is_exact();
// Similar to logic in CmpPNode::sub()
if (superk && subk &&
superk->is_loaded() && !superk->is_interface() &&
subk->is_loaded() && !subk->is_interface() &&
(!superk->is_obj_array_klass() ||
!superk->as_obj_array_klass()->base_element_klass()->is_interface()) &&
(!subk->is_obj_array_klass() ||
!subk->as_obj_array_klass()->base_element_klass()->is_interface() ||
xsubk)) {
bool unrelated_classes = false;
if (superk->equals(subk)) {
// skip
} else if (superk->is_subtype_of(subk)) {
unrelated_classes = xsubk;
} else if (subk->is_subtype_of(superk)) {
// skip
} else {
unrelated_classes = true;
}
if (unrelated_classes) {
TypePtr::PTR jp = sub_t->is_ptr()->join_ptr(super_t->is_ptr()->_ptr);
if (jp != TypePtr::Null && jp != TypePtr::BotPTR) {
return TypeInt::CC_GT;
}
}
}
if (super_t->singleton()) {
if (subk != NULL) {
switch (Compile::current()->static_subtype_check(superk, subk)) {
case Compile::SSC_always_false:
return TypeInt::CC_GT;
case Compile::SSC_always_true:
return TypeInt::CC_EQ;
case Compile::SSC_easy_test:
case Compile::SSC_full_test:
break;
default:
ShouldNotReachHere();
}
}
}
return bottom_type();
}
Node *SubTypeCheckNode::Ideal(PhaseGVN *phase, bool can_reshape) {
// Verify that optimizing the subtype check to a simple code pattern
// when possible would not constant fold better
#ifdef ASSERT
Node* obj_or_subklass = in(ObjOrSubKlass);
Node* superklass = in(SuperKlass);
if (obj_or_subklass == NULL ||
superklass == NULL) {
return NULL;
}
const Type* sub_t = phase->type(obj_or_subklass);
const Type* super_t = phase->type(superklass);
if (!super_t->isa_klassptr() ||
(!sub_t->isa_klassptr() && !sub_t->isa_oopptr())) {
return NULL;
}
ciKlass* superk = super_t->is_klassptr()->klass();
ciKlass* subk = sub_t->isa_klassptr() ? sub_t->is_klassptr()->klass() : sub_t->is_oopptr()->klass();
if (super_t->singleton() && subk != NULL && phase->C->static_subtype_check(superk, subk) == Compile::SSC_easy_test) {
Node* subklass = NULL;
if (sub_t->isa_oopptr()) {
Node* adr = phase->transform(new AddPNode(obj_or_subklass, obj_or_subklass, phase->MakeConX(oopDesc::klass_offset_in_bytes())));
subklass = phase->transform(LoadKlassNode::make(*phase, NULL, phase->C->immutable_memory(), adr, TypeInstPtr::KLASS));
} else {
subklass = obj_or_subklass;
}
Node* res = new CmpPNode(subklass, superklass);
const Type* t = phase->type(phase->transform(res));
assert((Value(phase) == t) || (t != TypeInt::CC_GT && t != TypeInt::CC_EQ), "missing Value() optimization");
if (phase->is_IterGVN()) {
phase->is_IterGVN()->_worklist.push(res);
}
return NULL;
}
if (super_t->singleton() && subk != NULL && phase->C->static_subtype_check(superk, subk) == Compile::SSC_full_test) {
Node* subklass = NULL;
if (sub_t->isa_oopptr()) {
Node* adr = phase->transform(new AddPNode(obj_or_subklass, obj_or_subklass, phase->MakeConX(oopDesc::klass_offset_in_bytes())));
subklass = phase->transform(LoadKlassNode::make(*phase, NULL, phase->C->immutable_memory(), adr, TypeInstPtr::KLASS));
} else {
subklass = obj_or_subklass;
}
Node *p1 = phase->transform(new AddPNode(superklass, superklass, phase->MakeConX(in_bytes(Klass::super_check_offset_offset()))));
Node* m = phase->C->immutable_memory();
Node *chk_off = phase->transform(new LoadINode(NULL, m, p1, phase->type(p1)->is_ptr(), TypeInt::INT, MemNode::unordered));
int cacheoff_con = in_bytes(Klass::secondary_super_cache_offset());
bool might_be_cache = (phase->find_int_con(chk_off, cacheoff_con) == cacheoff_con);
if (might_be_cache) {
return NULL;
}
Node *chk_off_X = chk_off;
#ifdef _LP64
chk_off_X = phase->transform(new ConvI2LNode(chk_off_X));
#endif
Node *p2 = phase->transform(new AddPNode(subklass,subklass,chk_off_X));
Node *kmem = phase->C->immutable_memory();
Node *nkls = phase->transform(LoadKlassNode::make(*phase, NULL, kmem, p2, phase->type(p2)->is_ptr(), TypeKlassPtr::OBJECT_OR_NULL));
Node* res = new CmpPNode(superklass, nkls);
const Type* t = phase->type(phase->transform(res));
assert((Value(phase) == t) || (t != TypeInt::CC_GT && t != TypeInt::CC_EQ), "missing Value() optimization");
if (phase->is_IterGVN()) {
phase->is_IterGVN()->_worklist.push(res);
}
return NULL;
}
#endif
return NULL;
}

View file

@ -0,0 +1,54 @@
/*
* Copyright (c) 2020, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
#ifndef SHARE_OPTO_SUBTYPENODE_HPP
#define SHARE_OPTO_SUBTYPENODE_HPP
#include "opto/node.hpp"
class SubTypeCheckNode : public CmpNode {
public:
enum {
Control,
ObjOrSubKlass,
SuperKlass
};
SubTypeCheckNode(Compile* C, Node* obj_or_subklass, Node* superklass)
: CmpNode(obj_or_subklass, superklass) {
init_class_id(Class_SubTypeCheck);
init_flags(Flag_is_macro);
C->add_macro_node(this);
}
Node* Ideal(PhaseGVN *phase, bool can_reshape);
virtual const Type* sub(const Type*, const Type*) const;
Node* Identity(PhaseGVN* phase) { return this; }
virtual int Opcode() const;
const Type* bottom_type() const { return TypeInt::CC; }
bool depends_only_on_test() const { return false; };
};
#endif // SHARE_OPTO_SUBTYPENODE_HPP