mirror of
https://github.com/openjdk/jdk.git
synced 2025-09-17 17:44:40 +02:00
8238691: C2: turn subtype check into macro node
Reviewed-by: vlivanov, thartmann
This commit is contained in:
parent
e4b27a48a0
commit
52d46c314b
18 changed files with 443 additions and 71 deletions
|
@ -746,6 +746,9 @@
|
|||
range(0, max_juint) \
|
||||
\
|
||||
product(bool, UseProfiledLoopPredicate, true, \
|
||||
"move predicates out of loops based on profiling data") \
|
||||
"Move predicates out of loops based on profiling data") \
|
||||
\
|
||||
diagnostic(bool, ExpandSubTypeCheckAtParseTime, false, \
|
||||
"Do not use subtype check macro node") \
|
||||
|
||||
#endif // SHARE_OPTO_C2_GLOBALS_HPP
|
||||
|
|
|
@ -46,6 +46,7 @@
|
|||
#include "opto/opaquenode.hpp"
|
||||
#include "opto/rootnode.hpp"
|
||||
#include "opto/subnode.hpp"
|
||||
#include "opto/subtypenode.hpp"
|
||||
#include "opto/vectornode.hpp"
|
||||
#include "utilities/macros.hpp"
|
||||
#if INCLUDE_SHENANDOAHGC
|
||||
|
|
|
@ -250,6 +250,7 @@ macro(OverflowMulL)
|
|||
macro(PCTable)
|
||||
macro(Parm)
|
||||
macro(PartialSubtypeCheck)
|
||||
macro(SubTypeCheck)
|
||||
macro(Phi)
|
||||
macro(PopCountI)
|
||||
macro(PopCountL)
|
||||
|
|
|
@ -4239,6 +4239,9 @@ int Compile::static_subtype_check(ciKlass* superk, ciKlass* subk) {
|
|||
// Add a dependency if there is a chance of a later subclass.
|
||||
dependencies()->assert_leaf_type(ik);
|
||||
}
|
||||
if (ik->is_abstract()) {
|
||||
return SSC_always_false;
|
||||
}
|
||||
return SSC_easy_test; // (3) caller can do a simple ptr comparison
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -3199,6 +3199,7 @@ void ConnectionGraph::split_unique_types(GrowableArray<Node *> &alloc_worklist,
|
|||
op == Op_FastLock || op == Op_AryEq || op == Op_StrComp || op == Op_HasNegatives ||
|
||||
op == Op_StrCompressedCopy || op == Op_StrInflatedCopy ||
|
||||
op == Op_StrEquals || op == Op_StrIndexOf || op == Op_StrIndexOfChar ||
|
||||
op == Op_SubTypeCheck ||
|
||||
BarrierSet::barrier_set()->barrier_set_c2()->is_gc_barrier_node(use))) {
|
||||
n->dump();
|
||||
use->dump();
|
||||
|
|
|
@ -41,6 +41,7 @@
|
|||
#include "opto/parse.hpp"
|
||||
#include "opto/rootnode.hpp"
|
||||
#include "opto/runtime.hpp"
|
||||
#include "opto/subtypenode.hpp"
|
||||
#include "runtime/deoptimization.hpp"
|
||||
#include "runtime/sharedRuntime.hpp"
|
||||
#include "utilities/bitMap.inline.hpp"
|
||||
|
@ -2623,21 +2624,94 @@ void GraphKit::make_slow_call_ex(Node* call, ciInstanceKlass* ex_klass, bool sep
|
|||
set_control(norm);
|
||||
}
|
||||
|
||||
static IfNode* gen_subtype_check_compare(Node* ctrl, Node* in1, Node* in2, BoolTest::mask test, float p, PhaseGVN* gvn, BasicType bt) {
|
||||
static IfNode* gen_subtype_check_compare(Node* ctrl, Node* in1, Node* in2, BoolTest::mask test, float p, PhaseGVN& gvn, BasicType bt) {
|
||||
Node* cmp = NULL;
|
||||
switch(bt) {
|
||||
case T_INT: cmp = new CmpINode(in1, in2); break;
|
||||
case T_ADDRESS: cmp = new CmpPNode(in1, in2); break;
|
||||
default: fatal("unexpected comparison type %s", type2name(bt));
|
||||
}
|
||||
gvn->transform(cmp);
|
||||
Node* bol = gvn->transform(new BoolNode(cmp, test));
|
||||
gvn.transform(cmp);
|
||||
Node* bol = gvn.transform(new BoolNode(cmp, test));
|
||||
IfNode* iff = new IfNode(ctrl, bol, p, COUNT_UNKNOWN);
|
||||
gvn->transform(iff);
|
||||
if (!bol->is_Con()) gvn->record_for_igvn(iff);
|
||||
gvn.transform(iff);
|
||||
if (!bol->is_Con()) gvn.record_for_igvn(iff);
|
||||
return iff;
|
||||
}
|
||||
|
||||
// Find the memory state for the secondary super type cache load when
|
||||
// a subtype check is expanded at macro expansion time. That field is
|
||||
// mutable so should not use immutable memory but
|
||||
// PartialSubtypeCheckNode that might modify it doesn't produce a new
|
||||
// memory state so bottom memory is the most accurate memory state to
|
||||
// hook the load with. This follows the implementation used when the
|
||||
// subtype check is expanded at parse time.
|
||||
static Node* find_bottom_mem(Node* ctrl, Compile* C) {
|
||||
const TypePtr* adr_type = TypeKlassPtr::make(TypePtr::NotNull, C->env()->Object_klass(), Type::OffsetBot);
|
||||
Node_Stack stack(0);
|
||||
VectorSet seen(Thread::current()->resource_area());
|
||||
|
||||
Node* c = ctrl;
|
||||
Node* mem = NULL;
|
||||
uint iter = 0;
|
||||
do {
|
||||
iter++;
|
||||
assert(iter < C->live_nodes(), "infinite loop");
|
||||
if (c->is_Region()) {
|
||||
for (DUIterator_Fast imax, i = c->fast_outs(imax); i < imax && mem == NULL; i++) {
|
||||
Node* u = c->fast_out(i);
|
||||
if (u->is_Phi() && u->bottom_type() == Type::MEMORY &&
|
||||
(u->adr_type() == TypePtr::BOTTOM || u->adr_type() == adr_type)) {
|
||||
mem = u;
|
||||
}
|
||||
}
|
||||
if (mem == NULL) {
|
||||
if (!seen.test_set(c->_idx)) {
|
||||
stack.push(c, 2);
|
||||
c = c->in(1);
|
||||
} else {
|
||||
Node* phi = NULL;
|
||||
uint idx = 0;
|
||||
for (;;) {
|
||||
phi = stack.node();
|
||||
idx = stack.index();
|
||||
if (idx < phi->req()) {
|
||||
break;
|
||||
}
|
||||
stack.pop();
|
||||
}
|
||||
c = phi->in(idx);
|
||||
stack.set_index(idx+1);
|
||||
}
|
||||
}
|
||||
} else if (c->is_Proj() && c->in(0)->adr_type() == TypePtr::BOTTOM) {
|
||||
for (DUIterator_Fast imax, i = c->in(0)->fast_outs(imax); i < imax; i++) {
|
||||
Node* u = c->in(0)->fast_out(i);
|
||||
if (u->bottom_type() == Type::MEMORY && u->as_Proj()->_is_io_use == c->as_Proj()->_is_io_use) {
|
||||
assert(mem == NULL, "");
|
||||
mem = u;
|
||||
}
|
||||
}
|
||||
} else if (c->is_CatchProj() && c->in(0)->in(0)->in(0)->adr_type() == TypePtr::BOTTOM) {
|
||||
Node* call = c->in(0)->in(0)->in(0);
|
||||
assert(call->is_Call(), "CatchProj with no call?");
|
||||
CallProjections projs;
|
||||
call->as_Call()->extract_projections(&projs, false, false);
|
||||
if (projs.catchall_memproj == NULL) {
|
||||
mem = projs.fallthrough_memproj;
|
||||
} else if (c == projs.fallthrough_catchproj) {
|
||||
mem = projs.fallthrough_memproj;
|
||||
} else {
|
||||
assert(c == projs.catchall_catchproj, "strange control");
|
||||
mem = projs.catchall_memproj;
|
||||
}
|
||||
} else {
|
||||
assert(!c->is_Start(), "should stop before start");
|
||||
c = c->in(0);
|
||||
}
|
||||
} while (mem == NULL);
|
||||
return mem;
|
||||
}
|
||||
|
||||
//-------------------------------gen_subtype_check-----------------------------
|
||||
// Generate a subtyping check. Takes as input the subtype and supertype.
|
||||
|
@ -2647,9 +2721,8 @@ static IfNode* gen_subtype_check_compare(Node* ctrl, Node* in1, Node* in2, BoolT
|
|||
// but that's not exposed to the optimizer. This call also doesn't take in an
|
||||
// Object; if you wish to check an Object you need to load the Object's class
|
||||
// prior to coming here.
|
||||
Node* Phase::gen_subtype_check(Node* subklass, Node* superklass, Node** ctrl, MergeMemNode* mem, PhaseGVN* gvn) {
|
||||
Compile* C = gvn->C;
|
||||
|
||||
Node* Phase::gen_subtype_check(Node* subklass, Node* superklass, Node** ctrl, Node* mem, PhaseGVN& gvn) {
|
||||
Compile* C = gvn.C;
|
||||
if ((*ctrl)->is_top()) {
|
||||
return C->top();
|
||||
}
|
||||
|
@ -2660,9 +2733,9 @@ Node* Phase::gen_subtype_check(Node* subklass, Node* superklass, Node** ctrl, Me
|
|||
if (subklass == superklass)
|
||||
return C->top(); // false path is dead; no test needed.
|
||||
|
||||
if (gvn->type(superklass)->singleton()) {
|
||||
ciKlass* superk = gvn->type(superklass)->is_klassptr()->klass();
|
||||
ciKlass* subk = gvn->type(subklass)->is_klassptr()->klass();
|
||||
if (gvn.type(superklass)->singleton()) {
|
||||
ciKlass* superk = gvn.type(superklass)->is_klassptr()->klass();
|
||||
ciKlass* subk = gvn.type(subklass)->is_klassptr()->klass();
|
||||
|
||||
// In the common case of an exact superklass, try to fold up the
|
||||
// test before generating code. You may ask, why not just generate
|
||||
|
@ -2677,7 +2750,7 @@ Node* Phase::gen_subtype_check(Node* subklass, Node* superklass, Node** ctrl, Me
|
|||
case Compile::SSC_always_false:
|
||||
{
|
||||
Node* always_fail = *ctrl;
|
||||
*ctrl = gvn->C->top();
|
||||
*ctrl = gvn.C->top();
|
||||
return always_fail;
|
||||
}
|
||||
case Compile::SSC_always_true:
|
||||
|
@ -2686,8 +2759,8 @@ Node* Phase::gen_subtype_check(Node* subklass, Node* superklass, Node** ctrl, Me
|
|||
{
|
||||
// Just do a direct pointer compare and be done.
|
||||
IfNode* iff = gen_subtype_check_compare(*ctrl, subklass, superklass, BoolTest::eq, PROB_STATIC_FREQUENT, gvn, T_ADDRESS);
|
||||
*ctrl = gvn->transform(new IfTrueNode(iff));
|
||||
return gvn->transform(new IfFalseNode(iff));
|
||||
*ctrl = gvn.transform(new IfTrueNode(iff));
|
||||
return gvn.transform(new IfFalseNode(iff));
|
||||
}
|
||||
case Compile::SSC_full_test:
|
||||
break;
|
||||
|
@ -2701,11 +2774,11 @@ Node* Phase::gen_subtype_check(Node* subklass, Node* superklass, Node** ctrl, Me
|
|||
// will always succeed. We could leave a dependency behind to ensure this.
|
||||
|
||||
// First load the super-klass's check-offset
|
||||
Node *p1 = gvn->transform(new AddPNode(superklass, superklass, gvn->MakeConX(in_bytes(Klass::super_check_offset_offset()))));
|
||||
Node* m = mem->memory_at(C->get_alias_index(gvn->type(p1)->is_ptr()));
|
||||
Node *chk_off = gvn->transform(new LoadINode(NULL, m, p1, gvn->type(p1)->is_ptr(), TypeInt::INT, MemNode::unordered));
|
||||
Node *p1 = gvn.transform(new AddPNode(superklass, superklass, gvn.MakeConX(in_bytes(Klass::super_check_offset_offset()))));
|
||||
Node* m = C->immutable_memory();
|
||||
Node *chk_off = gvn.transform(new LoadINode(NULL, m, p1, gvn.type(p1)->is_ptr(), TypeInt::INT, MemNode::unordered));
|
||||
int cacheoff_con = in_bytes(Klass::secondary_super_cache_offset());
|
||||
bool might_be_cache = (gvn->find_int_con(chk_off, cacheoff_con) == cacheoff_con);
|
||||
bool might_be_cache = (gvn.find_int_con(chk_off, cacheoff_con) == cacheoff_con);
|
||||
|
||||
// Load from the sub-klass's super-class display list, or a 1-word cache of
|
||||
// the secondary superclass list, or a failing value with a sentinel offset
|
||||
|
@ -2715,15 +2788,22 @@ Node* Phase::gen_subtype_check(Node* subklass, Node* superklass, Node** ctrl, Me
|
|||
// klass loads can never produce a NULL).
|
||||
Node *chk_off_X = chk_off;
|
||||
#ifdef _LP64
|
||||
chk_off_X = gvn->transform(new ConvI2LNode(chk_off_X));
|
||||
chk_off_X = gvn.transform(new ConvI2LNode(chk_off_X));
|
||||
#endif
|
||||
Node *p2 = gvn->transform(new AddPNode(subklass,subklass,chk_off_X));
|
||||
Node *p2 = gvn.transform(new AddPNode(subklass,subklass,chk_off_X));
|
||||
// For some types like interfaces the following loadKlass is from a 1-word
|
||||
// cache which is mutable so can't use immutable memory. Other
|
||||
// types load from the super-class display table which is immutable.
|
||||
m = mem->memory_at(C->get_alias_index(gvn->type(p2)->is_ptr()));
|
||||
Node *kmem = might_be_cache ? m : C->immutable_memory();
|
||||
Node *nkls = gvn->transform(LoadKlassNode::make(*gvn, NULL, kmem, p2, gvn->type(p2)->is_ptr(), TypeKlassPtr::OBJECT_OR_NULL));
|
||||
Node *kmem = C->immutable_memory();
|
||||
if (might_be_cache) {
|
||||
assert((C->get_alias_index(TypeKlassPtr::make(TypePtr::NotNull, C->env()->Object_klass(), Type::OffsetBot)) ==
|
||||
C->get_alias_index(gvn.type(p2)->is_ptr())), "");
|
||||
if (mem == NULL) {
|
||||
mem = find_bottom_mem(*ctrl, C);
|
||||
}
|
||||
kmem = mem->is_MergeMem() ? mem->as_MergeMem()->memory_at(C->get_alias_index(gvn.type(p2)->is_ptr())) : mem;
|
||||
}
|
||||
Node *nkls = gvn.transform(LoadKlassNode::make(gvn, NULL, kmem, p2, gvn.type(p2)->is_ptr(), TypeKlassPtr::OBJECT_OR_NULL));
|
||||
|
||||
// Compile speed common case: ARE a subtype and we canNOT fail
|
||||
if( superklass == nkls )
|
||||
|
@ -2733,8 +2813,8 @@ Node* Phase::gen_subtype_check(Node* subklass, Node* superklass, Node** ctrl, Me
|
|||
// time. Test to see if the value loaded just previously from the subklass
|
||||
// is exactly the superklass.
|
||||
IfNode *iff1 = gen_subtype_check_compare(*ctrl, superklass, nkls, BoolTest::eq, PROB_LIKELY(0.83f), gvn, T_ADDRESS);
|
||||
Node *iftrue1 = gvn->transform( new IfTrueNode (iff1));
|
||||
*ctrl = gvn->transform(new IfFalseNode(iff1));
|
||||
Node *iftrue1 = gvn.transform( new IfTrueNode (iff1));
|
||||
*ctrl = gvn.transform(new IfFalseNode(iff1));
|
||||
|
||||
// Compile speed common case: Check for being deterministic right now. If
|
||||
// chk_off is a constant and not equal to cacheoff then we are NOT a
|
||||
|
@ -2748,9 +2828,9 @@ Node* Phase::gen_subtype_check(Node* subklass, Node* superklass, Node** ctrl, Me
|
|||
|
||||
// Gather the various success & failures here
|
||||
RegionNode *r_ok_subtype = new RegionNode(4);
|
||||
gvn->record_for_igvn(r_ok_subtype);
|
||||
gvn.record_for_igvn(r_ok_subtype);
|
||||
RegionNode *r_not_subtype = new RegionNode(3);
|
||||
gvn->record_for_igvn(r_not_subtype);
|
||||
gvn.record_for_igvn(r_not_subtype);
|
||||
|
||||
r_ok_subtype->init_req(1, iftrue1);
|
||||
|
||||
|
@ -2759,17 +2839,17 @@ Node* Phase::gen_subtype_check(Node* subklass, Node* superklass, Node** ctrl, Me
|
|||
// check-offset points into the subklass display list or the 1-element
|
||||
// cache. If it points to the display (and NOT the cache) and the display
|
||||
// missed then it's not a subtype.
|
||||
Node *cacheoff = gvn->intcon(cacheoff_con);
|
||||
Node *cacheoff = gvn.intcon(cacheoff_con);
|
||||
IfNode *iff2 = gen_subtype_check_compare(*ctrl, chk_off, cacheoff, BoolTest::ne, PROB_LIKELY(0.63f), gvn, T_INT);
|
||||
r_not_subtype->init_req(1, gvn->transform(new IfTrueNode (iff2)));
|
||||
*ctrl = gvn->transform(new IfFalseNode(iff2));
|
||||
r_not_subtype->init_req(1, gvn.transform(new IfTrueNode (iff2)));
|
||||
*ctrl = gvn.transform(new IfFalseNode(iff2));
|
||||
|
||||
// Check for self. Very rare to get here, but it is taken 1/3 the time.
|
||||
// No performance impact (too rare) but allows sharing of secondary arrays
|
||||
// which has some footprint reduction.
|
||||
IfNode *iff3 = gen_subtype_check_compare(*ctrl, subklass, superklass, BoolTest::eq, PROB_LIKELY(0.36f), gvn, T_ADDRESS);
|
||||
r_ok_subtype->init_req(2, gvn->transform(new IfTrueNode(iff3)));
|
||||
*ctrl = gvn->transform(new IfFalseNode(iff3));
|
||||
r_ok_subtype->init_req(2, gvn.transform(new IfTrueNode(iff3)));
|
||||
*ctrl = gvn.transform(new IfFalseNode(iff3));
|
||||
|
||||
// -- Roads not taken here: --
|
||||
// We could also have chosen to perform the self-check at the beginning
|
||||
|
@ -2792,16 +2872,38 @@ Node* Phase::gen_subtype_check(Node* subklass, Node* superklass, Node** ctrl, Me
|
|||
// out of line, and it can only improve I-cache density.
|
||||
// The decision to inline or out-of-line this final check is platform
|
||||
// dependent, and is found in the AD file definition of PartialSubtypeCheck.
|
||||
Node* psc = gvn->transform(
|
||||
Node* psc = gvn.transform(
|
||||
new PartialSubtypeCheckNode(*ctrl, subklass, superklass));
|
||||
|
||||
IfNode *iff4 = gen_subtype_check_compare(*ctrl, psc, gvn->zerocon(T_OBJECT), BoolTest::ne, PROB_FAIR, gvn, T_ADDRESS);
|
||||
r_not_subtype->init_req(2, gvn->transform(new IfTrueNode (iff4)));
|
||||
r_ok_subtype ->init_req(3, gvn->transform(new IfFalseNode(iff4)));
|
||||
IfNode *iff4 = gen_subtype_check_compare(*ctrl, psc, gvn.zerocon(T_OBJECT), BoolTest::ne, PROB_FAIR, gvn, T_ADDRESS);
|
||||
r_not_subtype->init_req(2, gvn.transform(new IfTrueNode (iff4)));
|
||||
r_ok_subtype ->init_req(3, gvn.transform(new IfFalseNode(iff4)));
|
||||
|
||||
// Return false path; set default control to true path.
|
||||
*ctrl = gvn->transform(r_ok_subtype);
|
||||
return gvn->transform(r_not_subtype);
|
||||
*ctrl = gvn.transform(r_ok_subtype);
|
||||
return gvn.transform(r_not_subtype);
|
||||
}
|
||||
|
||||
Node* GraphKit::gen_subtype_check(Node* obj_or_subklass, Node* superklass) {
|
||||
if (ExpandSubTypeCheckAtParseTime) {
|
||||
MergeMemNode* mem = merged_memory();
|
||||
Node* ctrl = control();
|
||||
Node* subklass = obj_or_subklass;
|
||||
if (!_gvn.type(obj_or_subklass)->isa_klassptr()) {
|
||||
subklass = load_object_klass(obj_or_subklass);
|
||||
}
|
||||
|
||||
Node* n = Phase::gen_subtype_check(subklass, superklass, &ctrl, mem, _gvn);
|
||||
set_control(ctrl);
|
||||
return n;
|
||||
}
|
||||
|
||||
const TypePtr* adr_type = TypeKlassPtr::make(TypePtr::NotNull, C->env()->Object_klass(), Type::OffsetBot);
|
||||
Node* check = _gvn.transform(new SubTypeCheckNode(C, obj_or_subklass, superklass));
|
||||
Node* bol = _gvn.transform(new BoolNode(check, BoolTest::eq));
|
||||
IfNode* iff = create_and_xform_if(control(), bol, PROB_STATIC_FREQUENT, COUNT_UNKNOWN);
|
||||
set_control(_gvn.transform(new IfTrueNode(iff)));
|
||||
return _gvn.transform(new IfFalseNode(iff));
|
||||
}
|
||||
|
||||
// Profile-driven exact type check:
|
||||
|
@ -2833,10 +2935,9 @@ Node* GraphKit::type_check_receiver(Node* receiver, ciKlass* klass,
|
|||
Node* GraphKit::subtype_check_receiver(Node* receiver, ciKlass* klass,
|
||||
Node** casted_receiver) {
|
||||
const TypeKlassPtr* tklass = TypeKlassPtr::make(klass);
|
||||
Node* recv_klass = load_object_klass(receiver);
|
||||
Node* want_klass = makecon(tklass);
|
||||
|
||||
Node* slow_ctl = gen_subtype_check(recv_klass, want_klass);
|
||||
Node* slow_ctl = gen_subtype_check(receiver, want_klass);
|
||||
|
||||
// Cast receiver after successful check
|
||||
const TypeOopPtr* recv_type = tklass->cast_to_exactness(false)->is_klassptr()->as_instance_type();
|
||||
|
@ -3101,11 +3202,8 @@ Node* GraphKit::gen_instanceof(Node* obj, Node* superklass, bool safe_for_replac
|
|||
}
|
||||
}
|
||||
|
||||
// Load the object's klass
|
||||
Node* obj_klass = load_object_klass(not_null_obj);
|
||||
|
||||
// Generate the subtype check
|
||||
Node* not_subtype_ctrl = gen_subtype_check(obj_klass, superklass);
|
||||
Node* not_subtype_ctrl = gen_subtype_check(not_null_obj, superklass);
|
||||
|
||||
// Plug in the success path to the general merge in slot 1.
|
||||
region->init_req(_obj_path, control());
|
||||
|
@ -3228,11 +3326,8 @@ Node* GraphKit::gen_checkcast(Node *obj, Node* superklass,
|
|||
}
|
||||
|
||||
if (cast_obj == NULL) {
|
||||
// Load the object's klass
|
||||
Node* obj_klass = load_object_klass(not_null_obj);
|
||||
|
||||
// Generate the subtype check
|
||||
Node* not_subtype_ctrl = gen_subtype_check( obj_klass, superklass );
|
||||
Node* not_subtype_ctrl = gen_subtype_check(not_null_obj, superklass );
|
||||
|
||||
// Plug in success path into the merge
|
||||
cast_obj = _gvn.transform(new CheckCastPPNode(control(), not_null_obj, toop));
|
||||
|
@ -3241,7 +3336,7 @@ Node* GraphKit::gen_checkcast(Node *obj, Node* superklass,
|
|||
if (not_subtype_ctrl != top()) { // If failure is possible
|
||||
PreserveJVMState pjvms(this);
|
||||
set_control(not_subtype_ctrl);
|
||||
builtin_throw(Deoptimization::Reason_class_check, obj_klass);
|
||||
builtin_throw(Deoptimization::Reason_class_check, load_object_klass(not_null_obj));
|
||||
}
|
||||
} else {
|
||||
(*failure_control) = not_subtype_ctrl;
|
||||
|
|
|
@ -825,13 +825,7 @@ class GraphKit : public Phase {
|
|||
Node* gen_checkcast( Node *subobj, Node* superkls,
|
||||
Node* *failure_control = NULL );
|
||||
|
||||
Node* gen_subtype_check(Node* subklass, Node* superklass) {
|
||||
MergeMemNode* mem = merged_memory();
|
||||
Node* ctrl = control();
|
||||
Node* n = Phase::gen_subtype_check(subklass, superklass, &ctrl, mem, &_gvn);
|
||||
set_control(ctrl);
|
||||
return n;
|
||||
}
|
||||
Node* gen_subtype_check(Node* obj, Node* superklass);
|
||||
|
||||
// Exact type check used for predicted calls and casts.
|
||||
// Rewrites (*casted_receiver) to be casted to the stronger type.
|
||||
|
|
|
@ -3699,8 +3699,7 @@ bool LibraryCallKit::inline_array_copyOf(bool is_copyOfRange) {
|
|||
// Reason_class_check rather than Reason_intrinsic because we
|
||||
// want to intrinsify even if this traps.
|
||||
if (!too_many_traps(Deoptimization::Reason_class_check)) {
|
||||
Node* not_subtype_ctrl = gen_subtype_check(load_object_klass(original),
|
||||
klass_node);
|
||||
Node* not_subtype_ctrl = gen_subtype_check(original, klass_node);
|
||||
|
||||
if (not_subtype_ctrl != top()) {
|
||||
PreserveJVMState pjvms(this);
|
||||
|
@ -4766,9 +4765,9 @@ bool LibraryCallKit::inline_arraycopy() {
|
|||
}
|
||||
|
||||
// (9) each element of an oop array must be assignable
|
||||
Node* src_klass = load_object_klass(src);
|
||||
Node* dest_klass = load_object_klass(dest);
|
||||
Node* not_subtype_ctrl = gen_subtype_check(src_klass, dest_klass);
|
||||
if (src != dest) {
|
||||
Node* not_subtype_ctrl = gen_subtype_check(src, dest_klass);
|
||||
|
||||
if (not_subtype_ctrl != top()) {
|
||||
PreserveJVMState pjvms(this);
|
||||
|
@ -4777,6 +4776,7 @@ bool LibraryCallKit::inline_arraycopy() {
|
|||
Deoptimization::Action_make_not_entrant);
|
||||
assert(stopped(), "Should be stopped");
|
||||
}
|
||||
}
|
||||
{
|
||||
PreserveJVMState pjvms(this);
|
||||
set_control(_gvn.transform(slow_region));
|
||||
|
|
|
@ -4089,7 +4089,7 @@ Node *PhaseIdealLoop::get_late_ctrl( Node *n, Node *early ) {
|
|||
}
|
||||
} else {
|
||||
Node *sctrl = has_ctrl(s) ? get_ctrl(s) : s->in(0);
|
||||
assert(sctrl != NULL || s->outcnt() == 0, "must have control");
|
||||
assert(sctrl != NULL || !s->is_reachable_from_root(), "must have control");
|
||||
if (sctrl != NULL && !sctrl->is_top() && C->can_alias(s->adr_type(), load_alias_idx) && is_dominator(early, sctrl)) {
|
||||
LCA = dom_lca_for_get_late_ctrl(LCA, sctrl, n);
|
||||
}
|
||||
|
|
|
@ -40,6 +40,7 @@
|
|||
#include "opto/opaquenode.hpp"
|
||||
#include "opto/rootnode.hpp"
|
||||
#include "opto/subnode.hpp"
|
||||
#include "opto/subtypenode.hpp"
|
||||
#include "utilities/macros.hpp"
|
||||
|
||||
//=============================================================================
|
||||
|
@ -656,6 +657,9 @@ Node *PhaseIdealLoop::conditional_move( Node *region ) {
|
|||
}
|
||||
assert(bol->Opcode() == Op_Bool, "Unexpected node");
|
||||
int cmp_op = bol->in(1)->Opcode();
|
||||
if (cmp_op == Op_SubTypeCheck) { // SubTypeCheck expansion expects an IfNode
|
||||
return NULL;
|
||||
}
|
||||
// It is expensive to generate flags from a float compare.
|
||||
// Avoid duplicated float compare.
|
||||
if (phis > 1 && (cmp_op == Op_CmpF || cmp_op == Op_CmpD)) return NULL;
|
||||
|
|
|
@ -35,6 +35,7 @@
|
|||
#include "opto/compile.hpp"
|
||||
#include "opto/convertnode.hpp"
|
||||
#include "opto/graphKit.hpp"
|
||||
#include "opto/intrinsicnode.hpp"
|
||||
#include "opto/locknode.hpp"
|
||||
#include "opto/loopnode.hpp"
|
||||
#include "opto/macro.hpp"
|
||||
|
@ -46,6 +47,7 @@
|
|||
#include "opto/rootnode.hpp"
|
||||
#include "opto/runtime.hpp"
|
||||
#include "opto/subnode.hpp"
|
||||
#include "opto/subtypenode.hpp"
|
||||
#include "opto/type.hpp"
|
||||
#include "runtime/sharedRuntime.hpp"
|
||||
#include "utilities/macros.hpp"
|
||||
|
@ -2533,6 +2535,43 @@ void PhaseMacroExpand::expand_unlock_node(UnlockNode *unlock) {
|
|||
_igvn.replace_node(_memproj_fallthrough, mem_phi);
|
||||
}
|
||||
|
||||
void PhaseMacroExpand::expand_subtypecheck_node(SubTypeCheckNode *check) {
|
||||
assert(check->in(SubTypeCheckNode::Control) == NULL, "should be pinned");
|
||||
Node* bol = check->unique_out();
|
||||
Node* obj_or_subklass = check->in(SubTypeCheckNode::ObjOrSubKlass);
|
||||
Node* superklass = check->in(SubTypeCheckNode::SuperKlass);
|
||||
assert(bol->is_Bool() && bol->as_Bool()->_test._test == BoolTest::ne, "unexpected bool node");
|
||||
|
||||
for (DUIterator_Last imin, i = bol->last_outs(imin); i >= imin; --i) {
|
||||
Node* iff = bol->last_out(i);
|
||||
assert(iff->is_If(), "where's the if?");
|
||||
|
||||
if (iff->in(0)->is_top()) {
|
||||
_igvn.replace_input_of(iff, 1, C->top());
|
||||
continue;
|
||||
}
|
||||
|
||||
Node* iftrue = iff->as_If()->proj_out(1);
|
||||
Node* iffalse = iff->as_If()->proj_out(0);
|
||||
Node* ctrl = iff->in(0);
|
||||
|
||||
Node* subklass = NULL;
|
||||
if (_igvn.type(obj_or_subklass)->isa_klassptr()) {
|
||||
subklass = obj_or_subklass;
|
||||
} else {
|
||||
Node* k_adr = basic_plus_adr(obj_or_subklass, oopDesc::klass_offset_in_bytes());
|
||||
subklass = _igvn.transform(LoadKlassNode::make(_igvn, NULL, C->immutable_memory(), k_adr, TypeInstPtr::KLASS));
|
||||
}
|
||||
|
||||
Node* not_subtype_ctrl = Phase::gen_subtype_check(subklass, superklass, &ctrl, NULL, _igvn);
|
||||
|
||||
_igvn.replace_input_of(iff, 0, C->top());
|
||||
_igvn.replace_node(iftrue, not_subtype_ctrl);
|
||||
_igvn.replace_node(iffalse, ctrl);
|
||||
}
|
||||
_igvn.replace_node(check, C->top());
|
||||
}
|
||||
|
||||
//---------------------------eliminate_macro_nodes----------------------
|
||||
// Eliminate scalar replaced allocations and associated locks.
|
||||
void PhaseMacroExpand::eliminate_macro_nodes() {
|
||||
|
@ -2589,6 +2628,8 @@ void PhaseMacroExpand::eliminate_macro_nodes() {
|
|||
break;
|
||||
case Node::Class_OuterStripMinedLoop:
|
||||
break;
|
||||
case Node::Class_SubTypeCheck:
|
||||
break;
|
||||
default:
|
||||
assert(n->Opcode() == Op_LoopLimit ||
|
||||
n->Opcode() == Op_Opaque1 ||
|
||||
|
@ -2695,6 +2736,10 @@ bool PhaseMacroExpand::expand_macro_nodes() {
|
|||
expand_arraycopy_node(n->as_ArrayCopy());
|
||||
assert(C->macro_count() == (old_macro_count - 1), "expansion must have deleted one node from macro list");
|
||||
break;
|
||||
case Node::Class_SubTypeCheck:
|
||||
expand_subtypecheck_node(n->as_SubTypeCheck());
|
||||
assert(C->macro_count() == (old_macro_count - 1), "expansion must have deleted one node from macro list");
|
||||
break;
|
||||
}
|
||||
if (C->failing()) return true;
|
||||
}
|
||||
|
|
|
@ -30,6 +30,7 @@
|
|||
class AllocateNode;
|
||||
class AllocateArrayNode;
|
||||
class CallNode;
|
||||
class SubTypeCheckNode;
|
||||
class Node;
|
||||
class PhaseIterGVN;
|
||||
|
||||
|
@ -183,6 +184,8 @@ private:
|
|||
|
||||
void expand_arraycopy_node(ArrayCopyNode *ac);
|
||||
|
||||
void expand_subtypecheck_node(SubTypeCheckNode *check);
|
||||
|
||||
int replace_input(Node *use, Node *oldref, Node *newref);
|
||||
void migrate_outs(Node *old, Node *target);
|
||||
void copy_call_debug_info(CallNode *oldcall, CallNode * newcall);
|
||||
|
|
|
@ -524,7 +524,7 @@ Node* PhaseMacroExpand::generate_arraycopy(ArrayCopyNode *ac, AllocateArrayNode*
|
|||
// Test S[] against D[], not S against D, because (probably)
|
||||
// the secondary supertype cache is less busy for S[] than S.
|
||||
// This usually only matters when D is an interface.
|
||||
Node* not_subtype_ctrl = Phase::gen_subtype_check(src_klass, dest_klass, ctrl, mem, &_igvn);
|
||||
Node* not_subtype_ctrl = Phase::gen_subtype_check(src_klass, dest_klass, ctrl, mem, _igvn);
|
||||
// Plug failing path into checked_oop_disjoint_arraycopy
|
||||
if (not_subtype_ctrl != top()) {
|
||||
Node* local_ctrl = not_subtype_ctrl;
|
||||
|
|
|
@ -146,6 +146,7 @@ class StartNode;
|
|||
class State;
|
||||
class StoreNode;
|
||||
class SubNode;
|
||||
class SubTypeCheckNode;
|
||||
class Type;
|
||||
class TypeNode;
|
||||
class UnlockNode;
|
||||
|
@ -706,6 +707,7 @@ public:
|
|||
DEFINE_CLASS_ID(Cmp, Sub, 0)
|
||||
DEFINE_CLASS_ID(FastLock, Cmp, 0)
|
||||
DEFINE_CLASS_ID(FastUnlock, Cmp, 1)
|
||||
DEFINE_CLASS_ID(SubTypeCheck,Cmp, 2)
|
||||
|
||||
DEFINE_CLASS_ID(MergeMem, Node, 7)
|
||||
DEFINE_CLASS_ID(Bool, Node, 8)
|
||||
|
@ -875,6 +877,7 @@ public:
|
|||
DEFINE_CLASS_QUERY(Start)
|
||||
DEFINE_CLASS_QUERY(Store)
|
||||
DEFINE_CLASS_QUERY(Sub)
|
||||
DEFINE_CLASS_QUERY(SubTypeCheck)
|
||||
DEFINE_CLASS_QUERY(Type)
|
||||
DEFINE_CLASS_QUERY(Vector)
|
||||
DEFINE_CLASS_QUERY(LoadVector)
|
||||
|
|
|
@ -132,7 +132,7 @@ protected:
|
|||
// Object; if you wish to check an Object you need to load the Object's
|
||||
// class prior to coming here.
|
||||
// Used in GraphKit and PhaseMacroExpand
|
||||
static Node* gen_subtype_check(Node* subklass, Node* superklass, Node** ctrl, MergeMemNode* mem, PhaseGVN* gvn);
|
||||
static Node* gen_subtype_check(Node* subklass, Node* superklass, Node** ctrl, Node* mem, PhaseGVN& gvn);
|
||||
|
||||
public:
|
||||
Compile * C;
|
||||
|
|
|
@ -1352,7 +1352,7 @@ Node *BoolNode::Ideal(PhaseGVN *phase, bool can_reshape) {
|
|||
Node *cmp = in(1);
|
||||
if( !cmp->is_Sub() ) return NULL;
|
||||
int cop = cmp->Opcode();
|
||||
if( cop == Op_FastLock || cop == Op_FastUnlock) return NULL;
|
||||
if( cop == Op_FastLock || cop == Op_FastUnlock || cmp->is_SubTypeCheck()) return NULL;
|
||||
Node *cmp1 = cmp->in(1);
|
||||
Node *cmp2 = cmp->in(2);
|
||||
if( !cmp1 ) return NULL;
|
||||
|
|
165
src/hotspot/share/opto/subtypenode.cpp
Normal file
165
src/hotspot/share/opto/subtypenode.cpp
Normal file
|
@ -0,0 +1,165 @@
|
|||
/*
|
||||
* Copyright (c) 2020, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
* under the terms of the GNU General Public License version 2 only, as
|
||||
* published by the Free Software Foundation.
|
||||
*
|
||||
* This code is distributed in the hope that it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
* version 2 for more details (a copy is included in the LICENSE file that
|
||||
* accompanied this code).
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License version
|
||||
* 2 along with this work; if not, write to the Free Software Foundation,
|
||||
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
*
|
||||
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
|
||||
* or visit www.oracle.com if you need additional information or have any
|
||||
* questions.
|
||||
*
|
||||
*/
|
||||
|
||||
#include "precompiled.hpp"
|
||||
#include "opto/addnode.hpp"
|
||||
#include "opto/connode.hpp"
|
||||
#include "opto/convertnode.hpp"
|
||||
#include "opto/phaseX.hpp"
|
||||
#include "opto/subnode.hpp"
|
||||
#include "opto/subtypenode.hpp"
|
||||
|
||||
const Type* SubTypeCheckNode::sub(const Type* sub_t, const Type* super_t) const {
|
||||
ciKlass* superk = super_t->is_klassptr()->klass();
|
||||
ciKlass* subk = sub_t->isa_klassptr() ? sub_t->is_klassptr()->klass() : sub_t->is_oopptr()->klass();
|
||||
|
||||
bool xsuperk = super_t->is_klassptr()->klass_is_exact();
|
||||
bool xsubk = sub_t->isa_klassptr() ? sub_t->is_klassptr()->klass_is_exact() : sub_t->is_oopptr()->klass_is_exact();
|
||||
|
||||
// Similar to logic in CmpPNode::sub()
|
||||
if (superk && subk &&
|
||||
superk->is_loaded() && !superk->is_interface() &&
|
||||
subk->is_loaded() && !subk->is_interface() &&
|
||||
(!superk->is_obj_array_klass() ||
|
||||
!superk->as_obj_array_klass()->base_element_klass()->is_interface()) &&
|
||||
(!subk->is_obj_array_klass() ||
|
||||
!subk->as_obj_array_klass()->base_element_klass()->is_interface() ||
|
||||
xsubk)) {
|
||||
bool unrelated_classes = false;
|
||||
if (superk->equals(subk)) {
|
||||
// skip
|
||||
} else if (superk->is_subtype_of(subk)) {
|
||||
unrelated_classes = xsubk;
|
||||
} else if (subk->is_subtype_of(superk)) {
|
||||
// skip
|
||||
} else {
|
||||
unrelated_classes = true;
|
||||
}
|
||||
if (unrelated_classes) {
|
||||
TypePtr::PTR jp = sub_t->is_ptr()->join_ptr(super_t->is_ptr()->_ptr);
|
||||
if (jp != TypePtr::Null && jp != TypePtr::BotPTR) {
|
||||
return TypeInt::CC_GT;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (super_t->singleton()) {
|
||||
if (subk != NULL) {
|
||||
switch (Compile::current()->static_subtype_check(superk, subk)) {
|
||||
case Compile::SSC_always_false:
|
||||
return TypeInt::CC_GT;
|
||||
case Compile::SSC_always_true:
|
||||
return TypeInt::CC_EQ;
|
||||
case Compile::SSC_easy_test:
|
||||
case Compile::SSC_full_test:
|
||||
break;
|
||||
default:
|
||||
ShouldNotReachHere();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return bottom_type();
|
||||
}
|
||||
|
||||
Node *SubTypeCheckNode::Ideal(PhaseGVN *phase, bool can_reshape) {
|
||||
// Verify that optimizing the subtype check to a simple code pattern
|
||||
// when possible would not constant fold better
|
||||
#ifdef ASSERT
|
||||
Node* obj_or_subklass = in(ObjOrSubKlass);
|
||||
Node* superklass = in(SuperKlass);
|
||||
|
||||
if (obj_or_subklass == NULL ||
|
||||
superklass == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
const Type* sub_t = phase->type(obj_or_subklass);
|
||||
const Type* super_t = phase->type(superklass);
|
||||
|
||||
if (!super_t->isa_klassptr() ||
|
||||
(!sub_t->isa_klassptr() && !sub_t->isa_oopptr())) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
|
||||
ciKlass* superk = super_t->is_klassptr()->klass();
|
||||
ciKlass* subk = sub_t->isa_klassptr() ? sub_t->is_klassptr()->klass() : sub_t->is_oopptr()->klass();
|
||||
|
||||
if (super_t->singleton() && subk != NULL && phase->C->static_subtype_check(superk, subk) == Compile::SSC_easy_test) {
|
||||
Node* subklass = NULL;
|
||||
if (sub_t->isa_oopptr()) {
|
||||
Node* adr = phase->transform(new AddPNode(obj_or_subklass, obj_or_subklass, phase->MakeConX(oopDesc::klass_offset_in_bytes())));
|
||||
subklass = phase->transform(LoadKlassNode::make(*phase, NULL, phase->C->immutable_memory(), adr, TypeInstPtr::KLASS));
|
||||
} else {
|
||||
subklass = obj_or_subklass;
|
||||
}
|
||||
Node* res = new CmpPNode(subklass, superklass);
|
||||
const Type* t = phase->type(phase->transform(res));
|
||||
assert((Value(phase) == t) || (t != TypeInt::CC_GT && t != TypeInt::CC_EQ), "missing Value() optimization");
|
||||
if (phase->is_IterGVN()) {
|
||||
phase->is_IterGVN()->_worklist.push(res);
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (super_t->singleton() && subk != NULL && phase->C->static_subtype_check(superk, subk) == Compile::SSC_full_test) {
|
||||
Node* subklass = NULL;
|
||||
if (sub_t->isa_oopptr()) {
|
||||
Node* adr = phase->transform(new AddPNode(obj_or_subklass, obj_or_subklass, phase->MakeConX(oopDesc::klass_offset_in_bytes())));
|
||||
subklass = phase->transform(LoadKlassNode::make(*phase, NULL, phase->C->immutable_memory(), adr, TypeInstPtr::KLASS));
|
||||
} else {
|
||||
subklass = obj_or_subklass;
|
||||
}
|
||||
|
||||
Node *p1 = phase->transform(new AddPNode(superklass, superklass, phase->MakeConX(in_bytes(Klass::super_check_offset_offset()))));
|
||||
Node* m = phase->C->immutable_memory();
|
||||
Node *chk_off = phase->transform(new LoadINode(NULL, m, p1, phase->type(p1)->is_ptr(), TypeInt::INT, MemNode::unordered));
|
||||
int cacheoff_con = in_bytes(Klass::secondary_super_cache_offset());
|
||||
bool might_be_cache = (phase->find_int_con(chk_off, cacheoff_con) == cacheoff_con);
|
||||
|
||||
if (might_be_cache) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
Node *chk_off_X = chk_off;
|
||||
#ifdef _LP64
|
||||
chk_off_X = phase->transform(new ConvI2LNode(chk_off_X));
|
||||
#endif
|
||||
Node *p2 = phase->transform(new AddPNode(subklass,subklass,chk_off_X));
|
||||
Node *kmem = phase->C->immutable_memory();
|
||||
Node *nkls = phase->transform(LoadKlassNode::make(*phase, NULL, kmem, p2, phase->type(p2)->is_ptr(), TypeKlassPtr::OBJECT_OR_NULL));
|
||||
|
||||
Node* res = new CmpPNode(superklass, nkls);
|
||||
const Type* t = phase->type(phase->transform(res));
|
||||
assert((Value(phase) == t) || (t != TypeInt::CC_GT && t != TypeInt::CC_EQ), "missing Value() optimization");
|
||||
if (phase->is_IterGVN()) {
|
||||
phase->is_IterGVN()->_worklist.push(res);
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
#endif
|
||||
|
||||
return NULL;
|
||||
}
|
54
src/hotspot/share/opto/subtypenode.hpp
Normal file
54
src/hotspot/share/opto/subtypenode.hpp
Normal file
|
@ -0,0 +1,54 @@
|
|||
/*
|
||||
* Copyright (c) 2020, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
* under the terms of the GNU General Public License version 2 only, as
|
||||
* published by the Free Software Foundation.
|
||||
*
|
||||
* This code is distributed in the hope that it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
* version 2 for more details (a copy is included in the LICENSE file that
|
||||
* accompanied this code).
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License version
|
||||
* 2 along with this work; if not, write to the Free Software Foundation,
|
||||
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
*
|
||||
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
|
||||
* or visit www.oracle.com if you need additional information or have any
|
||||
* questions.
|
||||
*
|
||||
*/
|
||||
|
||||
#ifndef SHARE_OPTO_SUBTYPENODE_HPP
|
||||
#define SHARE_OPTO_SUBTYPENODE_HPP
|
||||
|
||||
#include "opto/node.hpp"
|
||||
|
||||
class SubTypeCheckNode : public CmpNode {
|
||||
public:
|
||||
enum {
|
||||
Control,
|
||||
ObjOrSubKlass,
|
||||
SuperKlass
|
||||
};
|
||||
|
||||
SubTypeCheckNode(Compile* C, Node* obj_or_subklass, Node* superklass)
|
||||
: CmpNode(obj_or_subklass, superklass) {
|
||||
init_class_id(Class_SubTypeCheck);
|
||||
init_flags(Flag_is_macro);
|
||||
C->add_macro_node(this);
|
||||
}
|
||||
|
||||
Node* Ideal(PhaseGVN *phase, bool can_reshape);
|
||||
virtual const Type* sub(const Type*, const Type*) const;
|
||||
Node* Identity(PhaseGVN* phase) { return this; }
|
||||
|
||||
virtual int Opcode() const;
|
||||
const Type* bottom_type() const { return TypeInt::CC; }
|
||||
bool depends_only_on_test() const { return false; };
|
||||
};
|
||||
|
||||
#endif // SHARE_OPTO_SUBTYPENODE_HPP
|
Loading…
Add table
Add a link
Reference in a new issue