8026796: Make replace_in_map() on parent maps generic

Propagate node replacements along control flow edges to callers

Reviewed-by: kvn, vlivanov
This commit is contained in:
Roland Westrelin 2014-06-10 13:37:16 +02:00
parent ea0470002e
commit 2838bcc3bf
18 changed files with 463 additions and 134 deletions

View file

@ -63,12 +63,12 @@ public:
}
virtual bool is_parse() const { return true; }
virtual JVMState* generate(JVMState* jvms, Parse* parent_parser);
virtual JVMState* generate(JVMState* jvms);
int is_osr() { return _is_osr; }
};
JVMState* ParseGenerator::generate(JVMState* jvms, Parse* parent_parser) {
JVMState* ParseGenerator::generate(JVMState* jvms) {
Compile* C = Compile::current();
C->print_inlining_update(this);
@ -81,7 +81,7 @@ JVMState* ParseGenerator::generate(JVMState* jvms, Parse* parent_parser) {
return NULL; // bailing out of the compile; do not try to parse
}
Parse parser(jvms, method(), _expected_uses, parent_parser);
Parse parser(jvms, method(), _expected_uses);
// Grab signature for matching/allocation
#ifdef ASSERT
if (parser.tf() != (parser.depth() == 1 ? C->tf() : tf())) {
@ -120,12 +120,12 @@ class DirectCallGenerator : public CallGenerator {
_separate_io_proj(separate_io_proj)
{
}
virtual JVMState* generate(JVMState* jvms, Parse* parent_parser);
virtual JVMState* generate(JVMState* jvms);
CallStaticJavaNode* call_node() const { return _call_node; }
};
JVMState* DirectCallGenerator::generate(JVMState* jvms, Parse* parent_parser) {
JVMState* DirectCallGenerator::generate(JVMState* jvms) {
GraphKit kit(jvms);
kit.C->print_inlining_update(this);
bool is_static = method()->is_static();
@ -173,10 +173,10 @@ public:
vtable_index >= 0, "either invalid or usable");
}
virtual bool is_virtual() const { return true; }
virtual JVMState* generate(JVMState* jvms, Parse* parent_parser);
virtual JVMState* generate(JVMState* jvms);
};
JVMState* VirtualCallGenerator::generate(JVMState* jvms, Parse* parent_parser) {
JVMState* VirtualCallGenerator::generate(JVMState* jvms) {
GraphKit kit(jvms);
Node* receiver = kit.argument(0);
@ -283,7 +283,7 @@ class LateInlineCallGenerator : public DirectCallGenerator {
// Convert the CallStaticJava into an inline
virtual void do_late_inline();
virtual JVMState* generate(JVMState* jvms, Parse* parent_parser) {
virtual JVMState* generate(JVMState* jvms) {
Compile *C = Compile::current();
C->log_inline_id(this);
@ -298,7 +298,7 @@ class LateInlineCallGenerator : public DirectCallGenerator {
// that the late inlining logic can distinguish between fall
// through and exceptional uses of the memory and io projections
// as is done for allocations and macro expansion.
return DirectCallGenerator::generate(jvms, parent_parser);
return DirectCallGenerator::generate(jvms);
}
virtual void print_inlining_late(const char* msg) {
@ -399,7 +399,7 @@ void LateInlineCallGenerator::do_late_inline() {
}
// Now perform the inlining using the synthesized JVMState
JVMState* new_jvms = _inline_cg->generate(jvms, NULL);
JVMState* new_jvms = _inline_cg->generate(jvms);
if (new_jvms == NULL) return; // no change
if (C->failing()) return;
@ -417,7 +417,7 @@ void LateInlineCallGenerator::do_late_inline() {
C->env()->notice_inlined_method(_inline_cg->method());
C->set_inlining_progress(true);
kit.replace_call(call, result);
kit.replace_call(call, result, true);
}
@ -439,8 +439,8 @@ class LateInlineMHCallGenerator : public LateInlineCallGenerator {
virtual bool is_mh_late_inline() const { return true; }
virtual JVMState* generate(JVMState* jvms, Parse* parent_parser) {
JVMState* new_jvms = LateInlineCallGenerator::generate(jvms, parent_parser);
virtual JVMState* generate(JVMState* jvms) {
JVMState* new_jvms = LateInlineCallGenerator::generate(jvms);
Compile* C = Compile::current();
if (_input_not_const) {
@ -486,14 +486,14 @@ class LateInlineStringCallGenerator : public LateInlineCallGenerator {
LateInlineStringCallGenerator(ciMethod* method, CallGenerator* inline_cg) :
LateInlineCallGenerator(method, inline_cg) {}
virtual JVMState* generate(JVMState* jvms, Parse* parent_parser) {
virtual JVMState* generate(JVMState* jvms) {
Compile *C = Compile::current();
C->log_inline_id(this);
C->add_string_late_inline(this);
JVMState* new_jvms = DirectCallGenerator::generate(jvms, parent_parser);
JVMState* new_jvms = DirectCallGenerator::generate(jvms);
return new_jvms;
}
@ -510,14 +510,14 @@ class LateInlineBoxingCallGenerator : public LateInlineCallGenerator {
LateInlineBoxingCallGenerator(ciMethod* method, CallGenerator* inline_cg) :
LateInlineCallGenerator(method, inline_cg) {}
virtual JVMState* generate(JVMState* jvms, Parse* parent_parser) {
virtual JVMState* generate(JVMState* jvms) {
Compile *C = Compile::current();
C->log_inline_id(this);
C->add_boxing_late_inline(this);
JVMState* new_jvms = DirectCallGenerator::generate(jvms, parent_parser);
JVMState* new_jvms = DirectCallGenerator::generate(jvms);
return new_jvms;
}
};
@ -553,7 +553,7 @@ public:
virtual bool is_virtual() const { return _is_virtual; }
virtual bool is_deferred() const { return true; }
virtual JVMState* generate(JVMState* jvms, Parse* parent_parser);
virtual JVMState* generate(JVMState* jvms);
};
@ -563,14 +563,14 @@ CallGenerator* CallGenerator::for_warm_call(WarmCallInfo* ci,
return new WarmCallGenerator(ci, if_cold, if_hot);
}
JVMState* WarmCallGenerator::generate(JVMState* jvms, Parse* parent_parser) {
JVMState* WarmCallGenerator::generate(JVMState* jvms) {
Compile* C = Compile::current();
C->print_inlining_update(this);
if (C->log() != NULL) {
C->log()->elem("warm_call bci='%d'", jvms->bci());
}
jvms = _if_cold->generate(jvms, parent_parser);
jvms = _if_cold->generate(jvms);
if (jvms != NULL) {
Node* m = jvms->map()->control();
if (m->is_CatchProj()) m = m->in(0); else m = C->top();
@ -631,7 +631,7 @@ public:
virtual bool is_inline() const { return _if_hit->is_inline(); }
virtual bool is_deferred() const { return _if_hit->is_deferred(); }
virtual JVMState* generate(JVMState* jvms, Parse* parent_parser);
virtual JVMState* generate(JVMState* jvms);
};
@ -643,14 +643,13 @@ CallGenerator* CallGenerator::for_predicted_call(ciKlass* predicted_receiver,
}
JVMState* PredictedCallGenerator::generate(JVMState* jvms, Parse* parent_parser) {
JVMState* PredictedCallGenerator::generate(JVMState* jvms) {
GraphKit kit(jvms);
kit.C->print_inlining_update(this);
PhaseGVN& gvn = kit.gvn();
// We need an explicit receiver null_check before checking its type.
// We share a map with the caller, so his JVMS gets adjusted.
Node* receiver = kit.argument(0);
CompileLog* log = kit.C->log();
if (log != NULL) {
log->elem("predicted_call bci='%d' klass='%d'",
@ -662,6 +661,10 @@ JVMState* PredictedCallGenerator::generate(JVMState* jvms, Parse* parent_parser)
return kit.transfer_exceptions_into_jvms();
}
// Make a copy of the replaced nodes in case we need to restore them
ReplacedNodes replaced_nodes = kit.map()->replaced_nodes();
replaced_nodes.clone();
Node* exact_receiver = receiver; // will get updated in place...
Node* slow_ctl = kit.type_check_receiver(receiver,
_predicted_receiver, _hit_prob,
@ -672,7 +675,7 @@ JVMState* PredictedCallGenerator::generate(JVMState* jvms, Parse* parent_parser)
{ PreserveJVMState pjvms(&kit);
kit.set_control(slow_ctl);
if (!kit.stopped()) {
slow_jvms = _if_missed->generate(kit.sync_jvms(), parent_parser);
slow_jvms = _if_missed->generate(kit.sync_jvms());
if (kit.failing())
return NULL; // might happen because of NodeCountInliningCutoff
assert(slow_jvms != NULL, "must be");
@ -693,12 +696,12 @@ JVMState* PredictedCallGenerator::generate(JVMState* jvms, Parse* parent_parser)
kit.replace_in_map(receiver, exact_receiver);
// Make the hot call:
JVMState* new_jvms = _if_hit->generate(kit.sync_jvms(), parent_parser);
JVMState* new_jvms = _if_hit->generate(kit.sync_jvms());
if (new_jvms == NULL) {
// Inline failed, so make a direct call.
assert(_if_hit->is_inline(), "must have been a failed inline");
CallGenerator* cg = CallGenerator::for_direct_call(_if_hit->method());
new_jvms = cg->generate(kit.sync_jvms(), parent_parser);
new_jvms = cg->generate(kit.sync_jvms());
}
kit.add_exception_states_from(new_jvms);
kit.set_jvms(new_jvms);
@ -715,6 +718,11 @@ JVMState* PredictedCallGenerator::generate(JVMState* jvms, Parse* parent_parser)
return kit.transfer_exceptions_into_jvms();
}
// There are 2 branches and the replaced nodes are only valid on
// one: restore the replaced nodes to what they were before the
// branch.
kit.map()->set_replaced_nodes(replaced_nodes);
// Finish the diamond.
kit.C->set_has_split_ifs(true); // Has chance for split-if optimization
RegionNode* region = new RegionNode(3);
@ -901,7 +909,7 @@ public:
virtual bool is_inlined() const { return true; }
virtual bool is_intrinsic() const { return true; }
virtual JVMState* generate(JVMState* jvms, Parse* parent_parser);
virtual JVMState* generate(JVMState* jvms);
};
@ -911,7 +919,7 @@ CallGenerator* CallGenerator::for_predicted_intrinsic(CallGenerator* intrinsic,
}
JVMState* PredictedIntrinsicGenerator::generate(JVMState* jvms, Parse* parent_parser) {
JVMState* PredictedIntrinsicGenerator::generate(JVMState* jvms) {
GraphKit kit(jvms);
PhaseGVN& gvn = kit.gvn();
@ -932,7 +940,7 @@ JVMState* PredictedIntrinsicGenerator::generate(JVMState* jvms, Parse* parent_pa
PreserveJVMState pjvms(&kit);
kit.set_control(slow_ctl);
if (!kit.stopped()) {
slow_jvms = _cg->generate(kit.sync_jvms(), parent_parser);
slow_jvms = _cg->generate(kit.sync_jvms());
if (kit.failing())
return NULL; // might happen because of NodeCountInliningCutoff
assert(slow_jvms != NULL, "must be");
@ -950,12 +958,12 @@ JVMState* PredictedIntrinsicGenerator::generate(JVMState* jvms, Parse* parent_pa
}
// Generate intrinsic code:
JVMState* new_jvms = _intrinsic->generate(kit.sync_jvms(), parent_parser);
JVMState* new_jvms = _intrinsic->generate(kit.sync_jvms());
if (new_jvms == NULL) {
// Intrinsic failed, so use slow code or make a direct call.
if (slow_map == NULL) {
CallGenerator* cg = CallGenerator::for_direct_call(method());
new_jvms = cg->generate(kit.sync_jvms(), parent_parser);
new_jvms = cg->generate(kit.sync_jvms());
} else {
kit.set_jvms(slow_jvms);
return kit.transfer_exceptions_into_jvms();
@ -1025,7 +1033,7 @@ public:
virtual bool is_virtual() const { ShouldNotReachHere(); return false; }
virtual bool is_trap() const { return true; }
virtual JVMState* generate(JVMState* jvms, Parse* parent_parser);
virtual JVMState* generate(JVMState* jvms);
};
@ -1037,7 +1045,7 @@ CallGenerator::for_uncommon_trap(ciMethod* m,
}
JVMState* UncommonTrapCallGenerator::generate(JVMState* jvms, Parse* parent_parser) {
JVMState* UncommonTrapCallGenerator::generate(JVMState* jvms) {
GraphKit kit(jvms);
kit.C->print_inlining_update(this);
// Take the trap with arguments pushed on the stack. (Cf. null_check_receiver).