8202164: Remove some unneeded BoolObjectClosure* is_alive parameters

Reviewed-by: kbarrett, stefank
This commit is contained in:
Coleen Phillimore 2018-04-25 17:50:32 -04:00
parent 465eea6d80
commit 8a427a07d8
6 changed files with 38 additions and 40 deletions

View file

@ -285,7 +285,7 @@ private:
protected: protected:
virtual bool do_unloading_oops(address low_boundary, BoolObjectClosure* is_alive, bool unloading_occurred); virtual bool do_unloading_oops(address low_boundary, BoolObjectClosure* is_alive, bool unloading_occurred);
virtual bool do_unloading_jvmci(BoolObjectClosure* is_alive, bool unloading_occurred) { return false; } virtual bool do_unloading_jvmci(bool unloading_occurred) { return false; }
}; };

View file

@ -525,7 +525,7 @@ void CompiledMethod::do_unloading(BoolObjectClosure* is_alive, bool unloading_oc
} }
#if INCLUDE_JVMCI #if INCLUDE_JVMCI
if (do_unloading_jvmci(is_alive, unloading_occurred)) { if (do_unloading_jvmci(unloading_occurred)) {
return; return;
} }
#endif #endif
@ -535,7 +535,7 @@ void CompiledMethod::do_unloading(BoolObjectClosure* is_alive, bool unloading_oc
} }
template <class CompiledICorStaticCall> template <class CompiledICorStaticCall>
static bool clean_if_nmethod_is_unloaded(CompiledICorStaticCall *ic, address addr, BoolObjectClosure *is_alive, CompiledMethod* from) { static bool clean_if_nmethod_is_unloaded(CompiledICorStaticCall *ic, address addr, CompiledMethod* from) {
// Ok, to lookup references to zombies here // Ok, to lookup references to zombies here
CodeBlob *cb = CodeCache::find_blob_unsafe(addr); CodeBlob *cb = CodeCache::find_blob_unsafe(addr);
CompiledMethod* nm = (cb != NULL) ? cb->as_compiled_method_or_null() : NULL; CompiledMethod* nm = (cb != NULL) ? cb->as_compiled_method_or_null() : NULL;
@ -555,12 +555,12 @@ static bool clean_if_nmethod_is_unloaded(CompiledICorStaticCall *ic, address add
return false; return false;
} }
static bool clean_if_nmethod_is_unloaded(CompiledIC *ic, BoolObjectClosure *is_alive, CompiledMethod* from) { static bool clean_if_nmethod_is_unloaded(CompiledIC *ic, CompiledMethod* from) {
return clean_if_nmethod_is_unloaded(ic, ic->ic_destination(), is_alive, from); return clean_if_nmethod_is_unloaded(ic, ic->ic_destination(), from);
} }
static bool clean_if_nmethod_is_unloaded(CompiledStaticCall *csc, BoolObjectClosure *is_alive, CompiledMethod* from) { static bool clean_if_nmethod_is_unloaded(CompiledStaticCall *csc, CompiledMethod* from) {
return clean_if_nmethod_is_unloaded(csc, csc->destination(), is_alive, from); return clean_if_nmethod_is_unloaded(csc, csc->destination(), from);
} }
bool CompiledMethod::do_unloading_parallel(BoolObjectClosure* is_alive, bool unloading_occurred) { bool CompiledMethod::do_unloading_parallel(BoolObjectClosure* is_alive, bool unloading_occurred) {
@ -608,15 +608,15 @@ bool CompiledMethod::do_unloading_parallel(BoolObjectClosure* is_alive, bool unl
clean_ic_if_metadata_is_dead(CompiledIC_at(&iter)); clean_ic_if_metadata_is_dead(CompiledIC_at(&iter));
} }
postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this); postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), this);
break; break;
case relocInfo::opt_virtual_call_type: case relocInfo::opt_virtual_call_type:
postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this); postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), this);
break; break;
case relocInfo::static_call_type: case relocInfo::static_call_type:
postponed |= clean_if_nmethod_is_unloaded(compiledStaticCall_at(iter.reloc()), is_alive, this); postponed |= clean_if_nmethod_is_unloaded(compiledStaticCall_at(iter.reloc()), this);
break; break;
case relocInfo::oop_type: case relocInfo::oop_type:
@ -636,7 +636,7 @@ bool CompiledMethod::do_unloading_parallel(BoolObjectClosure* is_alive, bool unl
} }
#if INCLUDE_JVMCI #if INCLUDE_JVMCI
if (do_unloading_jvmci(is_alive, unloading_occurred)) { if (do_unloading_jvmci(unloading_occurred)) {
return postponed; return postponed;
} }
#endif #endif
@ -647,7 +647,7 @@ bool CompiledMethod::do_unloading_parallel(BoolObjectClosure* is_alive, bool unl
return postponed; return postponed;
} }
void CompiledMethod::do_unloading_parallel_postponed(BoolObjectClosure* is_alive, bool unloading_occurred) { void CompiledMethod::do_unloading_parallel_postponed() {
ResourceMark rm; ResourceMark rm;
// Make sure the oop's ready to receive visitors // Make sure the oop's ready to receive visitors
@ -671,15 +671,15 @@ void CompiledMethod::do_unloading_parallel_postponed(BoolObjectClosure* is_alive
switch (iter.type()) { switch (iter.type()) {
case relocInfo::virtual_call_type: case relocInfo::virtual_call_type:
clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this); clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), this);
break; break;
case relocInfo::opt_virtual_call_type: case relocInfo::opt_virtual_call_type:
clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this); clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), this);
break; break;
case relocInfo::static_call_type: case relocInfo::static_call_type:
clean_if_nmethod_is_unloaded(compiledStaticCall_at(iter.reloc()), is_alive, this); clean_if_nmethod_is_unloaded(compiledStaticCall_at(iter.reloc()), this);
break; break;
default: default:

View file

@ -372,7 +372,7 @@ public:
virtual void do_unloading(BoolObjectClosure* is_alive, bool unloading_occurred); virtual void do_unloading(BoolObjectClosure* is_alive, bool unloading_occurred);
// The parallel versions are used by G1. // The parallel versions are used by G1.
virtual bool do_unloading_parallel(BoolObjectClosure* is_alive, bool unloading_occurred); virtual bool do_unloading_parallel(BoolObjectClosure* is_alive, bool unloading_occurred);
virtual void do_unloading_parallel_postponed(BoolObjectClosure* is_alive, bool unloading_occurred); virtual void do_unloading_parallel_postponed();
static unsigned char global_unloading_clock() { return _global_unloading_clock; } static unsigned char global_unloading_clock() { return _global_unloading_clock; }
static void increase_unloading_clock(); static void increase_unloading_clock();
@ -383,7 +383,7 @@ public:
protected: protected:
virtual bool do_unloading_oops(address low_boundary, BoolObjectClosure* is_alive, bool unloading_occurred) = 0; virtual bool do_unloading_oops(address low_boundary, BoolObjectClosure* is_alive, bool unloading_occurred) = 0;
#if INCLUDE_JVMCI #if INCLUDE_JVMCI
virtual bool do_unloading_jvmci(BoolObjectClosure* is_alive, bool unloading_occurred) = 0; virtual bool do_unloading_jvmci(bool unloading_occurred) = 0;
#endif #endif
private: private:

View file

@ -1028,17 +1028,16 @@ void nmethod::inc_decompile_count() {
mdo->inc_decompile_count(); mdo->inc_decompile_count();
} }
void nmethod::make_unloaded(BoolObjectClosure* is_alive, oop cause) { void nmethod::make_unloaded(oop cause) {
post_compiled_method_unload(); post_compiled_method_unload();
// Since this nmethod is being unloaded, make sure that dependencies // This nmethod is being unloaded, make sure that dependencies
// recorded in instanceKlasses get flushed and pass non-NULL closure to // recorded in instanceKlasses get flushed.
// indicate that this work is being done during a GC. // Since this work is being done during a GC, defer deleting dependencies from the
// InstanceKlass.
assert(Universe::heap()->is_gc_active(), "should only be called during gc"); assert(Universe::heap()->is_gc_active(), "should only be called during gc");
assert(is_alive != NULL, "Should be non-NULL"); flush_dependencies(/*delete_immediately*/false);
// A non-NULL is_alive closure indicates that this is being called during GC.
flush_dependencies(is_alive);
// Break cycle between nmethod & method // Break cycle between nmethod & method
LogTarget(Trace, class, unload) lt; LogTarget(Trace, class, unload) lt;
@ -1261,7 +1260,7 @@ bool nmethod::make_not_entrant_or_zombie(int state) {
if (nmethod_needs_unregister) { if (nmethod_needs_unregister) {
Universe::heap()->unregister_nmethod(this); Universe::heap()->unregister_nmethod(this);
} }
flush_dependencies(NULL); flush_dependencies(/*delete_immediately*/true);
} }
// zombie only - if a JVMTI agent has enabled the CompiledMethodUnload // zombie only - if a JVMTI agent has enabled the CompiledMethodUnload
@ -1344,13 +1343,13 @@ void nmethod::flush() {
// of dependencies must happen during phase 1 since after GC any // of dependencies must happen during phase 1 since after GC any
// dependencies in the unloaded nmethod won't be updated, so // dependencies in the unloaded nmethod won't be updated, so
// traversing the dependency information in unsafe. In that case this // traversing the dependency information in unsafe. In that case this
// function is called with a non-NULL argument and this function only // function is called with a boolean argument and this function only
// notifies instanceKlasses that are reachable // notifies instanceKlasses that are reachable
void nmethod::flush_dependencies(BoolObjectClosure* is_alive) { void nmethod::flush_dependencies(bool delete_immediately) {
assert_locked_or_safepoint(CodeCache_lock); assert_locked_or_safepoint(CodeCache_lock);
assert(Universe::heap()->is_gc_active() == (is_alive != NULL), assert(Universe::heap()->is_gc_active() != delete_immediately,
"is_alive is non-NULL if and only if we are called during GC"); "delete_immediately is false if and only if we are called during GC");
if (!has_flushed_dependencies()) { if (!has_flushed_dependencies()) {
set_has_flushed_dependencies(); set_has_flushed_dependencies();
for (Dependencies::DepStream deps(this); deps.next(); ) { for (Dependencies::DepStream deps(this); deps.next(); ) {
@ -1363,13 +1362,12 @@ void nmethod::flush_dependencies(BoolObjectClosure* is_alive) {
if (klass == NULL) { if (klass == NULL) {
continue; // ignore things like evol_method continue; // ignore things like evol_method
} }
// During GC the is_alive closure is non-NULL, and is used to // During GC delete_immediately is false, and liveness
// determine liveness of dependees that need to be updated. // of dependee determines class that needs to be updated.
if (is_alive == NULL || klass->is_loader_alive()) { if (delete_immediately || klass->is_loader_alive()) {
// The GC defers deletion of this entry, since there might be multiple threads // The GC defers deletion of this entry, since there might be multiple threads
// iterating over the _dependencies graph. Other call paths are single-threaded // iterating over the _dependencies graph. Other call paths are single-threaded
// and may delete it immediately. // and may delete it immediately.
bool delete_immediately = is_alive == NULL;
InstanceKlass::cast(klass)->remove_dependent_nmethod(this, delete_immediately); InstanceKlass::cast(klass)->remove_dependent_nmethod(this, delete_immediately);
} }
} }
@ -1390,7 +1388,7 @@ bool nmethod::can_unload(BoolObjectClosure* is_alive, oop* root, bool unloading_
// simply because one of its constant oops has gone dead. // simply because one of its constant oops has gone dead.
// No actual classes need to be unloaded in order for this to occur. // No actual classes need to be unloaded in order for this to occur.
assert(unloading_occurred || ScavengeRootsInCode, "Inconsistency in unloading"); assert(unloading_occurred || ScavengeRootsInCode, "Inconsistency in unloading");
make_unloaded(is_alive, obj); make_unloaded(obj);
return true; return true;
} }
@ -1516,12 +1514,12 @@ bool nmethod::do_unloading_oops(address low_boundary, BoolObjectClosure* is_aliv
} }
#if INCLUDE_JVMCI #if INCLUDE_JVMCI
bool nmethod::do_unloading_jvmci(BoolObjectClosure* is_alive, bool unloading_occurred) { bool nmethod::do_unloading_jvmci(bool unloading_occurred) {
if (_jvmci_installed_code != NULL) { if (_jvmci_installed_code != NULL) {
if (JNIHandles::is_global_weak_cleared(_jvmci_installed_code)) { if (JNIHandles::is_global_weak_cleared(_jvmci_installed_code)) {
if (_jvmci_installed_code_triggers_unloading) { if (_jvmci_installed_code_triggers_unloading) {
// jweak reference processing has already cleared the referent // jweak reference processing has already cleared the referent
make_unloaded(is_alive, NULL); make_unloaded(NULL);
return true; return true;
} else { } else {
clear_jvmci_installed_code(); clear_jvmci_installed_code();

View file

@ -349,10 +349,10 @@ class nmethod : public CompiledMethod {
return _state; return _state;
} }
void make_unloaded(BoolObjectClosure* is_alive, oop cause); void make_unloaded(oop cause);
bool has_dependencies() { return dependencies_size() != 0; } bool has_dependencies() { return dependencies_size() != 0; }
void flush_dependencies(BoolObjectClosure* is_alive); void flush_dependencies(bool delete_immediately);
bool has_flushed_dependencies() { return _has_flushed_dependencies; } bool has_flushed_dependencies() { return _has_flushed_dependencies; }
void set_has_flushed_dependencies() { void set_has_flushed_dependencies() {
assert(!has_flushed_dependencies(), "should only happen once"); assert(!has_flushed_dependencies(), "should only happen once");
@ -488,7 +488,7 @@ public:
#if INCLUDE_JVMCI #if INCLUDE_JVMCI
// See comment for _jvmci_installed_code_triggers_unloading field. // See comment for _jvmci_installed_code_triggers_unloading field.
// Returns whether this nmethod was unloaded. // Returns whether this nmethod was unloaded.
virtual bool do_unloading_jvmci(BoolObjectClosure* is_alive, bool unloading_occurred); virtual bool do_unloading_jvmci(bool unloading_occurred);
#endif #endif
private: private:

View file

@ -3353,7 +3353,7 @@ private:
} }
void clean_nmethod_postponed(CompiledMethod* nm) { void clean_nmethod_postponed(CompiledMethod* nm) {
nm->do_unloading_parallel_postponed(_is_alive, _unloading_occurred); nm->do_unloading_parallel_postponed();
} }
static const int MaxClaimNmethods = 16; static const int MaxClaimNmethods = 16;