mirror of
https://github.com/openjdk/jdk.git
synced 2025-08-28 15:24:43 +02:00
8217921: Runtime dead code removal
Reviewed-by: coleenp, sgehwolf, dholmes
This commit is contained in:
parent
6a9f775041
commit
d589fa875d
32 changed files with 9 additions and 255 deletions
|
@ -137,11 +137,6 @@ inline bool frame::equal(frame other) const {
|
||||||
// frame.
|
// frame.
|
||||||
inline intptr_t* frame::id(void) const { return unextended_sp(); }
|
inline intptr_t* frame::id(void) const { return unextended_sp(); }
|
||||||
|
|
||||||
// Relationals on frames based
|
|
||||||
// Return true if the frame is younger (more recent activation) than the frame represented by id
|
|
||||||
inline bool frame::is_younger(intptr_t* id) const { assert(this->id() != NULL && id != NULL, "NULL frame id");
|
|
||||||
return this->id() < id ; }
|
|
||||||
|
|
||||||
// Return true if the frame is older (less recent activation) than the frame represented by id
|
// Return true if the frame is older (less recent activation) than the frame represented by id
|
||||||
inline bool frame::is_older(intptr_t* id) const { assert(this->id() != NULL && id != NULL, "NULL frame id");
|
inline bool frame::is_older(intptr_t* id) const { assert(this->id() != NULL && id != NULL, "NULL frame id");
|
||||||
return this->id() > id ; }
|
return this->id() > id ; }
|
||||||
|
|
|
@ -120,17 +120,11 @@ inline bool frame::equal(frame other) const {
|
||||||
// frame.
|
// frame.
|
||||||
inline intptr_t* frame::id(void) const { return unextended_sp(); }
|
inline intptr_t* frame::id(void) const { return unextended_sp(); }
|
||||||
|
|
||||||
// Relationals on frames based
|
|
||||||
// Return true if the frame is younger (more recent activation) than the frame represented by id
|
|
||||||
inline bool frame::is_younger(intptr_t* id) const { assert(this->id() != NULL && id != NULL, "NULL frame id");
|
|
||||||
return this->id() < id ; }
|
|
||||||
|
|
||||||
// Return true if the frame is older (less recent activation) than the frame represented by id
|
// Return true if the frame is older (less recent activation) than the frame represented by id
|
||||||
inline bool frame::is_older(intptr_t* id) const { assert(this->id() != NULL && id != NULL, "NULL frame id");
|
inline bool frame::is_older(intptr_t* id) const { assert(this->id() != NULL && id != NULL, "NULL frame id");
|
||||||
return this->id() > id ; }
|
return this->id() > id ; }
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
inline intptr_t* frame::link() const { return (intptr_t*) *(intptr_t **)addr_at(link_offset); }
|
inline intptr_t* frame::link() const { return (intptr_t*) *(intptr_t **)addr_at(link_offset); }
|
||||||
|
|
||||||
inline intptr_t* frame::unextended_sp() const { return _unextended_sp; }
|
inline intptr_t* frame::unextended_sp() const { return _unextended_sp; }
|
||||||
|
|
|
@ -111,14 +111,6 @@ inline intptr_t* frame::id(void) const {
|
||||||
return _fp;
|
return _fp;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Return true if this frame is younger (more recent activation) than
|
|
||||||
// the frame represented by id.
|
|
||||||
inline bool frame::is_younger(intptr_t* id) const {
|
|
||||||
assert(this->id() != NULL && id != NULL, "NULL frame id");
|
|
||||||
// Stack grows towards smaller addresses on z/Architecture.
|
|
||||||
return this->id() < id;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Return true if this frame is older (less recent activation) than
|
// Return true if this frame is older (less recent activation) than
|
||||||
// the frame represented by id.
|
// the frame represented by id.
|
||||||
inline bool frame::is_older(intptr_t* id) const {
|
inline bool frame::is_older(intptr_t* id) const {
|
||||||
|
|
|
@ -57,11 +57,6 @@ inline bool frame::equal(frame other) const {
|
||||||
// frame.
|
// frame.
|
||||||
inline intptr_t* frame::id(void) const { return unextended_sp(); }
|
inline intptr_t* frame::id(void) const { return unextended_sp(); }
|
||||||
|
|
||||||
// Relationals on frames based
|
|
||||||
// Return true if the frame is younger (more recent activation) than the frame represented by id
|
|
||||||
inline bool frame::is_younger(intptr_t* id) const { assert(this->id() != NULL && id != NULL, "NULL frame id");
|
|
||||||
return this->id() < id ; }
|
|
||||||
|
|
||||||
// Return true if the frame is older (less recent activation) than the frame represented by id
|
// Return true if the frame is older (less recent activation) than the frame represented by id
|
||||||
inline bool frame::is_older(intptr_t* id) const { assert(this->id() != NULL && id != NULL, "NULL frame id");
|
inline bool frame::is_older(intptr_t* id) const { assert(this->id() != NULL && id != NULL, "NULL frame id");
|
||||||
return this->id() > id ; }
|
return this->id() > id ; }
|
||||||
|
|
|
@ -133,11 +133,6 @@ inline bool frame::equal(frame other) const {
|
||||||
// frame.
|
// frame.
|
||||||
inline intptr_t* frame::id(void) const { return unextended_sp(); }
|
inline intptr_t* frame::id(void) const { return unextended_sp(); }
|
||||||
|
|
||||||
// Relationals on frames based
|
|
||||||
// Return true if the frame is younger (more recent activation) than the frame represented by id
|
|
||||||
inline bool frame::is_younger(intptr_t* id) const { assert(this->id() != NULL && id != NULL, "NULL frame id");
|
|
||||||
return this->id() < id ; }
|
|
||||||
|
|
||||||
// Return true if the frame is older (less recent activation) than the frame represented by id
|
// Return true if the frame is older (less recent activation) than the frame represented by id
|
||||||
inline bool frame::is_older(intptr_t* id) const { assert(this->id() != NULL && id != NULL, "NULL frame id");
|
inline bool frame::is_older(intptr_t* id) const { assert(this->id() != NULL && id != NULL, "NULL frame id");
|
||||||
return this->id() > id ; }
|
return this->id() > id ; }
|
||||||
|
|
|
@ -211,7 +211,6 @@ class BytecodeStream: public BaseBytecodeStream {
|
||||||
return _code;
|
return _code;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool is_active_breakpoint() const { return Bytecodes::is_active_breakpoint_at(bcp()); }
|
|
||||||
Bytecodes::Code code() const { return _code; }
|
Bytecodes::Code code() const { return _code; }
|
||||||
|
|
||||||
// Unsigned indices, widening
|
// Unsigned indices, widening
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
* Copyright (c) 1997, 2016, Oracle and/or its affiliates. All rights reserved.
|
* Copyright (c) 1997, 2019, Oracle and/or its affiliates. All rights reserved.
|
||||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||||
*
|
*
|
||||||
* This code is free software; you can redistribute it and/or modify it
|
* This code is free software; you can redistribute it and/or modify it
|
||||||
|
@ -225,7 +225,6 @@ bool BytecodePrinter::check_index(int i, int& cp_index, outputStream* st) {
|
||||||
int ilimit = constants->length();
|
int ilimit = constants->length();
|
||||||
Bytecodes::Code code = raw_code();
|
Bytecodes::Code code = raw_code();
|
||||||
|
|
||||||
ConstantPoolCache* cache = NULL;
|
|
||||||
if (Bytecodes::uses_cp_cache(code)) {
|
if (Bytecodes::uses_cp_cache(code)) {
|
||||||
bool okay = true;
|
bool okay = true;
|
||||||
switch (code) {
|
switch (code) {
|
||||||
|
@ -256,8 +255,7 @@ bool BytecodePrinter::check_index(int i, int& cp_index, outputStream* st) {
|
||||||
|
|
||||||
bool BytecodePrinter::check_cp_cache_index(int i, int& cp_index, outputStream* st) {
|
bool BytecodePrinter::check_cp_cache_index(int i, int& cp_index, outputStream* st) {
|
||||||
ConstantPool* constants = method()->constants();
|
ConstantPool* constants = method()->constants();
|
||||||
int ilimit = constants->length(), climit = 0;
|
int climit = 0;
|
||||||
Bytecodes::Code code = raw_code();
|
|
||||||
|
|
||||||
ConstantPoolCache* cache = constants->cache();
|
ConstantPoolCache* cache = constants->cache();
|
||||||
// If rewriter hasn't run, the index is the cp_index
|
// If rewriter hasn't run, the index is the cp_index
|
||||||
|
@ -307,7 +305,6 @@ bool BytecodePrinter::check_obj_index(int i, int& cp_index, outputStream* st) {
|
||||||
|
|
||||||
|
|
||||||
bool BytecodePrinter::check_invokedynamic_index(int i, int& cp_index, outputStream* st) {
|
bool BytecodePrinter::check_invokedynamic_index(int i, int& cp_index, outputStream* st) {
|
||||||
ConstantPool* constants = method()->constants();
|
|
||||||
assert(ConstantPool::is_invokedynamic_index(i), "not secondary index?");
|
assert(ConstantPool::is_invokedynamic_index(i), "not secondary index?");
|
||||||
i = ConstantPool::decode_invokedynamic_index(i) + ConstantPool::CPCACHE_INDEX_TAG;
|
i = ConstantPool::decode_invokedynamic_index(i) + ConstantPool::CPCACHE_INDEX_TAG;
|
||||||
|
|
||||||
|
|
|
@ -380,7 +380,6 @@ class Bytecodes: AllStatic {
|
||||||
static Code code_or_bp_at(address bcp) { return (Code)cast(*bcp); }
|
static Code code_or_bp_at(address bcp) { return (Code)cast(*bcp); }
|
||||||
|
|
||||||
static Code code_at(Method* method, int bci);
|
static Code code_at(Method* method, int bci);
|
||||||
static bool is_active_breakpoint_at(address bcp) { return (Code)*bcp == _breakpoint; }
|
|
||||||
|
|
||||||
// find a bytecode, behind a breakpoint if necessary:
|
// find a bytecode, behind a breakpoint if necessary:
|
||||||
static Code non_breakpoint_code_at(const Method* method, address bcp);
|
static Code non_breakpoint_code_at(const Method* method, address bcp);
|
||||||
|
@ -405,18 +404,12 @@ class Bytecodes: AllStatic {
|
||||||
// if 'end' is provided, it indicates the end of the code buffer which
|
// if 'end' is provided, it indicates the end of the code buffer which
|
||||||
// should not be read past when parsing.
|
// should not be read past when parsing.
|
||||||
static int special_length_at(Bytecodes::Code code, address bcp, address end = NULL);
|
static int special_length_at(Bytecodes::Code code, address bcp, address end = NULL);
|
||||||
static int special_length_at(Method* method, address bcp, address end = NULL) { return special_length_at(code_at(method, bcp), bcp, end); }
|
|
||||||
static int raw_special_length_at(address bcp, address end = NULL);
|
static int raw_special_length_at(address bcp, address end = NULL);
|
||||||
static int length_for_code_at(Bytecodes::Code code, address bcp) { int l = length_for(code); return l > 0 ? l : special_length_at(code, bcp); }
|
static int length_for_code_at(Bytecodes::Code code, address bcp) { int l = length_for(code); return l > 0 ? l : special_length_at(code, bcp); }
|
||||||
static int length_at (Method* method, address bcp) { return length_for_code_at(code_at(method, bcp), bcp); }
|
static int length_at (Method* method, address bcp) { return length_for_code_at(code_at(method, bcp), bcp); }
|
||||||
static int java_length_at (Method* method, address bcp) { return length_for_code_at(java_code_at(method, bcp), bcp); }
|
static int java_length_at (Method* method, address bcp) { return length_for_code_at(java_code_at(method, bcp), bcp); }
|
||||||
static bool is_java_code (Code code) { return 0 <= code && code < number_of_java_codes; }
|
static bool is_java_code (Code code) { return 0 <= code && code < number_of_java_codes; }
|
||||||
|
|
||||||
static bool is_aload (Code code) { return (code == _aload || code == _aload_0 || code == _aload_1
|
|
||||||
|| code == _aload_2 || code == _aload_3); }
|
|
||||||
static bool is_astore (Code code) { return (code == _astore || code == _astore_0 || code == _astore_1
|
|
||||||
|| code == _astore_2 || code == _astore_3); }
|
|
||||||
|
|
||||||
static bool is_store_into_local(Code code){ return (_istore <= code && code <= _astore_3); }
|
static bool is_store_into_local(Code code){ return (_istore <= code && code <= _astore_3); }
|
||||||
static bool is_const (Code code) { return (_aconst_null <= code && code <= _ldc2_w); }
|
static bool is_const (Code code) { return (_aconst_null <= code && code <= _ldc2_w); }
|
||||||
static bool is_zero_const (Code code) { return (code == _aconst_null || code == _iconst_0
|
static bool is_zero_const (Code code) { return (code == _aconst_null || code == _iconst_0
|
||||||
|
@ -433,7 +426,6 @@ class Bytecodes: AllStatic {
|
||||||
assert(code == (u_char)code, "must be a byte");
|
assert(code == (u_char)code, "must be a byte");
|
||||||
return _flags[code + (is_wide ? (1<<BitsPerByte) : 0)];
|
return _flags[code + (is_wide ? (1<<BitsPerByte) : 0)];
|
||||||
}
|
}
|
||||||
static int format_bits (Code code, bool is_wide) { return flags(code, is_wide) & _all_fmt_bits; }
|
|
||||||
static bool has_all_flags (Code code, int test_flags, bool is_wide) {
|
static bool has_all_flags (Code code, int test_flags, bool is_wide) {
|
||||||
return (flags(code, is_wide) & test_flags) == test_flags;
|
return (flags(code, is_wide) & test_flags) == test_flags;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
* Copyright (c) 1997, 2018, Oracle and/or its affiliates. All rights reserved.
|
* Copyright (c) 1997, 2019, Oracle and/or its affiliates. All rights reserved.
|
||||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||||
*
|
*
|
||||||
* This code is free software; you can redistribute it and/or modify it
|
* This code is free software; you can redistribute it and/or modify it
|
||||||
|
@ -135,11 +135,6 @@ public:
|
||||||
frame& get_frame() { return _last_frame; }
|
frame& get_frame() { return _last_frame; }
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
bool InterpreterRuntime::is_breakpoint(JavaThread *thread) {
|
|
||||||
return Bytecodes::code_or_bp_at(LastFrameAccessor(thread).bcp()) == Bytecodes::_breakpoint;
|
|
||||||
}
|
|
||||||
|
|
||||||
//------------------------------------------------------------------------------------------------------------------------
|
//------------------------------------------------------------------------------------------------------------------------
|
||||||
// State accessors
|
// State accessors
|
||||||
|
|
||||||
|
|
|
@ -126,7 +126,6 @@ class InterpreterRuntime: AllStatic {
|
||||||
static void _breakpoint(JavaThread* thread, Method* method, address bcp);
|
static void _breakpoint(JavaThread* thread, Method* method, address bcp);
|
||||||
static Bytecodes::Code get_original_bytecode_at(JavaThread* thread, Method* method, address bcp);
|
static Bytecodes::Code get_original_bytecode_at(JavaThread* thread, Method* method, address bcp);
|
||||||
static void set_original_bytecode_at(JavaThread* thread, Method* method, address bcp, Bytecodes::Code new_code);
|
static void set_original_bytecode_at(JavaThread* thread, Method* method, address bcp, Bytecodes::Code new_code);
|
||||||
static bool is_breakpoint(JavaThread *thread);
|
|
||||||
|
|
||||||
// Safepoints
|
// Safepoints
|
||||||
static void at_safepoint(JavaThread* thread);
|
static void at_safepoint(JavaThread* thread);
|
||||||
|
|
|
@ -120,7 +120,6 @@ class TreeList : public FreeList_t {
|
||||||
// node to point to the new node.
|
// node to point to the new node.
|
||||||
TreeList<Chunk_t, FreeList_t>* remove_chunk_replace_if_needed(TreeChunk<Chunk_t, FreeList_t>* tc);
|
TreeList<Chunk_t, FreeList_t>* remove_chunk_replace_if_needed(TreeChunk<Chunk_t, FreeList_t>* tc);
|
||||||
// See FreeList.
|
// See FreeList.
|
||||||
void return_chunk_at_head(TreeChunk<Chunk_t, FreeList_t>* tc);
|
|
||||||
void return_chunk_at_tail(TreeChunk<Chunk_t, FreeList_t>* tc);
|
void return_chunk_at_tail(TreeChunk<Chunk_t, FreeList_t>* tc);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -236,7 +235,6 @@ class BinaryTreeDictionary: public CHeapObj<mtGC> {
|
||||||
size_t num_free_blocks() const;
|
size_t num_free_blocks() const;
|
||||||
size_t tree_height() const;
|
size_t tree_height() const;
|
||||||
size_t tree_height_helper(TreeList<Chunk_t, FreeList_t>* tl) const;
|
size_t tree_height_helper(TreeList<Chunk_t, FreeList_t>* tl) const;
|
||||||
size_t total_nodes_in_tree(TreeList<Chunk_t, FreeList_t>* tl) const;
|
|
||||||
size_t total_nodes_helper(TreeList<Chunk_t, FreeList_t>* tl) const;
|
size_t total_nodes_helper(TreeList<Chunk_t, FreeList_t>* tl) const;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
|
|
@ -245,35 +245,6 @@ void TreeList<Chunk_t, FreeList_t>::return_chunk_at_tail(TreeChunk<Chunk_t, Free
|
||||||
assert(tail() == NULL || tail()->next() == NULL, "list invariant");
|
assert(tail() == NULL || tail()->next() == NULL, "list invariant");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add this chunk at the head of the list. "At the head of the list"
|
|
||||||
// is defined to be after the chunk pointer to by head(). This is
|
|
||||||
// because the TreeList<Chunk_t, FreeList_t> is embedded in the first TreeChunk<Chunk_t, FreeList_t> in the
|
|
||||||
// list. See the definition of TreeChunk<Chunk_t, FreeList_t>.
|
|
||||||
template <class Chunk_t, class FreeList_t>
|
|
||||||
void TreeList<Chunk_t, FreeList_t>::return_chunk_at_head(TreeChunk<Chunk_t, FreeList_t>* chunk) {
|
|
||||||
assert(chunk->list() == this, "list should be set for chunk");
|
|
||||||
assert(head() != NULL, "The tree list is embedded in the first chunk");
|
|
||||||
assert(chunk != NULL, "returning NULL chunk");
|
|
||||||
// This is expensive for metaspace
|
|
||||||
assert(!FLSVerifyDictionary || !this->verify_chunk_in_free_list(chunk), "Double entry");
|
|
||||||
assert(head() == NULL || head()->prev() == NULL, "list invariant");
|
|
||||||
assert(tail() == NULL || tail()->next() == NULL, "list invariant");
|
|
||||||
|
|
||||||
Chunk_t* fc = head()->next();
|
|
||||||
if (fc != NULL) {
|
|
||||||
chunk->link_after(fc);
|
|
||||||
} else {
|
|
||||||
assert(tail() == NULL, "List is inconsistent");
|
|
||||||
this->link_tail(chunk);
|
|
||||||
}
|
|
||||||
head()->link_after(chunk);
|
|
||||||
assert(!head() || size() == head()->size(), "Wrong sized chunk in list");
|
|
||||||
FreeList_t::increment_count();
|
|
||||||
debug_only(this->increment_returned_bytes_by(chunk->size()*sizeof(HeapWord));)
|
|
||||||
assert(head() == NULL || head()->prev() == NULL, "list invariant");
|
|
||||||
assert(tail() == NULL || tail()->next() == NULL, "list invariant");
|
|
||||||
}
|
|
||||||
|
|
||||||
template <class Chunk_t, class FreeList_t>
|
template <class Chunk_t, class FreeList_t>
|
||||||
void TreeChunk<Chunk_t, FreeList_t>::assert_is_mangled() const {
|
void TreeChunk<Chunk_t, FreeList_t>::assert_is_mangled() const {
|
||||||
assert((ZapUnusedHeapArea &&
|
assert((ZapUnusedHeapArea &&
|
||||||
|
@ -799,11 +770,6 @@ size_t BinaryTreeDictionary<Chunk_t, FreeList_t>::total_nodes_helper(TreeList<Ch
|
||||||
total_nodes_helper(tl->right());
|
total_nodes_helper(tl->right());
|
||||||
}
|
}
|
||||||
|
|
||||||
template <class Chunk_t, class FreeList_t>
|
|
||||||
size_t BinaryTreeDictionary<Chunk_t, FreeList_t>::total_nodes_in_tree(TreeList<Chunk_t, FreeList_t>* tl) const {
|
|
||||||
return total_nodes_helper(root());
|
|
||||||
}
|
|
||||||
|
|
||||||
// Searches the tree for a chunk that ends at the
|
// Searches the tree for a chunk that ends at the
|
||||||
// specified address.
|
// specified address.
|
||||||
template <class Chunk_t, class FreeList_t>
|
template <class Chunk_t, class FreeList_t>
|
||||||
|
|
|
@ -191,9 +191,6 @@ private:
|
||||||
static void verify_subgraph_from(oop orig_obj) PRODUCT_RETURN;
|
static void verify_subgraph_from(oop orig_obj) PRODUCT_RETURN;
|
||||||
|
|
||||||
static KlassSubGraphInfo* get_subgraph_info(Klass *k);
|
static KlassSubGraphInfo* get_subgraph_info(Klass *k);
|
||||||
static int num_of_subgraph_infos();
|
|
||||||
|
|
||||||
static void build_archived_subgraph_info_records(int num_records);
|
|
||||||
|
|
||||||
static void init_subgraph_entry_fields(ArchivableStaticFieldInfo fields[],
|
static void init_subgraph_entry_fields(ArchivableStaticFieldInfo fields[],
|
||||||
int num, Thread* THREAD);
|
int num, Thread* THREAD);
|
||||||
|
@ -321,10 +318,6 @@ private:
|
||||||
|
|
||||||
inline static bool is_archived_object(oop p) NOT_CDS_JAVA_HEAP_RETURN_(false);
|
inline static bool is_archived_object(oop p) NOT_CDS_JAVA_HEAP_RETURN_(false);
|
||||||
|
|
||||||
static void archive_java_heap_objects() NOT_CDS_JAVA_HEAP_RETURN;
|
|
||||||
|
|
||||||
static char* read_archived_subgraph_infos(char* buffer) NOT_CDS_JAVA_HEAP_RETURN_(buffer);
|
|
||||||
static void write_archived_subgraph_infos() NOT_CDS_JAVA_HEAP_RETURN;
|
|
||||||
static void initialize_from_archived_subgraph(Klass* k) NOT_CDS_JAVA_HEAP_RETURN;
|
static void initialize_from_archived_subgraph(Klass* k) NOT_CDS_JAVA_HEAP_RETURN;
|
||||||
|
|
||||||
// NarrowOops stored in the CDS archive may use a different encoding scheme
|
// NarrowOops stored in the CDS archive may use a different encoding scheme
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
* Copyright (c) 2011, 2018, Oracle and/or its affiliates. All rights reserved.
|
* Copyright (c) 2011, 2019, Oracle and/or its affiliates. All rights reserved.
|
||||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||||
*
|
*
|
||||||
* This code is free software; you can redistribute it and/or modify it
|
* This code is free software; you can redistribute it and/or modify it
|
||||||
|
@ -873,19 +873,6 @@ metaspace::VirtualSpaceNode* MetaspaceUtils::find_enclosing_virtual_space(const
|
||||||
return vsn;
|
return vsn;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool MetaspaceUtils::is_in_committed(const void* p) {
|
|
||||||
#if INCLUDE_CDS
|
|
||||||
if (UseSharedSpaces) {
|
|
||||||
for (int idx = MetaspaceShared::ro; idx <= MetaspaceShared::mc; idx++) {
|
|
||||||
if (FileMapInfo::current_info()->is_in_shared_region(p, idx)) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
return find_enclosing_virtual_space(p) != NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool MetaspaceUtils::is_range_in_committed(const void* from, const void* to) {
|
bool MetaspaceUtils::is_range_in_committed(const void* from, const void* to) {
|
||||||
#if INCLUDE_CDS
|
#if INCLUDE_CDS
|
||||||
if (UseSharedSpaces) {
|
if (UseSharedSpaces) {
|
||||||
|
|
|
@ -333,7 +333,6 @@ class MetaspaceUtils : AllStatic {
|
||||||
// Utils to check if a pointer or range is part of a committed metaspace region
|
// Utils to check if a pointer or range is part of a committed metaspace region
|
||||||
// without acquiring any locks.
|
// without acquiring any locks.
|
||||||
static metaspace::VirtualSpaceNode* find_enclosing_virtual_space(const void* p);
|
static metaspace::VirtualSpaceNode* find_enclosing_virtual_space(const void* p);
|
||||||
static bool is_in_committed(const void* p);
|
|
||||||
static bool is_range_in_committed(const void* from, const void* to);
|
static bool is_range_in_committed(const void* from, const void* to);
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
@ -387,9 +386,6 @@ public:
|
||||||
}
|
}
|
||||||
|
|
||||||
static size_t min_chunk_size_words();
|
static size_t min_chunk_size_words();
|
||||||
static size_t min_chunk_size_bytes() {
|
|
||||||
return min_chunk_size_words() * BytesPerWord;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Flags for print_report().
|
// Flags for print_report().
|
||||||
enum ReportFlag {
|
enum ReportFlag {
|
||||||
|
|
|
@ -218,10 +218,6 @@ char* MetaspaceShared::read_only_space_alloc(size_t num_bytes) {
|
||||||
return _ro_region.allocate(num_bytes);
|
return _ro_region.allocate(num_bytes);
|
||||||
}
|
}
|
||||||
|
|
||||||
char* MetaspaceShared::read_only_space_top() {
|
|
||||||
return _ro_region.top();
|
|
||||||
}
|
|
||||||
|
|
||||||
void MetaspaceShared::initialize_runtime_shared_and_meta_spaces() {
|
void MetaspaceShared::initialize_runtime_shared_and_meta_spaces() {
|
||||||
assert(UseSharedSpaces, "Must be called when UseSharedSpaces is enabled");
|
assert(UseSharedSpaces, "Must be called when UseSharedSpaces is enabled");
|
||||||
|
|
||||||
|
|
|
@ -168,14 +168,11 @@ class MetaspaceShared : AllStatic {
|
||||||
|
|
||||||
static bool try_link_class(InstanceKlass* ik, TRAPS);
|
static bool try_link_class(InstanceKlass* ik, TRAPS);
|
||||||
static void link_and_cleanup_shared_classes(TRAPS);
|
static void link_and_cleanup_shared_classes(TRAPS);
|
||||||
static void check_shared_class_loader_type(InstanceKlass* ik);
|
|
||||||
|
|
||||||
// Allocate a block of memory from the "mc", "ro", or "rw" regions.
|
// Allocate a block of memory from the "mc", "ro", or "rw" regions.
|
||||||
static char* misc_code_space_alloc(size_t num_bytes);
|
static char* misc_code_space_alloc(size_t num_bytes);
|
||||||
static char* read_only_space_alloc(size_t num_bytes);
|
static char* read_only_space_alloc(size_t num_bytes);
|
||||||
|
|
||||||
static char* read_only_space_top();
|
|
||||||
|
|
||||||
template <typename T>
|
template <typename T>
|
||||||
static Array<T>* new_ro_array(int length) {
|
static Array<T>* new_ro_array(int length) {
|
||||||
#if INCLUDE_CDS
|
#if INCLUDE_CDS
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
* Copyright (c) 1997, 2018, Oracle and/or its affiliates. All rights reserved.
|
* Copyright (c) 1997, 2019, Oracle and/or its affiliates. All rights reserved.
|
||||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||||
*
|
*
|
||||||
* This code is free software; you can redistribute it and/or modify it
|
* This code is free software; you can redistribute it and/or modify it
|
||||||
|
@ -1286,14 +1286,6 @@ uintptr_t Universe::verify_mark_bits() {
|
||||||
#endif // PRODUCT
|
#endif // PRODUCT
|
||||||
|
|
||||||
|
|
||||||
void Universe::compute_verify_oop_data() {
|
|
||||||
verify_oop_mask();
|
|
||||||
verify_oop_bits();
|
|
||||||
verify_mark_mask();
|
|
||||||
verify_mark_bits();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void LatestMethodCache::init(Klass* k, Method* m) {
|
void LatestMethodCache::init(Klass* k, Method* m) {
|
||||||
if (!UseSharedSpaces) {
|
if (!UseSharedSpaces) {
|
||||||
_klass = k;
|
_klass = k;
|
||||||
|
|
|
@ -258,7 +258,6 @@ class Universe: AllStatic {
|
||||||
static uintptr_t _verify_oop_bits;
|
static uintptr_t _verify_oop_bits;
|
||||||
|
|
||||||
static void calculate_verify_data(HeapWord* low_boundary, HeapWord* high_boundary) PRODUCT_RETURN;
|
static void calculate_verify_data(HeapWord* low_boundary, HeapWord* high_boundary) PRODUCT_RETURN;
|
||||||
static void compute_verify_oop_data();
|
|
||||||
|
|
||||||
public:
|
public:
|
||||||
// Known classes in the VM
|
// Known classes in the VM
|
||||||
|
@ -392,8 +391,6 @@ class Universe: AllStatic {
|
||||||
};
|
};
|
||||||
static NARROW_OOP_MODE narrow_oop_mode();
|
static NARROW_OOP_MODE narrow_oop_mode();
|
||||||
static const char* narrow_oop_mode_to_string(NARROW_OOP_MODE mode);
|
static const char* narrow_oop_mode_to_string(NARROW_OOP_MODE mode);
|
||||||
static char* preferred_heap_base(size_t heap_size, size_t alignment, NARROW_OOP_MODE mode);
|
|
||||||
static char* preferred_metaspace_base(size_t heap_size, NARROW_OOP_MODE mode);
|
|
||||||
static address narrow_oop_base() { return _narrow_oop._base; }
|
static address narrow_oop_base() { return _narrow_oop._base; }
|
||||||
// Test whether bits of addr and possible offsets into the heap overlap.
|
// Test whether bits of addr and possible offsets into the heap overlap.
|
||||||
static bool is_disjoint_heap_base_address(address addr) {
|
static bool is_disjoint_heap_base_address(address addr) {
|
||||||
|
@ -416,10 +413,8 @@ class Universe: AllStatic {
|
||||||
|
|
||||||
// For UseCompressedClassPointers
|
// For UseCompressedClassPointers
|
||||||
static address narrow_klass_base() { return _narrow_klass._base; }
|
static address narrow_klass_base() { return _narrow_klass._base; }
|
||||||
static bool is_narrow_klass_base(void* addr) { return (narrow_klass_base() == (address)addr); }
|
|
||||||
static uint64_t narrow_klass_range() { return _narrow_klass_range; }
|
static uint64_t narrow_klass_range() { return _narrow_klass_range; }
|
||||||
static int narrow_klass_shift() { return _narrow_klass._shift; }
|
static int narrow_klass_shift() { return _narrow_klass._shift; }
|
||||||
static bool narrow_klass_use_implicit_null_checks() { return _narrow_klass._use_implicit_null_checks; }
|
|
||||||
|
|
||||||
static address* narrow_ptrs_base_addr() { return &_narrow_ptrs_base; }
|
static address* narrow_ptrs_base_addr() { return &_narrow_ptrs_base; }
|
||||||
static void set_narrow_ptrs_base(address a) { _narrow_ptrs_base = a; }
|
static void set_narrow_ptrs_base(address a) { _narrow_ptrs_base = a; }
|
||||||
|
@ -441,7 +436,6 @@ class Universe: AllStatic {
|
||||||
static ReservedSpace reserve_heap(size_t heap_size, size_t alignment);
|
static ReservedSpace reserve_heap(size_t heap_size, size_t alignment);
|
||||||
|
|
||||||
// Historic gc information
|
// Historic gc information
|
||||||
static size_t get_heap_capacity_at_last_gc() { return _heap_capacity_at_last_gc; }
|
|
||||||
static size_t get_heap_free_at_last_gc() { return _heap_capacity_at_last_gc - _heap_used_at_last_gc; }
|
static size_t get_heap_free_at_last_gc() { return _heap_capacity_at_last_gc - _heap_used_at_last_gc; }
|
||||||
static size_t get_heap_used_at_last_gc() { return _heap_used_at_last_gc; }
|
static size_t get_heap_used_at_last_gc() { return _heap_used_at_last_gc; }
|
||||||
static void update_heap_info_at_gc();
|
static void update_heap_info_at_gc();
|
||||||
|
@ -517,25 +511,4 @@ class Universe: AllStatic {
|
||||||
static int base_vtable_size() { return _base_vtable_size; }
|
static int base_vtable_size() { return _base_vtable_size; }
|
||||||
};
|
};
|
||||||
|
|
||||||
class DeferredObjAllocEvent : public CHeapObj<mtInternal> {
|
|
||||||
private:
|
|
||||||
oop _oop;
|
|
||||||
size_t _bytesize;
|
|
||||||
jint _arena_id;
|
|
||||||
|
|
||||||
public:
|
|
||||||
DeferredObjAllocEvent(const oop o, const size_t s, const jint id) {
|
|
||||||
_oop = o;
|
|
||||||
_bytesize = s;
|
|
||||||
_arena_id = id;
|
|
||||||
}
|
|
||||||
|
|
||||||
~DeferredObjAllocEvent() {
|
|
||||||
}
|
|
||||||
|
|
||||||
jint arena_id() { return _arena_id; }
|
|
||||||
size_t bytesize() { return _bytesize; }
|
|
||||||
oop get_oop() { return _oop; }
|
|
||||||
};
|
|
||||||
|
|
||||||
#endif // SHARE_MEMORY_UNIVERSE_HPP
|
#endif // SHARE_MEMORY_UNIVERSE_HPP
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
* Copyright (c) 1997, 2018, Oracle and/or its affiliates. All rights reserved.
|
* Copyright (c) 1997, 2019, Oracle and/or its affiliates. All rights reserved.
|
||||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||||
*
|
*
|
||||||
* This code is free software; you can redistribute it and/or modify it
|
* This code is free software; you can redistribute it and/or modify it
|
||||||
|
@ -266,11 +266,6 @@ size_t ReservedSpace::allocation_align_size_up(size_t size) {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
size_t ReservedSpace::allocation_align_size_down(size_t size) {
|
|
||||||
return align_down(size, os::vm_allocation_granularity());
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void ReservedSpace::release() {
|
void ReservedSpace::release() {
|
||||||
if (is_reserved()) {
|
if (is_reserved()) {
|
||||||
char *real_base = _base - _noaccess_prefix;
|
char *real_base = _base - _noaccess_prefix;
|
||||||
|
|
|
@ -88,7 +88,6 @@ class ReservedSpace {
|
||||||
static size_t page_align_size_up(size_t size);
|
static size_t page_align_size_up(size_t size);
|
||||||
static size_t page_align_size_down(size_t size);
|
static size_t page_align_size_down(size_t size);
|
||||||
static size_t allocation_align_size_up(size_t size);
|
static size_t allocation_align_size_up(size_t size);
|
||||||
static size_t allocation_align_size_down(size_t size);
|
|
||||||
bool contains(const void* p) const {
|
bool contains(const void* p) const {
|
||||||
return (base() <= ((char*)p)) && (((char*)p) < (base() + size()));
|
return (base() <= ((char*)p)) && (((char*)p) < (base() + size()));
|
||||||
}
|
}
|
||||||
|
|
|
@ -297,25 +297,11 @@ public:
|
||||||
// lazily create _jvmti_breakpoints and _breakpoint_list
|
// lazily create _jvmti_breakpoints and _breakpoint_list
|
||||||
static JvmtiBreakpoints& get_jvmti_breakpoints();
|
static JvmtiBreakpoints& get_jvmti_breakpoints();
|
||||||
|
|
||||||
// quickly test whether the bcp matches a cached breakpoint in the list
|
|
||||||
static inline bool is_breakpoint(address bcp);
|
|
||||||
|
|
||||||
static void oops_do(OopClosure* f);
|
static void oops_do(OopClosure* f);
|
||||||
static void metadata_do(void f(Metadata*)) NOT_JVMTI_RETURN;
|
static void metadata_do(void f(Metadata*)) NOT_JVMTI_RETURN;
|
||||||
static void gc_epilogue();
|
static void gc_epilogue();
|
||||||
};
|
};
|
||||||
|
|
||||||
// quickly test whether the bcp matches a cached breakpoint in the list
|
|
||||||
bool JvmtiCurrentBreakpoints::is_breakpoint(address bcp) {
|
|
||||||
address *bps = get_breakpoint_list();
|
|
||||||
if (bps == NULL) return false;
|
|
||||||
for ( ; (*bps) != NULL; bps++) {
|
|
||||||
if ((*bps) == bcp) return true;
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
///////////////////////////////////////////////////////////////
|
///////////////////////////////////////////////////////////////
|
||||||
//
|
//
|
||||||
// class VM_ChangeBreakpoints
|
// class VM_ChangeBreakpoints
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
* Copyright (c) 2008, 2018, Oracle and/or its affiliates. All rights reserved.
|
* Copyright (c) 2008, 2019, Oracle and/or its affiliates. All rights reserved.
|
||||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||||
*
|
*
|
||||||
* This code is free software; you can redistribute it and/or modify it
|
* This code is free software; you can redistribute it and/or modify it
|
||||||
|
@ -968,7 +968,6 @@ int MethodHandles::find_MemberNames(Klass* k,
|
||||||
bool search_superc = ((match_flags & SEARCH_SUPERCLASSES) != 0);
|
bool search_superc = ((match_flags & SEARCH_SUPERCLASSES) != 0);
|
||||||
bool search_intfc = ((match_flags & SEARCH_INTERFACES) != 0);
|
bool search_intfc = ((match_flags & SEARCH_INTERFACES) != 0);
|
||||||
bool local_only = !(search_superc | search_intfc);
|
bool local_only = !(search_superc | search_intfc);
|
||||||
bool classes_only = false;
|
|
||||||
|
|
||||||
if (name != NULL) {
|
if (name != NULL) {
|
||||||
if (name->utf8_length() == 0) return 0; // a match is not possible
|
if (name->utf8_length() == 0) return 0; // a match is not possible
|
||||||
|
|
|
@ -67,7 +67,6 @@ class MethodHandles: AllStatic {
|
||||||
static oop init_MemberName(Handle mname_h, Handle target_h, TRAPS); // compute vmtarget/vmindex from target
|
static oop init_MemberName(Handle mname_h, Handle target_h, TRAPS); // compute vmtarget/vmindex from target
|
||||||
static oop init_field_MemberName(Handle mname_h, fieldDescriptor& fd, bool is_setter = false);
|
static oop init_field_MemberName(Handle mname_h, fieldDescriptor& fd, bool is_setter = false);
|
||||||
static oop init_method_MemberName(Handle mname_h, CallInfo& info);
|
static oop init_method_MemberName(Handle mname_h, CallInfo& info);
|
||||||
static int method_ref_kind(Method* m, bool do_dispatch_if_possible = true);
|
|
||||||
static int find_MemberNames(Klass* k, Symbol* name, Symbol* sig,
|
static int find_MemberNames(Klass* k, Symbol* name, Symbol* sig,
|
||||||
int mflags, Klass* caller,
|
int mflags, Klass* caller,
|
||||||
int skip, objArrayHandle results, TRAPS);
|
int skip, objArrayHandle results, TRAPS);
|
||||||
|
@ -148,8 +147,6 @@ class MethodHandles: AllStatic {
|
||||||
|
|
||||||
static Bytecodes::Code signature_polymorphic_intrinsic_bytecode(vmIntrinsics::ID id);
|
static Bytecodes::Code signature_polymorphic_intrinsic_bytecode(vmIntrinsics::ID id);
|
||||||
|
|
||||||
static int get_named_constant(int which, Handle name_box, TRAPS);
|
|
||||||
|
|
||||||
public:
|
public:
|
||||||
static Symbol* lookup_signature(oop type_str, bool polymorphic, TRAPS); // use TempNewSymbol
|
static Symbol* lookup_signature(oop type_str, bool polymorphic, TRAPS); // use TempNewSymbol
|
||||||
static Symbol* lookup_basic_type_signature(Symbol* sig, bool keep_last_arg, TRAPS); // use TempNewSymbol
|
static Symbol* lookup_basic_type_signature(Symbol* sig, bool keep_last_arg, TRAPS); // use TempNewSymbol
|
||||||
|
@ -158,11 +155,6 @@ public:
|
||||||
}
|
}
|
||||||
static bool is_basic_type_signature(Symbol* sig);
|
static bool is_basic_type_signature(Symbol* sig);
|
||||||
|
|
||||||
static Symbol* lookup_method_type(Symbol* msig, Handle mtype, TRAPS);
|
|
||||||
|
|
||||||
static void print_as_method_type_on(outputStream* st, Symbol* sig) {
|
|
||||||
print_as_basic_type_signature_on(st, sig, true, true);
|
|
||||||
}
|
|
||||||
static void print_as_basic_type_signature_on(outputStream* st, Symbol* sig, bool keep_arrays = false, bool keep_basic_names = false);
|
static void print_as_basic_type_signature_on(outputStream* st, Symbol* sig, bool keep_arrays = false, bool keep_basic_names = false);
|
||||||
|
|
||||||
// decoding CONSTANT_MethodHandle constants
|
// decoding CONSTANT_MethodHandle constants
|
||||||
|
@ -188,13 +180,6 @@ public:
|
||||||
assert(ref_kind_is_valid(ref_kind), "");
|
assert(ref_kind_is_valid(ref_kind), "");
|
||||||
return (ref_kind & 1) != 0;
|
return (ref_kind & 1) != 0;
|
||||||
}
|
}
|
||||||
static bool ref_kind_is_static(int ref_kind) {
|
|
||||||
return !ref_kind_has_receiver(ref_kind) && (ref_kind != JVM_REF_newInvokeSpecial);
|
|
||||||
}
|
|
||||||
static bool ref_kind_does_dispatch(int ref_kind) {
|
|
||||||
return (ref_kind == JVM_REF_invokeVirtual ||
|
|
||||||
ref_kind == JVM_REF_invokeInterface);
|
|
||||||
}
|
|
||||||
|
|
||||||
static int ref_kind_to_flags(int ref_kind);
|
static int ref_kind_to_flags(int ref_kind);
|
||||||
|
|
||||||
|
|
|
@ -328,10 +328,6 @@ void Arguments::add_loaded_agent(AgentLibrary *agentLib) {
|
||||||
_agentList.add(agentLib);
|
_agentList.add(agentLib);
|
||||||
}
|
}
|
||||||
|
|
||||||
void Arguments::add_loaded_agent(const char* name, char* options, bool absolute_path, void* os_lib) {
|
|
||||||
_agentList.add(new AgentLibrary(name, options, absolute_path, os_lib));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Return TRUE if option matches 'property', or 'property=', or 'property.'.
|
// Return TRUE if option matches 'property', or 'property=', or 'property.'.
|
||||||
static bool matches_property_suffix(const char* option, const char* property, size_t len) {
|
static bool matches_property_suffix(const char* option, const char* property, size_t len) {
|
||||||
return ((strncmp(option, property, len) == 0) &&
|
return ((strncmp(option, property, len) == 0) &&
|
||||||
|
|
|
@ -346,7 +346,6 @@ class Arguments : AllStatic {
|
||||||
|
|
||||||
// Late-binding agents not started via arguments
|
// Late-binding agents not started via arguments
|
||||||
static void add_loaded_agent(AgentLibrary *agentLib);
|
static void add_loaded_agent(AgentLibrary *agentLib);
|
||||||
static void add_loaded_agent(const char* name, char* options, bool absolute_path, void* os_lib);
|
|
||||||
|
|
||||||
// Operation modi
|
// Operation modi
|
||||||
static Mode _mode;
|
static Mode _mode;
|
||||||
|
@ -368,7 +367,6 @@ class Arguments : AllStatic {
|
||||||
static bool _UseOnStackReplacement;
|
static bool _UseOnStackReplacement;
|
||||||
static bool _BackgroundCompilation;
|
static bool _BackgroundCompilation;
|
||||||
static bool _ClipInlining;
|
static bool _ClipInlining;
|
||||||
static bool _CIDynamicCompilePriority;
|
|
||||||
static intx _Tier3InvokeNotifyFreqLog;
|
static intx _Tier3InvokeNotifyFreqLog;
|
||||||
static intx _Tier4InvocationThreshold;
|
static intx _Tier4InvocationThreshold;
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
* Copyright (c) 1997, 2018, Oracle and/or its affiliates. All rights reserved.
|
* Copyright (c) 1997, 2019, Oracle and/or its affiliates. All rights reserved.
|
||||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||||
*
|
*
|
||||||
* This code is free software; you can redistribute it and/or modify it
|
* This code is free software; you can redistribute it and/or modify it
|
||||||
|
@ -368,29 +368,6 @@ frame frame::real_sender(RegisterMap* map) const {
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Note: called by profiler - NOT for current thread
|
|
||||||
frame frame::profile_find_Java_sender_frame(JavaThread *thread) {
|
|
||||||
// If we don't recognize this frame, walk back up the stack until we do
|
|
||||||
RegisterMap map(thread, false);
|
|
||||||
frame first_java_frame = frame();
|
|
||||||
|
|
||||||
// Find the first Java frame on the stack starting with input frame
|
|
||||||
if (is_java_frame()) {
|
|
||||||
// top frame is compiled frame or deoptimized frame
|
|
||||||
first_java_frame = *this;
|
|
||||||
} else if (safe_for_sender(thread)) {
|
|
||||||
for (frame sender_frame = sender(&map);
|
|
||||||
sender_frame.safe_for_sender(thread) && !sender_frame.is_first_frame();
|
|
||||||
sender_frame = sender_frame.sender(&map)) {
|
|
||||||
if (sender_frame.is_java_frame()) {
|
|
||||||
first_java_frame = sender_frame;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return first_java_frame;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Interpreter frames
|
// Interpreter frames
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -148,9 +148,6 @@ class frame {
|
||||||
// returns the sending frame
|
// returns the sending frame
|
||||||
frame sender(RegisterMap* map) const;
|
frame sender(RegisterMap* map) const;
|
||||||
|
|
||||||
// for Profiling - acting on another frame. walks sender frames
|
|
||||||
// if valid.
|
|
||||||
frame profile_find_Java_sender_frame(JavaThread *thread);
|
|
||||||
bool safe_for_sender(JavaThread *thread);
|
bool safe_for_sender(JavaThread *thread);
|
||||||
|
|
||||||
// returns the sender, but skips conversion frames
|
// returns the sender, but skips conversion frames
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
* Copyright (c) 2001, 2017, Oracle and/or its affiliates. All rights reserved.
|
* Copyright (c) 2001, 2019, Oracle and/or its affiliates. All rights reserved.
|
||||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||||
*
|
*
|
||||||
* This code is free software; you can redistribute it and/or modify it
|
* This code is free software; you can redistribute it and/or modify it
|
||||||
|
@ -356,14 +356,3 @@ void StatSampler::create_sampled_perfdata() {
|
||||||
PerfDataManager::create_counter(SUN_OS, "hrt.ticks",
|
PerfDataManager::create_counter(SUN_OS, "hrt.ticks",
|
||||||
PerfData::U_Ticks, psh, CHECK);
|
PerfData::U_Ticks, psh, CHECK);
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
|
||||||
* the statSampler_exit() function is called from os_init.cpp on
|
|
||||||
* exit of the vm.
|
|
||||||
*/
|
|
||||||
void statSampler_exit() {
|
|
||||||
|
|
||||||
if (!UsePerfData) return;
|
|
||||||
|
|
||||||
StatSampler::destroy();
|
|
||||||
}
|
|
||||||
|
|
|
@ -65,6 +65,4 @@ class StatSampler : AllStatic {
|
||||||
static void destroy();
|
static void destroy();
|
||||||
};
|
};
|
||||||
|
|
||||||
void statSampler_exit();
|
|
||||||
|
|
||||||
#endif // SHARE_RUNTIME_STATSAMPLER_HPP
|
#endif // SHARE_RUNTIME_STATSAMPLER_HPP
|
||||||
|
|
|
@ -121,7 +121,6 @@
|
||||||
template(PrintCompileQueue) \
|
template(PrintCompileQueue) \
|
||||||
template(PrintClassHierarchy) \
|
template(PrintClassHierarchy) \
|
||||||
template(ThreadSuspend) \
|
template(ThreadSuspend) \
|
||||||
template(CTWThreshold) \
|
|
||||||
template(ThreadsSuspendJVMTI) \
|
template(ThreadsSuspendJVMTI) \
|
||||||
template(ICBufferFull) \
|
template(ICBufferFull) \
|
||||||
template(ScavengeMonitors) \
|
template(ScavengeMonitors) \
|
||||||
|
@ -272,12 +271,6 @@ class VM_ThreadSuspend: public VM_ForceSafepoint {
|
||||||
VMOp_Type type() const { return VMOp_ThreadSuspend; }
|
VMOp_Type type() const { return VMOp_ThreadSuspend; }
|
||||||
};
|
};
|
||||||
|
|
||||||
// empty vm op, when forcing a safepoint due to ctw threshold is reached for the sweeper
|
|
||||||
class VM_CTWThreshold: public VM_ForceSafepoint {
|
|
||||||
public:
|
|
||||||
VMOp_Type type() const { return VMOp_CTWThreshold; }
|
|
||||||
};
|
|
||||||
|
|
||||||
// empty vm op, when forcing a safepoint to suspend threads from jvmti
|
// empty vm op, when forcing a safepoint to suspend threads from jvmti
|
||||||
class VM_ThreadsSuspendJVMTI: public VM_ForceSafepoint {
|
class VM_ThreadsSuspendJVMTI: public VM_ForceSafepoint {
|
||||||
public:
|
public:
|
||||||
|
|
|
@ -27,11 +27,6 @@
|
||||||
|
|
||||||
#include "memory/allocation.hpp"
|
#include "memory/allocation.hpp"
|
||||||
|
|
||||||
template<typename K> struct ResourceHashtableFns {
|
|
||||||
typedef unsigned (*hash_fn)(K const&);
|
|
||||||
typedef bool (*equals_fn)(K const&, K const&);
|
|
||||||
};
|
|
||||||
|
|
||||||
template<
|
template<
|
||||||
typename K, typename V,
|
typename K, typename V,
|
||||||
// xlC does not compile this:
|
// xlC does not compile this:
|
||||||
|
@ -160,10 +155,6 @@ class ResourceHashtable : public ResourceObj {
|
||||||
++bucket;
|
++bucket;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static size_t node_size() {
|
|
||||||
return sizeof(Node);
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue