mirror of
https://github.com/openjdk/jdk.git
synced 2025-09-20 02:54:35 +02:00
6995781: Native Memory Tracking (Phase 1)
7151532: DCmd for hotspot native memory tracking Implementation of native memory tracking phase 1, which tracks VM native memory usage, and related DCmd Reviewed-by: acorn, coleenp, fparain
This commit is contained in:
parent
8e42425c92
commit
a39b17624a
315 changed files with 7245 additions and 1477 deletions
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
* Copyright (c) 1997, 2011, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 1997, 2012, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
|
@ -40,6 +40,18 @@
|
|||
#define ARENA_ALIGN_MASK (~((size_t)ARENA_ALIGN_M1))
|
||||
#define ARENA_ALIGN(x) ((((size_t)(x)) + ARENA_ALIGN_M1) & ARENA_ALIGN_MASK)
|
||||
|
||||
|
||||
// noinline attribute
|
||||
#ifdef _WINDOWS
|
||||
#define _NOINLINE_ __declspec(noinline)
|
||||
#else
|
||||
#if __GNUC__ < 3 // gcc 2.x does not support noinline attribute
|
||||
#define _NOINLINE_
|
||||
#else
|
||||
#define _NOINLINE_ __attribute__ ((noinline))
|
||||
#endif
|
||||
#endif
|
||||
|
||||
// All classes in the virtual machine must be subclassed
|
||||
// by one of the following allocation classes:
|
||||
//
|
||||
|
@ -98,12 +110,72 @@ class AllocatedObj {
|
|||
};
|
||||
#endif
|
||||
|
||||
class CHeapObj ALLOCATION_SUPER_CLASS_SPEC {
|
||||
|
||||
/*
|
||||
* MemoryType bitmap layout:
|
||||
* | 16 15 14 13 12 11 10 09 | 08 07 06 05 | 04 03 02 01 |
|
||||
* | memory type | object | reserved |
|
||||
* | | type | |
|
||||
*/
|
||||
enum MemoryType {
|
||||
// Memory type by sub systems. It occupies lower byte.
|
||||
mtNone = 0x0000, // undefined
|
||||
mtClass = 0x0100, // memory class for Java classes
|
||||
mtThread = 0x0200, // memory for thread objects
|
||||
mtThreadStack = 0x0300,
|
||||
mtCode = 0x0400, // memory for generated code
|
||||
mtGC = 0x0500, // memory for GC
|
||||
mtCompiler = 0x0600, // memory for compiler
|
||||
mtInternal = 0x0700, // memory used by VM, but does not belong to
|
||||
// any of above categories, and not used for
|
||||
// native memory tracking
|
||||
mtOther = 0x0800, // memory not used by VM
|
||||
mtSymbol = 0x0900, // symbol
|
||||
mtNMT = 0x0A00, // memory used by native memory tracking
|
||||
mtChunk = 0x0B00, // chunk that holds content of arenas
|
||||
mtJavaHeap = 0x0C00, // Java heap
|
||||
mtDontTrack = 0x0D00, // memory we donot or cannot track
|
||||
mt_number_of_types = 0x000C, // number of memory types
|
||||
mt_masks = 0x7F00,
|
||||
|
||||
// object type mask
|
||||
otArena = 0x0010, // an arena object
|
||||
otNMTRecorder = 0x0020, // memory recorder object
|
||||
ot_masks = 0x00F0
|
||||
};
|
||||
|
||||
#define IS_MEMORY_TYPE(flags, type) ((flags & mt_masks) == type)
|
||||
#define HAS_VALID_MEMORY_TYPE(flags)((flags & mt_masks) != mtNone)
|
||||
#define FLAGS_TO_MEMORY_TYPE(flags) (flags & mt_masks)
|
||||
|
||||
#define IS_ARENA_OBJ(flags) ((flags & ot_masks) == otArena)
|
||||
#define IS_NMT_RECORDER(flags) ((flags & ot_masks) == otNMTRecorder)
|
||||
#define NMT_CAN_TRACK(flags) (!IS_NMT_RECORDER(flags) && !(IS_MEMORY_TYPE(flags, mtDontTrack)))
|
||||
|
||||
typedef unsigned short MEMFLAGS;
|
||||
|
||||
extern bool NMT_track_callsite;
|
||||
|
||||
// debug build does not inline
|
||||
#if defined(_DEBUG_)
|
||||
#define CURRENT_PC (NMT_track_callsite ? os::get_caller_pc(1) : 0)
|
||||
#define CALLER_PC (NMT_track_callsite ? os::get_caller_pc(2) : 0)
|
||||
#define CALLER_CALLER_PC (NMT_track_callsite ? os::get_caller_pc(3) : 0)
|
||||
#else
|
||||
#define CURRENT_PC (NMT_track_callsite? os::get_caller_pc(0) : 0)
|
||||
#define CALLER_PC (NMT_track_callsite ? os::get_caller_pc(1) : 0)
|
||||
#define CALLER_CALLER_PC (NMT_track_callsite ? os::get_caller_pc(2) : 0)
|
||||
#endif
|
||||
|
||||
|
||||
|
||||
template <MEMFLAGS F> class CHeapObj ALLOCATION_SUPER_CLASS_SPEC {
|
||||
public:
|
||||
void* operator new(size_t size);
|
||||
void* operator new (size_t size, const std::nothrow_t& nothrow_constant);
|
||||
_NOINLINE_ void* operator new(size_t size, address caller_pc = 0);
|
||||
_NOINLINE_ void* operator new (size_t size, const std::nothrow_t& nothrow_constant,
|
||||
address caller_pc = 0);
|
||||
|
||||
void operator delete(void* p);
|
||||
void* new_array(size_t size);
|
||||
};
|
||||
|
||||
// Base class for objects allocated on the stack only.
|
||||
|
@ -150,7 +222,7 @@ class AllStatic {
|
|||
|
||||
//------------------------------Chunk------------------------------------------
|
||||
// Linked list of raw memory chunks
|
||||
class Chunk: public CHeapObj {
|
||||
class Chunk: CHeapObj<mtChunk> {
|
||||
friend class VMStructs;
|
||||
|
||||
protected:
|
||||
|
@ -197,7 +269,7 @@ class Chunk: public CHeapObj {
|
|||
|
||||
//------------------------------Arena------------------------------------------
|
||||
// Fast allocation of memory
|
||||
class Arena: public CHeapObj {
|
||||
class Arena : public CHeapObj<mtNone|otArena> {
|
||||
protected:
|
||||
friend class ResourceMark;
|
||||
friend class HandleMark;
|
||||
|
@ -208,7 +280,8 @@ protected:
|
|||
Chunk *_chunk; // current chunk
|
||||
char *_hwm, *_max; // High water mark and max in current chunk
|
||||
void* grow(size_t x); // Get a new Chunk of at least size x
|
||||
NOT_PRODUCT(size_t _size_in_bytes;) // Size of arena (used for memory usage tracing)
|
||||
size_t _size_in_bytes; // Size of arena (used for native memory tracking)
|
||||
|
||||
NOT_PRODUCT(static julong _bytes_allocated;) // total #bytes allocated since start
|
||||
friend class AllocStats;
|
||||
debug_only(void* malloc(size_t size);)
|
||||
|
@ -231,6 +304,15 @@ protected:
|
|||
void destruct_contents();
|
||||
char* hwm() const { return _hwm; }
|
||||
|
||||
// new operators
|
||||
void* operator new (size_t size);
|
||||
void* operator new (size_t size, const std::nothrow_t& nothrow_constant);
|
||||
|
||||
// dynamic memory type tagging
|
||||
void* operator new(size_t size, MEMFLAGS flags);
|
||||
void* operator new(size_t size, const std::nothrow_t& nothrow_constant, MEMFLAGS flags);
|
||||
void operator delete(void* p);
|
||||
|
||||
// Fast allocate in the arena. Common case is: pointer test + increment.
|
||||
void* Amalloc(size_t x) {
|
||||
assert(is_power_of_2(ARENA_AMALLOC_ALIGNMENT) , "should be a power of 2");
|
||||
|
@ -306,16 +388,20 @@ protected:
|
|||
size_t used() const;
|
||||
|
||||
// Total # of bytes used
|
||||
size_t size_in_bytes() const NOT_PRODUCT({ return _size_in_bytes; }) PRODUCT_RETURN0;
|
||||
void set_size_in_bytes(size_t size) NOT_PRODUCT({ _size_in_bytes = size; }) PRODUCT_RETURN;
|
||||
size_t size_in_bytes() const { return _size_in_bytes; };
|
||||
void set_size_in_bytes(size_t size);
|
||||
|
||||
static void free_malloced_objects(Chunk* chunk, char* hwm, char* max, char* hwm2) PRODUCT_RETURN;
|
||||
static void free_all(char** start, char** end) PRODUCT_RETURN;
|
||||
|
||||
// how many arena instances
|
||||
NOT_PRODUCT(static volatile jint _instance_count;)
|
||||
private:
|
||||
// Reset this Arena to empty, access will trigger grow if necessary
|
||||
void reset(void) {
|
||||
_first = _chunk = NULL;
|
||||
_hwm = _max = NULL;
|
||||
set_size_in_bytes(0);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -373,7 +459,7 @@ class ResourceObj ALLOCATION_SUPER_CLASS_SPEC {
|
|||
#endif // ASSERT
|
||||
|
||||
public:
|
||||
void* operator new(size_t size, allocation_type type);
|
||||
void* operator new(size_t size, allocation_type type, MEMFLAGS flags);
|
||||
void* operator new(size_t size, Arena *arena) {
|
||||
address res = (address)arena->Amalloc(size);
|
||||
DEBUG_ONLY(set_allocation_type(res, ARENA);)
|
||||
|
@ -409,17 +495,28 @@ class ResourceObj ALLOCATION_SUPER_CLASS_SPEC {
|
|||
#define NEW_RESOURCE_OBJ(type)\
|
||||
NEW_RESOURCE_ARRAY(type, 1)
|
||||
|
||||
#define NEW_C_HEAP_ARRAY(type, size)\
|
||||
(type*) (AllocateHeap((size) * sizeof(type), XSTR(type) " in " __FILE__))
|
||||
#define NEW_C_HEAP_ARRAY(type, size, memflags)\
|
||||
(type*) (AllocateHeap((size) * sizeof(type), memflags))
|
||||
|
||||
#define REALLOC_C_HEAP_ARRAY(type, old, size)\
|
||||
(type*) (ReallocateHeap((char*)old, (size) * sizeof(type), XSTR(type) " in " __FILE__))
|
||||
#define REALLOC_C_HEAP_ARRAY(type, old, size, memflags)\
|
||||
(type*) (ReallocateHeap((char*)old, (size) * sizeof(type), memflags))
|
||||
|
||||
#define FREE_C_HEAP_ARRAY(type,old) \
|
||||
FreeHeap((char*)(old))
|
||||
#define FREE_C_HEAP_ARRAY(type,old,memflags) \
|
||||
FreeHeap((char*)(old), memflags)
|
||||
|
||||
#define NEW_C_HEAP_OBJ(type, memflags)\
|
||||
NEW_C_HEAP_ARRAY(type, 1, memflags)
|
||||
|
||||
|
||||
#define NEW_C_HEAP_ARRAY2(type, size, memflags, pc)\
|
||||
(type*) (AllocateHeap((size) * sizeof(type), memflags, pc))
|
||||
|
||||
#define REALLOC_C_HEAP_ARRAY2(type, old, size, memflags, pc)\
|
||||
(type*) (ReallocateHeap((char*)old, (size) * sizeof(type), memflags, pc))
|
||||
|
||||
#define NEW_C_HEAP_OBJ2(type, memflags, pc)\
|
||||
NEW_C_HEAP_ARRAY2(type, 1, memflags, pc)
|
||||
|
||||
#define NEW_C_HEAP_OBJ(type)\
|
||||
NEW_C_HEAP_ARRAY(type, 1)
|
||||
|
||||
extern bool warn_new_operator;
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue