mirror of
https://github.com/openjdk/jdk.git
synced 2025-08-28 15:24:43 +02:00
8258838: Remove JVM option UseStackBanging
Reviewed-by: dholmes, coleenp, kvn
This commit is contained in:
parent
33fbc10cb8
commit
cf3e4bfdb5
18 changed files with 120 additions and 170 deletions
|
@ -1,5 +1,5 @@
|
|||
//
|
||||
// Copyright (c) 2003, 2020, Oracle and/or its affiliates. All rights reserved.
|
||||
// Copyright (c) 2003, 2021, Oracle and/or its affiliates. All rights reserved.
|
||||
// Copyright (c) 2014, 2020, Red Hat, Inc. All rights reserved.
|
||||
// DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
//
|
||||
|
@ -1921,7 +1921,7 @@ void MachPrologNode::emit(CodeBuffer &cbuf, PhaseRegAlloc *ra_) const {
|
|||
}
|
||||
|
||||
int bangsize = C->output()->bang_size_in_bytes();
|
||||
if (C->output()->need_stack_bang(bangsize) && UseStackBanging)
|
||||
if (C->output()->need_stack_bang(bangsize))
|
||||
__ generate_stack_overflow_check(bangsize);
|
||||
|
||||
__ build_frame(framesize);
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
* Copyright (c) 2003, 2020, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2003, 2021, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2014, 2020, Red Hat Inc. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
|
@ -1499,11 +1499,7 @@ nmethod* SharedRuntime::generate_native_wrapper(MacroAssembler* masm,
|
|||
}
|
||||
|
||||
// Generate stack overflow check
|
||||
if (UseStackBanging) {
|
||||
__ bang_stack_with_offset(checked_cast<int>(StackOverflow::stack_shadow_zone_size()));
|
||||
} else {
|
||||
Unimplemented();
|
||||
}
|
||||
__ bang_stack_with_offset(checked_cast<int>(StackOverflow::stack_shadow_zone_size()));
|
||||
|
||||
// Generate a new frame for the wrapper.
|
||||
__ enter();
|
||||
|
@ -2413,10 +2409,8 @@ void SharedRuntime::generate_deopt_blob() {
|
|||
// Compilers generate code that bang the stack by as much as the
|
||||
// interpreter would need. So this stack banging should never
|
||||
// trigger a fault. Verify that it does not on non product builds.
|
||||
if (UseStackBanging) {
|
||||
__ ldrw(r19, Address(r5, Deoptimization::UnrollBlock::total_frame_sizes_offset_in_bytes()));
|
||||
__ bang_stack_size(r19, r2);
|
||||
}
|
||||
__ ldrw(r19, Address(r5, Deoptimization::UnrollBlock::total_frame_sizes_offset_in_bytes()));
|
||||
__ bang_stack_size(r19, r2);
|
||||
#endif
|
||||
// Load address of array of frame pcs into r2
|
||||
__ ldr(r2, Address(r5, Deoptimization::UnrollBlock::frame_pcs_offset_in_bytes()));
|
||||
|
@ -2630,12 +2624,10 @@ void SharedRuntime::generate_uncommon_trap_blob() {
|
|||
// Compilers generate code that bang the stack by as much as the
|
||||
// interpreter would need. So this stack banging should never
|
||||
// trigger a fault. Verify that it does not on non product builds.
|
||||
if (UseStackBanging) {
|
||||
__ ldrw(r1, Address(r4,
|
||||
Deoptimization::UnrollBlock::
|
||||
total_frame_sizes_offset_in_bytes()));
|
||||
__ bang_stack_size(r1, r2);
|
||||
}
|
||||
__ ldrw(r1, Address(r4,
|
||||
Deoptimization::UnrollBlock::
|
||||
total_frame_sizes_offset_in_bytes()));
|
||||
__ bang_stack_size(r1, r2);
|
||||
#endif
|
||||
|
||||
// Load address of array of frame pcs into r2 (address*)
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
* Copyright (c) 2003, 2020, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2003, 2021, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2014, 2020, Red Hat Inc. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
|
@ -1121,14 +1121,12 @@ void TemplateInterpreterGenerator::bang_stack_shadow_pages(bool native_call) {
|
|||
// Bang each page in the shadow zone. We can't assume it's been done for
|
||||
// an interpreter frame with greater than a page of locals, so each page
|
||||
// needs to be checked. Only true for non-native.
|
||||
if (UseStackBanging) {
|
||||
const int n_shadow_pages = (int)(StackOverflow::stack_shadow_zone_size() / os::vm_page_size());
|
||||
const int start_page = native_call ? n_shadow_pages : 1;
|
||||
const int page_size = os::vm_page_size();
|
||||
for (int pages = start_page; pages <= n_shadow_pages ; pages++) {
|
||||
__ sub(rscratch2, sp, pages*page_size);
|
||||
__ str(zr, Address(rscratch2));
|
||||
}
|
||||
const int n_shadow_pages = (int)(StackOverflow::stack_shadow_zone_size() / os::vm_page_size());
|
||||
const int start_page = native_call ? n_shadow_pages : 1;
|
||||
const int page_size = os::vm_page_size();
|
||||
for (int pages = start_page; pages <= n_shadow_pages ; pages++) {
|
||||
__ sub(rscratch2, sp, pages*page_size);
|
||||
__ str(zr, Address(rscratch2));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
* Copyright (c) 2008, 2020, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2008, 2021, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
|
@ -977,28 +977,24 @@ void MacroAssembler::zero_memory(Register start, Register end, Register tmp) {
|
|||
|
||||
void MacroAssembler::arm_stack_overflow_check(int frame_size_in_bytes, Register tmp) {
|
||||
// Version of AbstractAssembler::generate_stack_overflow_check optimized for ARM
|
||||
if (UseStackBanging) {
|
||||
const int page_size = os::vm_page_size();
|
||||
const int page_size = os::vm_page_size();
|
||||
|
||||
sub_slow(tmp, SP, StackOverflow::stack_shadow_zone_size());
|
||||
strb(R0, Address(tmp));
|
||||
for (; frame_size_in_bytes >= page_size; frame_size_in_bytes -= 0xff0) {
|
||||
strb(R0, Address(tmp, -0xff0, pre_indexed));
|
||||
}
|
||||
sub_slow(tmp, SP, StackOverflow::stack_shadow_zone_size());
|
||||
strb(R0, Address(tmp));
|
||||
for (; frame_size_in_bytes >= page_size; frame_size_in_bytes -= 0xff0) {
|
||||
strb(R0, Address(tmp, -0xff0, pre_indexed));
|
||||
}
|
||||
}
|
||||
|
||||
void MacroAssembler::arm_stack_overflow_check(Register Rsize, Register tmp) {
|
||||
if (UseStackBanging) {
|
||||
Label loop;
|
||||
Label loop;
|
||||
|
||||
mov(tmp, SP);
|
||||
add_slow(Rsize, Rsize, StackOverflow::stack_shadow_zone_size() - os::vm_page_size());
|
||||
bind(loop);
|
||||
subs(Rsize, Rsize, 0xff0);
|
||||
strb(R0, Address(tmp, -0xff0, pre_indexed));
|
||||
b(loop, hi);
|
||||
}
|
||||
mov(tmp, SP);
|
||||
add_slow(Rsize, Rsize, StackOverflow::stack_shadow_zone_size() - os::vm_page_size());
|
||||
bind(loop);
|
||||
subs(Rsize, Rsize, 0xff0);
|
||||
strb(R0, Address(tmp, -0xff0, pre_indexed));
|
||||
b(loop, hi);
|
||||
}
|
||||
|
||||
void MacroAssembler::stop(const char* msg) {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
* Copyright (c) 2008, 2020, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2008, 2021, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
|
@ -1504,18 +1504,17 @@ void SharedRuntime::generate_deopt_blob() {
|
|||
// Compilers generate code that bang the stack by as much as the
|
||||
// interpreter would need. So this stack banging should never
|
||||
// trigger a fault. Verify that it does not on non product builds.
|
||||
// See if it is enough stack to push deoptimized frames
|
||||
if (UseStackBanging) {
|
||||
// The compiled method that we are deoptimizing was popped from the stack.
|
||||
// If the stack bang results in a stack overflow, we don't return to the
|
||||
// method that is being deoptimized. The stack overflow exception is
|
||||
// propagated to the caller of the deoptimized method. Need to get the pc
|
||||
// from the caller in LR and restore FP.
|
||||
__ ldr(LR, Address(R2, 0));
|
||||
__ ldr(FP, Address(Rublock, Deoptimization::UnrollBlock::initial_info_offset_in_bytes()));
|
||||
__ ldr_s32(R8, Address(Rublock, Deoptimization::UnrollBlock::total_frame_sizes_offset_in_bytes()));
|
||||
__ arm_stack_overflow_check(R8, Rtemp);
|
||||
}
|
||||
// See if it is enough stack to push deoptimized frames.
|
||||
//
|
||||
// The compiled method that we are deoptimizing was popped from the stack.
|
||||
// If the stack bang results in a stack overflow, we don't return to the
|
||||
// method that is being deoptimized. The stack overflow exception is
|
||||
// propagated to the caller of the deoptimized method. Need to get the pc
|
||||
// from the caller in LR and restore FP.
|
||||
__ ldr(LR, Address(R2, 0));
|
||||
__ ldr(FP, Address(Rublock, Deoptimization::UnrollBlock::initial_info_offset_in_bytes()));
|
||||
__ ldr_s32(R8, Address(Rublock, Deoptimization::UnrollBlock::total_frame_sizes_offset_in_bytes()));
|
||||
__ arm_stack_overflow_check(R8, Rtemp);
|
||||
#endif
|
||||
__ ldr_s32(R8, Address(Rublock, Deoptimization::UnrollBlock::number_of_frames_offset_in_bytes()));
|
||||
|
||||
|
@ -1700,22 +1699,21 @@ void SharedRuntime::generate_uncommon_trap_blob() {
|
|||
|
||||
__ add(SP, SP, Rtemp);
|
||||
|
||||
// See if it is enough stack to push deoptimized frames
|
||||
// See if it is enough stack to push deoptimized frames.
|
||||
#ifdef ASSERT
|
||||
// Compilers generate code that bang the stack by as much as the
|
||||
// interpreter would need. So this stack banging should never
|
||||
// trigger a fault. Verify that it does not on non product builds.
|
||||
if (UseStackBanging) {
|
||||
// The compiled method that we are deoptimizing was popped from the stack.
|
||||
// If the stack bang results in a stack overflow, we don't return to the
|
||||
// method that is being deoptimized. The stack overflow exception is
|
||||
// propagated to the caller of the deoptimized method. Need to get the pc
|
||||
// from the caller in LR and restore FP.
|
||||
__ ldr(LR, Address(R2, 0));
|
||||
__ ldr(FP, Address(Rublock, Deoptimization::UnrollBlock::initial_info_offset_in_bytes()));
|
||||
__ ldr_s32(R8, Address(Rublock, Deoptimization::UnrollBlock::total_frame_sizes_offset_in_bytes()));
|
||||
__ arm_stack_overflow_check(R8, Rtemp);
|
||||
}
|
||||
//
|
||||
// The compiled method that we are deoptimizing was popped from the stack.
|
||||
// If the stack bang results in a stack overflow, we don't return to the
|
||||
// method that is being deoptimized. The stack overflow exception is
|
||||
// propagated to the caller of the deoptimized method. Need to get the pc
|
||||
// from the caller in LR and restore FP.
|
||||
__ ldr(LR, Address(R2, 0));
|
||||
__ ldr(FP, Address(Rublock, Deoptimization::UnrollBlock::initial_info_offset_in_bytes()));
|
||||
__ ldr_s32(R8, Address(Rublock, Deoptimization::UnrollBlock::total_frame_sizes_offset_in_bytes()));
|
||||
__ arm_stack_overflow_check(R8, Rtemp);
|
||||
#endif
|
||||
__ ldr_s32(R8, Address(Rublock, Deoptimization::UnrollBlock::number_of_frames_offset_in_bytes()));
|
||||
__ ldr_s32(Rtemp, Address(Rublock, Deoptimization::UnrollBlock::caller_adjustment_offset_in_bytes()));
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
//
|
||||
// Copyright (c) 2011, 2020, Oracle and/or its affiliates. All rights reserved.
|
||||
// Copyright (c) 2011, 2021, Oracle and/or its affiliates. All rights reserved.
|
||||
// Copyright (c) 2012, 2020 SAP SE. All rights reserved.
|
||||
// DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
//
|
||||
|
@ -1430,7 +1430,7 @@ void MachPrologNode::emit(CodeBuffer &cbuf, PhaseRegAlloc *ra_) const {
|
|||
|
||||
int bangsize = C->output()->bang_size_in_bytes();
|
||||
assert(bangsize >= framesize || bangsize <= 0, "stack bang size incorrect");
|
||||
if (C->output()->need_stack_bang(bangsize) && UseStackBanging) {
|
||||
if (C->output()->need_stack_bang(bangsize)) {
|
||||
// Unfortunately we cannot use the function provided in
|
||||
// assembler.cpp as we have to emulate the pipes. So I had to
|
||||
// insert the code of generate_stack_overflow_check(), see
|
||||
|
@ -1484,7 +1484,7 @@ void MachPrologNode::emit(CodeBuffer &cbuf, PhaseRegAlloc *ra_) const {
|
|||
bang_offset += page_size;
|
||||
}
|
||||
// R11 trashed
|
||||
} // C->output()->need_stack_bang(framesize) && UseStackBanging
|
||||
} // C->output()->need_stack_bang(framesize)
|
||||
|
||||
unsigned int bytes = (unsigned int)framesize;
|
||||
long offset = Assembler::align_addr(bytes, frame::alignment_in_bytes);
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
* Copyright (c) 2014, 2020, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2014, 2021, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2015, 2019 SAP SE. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
|
@ -1185,14 +1185,12 @@ void TemplateInterpreterGenerator::bang_stack_shadow_pages(bool native_call) {
|
|||
// Bang each page in the shadow zone. We can't assume it's been done for
|
||||
// an interpreter frame with greater than a page of locals, so each page
|
||||
// needs to be checked. Only true for non-native.
|
||||
if (UseStackBanging) {
|
||||
const int page_size = os::vm_page_size();
|
||||
const int n_shadow_pages = ((int)StackOverflow::stack_shadow_zone_size()) / page_size;
|
||||
const int start_page = native_call ? n_shadow_pages : 1;
|
||||
BLOCK_COMMENT("bang_stack_shadow_pages:");
|
||||
for (int pages = start_page; pages <= n_shadow_pages; pages++) {
|
||||
__ bang_stack_with_offset(pages*page_size);
|
||||
}
|
||||
const int page_size = os::vm_page_size();
|
||||
const int n_shadow_pages = ((int)StackOverflow::stack_shadow_zone_size()) / page_size;
|
||||
const int start_page = native_call ? n_shadow_pages : 1;
|
||||
BLOCK_COMMENT("bang_stack_shadow_pages:");
|
||||
for (int pages = start_page; pages <= n_shadow_pages; pages++) {
|
||||
__ bang_stack_with_offset(pages*page_size);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
//
|
||||
// Copyright (c) 2017, 2020, Oracle and/or its affiliates. All rights reserved.
|
||||
// Copyright (c) 2017, 2021, Oracle and/or its affiliates. All rights reserved.
|
||||
// Copyright (c) 2017, 2020 SAP SE. All rights reserved.
|
||||
// DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
//
|
||||
|
@ -862,7 +862,7 @@ void MachPrologNode::format(PhaseRegAlloc *ra_, outputStream *st) const {
|
|||
// careful, because some VM calls (such as call site linkage) can
|
||||
// use several kilobytes of stack. But the stack safety zone should
|
||||
// account for that. See bugs 4446381, 4468289, 4497237.
|
||||
if (C->output()->need_stack_bang(bangsize) && UseStackBanging) {
|
||||
if (C->output()->need_stack_bang(bangsize)) {
|
||||
st->print_cr("# stack bang"); st->print("\t");
|
||||
}
|
||||
st->print_cr("push_frame %d", (int)-framesize);
|
||||
|
@ -903,7 +903,7 @@ void MachPrologNode::emit(CodeBuffer &cbuf, PhaseRegAlloc *ra_) const {
|
|||
// careful, because some VM calls (such as call site linkage) can
|
||||
// use several kilobytes of stack. But the stack safety zone should
|
||||
// account for that. See bugs 4446381, 4468289, 4497237.
|
||||
if (C->output()->need_stack_bang(bangsize) && UseStackBanging) {
|
||||
if (C->output()->need_stack_bang(bangsize)) {
|
||||
__ generate_stack_overflow_check(bangsize);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
* Copyright (c) 2016, 2020, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2016, 2020 SAP SE. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
|
@ -2066,13 +2066,11 @@ void TemplateInterpreterGenerator::bang_stack_shadow_pages(bool native_call) {
|
|||
// Bang each page in the shadow zone. We can't assume it's been done for
|
||||
// an interpreter frame with greater than a page of locals, so each page
|
||||
// needs to be checked. Only true for non-native. For native, we only bang the last page.
|
||||
if (UseStackBanging) {
|
||||
const int page_size = os::vm_page_size();
|
||||
const int n_shadow_pages = (int)(StackOverflow::stack_shadow_zone_size()/page_size);
|
||||
const int start_page_num = native_call ? n_shadow_pages : 1;
|
||||
for (int pages = start_page_num; pages <= n_shadow_pages; pages++) {
|
||||
__ bang_stack_with_offset(pages*page_size);
|
||||
}
|
||||
const int page_size = os::vm_page_size();
|
||||
const int n_shadow_pages = (int)(StackOverflow::stack_shadow_zone_size()/page_size);
|
||||
const int start_page_num = native_call ? n_shadow_pages : 1;
|
||||
for (int pages = start_page_num; pages <= n_shadow_pages; pages++) {
|
||||
__ bang_stack_with_offset(pages*page_size);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -356,7 +356,7 @@ void C1_MacroAssembler::remove_frame(int frame_size_in_bytes) {
|
|||
|
||||
|
||||
void C1_MacroAssembler::verified_entry() {
|
||||
if (C1Breakpoint || VerifyFPU || !UseStackBanging) {
|
||||
if (C1Breakpoint || VerifyFPU) {
|
||||
// Verified Entry first instruction should be 5 bytes long for correct
|
||||
// patching by patch_verified_entry().
|
||||
//
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
* Copyright (c) 2003, 2020, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2003, 2021, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
|
@ -1597,13 +1597,7 @@ nmethod* SharedRuntime::generate_native_wrapper(MacroAssembler* masm,
|
|||
// instruction fits that requirement.
|
||||
|
||||
// Generate stack overflow check
|
||||
|
||||
if (UseStackBanging) {
|
||||
__ bang_stack_with_offset((int)StackOverflow::stack_shadow_zone_size());
|
||||
} else {
|
||||
// need a 5 byte instruction to allow MT safe patching to non-entrant
|
||||
__ fat_nop();
|
||||
}
|
||||
__ bang_stack_with_offset((int)StackOverflow::stack_shadow_zone_size());
|
||||
|
||||
// Generate a new frame for the wrapper.
|
||||
__ enter();
|
||||
|
@ -2430,10 +2424,8 @@ void SharedRuntime::generate_deopt_blob() {
|
|||
// Compilers generate code that bang the stack by as much as the
|
||||
// interpreter would need. So this stack banging should never
|
||||
// trigger a fault. Verify that it does not on non product builds.
|
||||
if (UseStackBanging) {
|
||||
__ movl(rbx, Address(rdi ,Deoptimization::UnrollBlock::total_frame_sizes_offset_in_bytes()));
|
||||
__ bang_stack_size(rbx, rcx);
|
||||
}
|
||||
__ movl(rbx, Address(rdi ,Deoptimization::UnrollBlock::total_frame_sizes_offset_in_bytes()));
|
||||
__ bang_stack_size(rbx, rcx);
|
||||
#endif
|
||||
|
||||
// Load array of frame pcs into ECX
|
||||
|
@ -2656,10 +2648,8 @@ void SharedRuntime::generate_uncommon_trap_blob() {
|
|||
// Compilers generate code that bang the stack by as much as the
|
||||
// interpreter would need. So this stack banging should never
|
||||
// trigger a fault. Verify that it does not on non product builds.
|
||||
if (UseStackBanging) {
|
||||
__ movl(rbx, Address(rdi ,Deoptimization::UnrollBlock::total_frame_sizes_offset_in_bytes()));
|
||||
__ bang_stack_size(rbx, rcx);
|
||||
}
|
||||
__ movl(rbx, Address(rdi ,Deoptimization::UnrollBlock::total_frame_sizes_offset_in_bytes()));
|
||||
__ bang_stack_size(rbx, rcx);
|
||||
#endif
|
||||
|
||||
// Load array of frame pcs into ECX
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
* Copyright (c) 2003, 2020, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2003, 2021, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
|
@ -1958,13 +1958,7 @@ nmethod* SharedRuntime::generate_native_wrapper(MacroAssembler* masm,
|
|||
// instruction fits that requirement.
|
||||
|
||||
// Generate stack overflow check
|
||||
|
||||
if (UseStackBanging) {
|
||||
__ bang_stack_with_offset((int)StackOverflow::stack_shadow_zone_size());
|
||||
} else {
|
||||
// need a 5 byte instruction to allow MT safe patching to non-entrant
|
||||
__ fat_nop();
|
||||
}
|
||||
__ bang_stack_with_offset((int)StackOverflow::stack_shadow_zone_size());
|
||||
|
||||
// Generate a new frame for the wrapper.
|
||||
__ enter();
|
||||
|
@ -2874,10 +2868,8 @@ void SharedRuntime::generate_deopt_blob() {
|
|||
// Compilers generate code that bang the stack by as much as the
|
||||
// interpreter would need. So this stack banging should never
|
||||
// trigger a fault. Verify that it does not on non product builds.
|
||||
if (UseStackBanging) {
|
||||
__ movl(rbx, Address(rdi, Deoptimization::UnrollBlock::total_frame_sizes_offset_in_bytes()));
|
||||
__ bang_stack_size(rbx, rcx);
|
||||
}
|
||||
__ movl(rbx, Address(rdi, Deoptimization::UnrollBlock::total_frame_sizes_offset_in_bytes()));
|
||||
__ bang_stack_size(rbx, rcx);
|
||||
#endif
|
||||
|
||||
// Load address of array of frame pcs into rcx
|
||||
|
@ -3077,10 +3069,8 @@ void SharedRuntime::generate_uncommon_trap_blob() {
|
|||
// Compilers generate code that bang the stack by as much as the
|
||||
// interpreter would need. So this stack banging should never
|
||||
// trigger a fault. Verify that it does not on non product builds.
|
||||
if (UseStackBanging) {
|
||||
__ movl(rbx, Address(rdi ,Deoptimization::UnrollBlock::total_frame_sizes_offset_in_bytes()));
|
||||
__ bang_stack_size(rbx, rcx);
|
||||
}
|
||||
__ movl(rbx, Address(rdi ,Deoptimization::UnrollBlock::total_frame_sizes_offset_in_bytes()));
|
||||
__ bang_stack_size(rbx, rcx);
|
||||
#endif
|
||||
|
||||
// Load address of array of frame pcs into rcx (address*)
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
* Copyright (c) 2003, 2020, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2003, 2021, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
|
@ -771,13 +771,11 @@ void TemplateInterpreterGenerator::bang_stack_shadow_pages(bool native_call) {
|
|||
// Bang each page in the shadow zone. We can't assume it's been done for
|
||||
// an interpreter frame with greater than a page of locals, so each page
|
||||
// needs to be checked. Only true for non-native.
|
||||
if (UseStackBanging) {
|
||||
const int page_size = os::vm_page_size();
|
||||
const int n_shadow_pages = ((int)StackOverflow::stack_shadow_zone_size()) / page_size;
|
||||
const int start_page = native_call ? n_shadow_pages : 1;
|
||||
for (int pages = start_page; pages <= n_shadow_pages; pages++) {
|
||||
__ bang_stack_with_offset(pages*page_size);
|
||||
}
|
||||
const int page_size = os::vm_page_size();
|
||||
const int n_shadow_pages = ((int)StackOverflow::stack_shadow_zone_size()) / page_size;
|
||||
const int start_page = native_call ? n_shadow_pages : 1;
|
||||
for (int pages = start_page; pages <= n_shadow_pages; pages++) {
|
||||
__ bang_stack_with_offset(pages*page_size);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
* Copyright (c) 1997, 2020, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 1997, 2021, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
|
@ -121,36 +121,34 @@ void AbstractAssembler::bind(Label& L) {
|
|||
}
|
||||
|
||||
void AbstractAssembler::generate_stack_overflow_check(int frame_size_in_bytes) {
|
||||
if (UseStackBanging) {
|
||||
// Each code entry causes one stack bang n pages down the stack where n
|
||||
// is configurable by StackShadowPages. The setting depends on the maximum
|
||||
// depth of VM call stack or native before going back into java code,
|
||||
// since only java code can raise a stack overflow exception using the
|
||||
// stack banging mechanism. The VM and native code does not detect stack
|
||||
// overflow.
|
||||
// The code in JavaCalls::call() checks that there is at least n pages
|
||||
// available, so all entry code needs to do is bang once for the end of
|
||||
// this shadow zone.
|
||||
// The entry code may need to bang additional pages if the framesize
|
||||
// is greater than a page.
|
||||
// Each code entry causes one stack bang n pages down the stack where n
|
||||
// is configurable by StackShadowPages. The setting depends on the maximum
|
||||
// depth of VM call stack or native before going back into java code,
|
||||
// since only java code can raise a stack overflow exception using the
|
||||
// stack banging mechanism. The VM and native code does not detect stack
|
||||
// overflow.
|
||||
// The code in JavaCalls::call() checks that there is at least n pages
|
||||
// available, so all entry code needs to do is bang once for the end of
|
||||
// this shadow zone.
|
||||
// The entry code may need to bang additional pages if the framesize
|
||||
// is greater than a page.
|
||||
|
||||
const int page_size = os::vm_page_size();
|
||||
int bang_end = (int)StackOverflow::stack_shadow_zone_size();
|
||||
const int page_size = os::vm_page_size();
|
||||
int bang_end = (int)StackOverflow::stack_shadow_zone_size();
|
||||
|
||||
// This is how far the previous frame's stack banging extended.
|
||||
const int bang_end_safe = bang_end;
|
||||
// This is how far the previous frame's stack banging extended.
|
||||
const int bang_end_safe = bang_end;
|
||||
|
||||
if (frame_size_in_bytes > page_size) {
|
||||
bang_end += frame_size_in_bytes;
|
||||
}
|
||||
if (frame_size_in_bytes > page_size) {
|
||||
bang_end += frame_size_in_bytes;
|
||||
}
|
||||
|
||||
int bang_offset = bang_end_safe;
|
||||
while (bang_offset <= bang_end) {
|
||||
// Need at least one stack bang at end of shadow zone.
|
||||
bang_stack_with_offset(bang_offset);
|
||||
bang_offset += page_size;
|
||||
}
|
||||
} // end (UseStackBanging)
|
||||
int bang_offset = bang_end_safe;
|
||||
while (bang_offset <= bang_end) {
|
||||
// Need at least one stack bang at end of shadow zone.
|
||||
bang_stack_with_offset(bang_offset);
|
||||
bang_offset += page_size;
|
||||
}
|
||||
}
|
||||
|
||||
void Label::add_patch_at(CodeBuffer* cb, int branch_loc, const char* file, int line) {
|
||||
|
|
|
@ -249,7 +249,6 @@ JVMCIObjectArray CompilerToVM::initialize_intrinsics(JVMCI_TRAPS) {
|
|||
do_bool_flag(UseSHA512Intrinsics) \
|
||||
X86_ONLY(do_intx_flag(UseSSE)) \
|
||||
COMPILER2_PRESENT(do_bool_flag(UseSquareToLenIntrinsic)) \
|
||||
do_bool_flag(UseStackBanging) \
|
||||
do_bool_flag(UseTLAB) \
|
||||
do_bool_flag(VerifyOops) \
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
* Copyright (c) 1998, 2020, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 1998, 2021, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
|
@ -440,7 +440,7 @@ bool PhaseOutput::need_stack_bang(int frame_size_in_bytes) const {
|
|||
// unexpected stack overflow (compiled method stack banging should
|
||||
// guarantee it doesn't happen) so we always need the stack bang in
|
||||
// a debug VM.
|
||||
return (UseStackBanging && C->stub_function() == NULL &&
|
||||
return (C->stub_function() == NULL &&
|
||||
(C->has_java_calls() || frame_size_in_bytes > os::vm_page_size()>>3
|
||||
DEBUG_ONLY(|| true)));
|
||||
}
|
||||
|
|
|
@ -973,11 +973,6 @@ const intx ObjectAlignmentInBytes = 8;
|
|||
"Inject thread creation failures for " \
|
||||
"UseDynamicNumberOfCompilerThreads") \
|
||||
\
|
||||
develop(bool, UseStackBanging, true, \
|
||||
"use stack banging for stack overflow checks (required for " \
|
||||
"proper StackOverflow handling; disable only to measure cost " \
|
||||
"of stackbanging)") \
|
||||
\
|
||||
develop(bool, GenerateSynchronizationCode, true, \
|
||||
"generate locking/unlocking code for synchronized methods and " \
|
||||
"monitors") \
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
* Copyright (c) 2011, 2020, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2011, 2021, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
|
@ -253,7 +253,7 @@ public class GraalHotSpotVMConfig extends GraalHotSpotVMConfigAccess {
|
|||
|
||||
public final int stackShadowPages = getFlag("StackShadowPages", Integer.class);
|
||||
public final int stackReservedPages = getFlag("StackReservedPages", Integer.class, 0, JDK >= 9);
|
||||
public final boolean useStackBanging = getFlag("UseStackBanging", Boolean.class);
|
||||
public final boolean useStackBanging = getFlag("UseStackBanging", Boolean.class, true, JDK < 17);
|
||||
public final int stackBias = getConstant("STACK_BIAS", Integer.class, 0, JDK < 15);
|
||||
public final int vmPageSize = getFieldValue("CompilerToVM::Data::vm_page_size", Integer.class, "int");
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue