Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

8343789: Move mutable nmethod data out of CodeCache #21276

Draft
wants to merge 5 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions src/hotspot/cpu/aarch64/macroAssembler_aarch64.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -5270,9 +5270,8 @@ void MacroAssembler::movoop(Register dst, jobject obj) {
mov(dst, Address((address)obj, rspec));
} else {
address dummy = address(uintptr_t(pc()) & -wordSize); // A nearby aligned address
ldr_constant(dst, Address(dummy, rspec));
mov(dst, Address(dummy, rspec));
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why this is needed?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

  • it is not a load from a Constant Pool, so calling ldr_constant here is seems incorrect
  • the ldr_constant function utilizes either ldr (with a range limit of ±1MB) or, when -XX:-NearCpool is enabled, adrp (range limit of ±2GB) followed by ldr — both of which may fall short when mutable data is allocated on the C heap.

}

}

// Move a metadata address into a register.
Expand Down
30 changes: 19 additions & 11 deletions src/hotspot/share/code/codeBlob.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -63,42 +63,51 @@ unsigned int CodeBlob::align_code_offset(int offset) {

// This must be consistent with the CodeBlob constructor's layout actions.
unsigned int CodeBlob::allocation_size(CodeBuffer* cb, int header_size) {
unsigned int size = header_size;
size += align_up(cb->total_relocation_size(), oopSize);
// align the size to CodeEntryAlignment
size = align_code_offset(size);
unsigned int size = align_code_offset(header_size);
size += align_up(cb->total_content_size(), oopSize);
size += align_up(cb->total_oop_size(), oopSize);
size += align_up(cb->total_metadata_size(), oopSize);
return size;
}

CodeBlob::CodeBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, uint16_t header_size,
int16_t frame_complete_offset, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments) :
int16_t frame_complete_offset, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments,
int mutable_data_size) :
_oop_maps(nullptr), // will be set by set_oop_maps() call
_name(name),
_size(size),
_relocation_size(align_up(cb->total_relocation_size(), oopSize)),
_content_offset(CodeBlob::align_code_offset(header_size + _relocation_size)),
_content_offset(CodeBlob::align_code_offset(header_size)),
_code_offset(_content_offset + cb->total_offset_of(cb->insts())),
_data_offset(_content_offset + align_up(cb->total_content_size(), oopSize)),
_frame_size(frame_size),
S390_ONLY(_ctable_offset(0) COMMA)
_header_size(header_size),
_frame_complete_offset(frame_complete_offset),
_kind(kind),
_caller_must_gc_arguments(caller_must_gc_arguments)
_caller_must_gc_arguments(caller_must_gc_arguments),
_mutable_data(nullptr),
_mutable_data_size(0)
{
assert(is_aligned(_size, oopSize), "unaligned size");
assert(is_aligned(header_size, oopSize), "unaligned size");
assert(is_aligned(_relocation_size, oopSize), "unaligned size");
assert(_data_offset <= _size, "codeBlob is too small: %d > %d", _data_offset, _size);
int code_end_offset = _content_offset + align_up(cb->total_content_size(), oopSize);
assert(code_end_offset == _size, "wrong codeBlob size: %d != %d", _size, code_end_offset);
assert(code_end() == content_end(), "must be the same - see code_end()");
#ifdef COMPILER1
// probably wrong for tiered
assert(_frame_size >= -1, "must use frame size or -1 for runtime stubs");
#endif // COMPILER1

// The mutable_data_size is either calculated by the nmethod constructor to account
// for reloc_info and additional data, or it is set here to accommodate only the relocation data.
_mutable_data_size = (mutable_data_size == 0) ? cb->total_relocation_size() : mutable_data_size;
if (_mutable_data_size > 0) {
_mutable_data = (address)os::malloc(_mutable_data_size, mtCode);
if (_mutable_data == nullptr) {
vm_exit_out_of_memory(_mutable_data_size, OOM_MALLOC_ERROR, "codebuffer: no space for mutable data");
}
}

set_oop_maps(oop_maps);
}

Expand All @@ -110,7 +119,6 @@ CodeBlob::CodeBlob(const char* name, CodeBlobKind kind, int size, uint16_t heade
_relocation_size(0),
_content_offset(CodeBlob::align_code_offset(header_size)),
_code_offset(_content_offset),
_data_offset(size),
_frame_size(0),
S390_ONLY(_ctable_offset(0) COMMA)
_header_size(header_size),
Expand Down
31 changes: 18 additions & 13 deletions src/hotspot/share/code/codeBlob.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -108,8 +108,6 @@ class CodeBlob {
int _relocation_size; // size of relocation (could be bigger than 64Kb)
int _content_offset; // offset to where content region begins (this includes consts, insts, stubs)
int _code_offset; // offset to where instructions region begins (this includes insts, stubs)

int _data_offset; // offset to where data region begins
int _frame_size; // size of stack frame in words (NOT slots. On x64 these are 64bit words)

S390_ONLY(int _ctable_offset;)
Expand All @@ -124,13 +122,17 @@ class CodeBlob {

bool _caller_must_gc_arguments;

address _mutable_data;
int _mutable_data_size;

Comment on lines +125 to +127
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should we add special CodeBlob subclass for nmethod to avoid increase of size for all blobs and stubs?

#ifndef PRODUCT
AsmRemarks _asm_remarks;
DbgStrings _dbg_strings;
#endif

CodeBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, uint16_t header_size,
int16_t frame_complete_offset, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments);
int16_t frame_complete_offset, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments,
int mutable_data_size = 0);

// Simple CodeBlob used for simple BufferBlob.
CodeBlob(const char* name, CodeBlobKind kind, int size, uint16_t header_size);
Expand Down Expand Up @@ -170,23 +172,27 @@ class CodeBlob {
UpcallStub* as_upcall_stub() const { assert(is_upcall_stub(), "must be upcall stub"); return (UpcallStub*) this; }
RuntimeStub* as_runtime_stub() const { assert(is_runtime_stub(), "must be runtime blob"); return (RuntimeStub*) this; }

address mutable_data_begin() const { return _mutable_data; }
address mutable_data_end() const { return _mutable_data + _mutable_data_size; }
int mutable_data_size() const { return _mutable_data_size; }

// Boundaries
address header_begin() const { return (address) this; }
address header_end() const { return ((address) this) + _header_size; }
relocInfo* relocation_begin() const { return (relocInfo*) header_end(); }
relocInfo* relocation_end() const { return (relocInfo*)(header_end() + _relocation_size); }
relocInfo* relocation_begin() const { return (relocInfo*)_mutable_data; }
relocInfo* relocation_end() const { return (relocInfo*)((address)relocation_begin() + _relocation_size); }
address content_begin() const { return (address) header_begin() + _content_offset; }
address content_end() const { return (address) header_begin() + _data_offset; }
address content_end() const { return (address) header_begin() + _size; }
address code_begin() const { return (address) header_begin() + _code_offset; }
// code_end == content_end is true for all types of blobs for now, it is also checked in the constructor
address code_end() const { return (address) header_begin() + _data_offset; }
address data_begin() const { return (address) header_begin() + _data_offset; }
address data_end() const { return (address) header_begin() + _size; }
address code_end() const { return (address) header_begin() + _size; }
address blob_end() const { return (address) header_begin() + _size; }

// [relocations, oops, metatada, jvmci_data] stays in _mutable_data
address mdata_begin() const { return mutable_data_begin(); }
address mdata_end() const { return mutable_data_end(); }
// Offsets
int content_offset() const { return _content_offset; }
int code_offset() const { return _code_offset; }
int data_offset() const { return _data_offset; }

// This field holds the beginning of the const section in the old code buffer.
// It is needed to fix relocations of pc-relative loads when resizing the
Expand All @@ -204,11 +210,10 @@ class CodeBlob {
// Only used from CodeCache::free_unused_tail() after the Interpreter blob was trimmed
void adjust_size(size_t used) {
_size = (int)used;
_data_offset = (int)used;
}

// Containment
bool blob_contains(address addr) const { return header_begin() <= addr && addr < data_end(); }
bool blob_contains(address addr) const { return header_begin() <= addr && addr < blob_end(); }
bool code_contains(address addr) const { return code_begin() <= addr && addr < code_end(); }
bool contains(address addr) const { return content_begin() <= addr && addr < content_end(); }
bool is_frame_complete_at(address addr) const { return _frame_complete_offset != CodeOffsets::frame_never_safe &&
Expand Down
Loading