3636#include " runtime/registerMap.hpp"
3737#include " utilities/align.hpp"
3838#include " utilities/debug.hpp"
39+ #if INCLUDE_JVMCI
40+ #include " jvmci/jvmciRuntime.hpp"
41+ #endif
3942
4043static int slow_path_size (nmethod* nm) {
4144 // The slow path code is out of line with C2.
@@ -57,40 +60,67 @@ static int entry_barrier_offset(nmethod* nm) {
5760 return 0 ;
5861}
5962
60- class NativeNMethodBarrier : public NativeInstruction {
61- address instruction_address () const { return addr_at (0 ); }
63+ class NativeNMethodBarrier {
64+ address _instruction_address;
65+ int * _guard_addr;
66+ nmethod* _nm;
67+
68+ address instruction_address () const { return _instruction_address; }
69+
70+ int *guard_addr () {
71+ return _guard_addr;
72+ }
6273
6374 int local_guard_offset (nmethod* nm) {
6475 // It's the last instruction
6576 return (-entry_barrier_offset (nm)) - 4 ;
6677 }
6778
68- int *guard_addr (nmethod* nm) {
69- if (nm->is_compiled_by_c2 ()) {
70- // With c2 compiled code, the guard is out-of-line in a stub
71- // We find it using the RelocIterator.
72- RelocIterator iter (nm);
73- while (iter.next ()) {
74- if (iter.type () == relocInfo::entry_guard_type) {
75- entry_guard_Relocation* const reloc = iter.entry_guard_reloc ();
76- return reinterpret_cast <int *>(reloc->addr ());
79+ public:
80+ NativeNMethodBarrier (nmethod* nm): _nm(nm) {
81+ #if INCLUDE_JVMCI
82+ if (nm->is_compiled_by_jvmci ()) {
83+ address pc = nm->code_begin () + nm->jvmci_nmethod_data ()->nmethod_entry_patch_offset ();
84+ RelocIterator iter (nm, pc, pc + 4 );
85+ guarantee (iter.next (), " missing relocs" );
86+ guarantee (iter.type () == relocInfo::section_word_type, " unexpected reloc" );
87+
88+ _guard_addr = (int *) iter.section_word_reloc ()->target ();
89+ _instruction_address = pc;
90+ } else
91+ #endif
92+ {
93+ _instruction_address = nm->code_begin () + nm->frame_complete_offset () + entry_barrier_offset (nm);
94+ if (nm->is_compiled_by_c2 ()) {
95+ // With c2 compiled code, the guard is out-of-line in a stub
96+ // We find it using the RelocIterator.
97+ RelocIterator iter (nm);
98+ while (iter.next ()) {
99+ if (iter.type () == relocInfo::entry_guard_type) {
100+ entry_guard_Relocation* const reloc = iter.entry_guard_reloc ();
101+ _guard_addr = reinterpret_cast <int *>(reloc->addr ());
102+ return ;
103+ }
104+ }
105+ ShouldNotReachHere ();
77106 }
107+ _guard_addr = reinterpret_cast <int *>(instruction_address () + local_guard_offset (nm));
78108 }
79- ShouldNotReachHere ();
80- }
81- return reinterpret_cast <int *>(instruction_address () + local_guard_offset (nm));
82109 }
83110
84- public:
85- int get_value (nmethod* nm) {
86- return Atomic::load_acquire (guard_addr (nm));
111+ int get_value () {
112+ return Atomic::load_acquire (guard_addr ());
87113 }
88114
89- void set_value (nmethod* nm, int value) {
90- Atomic::release_store (guard_addr (nm ), value);
115+ void set_value (int value) {
116+ Atomic::release_store (guard_addr (), value);
91117 }
92118
93- void verify () const ;
119+ bool check_barrier (err_msg& msg) const ;
120+ void verify () const {
121+ err_msg msg (" %s" , " " );
122+ assert (check_barrier (msg), " %s" , msg.buffer ());
123+ }
94124};
95125
96126// Store the instruction bitmask, bits and name for checking the barrier.
@@ -108,16 +138,17 @@ static const struct CheckInsn barrierInsn[] = {
108138// The encodings must match the instructions emitted by
109139// BarrierSetAssembler::nmethod_entry_barrier. The matching ignores the specific
110140// register numbers and immediate values in the encoding.
111- void NativeNMethodBarrier::verify ( ) const {
141+ bool NativeNMethodBarrier::check_barrier (err_msg& msg ) const {
112142 intptr_t addr = (intptr_t ) instruction_address ();
113143 for (unsigned int i = 0 ; i < sizeof (barrierInsn)/sizeof (struct CheckInsn ); i++ ) {
114144 uint32_t inst = *((uint32_t *) addr);
115145 if ((inst & barrierInsn[i].mask ) != barrierInsn[i].bits ) {
116- tty-> print_cr (" Addr: " INTPTR_FORMAT " Code: 0x%x" , addr, inst);
117- fatal ( " not an %s instruction. " , barrierInsn[i]. name ) ;
146+ msg. print (" Addr: " INTPTR_FORMAT " Code: 0x%x not an %s instruction " , addr, inst, barrierInsn[i]. name );
147+ return false ;
118148 }
119149 addr +=4 ;
120150 }
151+ return true ;
121152}
122153
123154void BarrierSetNMethod::deoptimize (nmethod* nm, address* return_address_ptr) {
@@ -154,13 +185,6 @@ void BarrierSetNMethod::deoptimize(nmethod* nm, address* return_address_ptr) {
154185 new_frame->pc = SharedRuntime::get_handle_wrong_method_stub ();
155186}
156187
157- static NativeNMethodBarrier* native_nmethod_barrier (nmethod* nm) {
158- address barrier_address = nm->code_begin () + nm->frame_complete_offset () + entry_barrier_offset (nm);
159- NativeNMethodBarrier* barrier = reinterpret_cast <NativeNMethodBarrier*>(barrier_address);
160- debug_only (barrier->verify ());
161- return barrier;
162- }
163-
164188void BarrierSetNMethod::set_guard_value (nmethod* nm, int value) {
165189 if (!supports_entry_barrier (nm)) {
166190 return ;
@@ -177,15 +201,22 @@ void BarrierSetNMethod::set_guard_value(nmethod* nm, int value) {
177201 bs_asm->increment_patching_epoch ();
178202 }
179203
180- NativeNMethodBarrier* barrier = native_nmethod_barrier (nm);
181- barrier-> set_value (nm, value);
204+ NativeNMethodBarrier barrier (nm);
205+ barrier. set_value (value);
182206}
183207
184208int BarrierSetNMethod::guard_value (nmethod* nm) {
185209 if (!supports_entry_barrier (nm)) {
186210 return disarmed_guard_value ();
187211 }
188212
189- NativeNMethodBarrier* barrier = native_nmethod_barrier (nm);
190- return barrier->get_value (nm);
213+ NativeNMethodBarrier barrier (nm);
214+ return barrier.get_value ();
215+ }
216+
217+ #if INCLUDE_JVMCI
218+ bool BarrierSetNMethod::verify_barrier (nmethod* nm, err_msg& msg) {
219+ NativeNMethodBarrier barrier (nm);
220+ return barrier.check_barrier (msg);
191221}
222+ #endif
0 commit comments