1 /*
2 * Copyright (c) 1997, 2018, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "classfile/javaClasses.inline.hpp"
27 #include "code/codeCache.hpp"
28 #include "code/debugInfoRec.hpp"
29 #include "code/nmethod.hpp"
30 #include "code/pcDesc.hpp"
31 #include "code/scopeDesc.hpp"
32 #include "interpreter/interpreter.hpp"
33 #include "interpreter/oopMapCache.hpp"
34 #include "oops/instanceKlass.hpp"
35 #include "oops/oop.inline.hpp"
36 #include "runtime/basicLock.hpp"
37 #include "runtime/frame.inline.hpp"
38 #include "runtime/handles.inline.hpp"
39 #include "runtime/monitorChunk.hpp"
40 #include "runtime/signature.hpp"
41 #include "runtime/stubRoutines.hpp"
42 #include "runtime/vframeArray.hpp"
43 #include "runtime/vframe_hp.hpp"
44 #ifdef COMPILER2
45 #include "opto/matcher.hpp"
46 #endif
47
48
49 // ------------- compiledVFrame --------------
50
51 StackValueCollection* compiledVFrame::locals() const {
52 // Natives has no scope
53 if (scope() == NULL) return new StackValueCollection(0);
54 GrowableArray<ScopeValue*>* scv_list = scope()->locals();
55 if (scv_list == NULL) return new StackValueCollection(0);
56
57 // scv_list is the list of ScopeValues describing the JVM stack state.
58 // There is one scv_list entry for every JVM stack state in use.
59 int length = scv_list->length();
60 StackValueCollection* result = new StackValueCollection(length);
61 GrowableArray<ScopeValue*>* objects = scope()->objects();
62 for (int i = 0; i < length; i++) {
63 result->add(create_stack_value(get_scope_value(scv_list, i, objects)));
64 }
65
66 // Replace the original values with any stores that have been
67 // performed through compiledVFrame::update_locals.
68 GrowableArray<jvmtiDeferredLocalVariableSet*>* list = thread()->deferred_locals();
69 if (list != NULL ) {
70 // In real life this never happens or is typically a single element search
71 for (int i = 0; i < list->length(); i++) {
72 if (list->at(i)->matches(this)) {
73 list->at(i)->update_locals(result);
74 break;
75 }
76 }
77 }
78
79 return result;
80 }
81
82
83 void compiledVFrame::set_locals(StackValueCollection* values) const {
84
85 fatal("Should use update_local for each local update");
86 }
87
88 void compiledVFrame::update_local(BasicType type, int index, jvalue value) {
89 assert(index >= 0 && index < method()->max_locals(), "out of bounds");
90 update_deferred_value(type, index, value);
91 }
92
93 void compiledVFrame::update_stack(BasicType type, int index, jvalue value) {
94 assert(index >= 0 && index < method()->max_stack(), "out of bounds");
95 update_deferred_value(type, index + method()->max_locals(), value);
96 }
97
98 void compiledVFrame::update_monitor(int index, MonitorInfo* val) {
99 assert(index >= 0, "out of bounds");
100 jvalue value;
101 value.l = cast_from_oop<jobject>(val->owner());
102 update_deferred_value(T_OBJECT, index + method()->max_locals() + method()->max_stack(), value);
103 }
104
105 void compiledVFrame::update_deferred_value(BasicType type, int index, jvalue value) {
106 assert(fr().is_deoptimized_frame(), "frame must be scheduled for deoptimization");
107 GrowableArray<jvmtiDeferredLocalVariableSet*>* deferred = thread()->deferred_locals();
108 jvmtiDeferredLocalVariableSet* locals = NULL;
109 if (deferred != NULL ) {
110 // See if this vframe has already had locals with deferred writes
111 for (int f = 0; f < deferred->length(); f++ ) {
112 if (deferred->at(f)->matches(this)) {
113 locals = deferred->at(f);
114 break;
115 }
116 }
117 // No matching vframe must push a new vframe
118 } else {
119 // No deferred updates pending for this thread.
120 // allocate in C heap
121 deferred = new(ResourceObj::C_HEAP, mtCompiler) GrowableArray<jvmtiDeferredLocalVariableSet*> (1, mtCompiler);
122 thread()->set_deferred_locals(deferred);
123 }
124 if (locals == NULL) {
125 locals = new jvmtiDeferredLocalVariableSet(method(), bci(), fr().id(), vframe_id());
126 deferred->push(locals);
127 assert(locals->id() == fr().id(), "Huh? Must match");
128 }
129 locals->set_value_at(index, type, value);
130 }
131
132 StackValueCollection* compiledVFrame::expressions() const {
133 // Natives has no scope
134 if (scope() == NULL) return new StackValueCollection(0);
135 GrowableArray<ScopeValue*>* scv_list = scope()->expressions();
136 if (scv_list == NULL) return new StackValueCollection(0);
137
138 // scv_list is the list of ScopeValues describing the JVM stack state.
139 // There is one scv_list entry for every JVM stack state in use.
140 int length = scv_list->length();
141 StackValueCollection* result = new StackValueCollection(length);
142 GrowableArray<ScopeValue*>* objects = scope()->objects();
143 for (int i = 0; i < length; i++) {
144 result->add(create_stack_value(get_scope_value(scv_list, i, objects)));
145 }
146
147 // Replace the original values with any stores that have been
148 // performed through compiledVFrame::update_stack.
149 GrowableArray<jvmtiDeferredLocalVariableSet*>* list = thread()->deferred_locals();
150 if (list != NULL ) {
151 // In real life this never happens or is typically a single element search
152 for (int i = 0; i < list->length(); i++) {
153 if (list->at(i)->matches(this)) {
154 list->at(i)->update_stack(result);
155 break;
156 }
157 }
158 }
159
160 return result;
161 }
162
163
164 // The implementation of the following two methods was factorized into the
165 // class StackValue because it is also used from within deoptimization.cpp for
166 // rematerialization and relocking of non-escaping objects.
167
168 StackValue *compiledVFrame::create_stack_value(ScopeValue *sv) const {
169 return StackValue::create_stack_value(&_fr, register_map(), sv);
170 }
171
172 BasicLock* compiledVFrame::resolve_monitor_lock(Location location) const {
173 return StackValue::resolve_monitor_lock(&_fr, location);
174 }
175
176 ScopeValue *compiledVFrame::match_object_to_stack_oop(intptr_t *oop_ptr, intptr_t *sp_base, GrowableArray<ScopeValue*>* objects) const {
177 if (objects == NULL) {
178 return NULL;
179 }
180 for (int j = 0; j < objects->length(); j++) {
181 ScopeValue* o_sv = objects->at(j);
182 if (o_sv->is_object()) {
183 if (o_sv->as_ObjectValue()->is_stack_object()) {
184 StackObjectValue *sov = (StackObjectValue *)o_sv;
185 Location o_loc = sov->get_stack_location();
186 int o_offset = o_loc.stack_offset();
187 int l_offset = (address)oop_ptr - (address)sp_base;
188 if (o_offset == l_offset) {
189 return o_sv;
190 }
191 }
192 }
193 }
194
195 return NULL;
196 }
197
198 ScopeValue *compiledVFrame::get_scope_value(GrowableArray<ScopeValue*>* scv_list, int index, GrowableArray<ScopeValue*>* objects) const {
199 ScopeValue* sv = scv_list->at(index);
200 if (sv->is_location()) {
201 if ((objects != NULL) && (objects->length() > 0)) {
202 //printf("Attempting to swap svs\n");
203 LocationValue* lv = (LocationValue *)sv;
204 Location loc = lv->location();
205 intptr_t *oop_ptr;
206 intptr_t *sp_base = _fr.unextended_sp();
207 intptr_t *sp_top = sp_base + _fr.cb()->frame_size();
208 if (loc.is_stack() && (loc.type() == Location::oop)) {
209 address value_addr = ((address)sp_base) + loc.stack_offset();
210 oop val = *(oop *)value_addr;
211 oop_ptr = cast_from_oop<intptr_t *>(val);
212 } else if (loc.is_register() && (loc.type() == Location::oop)) {
213 address value_addr = register_map()->location(VMRegImpl::as_VMReg(loc.register_number()));
214 oop val = *(oop *)value_addr;
215 oop_ptr = cast_from_oop<intptr_t *>(val);
216 } else {
217 assert(loc.type() != Location::oop, "Can not be an oop");
218 return sv;
219 }
220 if (sp_base <= oop_ptr && oop_ptr < sp_top) {
221 ScopeValue* o_sv = match_object_to_stack_oop(oop_ptr, sp_base, objects);
222 if (o_sv != NULL) {
223 scv_list->at_put(index, o_sv);
224 sv = o_sv;
225 } else {
226 assert(false, "did not find stack oop for object on stack");
227 }
228 }
229 }
230 } else if (sv->is_object()) {
231 oop o = sv->as_ObjectValue()->value()();
232 intptr_t *sp_base = _fr.unextended_sp();
233 intptr_t *sp_top = sp_base + _fr.cb()->frame_size();
234 intptr_t *oop_ptr = cast_from_oop<intptr_t *>(o);
235 if (sp_base <= oop_ptr && oop_ptr < sp_top) {
236 ScopeValue* o_sv = match_object_to_stack_oop(oop_ptr, sp_base, objects);
237 if (o_sv != NULL) {
238 assert(sv == o_sv, "Objects need to match");
239 sv = o_sv;
240 } else {
241 assert(false, "did not find stack oop for object on stack");
242 }
243 }
244 assert(oopDesc::is_oop_or_null(sv->as_ObjectValue()->value()()), "needs to be an oop");
245 }
246 return sv;
247 }
248
249
250 GrowableArray<MonitorInfo*>* compiledVFrame::monitors() const {
251 // Natives has no scope
252 if (scope() == NULL) {
253 CompiledMethod* nm = code();
254 Method* method = nm->method();
255 assert(method->is_native() || nm->is_aot(), "Expect a native method or precompiled method");
256 if (!method->is_synchronized()) {
257 return new GrowableArray<MonitorInfo*>(0);
258 }
259 // This monitor is really only needed for UseBiasedLocking, but
260 // return it in all cases for now as it might be useful for stack
261 // traces and tools as well
262 GrowableArray<MonitorInfo*> *monitors = new GrowableArray<MonitorInfo*>(1);
263 // Casting away const
264 frame& fr = (frame&) _fr;
265 MonitorInfo* info = new MonitorInfo(
266 fr.get_native_receiver(), fr.get_native_monitor(), false, false);
267 monitors->push(info);
268 return monitors;
269 }
270 GrowableArray<MonitorValue*>* monitors = scope()->monitors();
271 if (monitors == NULL) {
272 return new GrowableArray<MonitorInfo*>(0);
273 }
274 GrowableArray<MonitorInfo*>* result = new GrowableArray<MonitorInfo*>(monitors->length());
275 for (int index = 0; index < monitors->length(); index++) {
276 MonitorValue* mv = monitors->at(index);
277 ScopeValue* ov = mv->owner();
278 StackValue *owner_sv = create_stack_value(ov); // it is an oop
279 if (ov->is_object() && owner_sv->obj_is_scalar_replaced()) { // The owner object was scalar replaced
280 assert(mv->eliminated(), "monitor should be eliminated for scalar replaced object");
281 // Put klass for scalar replaced object.
282 ScopeValue* kv = ((ObjectValue *)ov)->klass();
283 assert(kv->is_constant_oop(), "klass should be oop constant for scalar replaced object");
284 Handle k(Thread::current(), ((ConstantOopReadValue*)kv)->value()());
285 assert(java_lang_Class::is_instance(k()), "must be");
286 result->push(new MonitorInfo(k(), resolve_monitor_lock(mv->basic_lock()),
287 mv->eliminated(), true));
288 } else {
289 result->push(new MonitorInfo(owner_sv->get_obj()(), resolve_monitor_lock(mv->basic_lock()),
290 mv->eliminated(), false));
291 }
292 }
293
294 // Replace the original values with any stores that have been
295 // performed through compiledVFrame::update_monitors.
296 GrowableArray<jvmtiDeferredLocalVariableSet*>* list = thread()->deferred_locals();
297 if (list != NULL ) {
298 // In real life this never happens or is typically a single element search
299 for (int i = 0; i < list->length(); i++) {
300 if (list->at(i)->matches(this)) {
301 list->at(i)->update_monitors(result);
302 break;
303 }
304 }
305 }
306
307 return result;
308 }
309
310
311 compiledVFrame::compiledVFrame(const frame* fr, const RegisterMap* reg_map, JavaThread* thread, CompiledMethod* nm)
312 : javaVFrame(fr, reg_map, thread) {
313 _scope = NULL;
314 _vframe_id = 0;
315 // Compiled method (native stub or Java code)
316 // native wrappers have no scope data, it is implied
317 if (!nm->is_compiled() || !nm->as_compiled_method()->is_native_method()) {
318 _scope = nm->scope_desc_at(_fr.pc());
319 }
320 }
321
322 compiledVFrame::compiledVFrame(const frame* fr, const RegisterMap* reg_map, JavaThread* thread, ScopeDesc* scope, int vframe_id)
323 : javaVFrame(fr, reg_map, thread) {
324 _scope = scope;
325 _vframe_id = vframe_id;
326 guarantee(_scope != NULL, "scope must be present");
327 }
328
329 compiledVFrame* compiledVFrame::at_scope(int decode_offset, int vframe_id) {
330 if (scope()->decode_offset() != decode_offset) {
331 ScopeDesc* scope = this->scope()->at_offset(decode_offset);
332 return new compiledVFrame(frame_pointer(), register_map(), thread(), scope, vframe_id);
333 }
334 assert(_vframe_id == vframe_id, "wrong frame id");
335 return this;
336 }
337
338 bool compiledVFrame::is_top() const {
339 // FIX IT: Remove this when new native stubs are in place
340 if (scope() == NULL) return true;
341 return scope()->is_top();
342 }
343
344
345 CompiledMethod* compiledVFrame::code() const {
346 return CodeCache::find_compiled(_fr.pc());
347 }
348
349
350 Method* compiledVFrame::method() const {
351 if (scope() == NULL) {
352 // native nmethods have no scope the method is implied
353 nmethod* nm = code()->as_nmethod();
354 assert(nm->is_native_method(), "must be native");
355 return nm->method();
356 }
357 return scope()->method();
358 }
359
360
361 int compiledVFrame::bci() const {
362 int raw = raw_bci();
363 return raw == SynchronizationEntryBCI ? 0 : raw;
364 }
365
366
367 int compiledVFrame::raw_bci() const {
368 if (scope() == NULL) {
369 // native nmethods have no scope the method/bci is implied
370 nmethod* nm = code()->as_nmethod();
371 assert(nm->is_native_method(), "must be native");
372 return 0;
373 }
374 return scope()->bci();
375 }
376
377 bool compiledVFrame::should_reexecute() const {
378 if (scope() == NULL) {
379 // native nmethods have no scope the method/bci is implied
380 nmethod* nm = code()->as_nmethod();
381 assert(nm->is_native_method(), "must be native");
382 return false;
383 }
384 return scope()->should_reexecute();
385 }
386
387 vframe* compiledVFrame::sender() const {
388 const frame f = fr();
389 if (scope() == NULL) {
390 // native nmethods have no scope the method/bci is implied
391 nmethod* nm = code()->as_nmethod();
392 assert(nm->is_native_method(), "must be native");
393 return vframe::sender();
394 } else {
395 return scope()->is_top()
396 ? vframe::sender()
397 : new compiledVFrame(&f, register_map(), thread(), scope()->sender(), vframe_id() + 1);
398 }
399 }
400
401 jvmtiDeferredLocalVariableSet::jvmtiDeferredLocalVariableSet(Method* method, int bci, intptr_t* id, int vframe_id) {
402 _method = method;
403 _bci = bci;
404 _id = id;
405 _vframe_id = vframe_id;
406 // Alway will need at least one, must be on C heap
407 _locals = new(ResourceObj::C_HEAP, mtCompiler) GrowableArray<jvmtiDeferredLocalVariable*> (1, mtCompiler);
408 }
409
410 jvmtiDeferredLocalVariableSet::~jvmtiDeferredLocalVariableSet() {
411 for (int i = 0; i < _locals->length(); i++ ) {
412 delete _locals->at(i);
413 }
414 // Free growableArray and c heap for elements
415 delete _locals;
416 }
417
418 bool jvmtiDeferredLocalVariableSet::matches(const vframe* vf) {
419 if (!vf->is_compiled_frame()) return false;
420 compiledVFrame* cvf = (compiledVFrame*)vf;
421 if (cvf->fr().id() == id() && cvf->vframe_id() == vframe_id()) {
422 assert(cvf->method() == method() && cvf->bci() == bci(), "must agree");
423 return true;
424 }
425 return false;
426 }
427
428 void jvmtiDeferredLocalVariableSet::set_value_at(int idx, BasicType type, jvalue val) {
429 for (int i = 0; i < _locals->length(); i++) {
430 if (_locals->at(i)->index() == idx) {
431 assert(_locals->at(i)->type() == type, "Wrong type");
432 _locals->at(i)->set_value(val);
433 return;
434 }
435 }
436 _locals->push(new jvmtiDeferredLocalVariable(idx, type, val));
437 }
438
439 void jvmtiDeferredLocalVariableSet::update_value(StackValueCollection* locals, BasicType type, int index, jvalue value) {
440 switch (type) {
441 case T_BOOLEAN:
442 locals->set_int_at(index, value.z);
443 break;
444 case T_CHAR:
445 locals->set_int_at(index, value.c);
446 break;
447 case T_FLOAT:
448 locals->set_float_at(index, value.f);
449 break;
450 case T_DOUBLE:
451 locals->set_double_at(index, value.d);
452 break;
453 case T_BYTE:
454 locals->set_int_at(index, value.b);
455 break;
456 case T_SHORT:
457 locals->set_int_at(index, value.s);
458 break;
459 case T_INT:
460 locals->set_int_at(index, value.i);
461 break;
462 case T_LONG:
463 locals->set_long_at(index, value.j);
464 break;
465 case T_OBJECT:
466 {
467 Handle obj(Thread::current(), (oop)value.l);
468 locals->set_obj_at(index, obj);
469 }
470 break;
471 default:
472 ShouldNotReachHere();
473 }
474 }
475
476 void jvmtiDeferredLocalVariableSet::update_locals(StackValueCollection* locals) {
477 for (int l = 0; l < _locals->length(); l ++) {
478 jvmtiDeferredLocalVariable* val = _locals->at(l);
479 if (val->index() >= 0 && val->index() < method()->max_locals()) {
480 update_value(locals, val->type(), val->index(), val->value());
481 }
482 }
483 }
484
485
486 void jvmtiDeferredLocalVariableSet::update_stack(StackValueCollection* expressions) {
487 for (int l = 0; l < _locals->length(); l ++) {
488 jvmtiDeferredLocalVariable* val = _locals->at(l);
489 if (val->index() >= method()->max_locals() && val->index() < method()->max_locals() + method()->max_stack()) {
490 update_value(expressions, val->type(), val->index() - method()->max_locals(), val->value());
491 }
492 }
493 }
494
495
496 void jvmtiDeferredLocalVariableSet::update_monitors(GrowableArray<MonitorInfo*>* monitors) {
497 for (int l = 0; l < _locals->length(); l ++) {
498 jvmtiDeferredLocalVariable* val = _locals->at(l);
499 if (val->index() >= method()->max_locals() + method()->max_stack()) {
500 int lock_index = val->index() - (method()->max_locals() + method()->max_stack());
501 MonitorInfo* info = monitors->at(lock_index);
502 MonitorInfo* new_info = new MonitorInfo((oopDesc*)val->value().l, info->lock(), info->eliminated(), info->owner_is_scalar_replaced());
503 monitors->at_put(lock_index, new_info);
504 }
505 }
506 }
507
508
509 void jvmtiDeferredLocalVariableSet::oops_do(OopClosure* f) {
510 // The Method* is on the stack so a live activation keeps it alive
511 // either by mirror in interpreter or code in compiled code.
512 for (int i = 0; i < _locals->length(); i++) {
513 if (_locals->at(i)->type() == T_OBJECT) {
514 f->do_oop(_locals->at(i)->oop_addr());
515 }
516 }
517 }
518
519 jvmtiDeferredLocalVariable::jvmtiDeferredLocalVariable(int index, BasicType type, jvalue value) {
520 _index = index;
521 _type = type;
522 _value = value;
523 }
524
525
526 #ifndef PRODUCT
527 void compiledVFrame::verify() const {
528 Unimplemented();
529 }
530 #endif // PRODUCT