< prev index next >

src/hotspot/share/runtime/deoptimization.cpp

Print this page

 286   vframe* vf = vframe::new_vframe(&deoptee, &map, thread);
 287   while (!vf->is_top()) {
 288     assert(vf->is_compiled_frame(), "Wrong frame type");
 289     chunk->push(compiledVFrame::cast(vf));
 290     vf = vf->sender();
 291   }
 292   assert(vf->is_compiled_frame(), "Wrong frame type");
 293   chunk->push(compiledVFrame::cast(vf));
 294 
 295   bool realloc_failures = false;
 296 
 297 #if COMPILER2_OR_JVMCI
 298 #if INCLUDE_JVMCI
 299   bool jvmci_enabled = true;
 300 #else
 301   bool jvmci_enabled = false;
 302 #endif
 303 
 304   // Reallocate the non-escaping objects and restore their fields. Then
 305   // relock objects if synchronization on them was eliminated.
 306   if (jvmci_enabled COMPILER2_PRESENT( || (DoEscapeAnalysis && EliminateAllocations) )) {
 307     realloc_failures = eliminate_allocations(thread, exec_mode, cm, deoptee, map, chunk);
 308   }
 309 #endif // COMPILER2_OR_JVMCI
 310 
 311   // Revoke biases, done with in java state.
 312   // No safepoints allowed after this
 313   revoke_from_deopt_handler(thread, deoptee, &map);
 314 
 315   // Ensure that no safepoint is taken after pointers have been stored
 316   // in fields of rematerialized objects.  If a safepoint occurs from here on
 317   // out the java state residing in the vframeArray will be missed.
 318   // Locks may be rebaised in a safepoint.
 319   NoSafepointVerifier no_safepoint;
 320 
 321 #if COMPILER2_OR_JVMCI
 322   if (jvmci_enabled COMPILER2_PRESENT( || ((DoEscapeAnalysis || EliminateNestedLocks) && EliminateLocks) )) {
 323     eliminate_locks(thread, chunk, realloc_failures);
 324   }
 325 #endif // COMPILER2_OR_JVMCI
 326 

 989     oop obj = NULL;
 990 
 991     if (k->is_instance_klass()) {
 992 #if INCLUDE_JVMCI || INCLUDE_AOT
 993       CompiledMethod* cm = fr->cb()->as_compiled_method_or_null();
 994       if (cm->is_compiled_by_jvmci() && sv->is_auto_box()) {
 995         AutoBoxObjectValue* abv = (AutoBoxObjectValue*) sv;
 996         obj = get_cached_box(abv, fr, reg_map, THREAD);
 997         if (obj != NULL) {
 998           // Set the flag to indicate the box came from a cache, so that we can skip the field reassignment for it.
 999           abv->set_cached(true);
1000         }
1001       }
1002 #endif // INCLUDE_JVMCI || INCLUDE_AOT
1003       InstanceKlass* ik = InstanceKlass::cast(k);
1004       if (obj == NULL) {
1005         obj = ik->allocate_instance(THREAD);
1006       }
1007     } else if (k->is_typeArray_klass()) {
1008       TypeArrayKlass* ak = TypeArrayKlass::cast(k);
1009       assert(sv->field_size() % type2size[ak->element_type()] == 0, "non-integral array length");
1010       int len = sv->field_size() / type2size[ak->element_type()];





1011       obj = ak->allocate(len, THREAD);
1012     } else if (k->is_objArray_klass()) {
1013       ObjArrayKlass* ak = ObjArrayKlass::cast(k);
1014       obj = ak->allocate(sv->field_size(), THREAD);






1015     }
1016 
1017     if (obj == NULL) {
1018       failures = true;
1019     }
1020 
1021     assert(sv->value().is_null(), "redundant reallocation");
1022     assert(obj != NULL || HAS_PENDING_EXCEPTION, "allocation should succeed or we should get an exception");
1023     CLEAR_PENDING_EXCEPTION;
1024     sv->set_value(obj);
1025   }
1026 
1027   if (failures) {
1028     THROW_OOP_(Universe::out_of_memory_error_realloc_objects(), failures);
1029   } else if (pending_exception.not_null()) {
1030     thread->set_pending_exception(pending_exception(), exception_file, exception_line);
1031   }
1032 
1033   return failures;
1034 }
1035 












1036 #if INCLUDE_JVMCI
1037 /**
1038  * For primitive types whose kind gets "erased" at runtime (shorts become stack ints),
1039  * we need to somehow be able to recover the actual kind to be able to write the correct
1040  * amount of bytes.
1041  * For that purpose, this method assumes that, for an entry spanning n bytes at index i,
1042  * the entries at index n + 1 to n + i are 'markers'.
1043  * For example, if we were writing a short at index 4 of a byte array of size 8, the
1044  * expected form of the array would be:
1045  *
1046  * {b0, b1, b2, b3, INT, marker, b6, b7}
1047  *
1048  * Thus, in order to get back the size of the entry, we simply need to count the number
1049  * of marked entries
1050  *
1051  * @param virtualArray the virtualized byte array
1052  * @param i index of the virtual entry we are recovering
1053  * @return The number of bytes the entry spans
1054  */
1055 static int count_number_of_bytes_for_entry(ObjectValue *virtualArray, int i) {

1077     case 1:
1078       obj->byte_at_put(index, (jbyte) *((jint *) &val));
1079       break;
1080     case 2:
1081       *((jshort *) check_alignment_get_addr(obj, index, 2)) = (jshort) *((jint *) &val);
1082       break;
1083     case 4:
1084       *((jint *) check_alignment_get_addr(obj, index, 4)) = (jint) *((jint *) &val);
1085       break;
1086     case 8:
1087       *((jlong *) check_alignment_get_addr(obj, index, 8)) = (jlong) *((jlong *) &val);
1088       break;
1089     default:
1090       ShouldNotReachHere();
1091   }
1092 }
1093 #endif // INCLUDE_JVMCI
1094 
1095 
1096 // restore elements of an eliminated type array
1097 void Deoptimization::reassign_type_array_elements(frame* fr, RegisterMap* reg_map, ObjectValue* sv, typeArrayOop obj, BasicType type) {
1098   int index = 0;
1099   intptr_t val;
1100 
1101   for (int i = 0; i < sv->field_size(); i++) {
1102     StackValue* value = StackValue::create_stack_value(fr, reg_map, sv->field_at(i));
1103     switch(type) {
1104     case T_LONG: case T_DOUBLE: {
1105       assert(value->type() == T_INT, "Agreement.");
1106       StackValue* low =
1107         StackValue::create_stack_value(fr, reg_map, sv->field_at(++i));
1108 #ifdef _LP64
1109       jlong res = (jlong)low->get_int();
1110 #else
1111       jlong res = jlong_from((jint)value->get_int(), (jint)low->get_int());
1112 #endif
1113       obj->long_at_put(index, res);
1114       break;
1115     }
1116 
1117     // Have to cast to INT (32 bits) pointer to avoid little/big-endian problem.

1176       obj->byte_at_put(index, (jbyte)*((jint*)&val));
1177       break;
1178 #endif // INCLUDE_JVMCI
1179     }
1180 
1181     case T_BOOLEAN: {
1182       assert(value->type() == T_INT, "Agreement.");
1183       val = value->get_int();
1184       obj->bool_at_put(index, (jboolean)*((jint*)&val));
1185       break;
1186     }
1187 
1188       default:
1189         ShouldNotReachHere();
1190     }
1191     index++;
1192   }
1193 }
1194 
1195 // restore fields of an eliminated object array
1196 void Deoptimization::reassign_object_array_elements(frame* fr, RegisterMap* reg_map, ObjectValue* sv, objArrayOop obj) {
1197   for (int i = 0; i < sv->field_size(); i++) {
1198     StackValue* value = StackValue::create_stack_value(fr, reg_map, sv->field_at(i));
1199     assert(value->type() == T_OBJECT, "object element expected");

1200     obj->obj_at_put(i, value->get_obj()());
1201   }
1202 }
1203 
1204 class ReassignedField {
1205 public:
1206   int _offset;
1207   BasicType _type;
1208 public:
1209   ReassignedField() {
1210     _offset = 0;
1211     _type = T_ILLEGAL;
1212   }
1213 };
1214 
1215 int compare(ReassignedField* left, ReassignedField* right) {
1216   return left->_offset - right->_offset;
1217 }
1218 

































































1219 // Restore fields of an eliminated instance object using the same field order
1220 // returned by HotSpotResolvedObjectTypeImpl.getInstanceFields(true)
1221 static int reassign_fields_by_klass(InstanceKlass* klass, frame* fr, RegisterMap* reg_map, ObjectValue* sv, int svIndex, oop obj, bool skip_internal) {
1222   GrowableArray<ReassignedField>* fields = new GrowableArray<ReassignedField>();
1223   InstanceKlass* ik = klass;
1224   while (ik != NULL) {
1225     for (AllFieldStream fs(ik); !fs.done(); fs.next()) {
1226       if (!fs.access_flags().is_static() && (!skip_internal || !fs.access_flags().is_internal())) {
1227         ReassignedField field;
1228         field._offset = fs.offset();
1229         field._type = Signature::basic_type(fs.signature());
1230         fields->append(field);
1231       }
1232     }
1233     ik = ik->superklass();
1234   }
1235   fields->sort(compare);
1236   for (int i = 0; i < fields->length(); i++) {
1237     intptr_t val;
1238     ScopeValue* scope_field = sv->field_at(svIndex);
1239     StackValue* value = StackValue::create_stack_value(fr, reg_map, scope_field);
1240     int offset = fields->at(i)._offset;
1241     BasicType type = fields->at(i)._type;
1242     switch (type) {
1243       case T_OBJECT: case T_ARRAY:
1244         assert(value->type() == T_OBJECT, "Agreement.");

1245         obj->obj_field_put(offset, value->get_obj()());
1246         break;
1247 
1248       // Have to cast to INT (32 bits) pointer to avoid little/big-endian problem.
1249       case T_INT: case T_FLOAT: { // 4 bytes.
1250         assert(value->type() == T_INT, "Agreement.");
1251         bool big_value = false;
1252         if (i+1 < fields->length() && fields->at(i+1)._type == T_INT) {
1253           if (scope_field->is_location()) {
1254             Location::Type type = ((LocationValue*) scope_field)->location().type();
1255             if (type == Location::dbl || type == Location::lng) {
1256               big_value = true;
1257             }
1258           }
1259           if (scope_field->is_constant_int()) {
1260             ScopeValue* next_scope_field = sv->field_at(svIndex + 1);
1261             if (next_scope_field->is_constant_long() || next_scope_field->is_constant_double()) {
1262               big_value = true;
1263             }
1264           }

1300         obj->char_field_put(offset, (jchar)*((jint*)&val));
1301         break;
1302 
1303       case T_BYTE:
1304         assert(value->type() == T_INT, "Agreement.");
1305         val = value->get_int();
1306         obj->byte_field_put(offset, (jbyte)*((jint*)&val));
1307         break;
1308 
1309       case T_BOOLEAN:
1310         assert(value->type() == T_INT, "Agreement.");
1311         val = value->get_int();
1312         obj->bool_field_put(offset, (jboolean)*((jint*)&val));
1313         break;
1314 
1315       default:
1316         ShouldNotReachHere();
1317     }
1318     svIndex++;
1319   }
1320   return svIndex;































































































































































1321 }
1322 
1323 // restore fields of all eliminated objects and arrays
1324 void Deoptimization::reassign_fields(frame* fr, RegisterMap* reg_map, GrowableArray<ScopeValue*>* objects, bool realloc_failures, bool skip_internal) {
1325   for (int i = 0; i < objects->length(); i++) {
1326     ObjectValue* sv = (ObjectValue*) objects->at(i);
1327     Klass* k = java_lang_Class::as_Klass(sv->klass()->as_ConstantOopReadValue()->value()());
1328     Handle obj = sv->value();
1329     assert(obj.not_null() || realloc_failures, "reallocation was missed");
1330     if (PrintDeoptimizationDetails) {
1331       tty->print_cr("reassign fields for object of type %s!", k->name()->as_C_string());
1332     }
1333     if (obj.is_null()) {
1334       continue;
1335     }
1336 #if INCLUDE_JVMCI || INCLUDE_AOT
1337     // Don't reassign fields of boxes that came from a cache. Caches may be in CDS.
1338     if (sv->is_auto_box() && ((AutoBoxObjectValue*) sv)->is_cached()) {
1339       continue;
1340     }
1341 #endif // INCLUDE_JVMCI || INCLUDE_AOT
1342     if (k->is_instance_klass()) {
1343       InstanceKlass* ik = InstanceKlass::cast(k);
1344       reassign_fields_by_klass(ik, fr, reg_map, sv, 0, obj(), skip_internal);
1345     } else if (k->is_typeArray_klass()) {
1346       TypeArrayKlass* ak = TypeArrayKlass::cast(k);
1347       reassign_type_array_elements(fr, reg_map, sv, (typeArrayOop) obj(), ak->element_type());
1348     } else if (k->is_objArray_klass()) {
1349       reassign_object_array_elements(fr, reg_map, sv, (objArrayOop) obj());
1350     }
1351   }
1352 }
1353 
1354 
1355 // relock objects for which synchronization was eliminated
1356 void Deoptimization::relock_objects(GrowableArray<MonitorInfo*>* monitors, JavaThread* thread, bool realloc_failures) {
1357   for (int i = 0; i < monitors->length(); i++) {
1358     MonitorInfo* mon_info = monitors->at(i);
1359     if (mon_info->eliminated()) {
1360       assert(!mon_info->owner_is_scalar_replaced() || realloc_failures, "reallocation was missed");
1361       if (!mon_info->owner_is_scalar_replaced()) {
1362         Handle obj(thread, mon_info->owner());
1363         markWord mark = obj->mark();
1364         if (UseBiasedLocking && mark.has_bias_pattern()) {
1365           // New allocated objects may have the mark set to anonymously biased.
1366           // Also the deoptimized method may called methods with synchronization
1367           // where the thread-local object is bias locked to the current thread.
1368           assert(mark.is_biased_anonymously() ||
1369                  mark.biased_locker() == thread, "should be locked to current thread");

 286   vframe* vf = vframe::new_vframe(&deoptee, &map, thread);
 287   while (!vf->is_top()) {
 288     assert(vf->is_compiled_frame(), "Wrong frame type");
 289     chunk->push(compiledVFrame::cast(vf));
 290     vf = vf->sender();
 291   }
 292   assert(vf->is_compiled_frame(), "Wrong frame type");
 293   chunk->push(compiledVFrame::cast(vf));
 294 
 295   bool realloc_failures = false;
 296 
 297 #if COMPILER2_OR_JVMCI
 298 #if INCLUDE_JVMCI
 299   bool jvmci_enabled = true;
 300 #else
 301   bool jvmci_enabled = false;
 302 #endif
 303 
 304   // Reallocate the non-escaping objects and restore their fields. Then
 305   // relock objects if synchronization on them was eliminated.
 306   if (jvmci_enabled COMPILER2_PRESENT( || (DoEscapeAnalysis && EliminateAllocations || (DoEscapeAnalysis && UseStackAllocationRuntime)) )) {
 307     realloc_failures = eliminate_allocations(thread, exec_mode, cm, deoptee, map, chunk);
 308   }
 309 #endif // COMPILER2_OR_JVMCI
 310 
 311   // Revoke biases, done with in java state.
 312   // No safepoints allowed after this
 313   revoke_from_deopt_handler(thread, deoptee, &map);
 314 
 315   // Ensure that no safepoint is taken after pointers have been stored
 316   // in fields of rematerialized objects.  If a safepoint occurs from here on
 317   // out the java state residing in the vframeArray will be missed.
 318   // Locks may be rebaised in a safepoint.
 319   NoSafepointVerifier no_safepoint;
 320 
 321 #if COMPILER2_OR_JVMCI
 322   if (jvmci_enabled COMPILER2_PRESENT( || ((DoEscapeAnalysis || EliminateNestedLocks) && EliminateLocks) )) {
 323     eliminate_locks(thread, chunk, realloc_failures);
 324   }
 325 #endif // COMPILER2_OR_JVMCI
 326 

 989     oop obj = NULL;
 990 
 991     if (k->is_instance_klass()) {
 992 #if INCLUDE_JVMCI || INCLUDE_AOT
 993       CompiledMethod* cm = fr->cb()->as_compiled_method_or_null();
 994       if (cm->is_compiled_by_jvmci() && sv->is_auto_box()) {
 995         AutoBoxObjectValue* abv = (AutoBoxObjectValue*) sv;
 996         obj = get_cached_box(abv, fr, reg_map, THREAD);
 997         if (obj != NULL) {
 998           // Set the flag to indicate the box came from a cache, so that we can skip the field reassignment for it.
 999           abv->set_cached(true);
1000         }
1001       }
1002 #endif // INCLUDE_JVMCI || INCLUDE_AOT
1003       InstanceKlass* ik = InstanceKlass::cast(k);
1004       if (obj == NULL) {
1005         obj = ik->allocate_instance(THREAD);
1006       }
1007     } else if (k->is_typeArray_klass()) {
1008       TypeArrayKlass* ak = TypeArrayKlass::cast(k);
1009       int len;
1010       if (sv->is_stack_object()) {
1011         len = ((StackObjectValue *)sv)->get_field_length()->value();
1012       } else {
1013         assert(sv->field_size() % type2size[ak->element_type()] == 0, "non-integral array length");
1014         len = sv->field_size() / type2size[ak->element_type()];
1015       }
1016       obj = ak->allocate(len, THREAD);
1017     } else if (k->is_objArray_klass()) {
1018       ObjArrayKlass* ak = ObjArrayKlass::cast(k);
1019       int len;
1020       if (sv->is_stack_object()) {
1021         len = ((StackObjectValue *)sv)->get_field_length()->value();
1022       } else {
1023         len = sv->field_size();
1024       }
1025       obj = ak->allocate(len, THREAD);
1026     }
1027 
1028     if (obj == NULL) {
1029       failures = true;
1030     }
1031 
1032     assert(sv->value().is_null(), "redundant reallocation");
1033     assert(obj != NULL || HAS_PENDING_EXCEPTION, "allocation should succeed or we should get an exception");
1034     CLEAR_PENDING_EXCEPTION;
1035     sv->set_value(obj);
1036   }
1037 
1038   if (failures) {
1039     THROW_OOP_(Universe::out_of_memory_error_realloc_objects(), failures);
1040   } else if (pending_exception.not_null()) {
1041     thread->set_pending_exception(pending_exception(), exception_file, exception_line);
1042   }
1043 
1044   return failures;
1045 }
1046 
1047 void Deoptimization::reassign_scalar_replaced_fields(frame *fr, RegisterMap *reg_map, GrowableArray<ScopeValue*>* objects, ObjectValue *sv, Handle obj, Klass* k, bool skip_internal) {
1048   if (k->is_instance_klass()) {
1049       InstanceKlass* ik = InstanceKlass::cast(k);
1050       reassign_scalar_replaced_fields_by_klass(ik, fr, reg_map, objects, sv, 0, obj(), skip_internal);
1051     } else if (k->is_typeArray_klass()) {
1052       TypeArrayKlass* ak = TypeArrayKlass::cast(k);
1053       reassign_scalar_replaced_type_array_elements(fr, reg_map, sv, (typeArrayOop) obj(), ak->element_type());
1054     } else if (k->is_objArray_klass()) {
1055       reassign_scalar_replaced_object_array_elements(fr, reg_map, objects, sv, (objArrayOop) obj());
1056     }
1057 }
1058 
1059 #if INCLUDE_JVMCI
1060 /**
1061  * For primitive types whose kind gets "erased" at runtime (shorts become stack ints),
1062  * we need to somehow be able to recover the actual kind to be able to write the correct
1063  * amount of bytes.
1064  * For that purpose, this method assumes that, for an entry spanning n bytes at index i,
1065  * the entries at index n + 1 to n + i are 'markers'.
1066  * For example, if we were writing a short at index 4 of a byte array of size 8, the
1067  * expected form of the array would be:
1068  *
1069  * {b0, b1, b2, b3, INT, marker, b6, b7}
1070  *
1071  * Thus, in order to get back the size of the entry, we simply need to count the number
1072  * of marked entries
1073  *
1074  * @param virtualArray the virtualized byte array
1075  * @param i index of the virtual entry we are recovering
1076  * @return The number of bytes the entry spans
1077  */
1078 static int count_number_of_bytes_for_entry(ObjectValue *virtualArray, int i) {

1100     case 1:
1101       obj->byte_at_put(index, (jbyte) *((jint *) &val));
1102       break;
1103     case 2:
1104       *((jshort *) check_alignment_get_addr(obj, index, 2)) = (jshort) *((jint *) &val);
1105       break;
1106     case 4:
1107       *((jint *) check_alignment_get_addr(obj, index, 4)) = (jint) *((jint *) &val);
1108       break;
1109     case 8:
1110       *((jlong *) check_alignment_get_addr(obj, index, 8)) = (jlong) *((jlong *) &val);
1111       break;
1112     default:
1113       ShouldNotReachHere();
1114   }
1115 }
1116 #endif // INCLUDE_JVMCI
1117 
1118 
1119 // restore elements of an eliminated type array
1120 void Deoptimization::reassign_scalar_replaced_type_array_elements(frame* fr, RegisterMap* reg_map, ObjectValue* sv, typeArrayOop obj, BasicType type) {
1121   int index = 0;
1122   intptr_t val;
1123 
1124   for (int i = 0; i < sv->field_size(); i++) {
1125     StackValue* value = StackValue::create_stack_value(fr, reg_map, sv->field_at(i));
1126     switch(type) {
1127     case T_LONG: case T_DOUBLE: {
1128       assert(value->type() == T_INT, "Agreement.");
1129       StackValue* low =
1130         StackValue::create_stack_value(fr, reg_map, sv->field_at(++i));
1131 #ifdef _LP64
1132       jlong res = (jlong)low->get_int();
1133 #else
1134       jlong res = jlong_from((jint)value->get_int(), (jint)low->get_int());
1135 #endif
1136       obj->long_at_put(index, res);
1137       break;
1138     }
1139 
1140     // Have to cast to INT (32 bits) pointer to avoid little/big-endian problem.

1199       obj->byte_at_put(index, (jbyte)*((jint*)&val));
1200       break;
1201 #endif // INCLUDE_JVMCI
1202     }
1203 
1204     case T_BOOLEAN: {
1205       assert(value->type() == T_INT, "Agreement.");
1206       val = value->get_int();
1207       obj->bool_at_put(index, (jboolean)*((jint*)&val));
1208       break;
1209     }
1210 
1211       default:
1212         ShouldNotReachHere();
1213     }
1214     index++;
1215   }
1216 }
1217 
1218 // restore fields of an eliminated object array
1219 void Deoptimization::reassign_scalar_replaced_object_array_elements(frame* fr, RegisterMap* reg_map, GrowableArray<ScopeValue*>* objects, ObjectValue* sv, objArrayOop obj) {
1220   for (int i = 0; i < sv->field_size(); i++) {
1221     StackValue* value = StackValue::create_stack_value(fr, reg_map, get_scope_value(fr, reg_map, sv->field_at(i), objects));
1222     assert(value->type() == T_OBJECT, "object element expected");
1223     assert(oopDesc::is_oop_or_null(value->get_obj()()), "must be oop");
1224     obj->obj_at_put(i, value->get_obj()());
1225   }
1226 }
1227 
1228 class ReassignedField {
1229 public:
1230   int _offset;
1231   BasicType _type;
1232 public:
1233   ReassignedField() {
1234     _offset = 0;
1235     _type = T_ILLEGAL;
1236   }
1237 };
1238 
1239 int compare(ReassignedField* left, ReassignedField* right) {
1240   return left->_offset - right->_offset;
1241 }
1242 
1243 ScopeValue *Deoptimization::match_object_to_stack_oop(intptr_t *oop_ptr, intptr_t *sp_base, GrowableArray<ScopeValue*>* objects) {
1244   for (int j = 0; j < objects->length(); j++) {
1245     ScopeValue* o_sv = objects->at(j);
1246     if (o_sv->is_object()) {
1247       if (o_sv->as_ObjectValue()->is_stack_object()) {
1248         StackObjectValue *sov = (StackObjectValue *)o_sv;
1249         Location o_loc = sov->get_stack_location();
1250         int o_offset = o_loc.stack_offset();
1251         int l_offset = (address)oop_ptr - (address)sp_base;
1252         if (o_offset == l_offset) {
1253           return o_sv;
1254         }
1255       }
1256     }
1257   }
1258   return NULL;
1259 }
1260 
1261 ScopeValue *Deoptimization::get_scope_value(frame* fr, RegisterMap* reg_map, ScopeValue* sv, GrowableArray<ScopeValue*>* objects) {
1262   if (sv->is_location()) {
1263     if ((objects != NULL) && (objects->length() > 0)) {
1264       LocationValue* lv = (LocationValue *)sv;
1265       Location loc = lv->location();
1266       intptr_t *oop_ptr;
1267       intptr_t *sp_base = fr->unextended_sp();
1268       intptr_t *sp_top = sp_base + fr->cb()->frame_size();
1269       if (loc.is_stack() && (loc.type() == Location::oop)) {
1270         address value_addr = ((address)sp_base) + loc.stack_offset();
1271         oop val = *(oop *)value_addr;
1272         oop_ptr = cast_from_oop<intptr_t *>(val);
1273       } else if (loc.is_register() && (loc.type() == Location::oop)) {
1274         address value_addr = reg_map->location(VMRegImpl::as_VMReg(loc.register_number()));
1275         oop val = *(oop *)value_addr;
1276         oop_ptr = cast_from_oop<intptr_t *>(val);
1277       } else {
1278         assert(loc.type() != Location::oop, "Can not be an oop");
1279         return sv;
1280       }
1281       if (sp_base <= oop_ptr && oop_ptr < sp_top) {
1282         ScopeValue* o_sv = Deoptimization::match_object_to_stack_oop(oop_ptr, sp_base, objects);
1283         if (o_sv != NULL) {
1284           sv = o_sv;
1285         } else {
1286           assert(false, "pointer to stack but did not find object to replace");
1287         }
1288       }
1289     }
1290   } else if (sv->is_object()) {
1291     oop o = sv->as_ObjectValue()->value()();
1292     intptr_t *sp_base = fr->unextended_sp();
1293     intptr_t *sp_top = sp_base + fr->cb()->frame_size();
1294     intptr_t *oop_ptr = cast_from_oop<intptr_t *>(o);
1295     if (sp_base <= oop_ptr && oop_ptr < sp_top) {
1296       ScopeValue* o_sv = Deoptimization::match_object_to_stack_oop(oop_ptr, sp_base, objects);
1297       if (o_sv != NULL) {
1298         sv = o_sv;
1299         assert(sv = o_sv, "objects have to match?");
1300       } else {
1301         assert(false, "pointer to stack but did not find object to replace");
1302       }
1303     }
1304   }
1305   return sv;
1306 }
1307 
1308 // Restore fields of an eliminated instance object using the same field order
1309 // returned by HotSpotResolvedObjectTypeImpl.getInstanceFields(true)
1310 void Deoptimization::reassign_scalar_replaced_fields_by_klass(InstanceKlass* klass, frame* fr, RegisterMap* reg_map, GrowableArray<ScopeValue*>* objects, ObjectValue* sv, int svIndex, oop obj, bool skip_internal) {
1311   GrowableArray<ReassignedField>* fields = new GrowableArray<ReassignedField>();
1312   InstanceKlass* ik = klass;
1313   while (ik != NULL) {
1314     for (AllFieldStream fs(ik); !fs.done(); fs.next()) {
1315       if (!fs.access_flags().is_static() && (!skip_internal || !fs.access_flags().is_internal())) {
1316         ReassignedField field;
1317         field._offset = fs.offset();
1318         field._type = Signature::basic_type(fs.signature());
1319         fields->append(field);
1320       }
1321     }
1322     ik = ik->superklass();
1323   }
1324   fields->sort(compare);
1325   for (int i = 0; i < fields->length(); i++) {
1326     intptr_t val;
1327     ScopeValue* scope_field = get_scope_value(fr, reg_map, sv->field_at(svIndex), objects);
1328     StackValue* value = StackValue::create_stack_value(fr, reg_map, scope_field);
1329     int offset = fields->at(i)._offset;
1330     BasicType type = fields->at(i)._type;
1331     switch (type) {
1332       case T_OBJECT: case T_ARRAY:
1333         assert(value->type() == T_OBJECT, "Agreement.");
1334         assert(oopDesc::is_oop_or_null(value->get_obj()()), "must be oop");
1335         obj->obj_field_put(offset, value->get_obj()());
1336         break;
1337 
1338       // Have to cast to INT (32 bits) pointer to avoid little/big-endian problem.
1339       case T_INT: case T_FLOAT: { // 4 bytes.
1340         assert(value->type() == T_INT, "Agreement.");
1341         bool big_value = false;
1342         if (i+1 < fields->length() && fields->at(i+1)._type == T_INT) {
1343           if (scope_field->is_location()) {
1344             Location::Type type = ((LocationValue*) scope_field)->location().type();
1345             if (type == Location::dbl || type == Location::lng) {
1346               big_value = true;
1347             }
1348           }
1349           if (scope_field->is_constant_int()) {
1350             ScopeValue* next_scope_field = sv->field_at(svIndex + 1);
1351             if (next_scope_field->is_constant_long() || next_scope_field->is_constant_double()) {
1352               big_value = true;
1353             }
1354           }

1390         obj->char_field_put(offset, (jchar)*((jint*)&val));
1391         break;
1392 
1393       case T_BYTE:
1394         assert(value->type() == T_INT, "Agreement.");
1395         val = value->get_int();
1396         obj->byte_field_put(offset, (jbyte)*((jint*)&val));
1397         break;
1398 
1399       case T_BOOLEAN:
1400         assert(value->type() == T_INT, "Agreement.");
1401         val = value->get_int();
1402         obj->bool_field_put(offset, (jboolean)*((jint*)&val));
1403         break;
1404 
1405       default:
1406         ShouldNotReachHere();
1407     }
1408     svIndex++;
1409   }
1410 }
1411 
1412 void Deoptimization::reassign_stack_allocated_type_array_elements(oop orig, oop newly_allocated, Klass *k) {
1413   typeArrayOop orig_obj = (typeArrayOop) orig;
1414   typeArrayOop new_obj = (typeArrayOop) newly_allocated;
1415   assert(orig_obj->length() == new_obj->length(), "lengths have to be the same");
1416   TypeArrayKlass* ak = TypeArrayKlass::cast(k);
1417   BasicType type = ak->element_type();
1418   for (int i = 0; i < orig_obj->length(); i++) {
1419     switch (type) {
1420       case T_BOOLEAN:
1421         new_obj->bool_at_put(i, orig_obj->bool_at(i));
1422         break;
1423       case T_CHAR:
1424         new_obj->char_at_put(i, orig_obj->char_at(i));
1425         break;
1426       case T_FLOAT:
1427         new_obj->float_at_put(i, orig_obj->float_at(i));
1428         break;
1429       case T_DOUBLE:
1430         new_obj->double_at_put(i, orig_obj->double_at(i));
1431         break;
1432       case T_BYTE:
1433         new_obj->byte_at_put(i, orig_obj->byte_at(i));
1434         break;
1435       case T_SHORT:
1436         new_obj->short_at_put(i, orig_obj->short_at(i));
1437         break;
1438       case T_INT:
1439         new_obj->int_at_put(i, orig_obj->int_at(i));
1440         break;
1441       case T_LONG:
1442         new_obj->long_at_put(i, orig_obj->long_at(i));
1443         break;
1444       default:
1445         assert(false, "unreachable");
1446     }
1447   }
1448 }
1449 
1450 void Deoptimization::reassign_stack_allocated_object_array_elements(oop orig, oop newly_allocated, intptr_t *sp_base, intptr_t *sp_top, GrowableArray<ScopeValue*>* objects) {
1451   objArrayOop orig_obj = (objArrayOop) orig;
1452   objArrayOop new_obj = (objArrayOop) newly_allocated;
1453   assert(orig_obj->length() == new_obj->length(), "lengths have to be the same");
1454   for (int i = 0; i < orig_obj->length(); i++) {
1455     oop o = orig_obj->obj_at(i);
1456     intptr_t *oop_ptr = cast_from_oop<intptr_t *>(o);
1457     if (sp_base <= oop_ptr && oop_ptr < sp_top) {
1458       int field_offset = (address)oop_ptr - (address)sp_base;
1459       bool found = false;
1460       for (int j = 0; j < objects->length(); j++) {
1461         ScopeValue* o_sv = objects->at(j);
1462         if (o_sv->is_object() && o_sv->as_ObjectValue()->is_stack_object()) {
1463           StackObjectValue *sov = (StackObjectValue *)o_sv;
1464           Location o_loc = sov->get_stack_location();
1465           int o_offset = o_loc.stack_offset();
1466           if (o_offset == field_offset) {
1467             o = sov->value()();
1468             found = true;
1469             break;
1470           }
1471         }
1472       }
1473       assert(found, "pointer to stack but did not find object to replace");
1474     }
1475     assert(oopDesc::is_oop_or_null(o), "must be oop");
1476     new_obj->obj_at_put(i, o);
1477   }
1478 }
1479 
1480 class ReassignStackObjectFields: public FieldClosure {
1481  private:
1482   oop _orig;
1483   oop _new;
1484   intptr_t *_sp_base;
1485   intptr_t *_sp_top;
1486   GrowableArray<ScopeValue*>* _objects;
1487 
1488  public:
1489   ReassignStackObjectFields(oop orig, oop n, intptr_t *sp_base, intptr_t *sp_top, GrowableArray<ScopeValue*>* objects) :
1490     _orig(orig), _new(n), _sp_base(sp_base), _sp_top(sp_top), _objects(objects) {}
1491 
1492   void do_field(fieldDescriptor* fd) {
1493     BasicType ft = fd->field_type();
1494     switch (ft) {
1495       case T_BYTE:
1496         _new->byte_field_put(fd->offset(), _orig->byte_field(fd->offset()));
1497         break;
1498       case T_CHAR:
1499         _new->char_field_put(fd->offset(), _orig->char_field(fd->offset()));
1500         break;
1501       case T_DOUBLE:
1502         _new->double_field_put(fd->offset(), _orig->double_field(fd->offset()));
1503         break;
1504       case T_FLOAT:
1505         _new->float_field_put(fd->offset(), _orig->float_field(fd->offset()));
1506         break;
1507       case T_INT:
1508         _new->int_field_put(fd->offset(), _orig->int_field(fd->offset()));
1509         break;
1510       case T_LONG:
1511         _new->long_field_put(fd->offset(), _orig->long_field(fd->offset()));
1512         break;
1513       case T_SHORT:
1514         _new->short_field_put(fd->offset(), _orig->short_field(fd->offset()));
1515         break;
1516       case T_BOOLEAN:
1517         _new->bool_field_put(fd->offset(), _orig->bool_field(fd->offset()));
1518         break;
1519       case T_ARRAY:
1520       case T_OBJECT: {
1521         oop o = _orig->obj_field(fd->offset());
1522         intptr_t *oop_ptr = cast_from_oop<intptr_t *>(o);
1523         if (_sp_base <= oop_ptr && oop_ptr < _sp_top) {
1524           int field_offset = (address)oop_ptr - (address)_sp_base;
1525           bool found = false;
1526           for (int j = 0; j < _objects->length(); j++) {
1527             ScopeValue* o_sv = _objects->at(j);
1528             if (o_sv->is_object() && o_sv->as_ObjectValue()->is_stack_object()) {
1529               StackObjectValue *sov = (StackObjectValue *)o_sv;
1530               Location o_loc = sov->get_stack_location();
1531               int o_offset = o_loc.stack_offset();
1532               if (o_offset == field_offset) {
1533                 o = sov->value()();
1534                 found = true;
1535                 break;
1536               }
1537             }
1538           }
1539           assert(found, "Pointer to stack but did not find object to replace");
1540         }
1541         assert(oopDesc::is_oop_or_null(o), "must be oop");
1542         _new->obj_field_put(fd->offset(), o);
1543         break;
1544       }
1545       default:
1546         ShouldNotReachHere();
1547         break;
1548      }
1549   }
1550 };
1551 
1552 void Deoptimization::reassign_stack_allocated_fields(frame *fr, GrowableArray<ScopeValue*>* objects, ObjectValue *sv, Handle obj, Klass* k) {
1553   StackObjectValue *sov = (StackObjectValue *)sv;
1554   Location loc = sov->get_stack_location();
1555   address value_addr = ((address)fr->unextended_sp()) + loc.stack_offset();
1556   oop orig = cast_to_oop<address>(value_addr);
1557   oop newly_allocated = obj();
1558   intptr_t *sp_base = fr->unextended_sp();
1559   intptr_t *sp_top = sp_base + fr->cb()->frame_size();
1560 
1561   if (k->is_instance_klass()) {
1562     InstanceKlass* ik = InstanceKlass::cast(k);
1563     ReassignStackObjectFields reassign(orig, newly_allocated, sp_base, sp_top, objects);
1564     ik->do_nonstatic_fields(&reassign);
1565   } else if (k->is_typeArray_klass()) {
1566     reassign_stack_allocated_type_array_elements(orig, newly_allocated, k);
1567   } else if (k->is_objArray_klass()) {
1568     reassign_stack_allocated_object_array_elements(orig, newly_allocated, sp_base, sp_top, objects);
1569   }
1570 }
1571 
1572 // restore fields of all eliminated objects and arrays
1573 void Deoptimization::reassign_fields(frame* fr, RegisterMap* reg_map, GrowableArray<ScopeValue*>* objects, bool realloc_failures, bool skip_internal) {
1574   for (int i = 0; i < objects->length(); i++) {
1575     ObjectValue* sv = (ObjectValue*) objects->at(i);
1576     Klass* k = java_lang_Class::as_Klass(sv->klass()->as_ConstantOopReadValue()->value()());
1577     Handle obj = sv->value();
1578     assert(obj.not_null() || realloc_failures, "reallocation was missed");
1579     if (PrintDeoptimizationDetails) {
1580       tty->print_cr("reassign fields for object of type %s!", k->name()->as_C_string());
1581     }
1582     if (obj.is_null()) {
1583       continue;
1584     }
1585 #if INCLUDE_JVMCI || INCLUDE_AOT
1586     // Don't reassign fields of boxes that came from a cache. Caches may be in CDS.
1587     if (sv->is_auto_box() && ((AutoBoxObjectValue*) sv)->is_cached()) {
1588       continue;
1589     }
1590 #endif // INCLUDE_JVMCI || INCLUDE_AOT
1591 
1592     if (sv->is_stack_object()) {
1593       reassign_stack_allocated_fields(fr, objects, sv, obj, k);
1594     } else {
1595       reassign_scalar_replaced_fields(fr, reg_map, objects, sv, obj, k, skip_internal);



1596     }
1597   }
1598 }
1599 
1600 
1601 // relock objects for which synchronization was eliminated
1602 void Deoptimization::relock_objects(GrowableArray<MonitorInfo*>* monitors, JavaThread* thread, bool realloc_failures) {
1603   for (int i = 0; i < monitors->length(); i++) {
1604     MonitorInfo* mon_info = monitors->at(i);
1605     if (mon_info->eliminated()) {
1606       assert(!mon_info->owner_is_scalar_replaced() || realloc_failures, "reallocation was missed");
1607       if (!mon_info->owner_is_scalar_replaced()) {
1608         Handle obj(thread, mon_info->owner());
1609         markWord mark = obj->mark();
1610         if (UseBiasedLocking && mark.has_bias_pattern()) {
1611           // New allocated objects may have the mark set to anonymously biased.
1612           // Also the deoptimized method may called methods with synchronization
1613           // where the thread-local object is bias locked to the current thread.
1614           assert(mark.is_biased_anonymously() ||
1615                  mark.biased_locker() == thread, "should be locked to current thread");
< prev index next >