src/share/vm/opto/library_call.cpp

Print this page




 257   bool inline_array_copyOf(bool is_copyOfRange);
 258   bool inline_array_equals(StrIntrinsicNode::ArgEnc ae);
 259   void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array, bool card_mark);
 260   bool inline_native_clone(bool is_virtual);
 261   bool inline_native_Reflection_getCallerClass();
 262   // Helper function for inlining native object hash method
 263   bool inline_native_hashcode(bool is_virtual, bool is_static);
 264   bool inline_native_getClass();
 265 
 266   // Helper functions for inlining arraycopy
 267   bool inline_arraycopy();
 268   AllocateArrayNode* tightly_coupled_allocation(Node* ptr,
 269                                                 RegionNode* slow_region);
 270   JVMState* arraycopy_restore_alloc_state(AllocateArrayNode* alloc, int& saved_reexecute_sp);
 271   void arraycopy_move_allocation_here(AllocateArrayNode* alloc, Node* dest, JVMState* saved_jvms, int saved_reexecute_sp);
 272 
 273   typedef enum { LS_xadd, LS_xchg, LS_cmpxchg } LoadStoreKind;
 274   bool inline_unsafe_load_store(BasicType type,  LoadStoreKind kind);
 275   bool inline_unsafe_ordered_store(BasicType type);
 276   bool inline_unsafe_fence(vmIntrinsics::ID id);

 277   bool inline_fp_conversions(vmIntrinsics::ID id);
 278   bool inline_number_methods(vmIntrinsics::ID id);
 279   bool inline_reference_get();
 280   bool inline_Class_cast();
 281   bool inline_aescrypt_Block(vmIntrinsics::ID id);
 282   bool inline_cipherBlockChaining_AESCrypt(vmIntrinsics::ID id);
 283   Node* inline_cipherBlockChaining_AESCrypt_predicate(bool decrypting);
 284   Node* get_key_start_from_aescrypt_object(Node* aescrypt_object);
 285   Node* get_original_key_start_from_aescrypt_object(Node* aescrypt_object);
 286   bool inline_ghash_processBlocks();
 287   bool inline_sha_implCompress(vmIntrinsics::ID id);
 288   bool inline_digestBase_implCompressMB(int predicate);
 289   bool inline_sha_implCompressMB(Node* digestBaseObj, ciInstanceKlass* instklass_SHA,
 290                                  bool long_state, address stubAddr, const char *stubName,
 291                                  Node* src_start, Node* ofs, Node* limit);
 292   Node* get_state_from_sha_object(Node *sha_object);
 293   Node* get_state_from_sha5_object(Node *sha_object);
 294   Node* inline_digestBase_implCompressMB_predicate(int predicate);
 295   bool inline_encodeISOArray();
 296   bool inline_updateCRC32();


 612   case vmIntrinsics::_putLongUnaligned:         return inline_unsafe_access(!is_native_ptr,  is_store, T_LONG,    !is_volatile);
 613 
 614   case vmIntrinsics::_compareAndSwapObject:     return inline_unsafe_load_store(T_OBJECT, LS_cmpxchg);
 615   case vmIntrinsics::_compareAndSwapInt:        return inline_unsafe_load_store(T_INT,    LS_cmpxchg);
 616   case vmIntrinsics::_compareAndSwapLong:       return inline_unsafe_load_store(T_LONG,   LS_cmpxchg);
 617 
 618   case vmIntrinsics::_putOrderedObject:         return inline_unsafe_ordered_store(T_OBJECT);
 619   case vmIntrinsics::_putOrderedInt:            return inline_unsafe_ordered_store(T_INT);
 620   case vmIntrinsics::_putOrderedLong:           return inline_unsafe_ordered_store(T_LONG);
 621 
 622   case vmIntrinsics::_getAndAddInt:             return inline_unsafe_load_store(T_INT,    LS_xadd);
 623   case vmIntrinsics::_getAndAddLong:            return inline_unsafe_load_store(T_LONG,   LS_xadd);
 624   case vmIntrinsics::_getAndSetInt:             return inline_unsafe_load_store(T_INT,    LS_xchg);
 625   case vmIntrinsics::_getAndSetLong:            return inline_unsafe_load_store(T_LONG,   LS_xchg);
 626   case vmIntrinsics::_getAndSetObject:          return inline_unsafe_load_store(T_OBJECT, LS_xchg);
 627 
 628   case vmIntrinsics::_loadFence:
 629   case vmIntrinsics::_storeFence:
 630   case vmIntrinsics::_fullFence:                return inline_unsafe_fence(intrinsic_id());
 631 


 632   case vmIntrinsics::_currentThread:            return inline_native_currentThread();
 633   case vmIntrinsics::_isInterrupted:            return inline_native_isInterrupted();
 634 
 635 #ifdef TRACE_HAVE_INTRINSICS
 636   case vmIntrinsics::_classID:                  return inline_native_classID();
 637   case vmIntrinsics::_threadID:                 return inline_native_threadID();
 638   case vmIntrinsics::_counterTime:              return inline_native_time_funcs(CAST_FROM_FN_PTR(address, TRACE_TIME_METHOD), "counterTime");
 639 #endif
 640   case vmIntrinsics::_currentTimeMillis:        return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeMillis), "currentTimeMillis");
 641   case vmIntrinsics::_nanoTime:                 return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeNanos), "nanoTime");
 642   case vmIntrinsics::_allocateInstance:         return inline_unsafe_allocate();
 643   case vmIntrinsics::_copyMemory:               return inline_unsafe_copyMemory();
 644   case vmIntrinsics::_newArray:                 return inline_native_newArray();
 645   case vmIntrinsics::_getLength:                return inline_native_getLength();
 646   case vmIntrinsics::_copyOf:                   return inline_array_copyOf(false);
 647   case vmIntrinsics::_copyOfRange:              return inline_array_copyOf(true);
 648   case vmIntrinsics::_equalsB:                  return inline_array_equals(StrIntrinsicNode::LL);
 649   case vmIntrinsics::_equalsC:                  return inline_array_equals(StrIntrinsicNode::UU);
 650   case vmIntrinsics::_clone:                    return inline_native_clone(intrinsic()->is_virtual());
 651 


2957 bool LibraryCallKit::inline_unsafe_fence(vmIntrinsics::ID id) {
2958   // Regardless of form, don't allow previous ld/st to move down,
2959   // then issue acquire, release, or volatile mem_bar.
2960   insert_mem_bar(Op_MemBarCPUOrder);
2961   switch(id) {
2962     case vmIntrinsics::_loadFence:
2963       insert_mem_bar(Op_LoadFence);
2964       return true;
2965     case vmIntrinsics::_storeFence:
2966       insert_mem_bar(Op_StoreFence);
2967       return true;
2968     case vmIntrinsics::_fullFence:
2969       insert_mem_bar(Op_MemBarVolatile);
2970       return true;
2971     default:
2972       fatal_unexpected_iid(id);
2973       return false;
2974   }
2975 }
2976 





2977 bool LibraryCallKit::klass_needs_init_guard(Node* kls) {
2978   if (!kls->is_Con()) {
2979     return true;
2980   }
2981   const TypeKlassPtr* klsptr = kls->bottom_type()->isa_klassptr();
2982   if (klsptr == NULL) {
2983     return true;
2984   }
2985   ciInstanceKlass* ik = klsptr->klass()->as_instance_klass();
2986   // don't need a guard for a klass that is already initialized
2987   return !ik->is_initialized();
2988 }
2989 
2990 //----------------------------inline_unsafe_allocate---------------------------
2991 // public native Object Unsafe.allocateInstance(Class<?> cls);
2992 bool LibraryCallKit::inline_unsafe_allocate() {
2993   if (callee()->is_static())  return false;  // caller must have the capability!
2994 
2995   null_check_receiver();  // null-check, then ignore
2996   Node* cls = null_check(argument(1));


3338   if (expect_prim)  never_see_null = false;  // expect nulls (meaning prims)
3339 
3340   // Now load the mirror's klass metaobject, and null-check it.
3341   // Side-effects region with the control path if the klass is null.
3342   Node* kls = load_klass_from_mirror(mirror, never_see_null, region, _prim_path);
3343   // If kls is null, we have a primitive mirror.
3344   phi->init_req(_prim_path, prim_return_value);
3345   if (stopped()) { set_result(region, phi); return true; }
3346   bool safe_for_replace = (region->in(_prim_path) == top());
3347 
3348   Node* p;  // handy temp
3349   Node* null_ctl;
3350 
3351   // Now that we have the non-null klass, we can perform the real query.
3352   // For constant classes, the query will constant-fold in LoadNode::Value.
3353   Node* query_value = top();
3354   switch (id) {
3355   case vmIntrinsics::_isInstance:
3356     // nothing is an instance of a primitive type
3357     query_value = gen_instanceof(obj, kls, safe_for_replace);



3358     break;
3359 
3360   case vmIntrinsics::_getModifiers:
3361     p = basic_plus_adr(kls, in_bytes(Klass::modifier_flags_offset()));
3362     query_value = make_load(NULL, p, TypeInt::INT, T_INT, MemNode::unordered);
3363     break;
3364 
3365   case vmIntrinsics::_isInterface:
3366     // (To verify this code sequence, check the asserts in JVM_IsInterface.)
3367     if (generate_interface_guard(kls, region) != NULL)
3368       // A guard was added.  If the guard is taken, it was an interface.
3369       phi->add_req(intcon(1));
3370     // If we fall through, it's a plain class.
3371     query_value = intcon(0);
3372     break;
3373 
3374   case vmIntrinsics::_isArray:
3375     // (To verify this code sequence, check the asserts in JVM_IsArrayClass.)
3376     if (generate_array_guard(kls, region) != NULL)
3377       // A guard was added.  If the guard is taken, it was an array.




 257   bool inline_array_copyOf(bool is_copyOfRange);
 258   bool inline_array_equals(StrIntrinsicNode::ArgEnc ae);
 259   void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array, bool card_mark);
 260   bool inline_native_clone(bool is_virtual);
 261   bool inline_native_Reflection_getCallerClass();
 262   // Helper function for inlining native object hash method
 263   bool inline_native_hashcode(bool is_virtual, bool is_static);
 264   bool inline_native_getClass();
 265 
 266   // Helper functions for inlining arraycopy
 267   bool inline_arraycopy();
 268   AllocateArrayNode* tightly_coupled_allocation(Node* ptr,
 269                                                 RegionNode* slow_region);
 270   JVMState* arraycopy_restore_alloc_state(AllocateArrayNode* alloc, int& saved_reexecute_sp);
 271   void arraycopy_move_allocation_here(AllocateArrayNode* alloc, Node* dest, JVMState* saved_jvms, int saved_reexecute_sp);
 272 
 273   typedef enum { LS_xadd, LS_xchg, LS_cmpxchg } LoadStoreKind;
 274   bool inline_unsafe_load_store(BasicType type,  LoadStoreKind kind);
 275   bool inline_unsafe_ordered_store(BasicType type);
 276   bool inline_unsafe_fence(vmIntrinsics::ID id);
 277   bool inline_onspinwait();
 278   bool inline_fp_conversions(vmIntrinsics::ID id);
 279   bool inline_number_methods(vmIntrinsics::ID id);
 280   bool inline_reference_get();
 281   bool inline_Class_cast();
 282   bool inline_aescrypt_Block(vmIntrinsics::ID id);
 283   bool inline_cipherBlockChaining_AESCrypt(vmIntrinsics::ID id);
 284   Node* inline_cipherBlockChaining_AESCrypt_predicate(bool decrypting);
 285   Node* get_key_start_from_aescrypt_object(Node* aescrypt_object);
 286   Node* get_original_key_start_from_aescrypt_object(Node* aescrypt_object);
 287   bool inline_ghash_processBlocks();
 288   bool inline_sha_implCompress(vmIntrinsics::ID id);
 289   bool inline_digestBase_implCompressMB(int predicate);
 290   bool inline_sha_implCompressMB(Node* digestBaseObj, ciInstanceKlass* instklass_SHA,
 291                                  bool long_state, address stubAddr, const char *stubName,
 292                                  Node* src_start, Node* ofs, Node* limit);
 293   Node* get_state_from_sha_object(Node *sha_object);
 294   Node* get_state_from_sha5_object(Node *sha_object);
 295   Node* inline_digestBase_implCompressMB_predicate(int predicate);
 296   bool inline_encodeISOArray();
 297   bool inline_updateCRC32();


 613   case vmIntrinsics::_putLongUnaligned:         return inline_unsafe_access(!is_native_ptr,  is_store, T_LONG,    !is_volatile);
 614 
 615   case vmIntrinsics::_compareAndSwapObject:     return inline_unsafe_load_store(T_OBJECT, LS_cmpxchg);
 616   case vmIntrinsics::_compareAndSwapInt:        return inline_unsafe_load_store(T_INT,    LS_cmpxchg);
 617   case vmIntrinsics::_compareAndSwapLong:       return inline_unsafe_load_store(T_LONG,   LS_cmpxchg);
 618 
 619   case vmIntrinsics::_putOrderedObject:         return inline_unsafe_ordered_store(T_OBJECT);
 620   case vmIntrinsics::_putOrderedInt:            return inline_unsafe_ordered_store(T_INT);
 621   case vmIntrinsics::_putOrderedLong:           return inline_unsafe_ordered_store(T_LONG);
 622 
 623   case vmIntrinsics::_getAndAddInt:             return inline_unsafe_load_store(T_INT,    LS_xadd);
 624   case vmIntrinsics::_getAndAddLong:            return inline_unsafe_load_store(T_LONG,   LS_xadd);
 625   case vmIntrinsics::_getAndSetInt:             return inline_unsafe_load_store(T_INT,    LS_xchg);
 626   case vmIntrinsics::_getAndSetLong:            return inline_unsafe_load_store(T_LONG,   LS_xchg);
 627   case vmIntrinsics::_getAndSetObject:          return inline_unsafe_load_store(T_OBJECT, LS_xchg);
 628 
 629   case vmIntrinsics::_loadFence:
 630   case vmIntrinsics::_storeFence:
 631   case vmIntrinsics::_fullFence:                return inline_unsafe_fence(intrinsic_id());
 632 
 633   case vmIntrinsics::_onSpinWait:             return inline_onspinwait();
 634 
 635   case vmIntrinsics::_currentThread:            return inline_native_currentThread();
 636   case vmIntrinsics::_isInterrupted:            return inline_native_isInterrupted();
 637 
 638 #ifdef TRACE_HAVE_INTRINSICS
 639   case vmIntrinsics::_classID:                  return inline_native_classID();
 640   case vmIntrinsics::_threadID:                 return inline_native_threadID();
 641   case vmIntrinsics::_counterTime:              return inline_native_time_funcs(CAST_FROM_FN_PTR(address, TRACE_TIME_METHOD), "counterTime");
 642 #endif
 643   case vmIntrinsics::_currentTimeMillis:        return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeMillis), "currentTimeMillis");
 644   case vmIntrinsics::_nanoTime:                 return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeNanos), "nanoTime");
 645   case vmIntrinsics::_allocateInstance:         return inline_unsafe_allocate();
 646   case vmIntrinsics::_copyMemory:               return inline_unsafe_copyMemory();
 647   case vmIntrinsics::_newArray:                 return inline_native_newArray();
 648   case vmIntrinsics::_getLength:                return inline_native_getLength();
 649   case vmIntrinsics::_copyOf:                   return inline_array_copyOf(false);
 650   case vmIntrinsics::_copyOfRange:              return inline_array_copyOf(true);
 651   case vmIntrinsics::_equalsB:                  return inline_array_equals(StrIntrinsicNode::LL);
 652   case vmIntrinsics::_equalsC:                  return inline_array_equals(StrIntrinsicNode::UU);
 653   case vmIntrinsics::_clone:                    return inline_native_clone(intrinsic()->is_virtual());
 654 


2960 bool LibraryCallKit::inline_unsafe_fence(vmIntrinsics::ID id) {
2961   // Regardless of form, don't allow previous ld/st to move down,
2962   // then issue acquire, release, or volatile mem_bar.
2963   insert_mem_bar(Op_MemBarCPUOrder);
2964   switch(id) {
2965     case vmIntrinsics::_loadFence:
2966       insert_mem_bar(Op_LoadFence);
2967       return true;
2968     case vmIntrinsics::_storeFence:
2969       insert_mem_bar(Op_StoreFence);
2970       return true;
2971     case vmIntrinsics::_fullFence:
2972       insert_mem_bar(Op_MemBarVolatile);
2973       return true;
2974     default:
2975       fatal_unexpected_iid(id);
2976       return false;
2977   }
2978 }
2979 
2980 bool LibraryCallKit::inline_onspinwait() {
2981   insert_mem_bar(Op_OnSpinWait);
2982   return true;
2983 }
2984 
2985 bool LibraryCallKit::klass_needs_init_guard(Node* kls) {
2986   if (!kls->is_Con()) {
2987     return true;
2988   }
2989   const TypeKlassPtr* klsptr = kls->bottom_type()->isa_klassptr();
2990   if (klsptr == NULL) {
2991     return true;
2992   }
2993   ciInstanceKlass* ik = klsptr->klass()->as_instance_klass();
2994   // don't need a guard for a klass that is already initialized
2995   return !ik->is_initialized();
2996 }
2997 
2998 //----------------------------inline_unsafe_allocate---------------------------
2999 // public native Object Unsafe.allocateInstance(Class<?> cls);
3000 bool LibraryCallKit::inline_unsafe_allocate() {
3001   if (callee()->is_static())  return false;  // caller must have the capability!
3002 
3003   null_check_receiver();  // null-check, then ignore
3004   Node* cls = null_check(argument(1));


3346   if (expect_prim)  never_see_null = false;  // expect nulls (meaning prims)
3347 
3348   // Now load the mirror's klass metaobject, and null-check it.
3349   // Side-effects region with the control path if the klass is null.
3350   Node* kls = load_klass_from_mirror(mirror, never_see_null, region, _prim_path);
3351   // If kls is null, we have a primitive mirror.
3352   phi->init_req(_prim_path, prim_return_value);
3353   if (stopped()) { set_result(region, phi); return true; }
3354   bool safe_for_replace = (region->in(_prim_path) == top());
3355 
3356   Node* p;  // handy temp
3357   Node* null_ctl;
3358 
3359   // Now that we have the non-null klass, we can perform the real query.
3360   // For constant classes, the query will constant-fold in LoadNode::Value.
3361   Node* query_value = top();
3362   switch (id) {
3363   case vmIntrinsics::_isInstance:
3364     // nothing is an instance of a primitive type
3365     query_value = gen_instanceof(obj, kls, safe_for_replace);
3366     break;
3367   
3368   case vmIntrinsics::_onSpinWait:
3369     break;
3370 
3371   case vmIntrinsics::_getModifiers:
3372     p = basic_plus_adr(kls, in_bytes(Klass::modifier_flags_offset()));
3373     query_value = make_load(NULL, p, TypeInt::INT, T_INT, MemNode::unordered);
3374     break;
3375 
3376   case vmIntrinsics::_isInterface:
3377     // (To verify this code sequence, check the asserts in JVM_IsInterface.)
3378     if (generate_interface_guard(kls, region) != NULL)
3379       // A guard was added.  If the guard is taken, it was an interface.
3380       phi->add_req(intcon(1));
3381     // If we fall through, it's a plain class.
3382     query_value = intcon(0);
3383     break;
3384 
3385   case vmIntrinsics::_isArray:
3386     // (To verify this code sequence, check the asserts in JVM_IsArrayClass.)
3387     if (generate_array_guard(kls, region) != NULL)
3388       // A guard was added.  If the guard is taken, it was an array.