Lines Matching defs:__

141 #define __ sasm->
149 assert(frame_size_in_bytes == __ total_frame_size_in_bytes(reg_save_size_in_words),
177 assert(frame_size_in_bytes == __ total_frame_size_in_bytes(reg_save_size_in_words),
179 __ save_frame_c1(frame_size_in_bytes);
193 __ st_ptr(r, SP, (sp_offset * BytesPerWord) + STACK_BIAS);
201 __ stf(FloatRegisterImpl::S, r, SP, (sp_offset * BytesPerWord) + STACK_BIAS);
212 __ ld_ptr(SP, (cpu_reg_save_offsets[i] * BytesPerWord) + STACK_BIAS, r);
219 __ ldf(FloatRegisterImpl::S, SP, (fpu_reg_save_offsets[i] * BytesPerWord) + STACK_BIAS, r);
280 call_offset = __ call_RT(noreg, noreg, target);
282 call_offset = __ call_RT(noreg, noreg, target, G4);
287 __ should_not_reach_here();
299 call_offset = __ call_RT(result, noreg, target);
301 call_offset = __ call_RT(result, noreg, target, arg1);
303 call_offset = __ call_RT(result, noreg, target, arg1, arg2);
305 call_offset = __ call_RT(result, noreg, target, arg1, arg2, arg3);
313 __ ret();
314 __ delayed()->restore();
325 int call_offset = __ call_RT(noreg, noreg, target);
335 __ br_null_short(O0, Assembler::pt, no_deopt);
344 __ jump_to(dest, O0);
345 __ delayed()->restore();
347 __ bind(no_deopt);
349 __ ret();
350 __ delayed()->restore();
378 __ set_info("new_instance", dont_gc_arguments);
380 __ set_info("fast new_instance", dont_gc_arguments);
383 __ set_info("fast new_instance init check", dont_gc_arguments);
397 __ save_frame(0);
401 __ ldub(G5_klass, in_bytes(instanceKlass::init_state_offset()), G3_t1);
402 __ cmp_and_br_short(G3_t1, instanceKlass::fully_initialized, Assembler::notEqual, Assembler::pn, slow_path);
408 __ ld(G5_klass, in_bytes(Klass::layout_helper_offset()), G1_obj_size);
410 __ cmp_and_br_short(G1_obj_size, 0, Assembler::lessEqual, Assembler::pn, not_ok);
411 __ btst(Klass::_lh_instance_slow_path_bit, G1_obj_size);
412 __ br(Assembler::zero, false, Assembler::pn, ok);
413 __ delayed()->nop();
414 __ bind(not_ok);
415 __ stop("assert(can be fast path allocated)");
416 __ should_not_reach_here();
417 __ bind(ok);
423 __ tlab_refill(retry_tlab, try_eden, slow_path); // preserves G5_klass
425 __ bind(retry_tlab);
428 __ ld(G5_klass, in_bytes(Klass::layout_helper_offset()), G1_obj_size);
430 __ tlab_allocate(O0_obj, G1_obj_size, 0, G3_t1, slow_path);
432 __ initialize_object(O0_obj, G5_klass, G1_obj_size, 0, G3_t1, G4_t2);
433 __ verify_oop(O0_obj);
434 __ mov(O0, I0);
435 __ ret();
436 __ delayed()->restore();
438 __ bind(try_eden);
440 __ ld(G5_klass, in_bytes(Klass::layout_helper_offset()), G1_obj_size);
441 __ eden_allocate(O0_obj, G1_obj_size, 0, G3_t1, G4_t2, slow_path);
442 __ incr_allocated_bytes(G1_obj_size, G3_t1, G4_t2);
444 __ initialize_object(O0_obj, G5_klass, G1_obj_size, 0, G3_t1, G4_t2);
445 __ verify_oop(O0_obj);
446 __ mov(O0, I0);
447 __ ret();
448 __ delayed()->restore();
450 __ bind(slow_path);
453 __ restore();
482 __ set_info("new_type_array", dont_gc_arguments);
484 __ set_info("new_object_array", dont_gc_arguments);
492 __ ld(klass_lh, G3_t1);
493 __ sra(G3_t1, Klass::_lh_array_tag_shift, G3_t1);
497 __ cmp_and_brx_short(G3_t1, tag, Assembler::equal, Assembler::pt, ok);
498 __ stop("assert(is an array klass)");
499 __ should_not_reach_here();
500 __ bind(ok);
512 __ set(C1_MacroAssembler::max_array_allocation_length, G3_t1);
513 __ cmp_and_br_short(G4_length, G3_t1, Assembler::greaterUnsigned, Assembler::pn, slow_path);
518 __ tlab_refill(retry_tlab, try_eden, slow_path); // preserves G4_length and G5_klass
520 __ bind(retry_tlab);
523 __ ld(klass_lh, G3_t1);
524 __ sll(G4_length, G3_t1, G1_arr_size);
525 __ srl(G3_t1, Klass::_lh_header_size_shift, G3_t1);
526 __ and3(G3_t1, Klass::_lh_header_size_mask, G3_t1);
527 __ add(G1_arr_size, G3_t1, G1_arr_size);
528 __ add(G1_arr_size, MinObjAlignmentInBytesMask, G1_arr_size); // align up
529 __ and3(G1_arr_size, ~MinObjAlignmentInBytesMask, G1_arr_size);
531 __ tlab_allocate(O0_obj, G1_arr_size, 0, G3_t1, slow_path); // preserves G1_arr_size
533 __ initialize_header(O0_obj, G5_klass, G4_length, G3_t1, O1_t2);
534 __ ldub(klass_lh, G3_t1, klass_lh_header_size_offset);
535 __ sub(G1_arr_size, G3_t1, O1_t2); // body length
536 __ add(O0_obj, G3_t1, G3_t1); // body start
537 __ initialize_body(G3_t1, O1_t2);
538 __ verify_oop(O0_obj);
539 __ retl();
540 __ delayed()->nop();
542 __ bind(try_eden);
544 __ ld(klass_lh, G3_t1);
545 __ sll(G4_length, G3_t1, G1_arr_size);
546 __ srl(G3_t1, Klass::_lh_header_size_shift, G3_t1);
547 __ and3(G3_t1, Klass::_lh_header_size_mask, G3_t1);
548 __ add(G1_arr_size, G3_t1, G1_arr_size);
549 __ add(G1_arr_size, MinObjAlignmentInBytesMask, G1_arr_size);
550 __ and3(G1_arr_size, ~MinObjAlignmentInBytesMask, G1_arr_size);
552 __ eden_allocate(O0_obj, G1_arr_size, 0, G3_t1, O1_t2, slow_path); // preserves G1_arr_size
553 __ incr_allocated_bytes(G1_arr_size, G3_t1, O1_t2);
555 __ initialize_header(O0_obj, G5_klass, G4_length, G3_t1, O1_t2);
556 __ ldub(klass_lh, G3_t1, klass_lh_header_size_offset);
557 __ sub(G1_arr_size, G3_t1, O1_t2); // body length
558 __ add(O0_obj, G3_t1, G3_t1); // body start
559 __ initialize_body(G3_t1, O1_t2);
560 __ verify_oop(O0_obj);
561 __ retl();
562 __ delayed()->nop();
564 __ bind(slow_path);
580 __ set_info("new_multi_array", dont_gc_arguments);
588 __ set_info("register_finalizer", dont_gc_arguments);
593 __ load_klass(O0, t);
594 __ ld(t, in_bytes(Klass::access_flags_offset()), t);
595 __ set(JVM_ACC_HAS_FINALIZER, G3);
596 __ andcc(G3, t, G0);
597 __ br(Assembler::notZero, false, Assembler::pt, register_finalizer);
598 __ delayed()->nop();
601 __ retl();
602 __ delayed()->nop();
604 __ bind(register_finalizer);
606 int call_offset = __ call_RT(noreg, noreg,
614 __ ret();
615 __ delayed()->restore();
620 { __ set_info("range_check_failed", dont_gc_arguments); // arguments will be discarded
627 { __ set_info("index_range_check_failed", dont_gc_arguments); // arguments will be discarded
634 { __ set_info("throw_div0_exception", dont_gc_arguments);
640 { __ set_info("throw_null_pointer_exception", dont_gc_arguments);
646 { __ set_info("handle_exception", dont_gc_arguments);
652 { __ set_info("handle_exception_from_callee", dont_gc_arguments);
662 __ set_info("unwind_exception", dont_gc_arguments);
663 __ mov(Oexception, Oexception->after_save());
664 __ add(I7, frame::pc_return_offset, Oissuing_pc->after_save());
666 __ call_VM_leaf(L7_thread_cache, CAST_FROM_FN_PTR(address, SharedRuntime::exception_handler_for_return_address),
668 __ verify_not_null_oop(Oexception->after_save());
671 __ mov(O0, G5); // Save the target address.
672 __ lduw(Address(G2_thread, JavaThread::is_method_handle_return_offset()), L0);
673 __ tst(L0); // Condition codes are preserved over the restore.
674 __ restore();
676 __ jmp(G5, 0);
677 __ delayed()->movcc(Assembler::notZero, false, Assembler::icc, L7_mh_SP_save, SP); // Restore SP if required.
683 __ set_info("throw_array_store_exception", dont_gc_arguments);
691 __ set_info("throw_class_cast_exception", dont_gc_arguments);
698 __ set_info("throw_incompatible_class_cast_exception", dont_gc_arguments);
713 __ save_frame(0); // Blow no registers!
715 __ check_klass_subtype_slow_path(G3, G1, L0, L1, L2, L4, NULL, &miss);
717 __ mov(1, G3);
718 __ ret(); // Result in G5 is 'true'
719 __ delayed()->restore(); // free copy or add can go here
721 __ bind(miss);
722 __ mov(0, G3);
723 __ ret(); // Result in G5 is 'false'
724 __ delayed()->restore(); // free copy or add can go here
731 __ set_info("monitorenter", dont_gc_arguments);
737 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, monitorenter), G4, G5);
743 __ ret();
744 __ delayed()->restore();
754 __ set_info("monitorexit", dont_gc_arguments);
760 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, monitorexit), G4);
766 __ ret();
767 __ delayed()->restore();
773 __ set_info("deoptimize", dont_gc_arguments);
775 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, deoptimize));
782 __ jump_to(dest, O0);
783 __ delayed()->restore();
788 { __ set_info("access_field_patching", dont_gc_arguments);
794 { __ set_info("load_klass_patching", dont_gc_arguments);
801 __ set_info("dtrace_object_alloc", dont_gc_arguments);
806 __ save_thread(L7_thread_cache);
807 __ call(CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_object_alloc),
809 __ delayed()->mov(I0, O0);
810 __ restore_thread(L7_thread_cache);
813 __ ret();
814 __ delayed()->restore();
823 __ save_frame(0);
824 __ set((int)id, O1);
825 __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, unimplemented_entry), I0);
826 __ should_not_reach_here();
830 __ set_info("g1_pre_barrier_slow_id", dont_gc_arguments);
845 __ bind(restart);
848 __ ld_ptr(G2_thread, satb_q_index_byte_offset, tmp);
851 __ cmp_and_brx_short(tmp, G0, Assembler::equal, Assembler::pn, refill);
853 __ ld_ptr(G2_thread, satb_q_buf_byte_offset, tmp2);
854 __ sub(tmp, oopSize, tmp);
856 __ st_ptr(pre_val, tmp2, tmp); // [_buf + index] := <address_of_card>
858 __ retl();
859 __ delayed()->st_ptr(tmp, G2_thread, satb_q_index_byte_offset);
861 __ bind(refill);
862 __ save_frame(0);
864 __ mov(pre_val, L0);
865 __ mov(tmp, L1);
866 __ mov(tmp2, L2);
868 __ call_VM_leaf(L7_thread_cache,
873 __ mov(L0, pre_val);
874 __ mov(L1, tmp);
875 __ mov(L2, tmp2);
877 __ br(Assembler::always, /*annul*/false, Assembler::pt, restart);
878 __ delayed()->restore();
886 __ save_frame(0);
887 __ set((int)id, O1);
888 __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, unimplemented_entry), I0);
889 __ should_not_reach_here();
893 __ set_info("g1_post_barrier_slow_id", dont_gc_arguments);
904 __ srlx(addr, CardTableModRefBS::card_shift, addr);
906 __ srl(addr, CardTableModRefBS::card_shift, addr);
910 __ set(rs, cardtable); // cardtable := <card table base>
911 __ ldub(addr, cardtable, tmp); // tmp := [addr + cardtable]
914 __ cmp_and_br_short(tmp, G0, Assembler::notEqual, Assembler::pt, not_already_dirty);
918 __ retl();
919 __ delayed()->nop();
922 __ bind(not_already_dirty);
925 __ add(addr, cardtable, tmp2);
928 __ stb(G0, tmp2, 0); // [cardPtr] := 0 (i.e., dirty).
943 __ bind(restart);
947 __ ld_ptr(G2_thread, dirty_card_q_index_byte_offset, tmp3);
950 __ cmp_and_brx_short(tmp3, G0, Assembler::equal, Assembler::pn, refill);
952 __ ld_ptr(G2_thread, dirty_card_q_buf_byte_offset, tmp4);
953 __ sub(tmp3, oopSize, tmp3);
955 __ st_ptr(tmp2, tmp4, tmp3); // [_buf + index] := <address_of_card>
957 __ retl();
958 __ delayed()->st_ptr(tmp3, G2_thread, dirty_card_q_index_byte_offset);
960 __ bind(refill);
961 __ save_frame(0);
963 __ mov(tmp2, L0);
964 __ mov(tmp3, L1);
965 __ mov(tmp4, L2);
967 __ call_VM_leaf(L7_thread_cache,
972 __ mov(L0, tmp2);
973 __ mov(L1, tmp3);
974 __ mov(L2, tmp4);
976 __ br(Assembler::always, /*annul*/false, Assembler::pt, restart);
977 __ delayed()->restore();
983 { __ set_info("unimplemented entry", dont_gc_arguments);
984 __ save_frame(0);
985 __ set((int)id, O1);
986 __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, unimplemented_entry), O1);
987 __ should_not_reach_here();
996 __ block_comment("generate_handle_exception");
1011 __ ld_ptr(G2_thread, in_bytes(JavaThread::pending_exception_offset()), Oexception);
1012 __ ld_ptr(Oexception, 0, G0);
1013 __ st_ptr(G0, G2_thread, in_bytes(JavaThread::pending_exception_offset()));
1014 __ add(I7, frame::pc_return_offset, Oissuing_pc);
1019 __ mov(Oexception->after_save(), Oexception);
1020 __ mov(Oissuing_pc->after_save(), Oissuing_pc);
1027 __ save_frame_c1(frame_size_in_bytes);
1028 __ mov(Oexception->after_save(), Oexception);
1029 __ mov(Oissuing_pc->after_save(), Oissuing_pc);
1034 __ verify_not_null_oop(Oexception);
1037 __ st_ptr(Oexception, G2_thread, in_bytes(JavaThread::exception_oop_offset()));
1038 __ st_ptr(Oissuing_pc, G2_thread, in_bytes(JavaThread::exception_pc_offset()));
1041 __ mov(Oissuing_pc, I7);
1042 __ sub(I7, frame::pc_return_offset, I7);
1043 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, exception_handler_for_pc));
1056 __ jmp(O0, 0);
1057 __ delayed()->restore();
1061 __ mov(O0, G5); // Save the target address.
1062 __ lduw(Address(G2_thread, JavaThread::is_method_handle_return_offset()), L0);
1063 __ tst(L0); // Condition codes are preserved over the restore.
1064 __ restore();
1066 __ jmp(G5, 0); // jump to the exception handler
1067 __ delayed()->movcc(Assembler::notZero, false, Assembler::icc, L7_mh_SP_save, SP); // Restore SP if required.
1076 #undef __