Lines Matching refs:AArch64

1 //===-- AArch64ISelLowering.cpp - AArch64 DAG Lowering Implementation -----===//
10 // This file defines the interfaces that AArch64 uses to lower LLVM code into a
16 #include "AArch64.h"
51 addRegisterClass(MVT::i32, &AArch64::GPR32RegClass);
52 addRegisterClass(MVT::i64, &AArch64::GPR64RegClass);
53 addRegisterClass(MVT::f16, &AArch64::FPR16RegClass);
54 addRegisterClass(MVT::f32, &AArch64::FPR32RegClass);
55 addRegisterClass(MVT::f64, &AArch64::FPR64RegClass);
56 addRegisterClass(MVT::f128, &AArch64::FPR128RegClass);
60 addRegisterClass(MVT::v8i8, &AArch64::VPR64RegClass);
61 addRegisterClass(MVT::v4i16, &AArch64::VPR64RegClass);
62 addRegisterClass(MVT::v2i32, &AArch64::VPR64RegClass);
63 addRegisterClass(MVT::v1i64, &AArch64::VPR64RegClass);
64 addRegisterClass(MVT::v2f32, &AArch64::VPR64RegClass);
65 addRegisterClass(MVT::v16i8, &AArch64::VPR128RegClass);
66 addRegisterClass(MVT::v8i16, &AArch64::VPR128RegClass);
67 addRegisterClass(MVT::v4i32, &AArch64::VPR128RegClass);
68 addRegisterClass(MVT::v2i64, &AArch64::VPR128RegClass);
69 addRegisterClass(MVT::v4f32, &AArch64::VPR128RegClass);
70 addRegisterClass(MVT::v2f64, &AArch64::VPR128RegClass);
81 // AArch64 does not have i1 loads, or much of anything for i1 really.
86 setStackPointerRegisterToSaveRestore(AArch64::XSP);
269 setExceptionPointerRegister(AArch64::X0);
270 setExceptionSelectorRegister(AArch64::X1);
309 static const unsigned LoadBares[] = {AArch64::LDXR_byte, AArch64::LDXR_hword,
310 AArch64::LDXR_word, AArch64::LDXR_dword};
311 static const unsigned LoadAcqs[] = {AArch64::LDAXR_byte, AArch64::LDAXR_hword,
312 AArch64::LDAXR_word, AArch64::LDAXR_dword};
313 static const unsigned StoreBares[] = {AArch64::STXR_byte, AArch64::STXR_hword,
314 AArch64::STXR_word, AArch64::STXR_dword};
315 static const unsigned StoreRels[] = {AArch64::STLXR_byte,AArch64::STLXR_hword,
316 AArch64::STLXR_word, AArch64::STLXR_dword};
371 = Size == 8 ? &AArch64::GPR64RegClass : &AArch64::GPR32RegClass;
392 if (BinOpcode == AArch64::BICwww_lsl || BinOpcode == AArch64::BICxxx_lsl)
401 unsigned stxr_status = MRI.createVirtualRegister(&AArch64::GPR32RegClass);
402 MRI.constrainRegClass(stxr_status, &AArch64::GPR32wspRegClass);
405 BuildMI(BB, dl, TII->get(AArch64::CBNZw))
444 TRC = &AArch64::GPR64RegClass;
445 TRCsp = &AArch64::GPR64xspRegClass;
447 TRC = &AArch64::GPR32RegClass;
448 TRCsp = &AArch64::GPR32wspRegClass;
488 BuildMI(BB, dl, TII->get(Size == 8 ? AArch64::CSELxxxc : AArch64::CSELwwwc),
492 unsigned stxr_status = MRI.createVirtualRegister(&AArch64::GPR32RegClass);
493 MRI.constrainRegClass(stxr_status, &AArch64::GPR32wspRegClass);
497 BuildMI(BB, dl, TII->get(AArch64::CBNZw))
526 TRCsp = Size == 8 ? &AArch64::GPR64xspRegClass : &AArch64::GPR32wspRegClass;
561 unsigned CmpOp = Size == 8 ? AArch64::CMPxx_lsl : AArch64::CMPww_lsl;
565 BuildMI(BB, dl, TII->get(AArch64::Bcc))
574 unsigned stxr_status = MRI.createVirtualRegister(&AArch64::GPR32RegClass);
575 MRI.constrainRegClass(stxr_status, &AArch64::GPR32wspRegClass);
578 BuildMI(BB, dl, TII->get(AArch64::CBNZw))
649 BuildMI(MBB, DL, TII->get(AArch64::LSFP128_STR))
653 BuildMI(MBB, DL, TII->get(AArch64::Bcc))
656 BuildMI(MBB, DL, TII->get(AArch64::Bimm))
663 BuildMI(TrueBB, DL, TII->get(AArch64::LSFP128_STR))
676 EndBB->addLiveIn(AArch64::NZCV);
678 BuildMI(*EndBB, StartOfEnd, DL, TII->get(AArch64::LSFP128_LDR), DestReg)
691 case AArch64::F128CSEL:
693 case AArch64::ATOMIC_LOAD_ADD_I8:
694 return emitAtomicBinary(MI, MBB, 1, AArch64::ADDwww_lsl);
695 case AArch64::ATOMIC_LOAD_ADD_I16:
696 return emitAtomicBinary(MI, MBB, 2, AArch64::ADDwww_lsl);
697 case AArch64::ATOMIC_LOAD_ADD_I32:
698 return emitAtomicBinary(MI, MBB, 4, AArch64::ADDwww_lsl);
699 case AArch64::ATOMIC_LOAD_ADD_I64:
700 return emitAtomicBinary(MI, MBB, 8, AArch64::ADDxxx_lsl);
702 case AArch64::ATOMIC_LOAD_SUB_I8:
703 return emitAtomicBinary(MI, MBB, 1, AArch64::SUBwww_lsl);
704 case AArch64::ATOMIC_LOAD_SUB_I16:
705 return emitAtomicBinary(MI, MBB, 2, AArch64::SUBwww_lsl);
706 case AArch64::ATOMIC_LOAD_SUB_I32:
707 return emitAtomicBinary(MI, MBB, 4, AArch64::SUBwww_lsl);
708 case AArch64::ATOMIC_LOAD_SUB_I64:
709 return emitAtomicBinary(MI, MBB, 8, AArch64::SUBxxx_lsl);
711 case AArch64::ATOMIC_LOAD_AND_I8:
712 return emitAtomicBinary(MI, MBB, 1, AArch64::ANDwww_lsl);
713 case AArch64::ATOMIC_LOAD_AND_I16:
714 return emitAtomicBinary(MI, MBB, 2, AArch64::ANDwww_lsl);
715 case AArch64::ATOMIC_LOAD_AND_I32:
716 return emitAtomicBinary(MI, MBB, 4, AArch64::ANDwww_lsl);
717 case AArch64::ATOMIC_LOAD_AND_I64:
718 return emitAtomicBinary(MI, MBB, 8, AArch64::ANDxxx_lsl);
720 case AArch64::ATOMIC_LOAD_OR_I8:
721 return emitAtomicBinary(MI, MBB, 1, AArch64::ORRwww_lsl);
722 case AArch64::ATOMIC_LOAD_OR_I16:
723 return emitAtomicBinary(MI, MBB, 2, AArch64::ORRwww_lsl);
724 case AArch64::ATOMIC_LOAD_OR_I32:
725 return emitAtomicBinary(MI, MBB, 4, AArch64::ORRwww_lsl);
726 case AArch64::ATOMIC_LOAD_OR_I64:
727 return emitAtomicBinary(MI, MBB, 8, AArch64::ORRxxx_lsl);
729 case AArch64::ATOMIC_LOAD_XOR_I8:
730 return emitAtomicBinary(MI, MBB, 1, AArch64::EORwww_lsl);
731 case AArch64::ATOMIC_LOAD_XOR_I16:
732 return emitAtomicBinary(MI, MBB, 2, AArch64::EORwww_lsl);
733 case AArch64::ATOMIC_LOAD_XOR_I32:
734 return emitAtomicBinary(MI, MBB, 4, AArch64::EORwww_lsl);
735 case AArch64::ATOMIC_LOAD_XOR_I64:
736 return emitAtomicBinary(MI, MBB, 8, AArch64::EORxxx_lsl);
738 case AArch64::ATOMIC_LOAD_NAND_I8:
739 return emitAtomicBinary(MI, MBB, 1, AArch64::BICwww_lsl);
740 case AArch64::ATOMIC_LOAD_NAND_I16:
741 return emitAtomicBinary(MI, MBB, 2, AArch64::BICwww_lsl);
742 case AArch64::ATOMIC_LOAD_NAND_I32:
743 return emitAtomicBinary(MI, MBB, 4, AArch64::BICwww_lsl);
744 case AArch64::ATOMIC_LOAD_NAND_I64:
745 return emitAtomicBinary(MI, MBB, 8, AArch64::BICxxx_lsl);
747 case AArch64::ATOMIC_LOAD_MIN_I8:
748 return emitAtomicBinaryMinMax(MI, MBB, 1, AArch64::CMPww_sxtb, A64CC::GT);
749 case AArch64::ATOMIC_LOAD_MIN_I16:
750 return emitAtomicBinaryMinMax(MI, MBB, 2, AArch64::CMPww_sxth, A64CC::GT);
751 case AArch64::ATOMIC_LOAD_MIN_I32:
752 return emitAtomicBinaryMinMax(MI, MBB, 4, AArch64::CMPww_lsl, A64CC::GT);
753 case AArch64::ATOMIC_LOAD_MIN_I64:
754 return emitAtomicBinaryMinMax(MI, MBB, 8, AArch64::CMPxx_lsl, A64CC::GT);
756 case AArch64::ATOMIC_LOAD_MAX_I8:
757 return emitAtomicBinaryMinMax(MI, MBB, 1, AArch64::CMPww_sxtb, A64CC::LT);
758 case AArch64::ATOMIC_LOAD_MAX_I16:
759 return emitAtomicBinaryMinMax(MI, MBB, 2, AArch64::CMPww_sxth, A64CC::LT);
760 case AArch64::ATOMIC_LOAD_MAX_I32:
761 return emitAtomicBinaryMinMax(MI, MBB, 4, AArch64::CMPww_lsl, A64CC::LT);
762 case AArch64::ATOMIC_LOAD_MAX_I64:
763 return emitAtomicBinaryMinMax(MI, MBB, 8, AArch64::CMPxx_lsl, A64CC::LT);
765 case AArch64::ATOMIC_LOAD_UMIN_I8:
766 return emitAtomicBinaryMinMax(MI, MBB, 1, AArch64::CMPww_uxtb, A64CC::HI);
767 case AArch64::ATOMIC_LOAD_UMIN_I16:
768 return emitAtomicBinaryMinMax(MI, MBB, 2, AArch64::CMPww_uxth, A64CC::HI);
769 case AArch64::ATOMIC_LOAD_UMIN_I32:
770 return emitAtomicBinaryMinMax(MI, MBB, 4, AArch64::CMPww_lsl, A64CC::HI);
771 case AArch64::ATOMIC_LOAD_UMIN_I64:
772 return emitAtomicBinaryMinMax(MI, MBB, 8, AArch64::CMPxx_lsl, A64CC::HI);
774 case AArch64::ATOMIC_LOAD_UMAX_I8:
775 return emitAtomicBinaryMinMax(MI, MBB, 1, AArch64::CMPww_uxtb, A64CC::LO);
776 case AArch64::ATOMIC_LOAD_UMAX_I16:
777 return emitAtomicBinaryMinMax(MI, MBB, 2, AArch64::CMPww_uxth, A64CC::LO);
778 case AArch64::ATOMIC_LOAD_UMAX_I32:
779 return emitAtomicBinaryMinMax(MI, MBB, 4, AArch64::CMPww_lsl, A64CC::LO);
780 case AArch64::ATOMIC_LOAD_UMAX_I64:
781 return emitAtomicBinaryMinMax(MI, MBB, 8, AArch64::CMPxx_lsl, A64CC::LO);
783 case AArch64::ATOMIC_SWAP_I8:
785 case AArch64::ATOMIC_SWAP_I16:
787 case AArch64::ATOMIC_SWAP_I32:
789 case AArch64::ATOMIC_SWAP_I64:
792 case AArch64::ATOMIC_CMP_SWAP_I8:
794 case AArch64::ATOMIC_CMP_SWAP_I16:
796 case AArch64::ATOMIC_CMP_SWAP_I32:
798 case AArch64::ATOMIC_CMP_SWAP_I64:
842 AArch64::Q0, AArch64::Q1, AArch64::Q2, AArch64::Q3,
843 AArch64::Q4, AArch64::Q5, AArch64::Q6, AArch64::Q7
848 AArch64::X0, AArch64::X1, AArch64::X2, AArch64::X3,
849 AArch64::X4, AArch64::X5, AArch64::X6, AArch64::X7
900 unsigned VReg = MF.addLiveIn(AArch64ArgRegs[i], &AArch64::GPR64RegClass);
920 &AArch64::FPR128RegClass);
1017 case 8: DestSubReg = AArch64::sub_8; break;
1018 case 16: DestSubReg = AArch64::sub_16; break;
1019 case 32: DestSubReg = AArch64::sub_32; break;
1020 case 64: DestSubReg = AArch64::sub_64; break;
1171 // On AArch64 (and all other architectures I'm aware of) the most this has to
1207 SDValue StackPtr = DAG.getCopyFromReg(Chain, dl, AArch64::XSP,
1232 case 8: SrcSubReg = AArch64::sub_8; break;
1233 case 16: SrcSubReg = AArch64::sub_16; break;
1234 case 32: SrcSubReg = AArch64::sub_32; break;
1235 case 64: SrcSubReg = AArch64::sub_64; break;
1752 // AArch64 BooleanContents is the default UndefinedBooleanContent, which means
1992 // AArch64's small model demands the following sequence:
2045 Chain = DAG.getCopyToReg(DAG.getEntryNode(), DL, AArch64::X0, DescAddr, Glue);
2060 Ops.push_back(DAG.getRegister(AArch64::X0, PtrVT));
2071 return DAG.getCopyFromReg(Chain, DL, AArch64::X0, PtrVT, Glue);
2107 TPOff = SDValue(DAG.getMachineNode(AArch64::MOVZxii, DL, PtrVT, HiVar,
2109 TPOff = SDValue(DAG.getMachineNode(AArch64::MOVKxii, DL, PtrVT,
2155 TPOff = SDValue(DAG.getMachineNode(AArch64::MOVZxii, DL, PtrVT, HiVar,
2157 TPOff = SDValue(DAG.getMachineNode(AArch64::MOVKxii, DL, PtrVT,
2276 // AArch64 BooleanContents is the default UndefinedBooleanContent, which means
2570 // The layout of the va_list struct is specified in the AArch64 Procedure Call
3478 return std::make_pair(0U, &AArch64::GPR32RegClass);
3480 return std::make_pair(0U, &AArch64::GPR64RegClass);
3484 return std::make_pair(0U, &AArch64::FPR16RegClass);
3486 return std::make_pair(0U, &AArch64::FPR32RegClass);
3488 return std::make_pair(0U, &AArch64::FPR64RegClass);
3490 return std::make_pair(0U, &AArch64::VPR64RegClass);
3492 return std::make_pair(0U, &AArch64::FPR128RegClass);
3494 return std::make_pair(0U, &AArch64::VPR128RegClass);