Lines Matching defs:vixl

48     return vixl::aarch64::kSPRegInternalCode;
51 return vixl::aarch64::kZeroRegCode;
57 if (code == vixl::aarch64::kSPRegInternalCode) {
60 if (code == vixl::aarch64::kZeroRegCode) {
66 inline vixl::aarch64::Register XRegisterFrom(Location location) {
68 return vixl::aarch64::Register::GetXRegFromCode(VIXLRegCodeFromART(location.reg()));
71 inline vixl::aarch64::Register WRegisterFrom(Location location) {
73 return vixl::aarch64::Register::GetWRegFromCode(VIXLRegCodeFromART(location.reg()));
76 inline vixl::aarch64::Register RegisterFrom(Location location, DataType::Type type) {
81 inline vixl::aarch64::Register OutputRegister(HInstruction* instr) {
85 inline vixl::aarch64::Register InputRegisterAt(HInstruction* instr, int input_index) {
90 inline vixl::aarch64::FPRegister DRegisterFrom(Location location) {
92 return vixl::aarch64::FPRegister::GetDRegFromCode(location.reg());
95 inline vixl::aarch64::FPRegister QRegisterFrom(Location location) {
97 return vixl::aarch64::FPRegister::GetQRegFromCode(location.reg());
100 inline vixl::aarch64::FPRegister VRegisterFrom(Location location) {
102 return vixl::aarch64::FPRegister::GetVRegFromCode(location.reg());
105 inline vixl::aarch64::FPRegister SRegisterFrom(Location location) {
107 return vixl::aarch64::FPRegister::GetSRegFromCode(location.reg());
110 inline vixl::aarch64::FPRegister FPRegisterFrom(Location location, DataType::Type type) {
115 inline vixl::aarch64::FPRegister OutputFPRegister(HInstruction* instr) {
119 inline vixl::aarch64::FPRegister InputFPRegisterAt(HInstruction* instr, int input_index) {
124 inline vixl::aarch64::CPURegister CPURegisterFrom(Location location, DataType::Type type) {
126 ? vixl::aarch64::CPURegister(FPRegisterFrom(location, type))
127 : vixl::aarch64::CPURegister(RegisterFrom(location, type));
130 inline vixl::aarch64::CPURegister OutputCPURegister(HInstruction* instr) {
132 ? static_cast<vixl::aarch64::CPURegister>(OutputFPRegister(instr))
133 : static_cast<vixl::aarch64::CPURegister>(OutputRegister(instr));
136 inline vixl::aarch64::CPURegister InputCPURegisterAt(HInstruction* instr, int index) {
138 ? static_cast<vixl::aarch64::CPURegister>(InputFPRegisterAt(instr, index))
139 : static_cast<vixl::aarch64::CPURegister>(InputRegisterAt(instr, index));
142 inline vixl::aarch64::CPURegister InputCPURegisterOrZeroRegAt(HInstruction* instr,
147 return (DataType::Size(input_type) >= vixl::aarch64::kXRegSizeInBytes)
148 ? vixl::aarch64::Register(vixl::aarch64::xzr)
149 : vixl::aarch64::Register(vixl::aarch64::wzr);
166 inline vixl::aarch64::Operand OperandFrom(Location location, DataType::Type type) {
168 return vixl::aarch64::Operand(RegisterFrom(location, type));
170 return vixl::aarch64::Operand(Int64ConstantFrom(location));
174 inline vixl::aarch64::Operand InputOperandAt(HInstruction* instr, int input_index) {
179 inline vixl::aarch64::MemOperand StackOperandFrom(Location location) {
180 return vixl::aarch64::MemOperand(vixl::aarch64::sp, location.GetStackIndex());
183 inline vixl::aarch64::MemOperand HeapOperand(const vixl::aarch64::Register& base,
187 return vixl::aarch64::MemOperand(base.X(), offset);
190 inline vixl::aarch64::MemOperand HeapOperand(const vixl::aarch64::Register& base,
191 const vixl::aarch64::Register& regoffset,
192 vixl::aarch64::Shift shift = vixl::aarch64::LSL,
196 return vixl::aarch64::MemOperand(base.X(), regoffset, shift, shift_amount);
199 inline vixl::aarch64::MemOperand HeapOperand(const vixl::aarch64::Register& base,
204 inline vixl::aarch64::MemOperand HeapOperandFrom(Location location, Offset offset) {
208 inline Location LocationFrom(const vixl::aarch64::Register& reg) {
212 inline Location LocationFrom(const vixl::aarch64::FPRegister& fpreg) {
216 inline vixl::aarch64::Operand OperandFromMemOperand(
217 const vixl::aarch64::MemOperand& mem_op) {
219 return vixl::aarch64::Operand(mem_op.GetOffset());
222 if (mem_op.GetExtend() != vixl::aarch64::NO_EXTEND) {
223 return vixl::aarch64::Operand(mem_op.GetRegisterOffset(),
226 } else if (mem_op.GetShift() != vixl::aarch64::NO_SHIFT) {
227 return vixl::aarch64::Operand(mem_op.GetRegisterOffset(),
245 return vixl::aarch64::Assembler::IsImmFP32(constant->AsFloatConstant()->GetValue());
247 return vixl::aarch64::Assembler::IsImmFP64(constant->AsDoubleConstant()->GetValue());
265 return vixl::aarch64::Assembler::IsImmLogical(value, vixl::aarch64::kXRegSize);
268 return vixl::aarch64::Assembler::IsImmMovn(value, vixl::aarch64::kXRegSize);
280 return vixl::aarch64::Assembler::IsImmAddSub(value)
281 || vixl::aarch64::Assembler::IsImmAddSub(-value);
295 // Check if registers in art register set have the same register code in vixl. If the register
296 // codes are same, we can initialize vixl register list simply by the register masks. Currently,
297 // only SP/WSP and ZXR/WZR codes are different between art and vixl.
317 inline vixl::aarch64::Shift ShiftFromOpKind(HDataProcWithShifterOp::OpKind op_kind) {
319 case HDataProcWithShifterOp::kASR: return vixl::aarch64::ASR;
320 case HDataProcWithShifterOp::kLSL: return vixl::aarch64::LSL;
321 case HDataProcWithShifterOp::kLSR: return vixl::aarch64::LSR;
325 return vixl::aarch64::NO_SHIFT;
329 inline vixl::aarch64::Extend ExtendFromOpKind(HDataProcWithShifterOp::OpKind op_kind) {
331 case HDataProcWithShifterOp::kUXTB: return vixl::aarch64::UXTB;
332 case HDataProcWithShifterOp::kUXTH: return vixl::aarch64::UXTH;
333 case HDataProcWithShifterOp::kUXTW: return vixl::aarch64::UXTW;
334 case HDataProcWithShifterOp::kSXTB: return vixl::aarch64::SXTB;
335 case HDataProcWithShifterOp::kSXTH: return vixl::aarch64::SXTH;
336 case HDataProcWithShifterOp::kSXTW: return vixl::aarch64::SXTW;
340 return vixl::aarch64::NO_EXTEND;