CGBuiltin.cpp revision e327aa81e38d9d17702f95ad4df56f8e3e9b7e91
1//===---- CGBuiltin.cpp - Emit LLVM Code for builtins ---------------------===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This contains code to emit Builtin calls as LLVM code.
11//
12//===----------------------------------------------------------------------===//
13
14#include "TargetInfo.h"
15#include "CodeGenFunction.h"
16#include "CodeGenModule.h"
17#include "CGObjCRuntime.h"
18#include "clang/Basic/TargetInfo.h"
19#include "clang/AST/ASTContext.h"
20#include "clang/AST/Decl.h"
21#include "clang/Basic/TargetBuiltins.h"
22#include "llvm/Intrinsics.h"
23#include "llvm/Target/TargetData.h"
24
25using namespace clang;
26using namespace CodeGen;
27using namespace llvm;
28
29/// getBuiltinLibFunction - Given a builtin id for a function like
30/// "__builtin_fabsf", return a Function* for "fabsf".
31llvm::Value *CodeGenModule::getBuiltinLibFunction(const FunctionDecl *FD,
32                                                  unsigned BuiltinID) {
33  assert(Context.BuiltinInfo.isLibFunction(BuiltinID));
34
35  // Get the name, skip over the __builtin_ prefix (if necessary).
36  StringRef Name;
37  GlobalDecl D(FD);
38
39  // If the builtin has been declared explicitly with an assembler label,
40  // use the mangled name. This differs from the plain label on platforms
41  // that prefix labels.
42  if (FD->hasAttr<AsmLabelAttr>())
43    Name = getMangledName(D);
44  else
45    Name = Context.BuiltinInfo.GetName(BuiltinID) + 10;
46
47  llvm::FunctionType *Ty =
48    cast<llvm::FunctionType>(getTypes().ConvertType(FD->getType()));
49
50  return GetOrCreateLLVMFunction(Name, Ty, D, /*ForVTable=*/false);
51}
52
53/// Emit the conversions required to turn the given value into an
54/// integer of the given size.
55static Value *EmitToInt(CodeGenFunction &CGF, llvm::Value *V,
56                        QualType T, llvm::IntegerType *IntType) {
57  V = CGF.EmitToMemory(V, T);
58
59  if (V->getType()->isPointerTy())
60    return CGF.Builder.CreatePtrToInt(V, IntType);
61
62  assert(V->getType() == IntType);
63  return V;
64}
65
66static Value *EmitFromInt(CodeGenFunction &CGF, llvm::Value *V,
67                          QualType T, llvm::Type *ResultType) {
68  V = CGF.EmitFromMemory(V, T);
69
70  if (ResultType->isPointerTy())
71    return CGF.Builder.CreateIntToPtr(V, ResultType);
72
73  assert(V->getType() == ResultType);
74  return V;
75}
76
77/// Utility to insert an atomic instruction based on Instrinsic::ID
78/// and the expression node.
79static RValue EmitBinaryAtomic(CodeGenFunction &CGF,
80                               llvm::AtomicRMWInst::BinOp Kind,
81                               const CallExpr *E) {
82  QualType T = E->getType();
83  assert(E->getArg(0)->getType()->isPointerType());
84  assert(CGF.getContext().hasSameUnqualifiedType(T,
85                                  E->getArg(0)->getType()->getPointeeType()));
86  assert(CGF.getContext().hasSameUnqualifiedType(T, E->getArg(1)->getType()));
87
88  llvm::Value *DestPtr = CGF.EmitScalarExpr(E->getArg(0));
89  unsigned AddrSpace =
90    cast<llvm::PointerType>(DestPtr->getType())->getAddressSpace();
91
92  llvm::IntegerType *IntType =
93    llvm::IntegerType::get(CGF.getLLVMContext(),
94                           CGF.getContext().getTypeSize(T));
95  llvm::Type *IntPtrType = IntType->getPointerTo(AddrSpace);
96
97  llvm::Value *Args[2];
98  Args[0] = CGF.Builder.CreateBitCast(DestPtr, IntPtrType);
99  Args[1] = CGF.EmitScalarExpr(E->getArg(1));
100  llvm::Type *ValueType = Args[1]->getType();
101  Args[1] = EmitToInt(CGF, Args[1], T, IntType);
102
103  llvm::Value *Result =
104      CGF.Builder.CreateAtomicRMW(Kind, Args[0], Args[1],
105                                  llvm::SequentiallyConsistent);
106  Result = EmitFromInt(CGF, Result, T, ValueType);
107  return RValue::get(Result);
108}
109
110/// Utility to insert an atomic instruction based Instrinsic::ID and
111/// the expression node, where the return value is the result of the
112/// operation.
113static RValue EmitBinaryAtomicPost(CodeGenFunction &CGF,
114                                   llvm::AtomicRMWInst::BinOp Kind,
115                                   const CallExpr *E,
116                                   Instruction::BinaryOps Op) {
117  QualType T = E->getType();
118  assert(E->getArg(0)->getType()->isPointerType());
119  assert(CGF.getContext().hasSameUnqualifiedType(T,
120                                  E->getArg(0)->getType()->getPointeeType()));
121  assert(CGF.getContext().hasSameUnqualifiedType(T, E->getArg(1)->getType()));
122
123  llvm::Value *DestPtr = CGF.EmitScalarExpr(E->getArg(0));
124  unsigned AddrSpace =
125    cast<llvm::PointerType>(DestPtr->getType())->getAddressSpace();
126
127  llvm::IntegerType *IntType =
128    llvm::IntegerType::get(CGF.getLLVMContext(),
129                           CGF.getContext().getTypeSize(T));
130  llvm::Type *IntPtrType = IntType->getPointerTo(AddrSpace);
131
132  llvm::Value *Args[2];
133  Args[1] = CGF.EmitScalarExpr(E->getArg(1));
134  llvm::Type *ValueType = Args[1]->getType();
135  Args[1] = EmitToInt(CGF, Args[1], T, IntType);
136  Args[0] = CGF.Builder.CreateBitCast(DestPtr, IntPtrType);
137
138  llvm::Value *Result =
139      CGF.Builder.CreateAtomicRMW(Kind, Args[0], Args[1],
140                                  llvm::SequentiallyConsistent);
141  Result = CGF.Builder.CreateBinOp(Op, Result, Args[1]);
142  Result = EmitFromInt(CGF, Result, T, ValueType);
143  return RValue::get(Result);
144}
145
146/// EmitFAbs - Emit a call to fabs/fabsf/fabsl, depending on the type of ValTy,
147/// which must be a scalar floating point type.
148static Value *EmitFAbs(CodeGenFunction &CGF, Value *V, QualType ValTy) {
149  const BuiltinType *ValTyP = ValTy->getAs<BuiltinType>();
150  assert(ValTyP && "isn't scalar fp type!");
151
152  StringRef FnName;
153  switch (ValTyP->getKind()) {
154  default: llvm_unreachable("Isn't a scalar fp type!");
155  case BuiltinType::Float:      FnName = "fabsf"; break;
156  case BuiltinType::Double:     FnName = "fabs"; break;
157  case BuiltinType::LongDouble: FnName = "fabsl"; break;
158  }
159
160  // The prototype is something that takes and returns whatever V's type is.
161  llvm::FunctionType *FT = llvm::FunctionType::get(V->getType(), V->getType(),
162                                                   false);
163  llvm::Value *Fn = CGF.CGM.CreateRuntimeFunction(FT, FnName);
164
165  return CGF.Builder.CreateCall(Fn, V, "abs");
166}
167
168static RValue emitLibraryCall(CodeGenFunction &CGF, const FunctionDecl *Fn,
169                              const CallExpr *E, llvm::Value *calleeValue) {
170  return CGF.EmitCall(E->getCallee()->getType(), calleeValue,
171                      ReturnValueSlot(), E->arg_begin(), E->arg_end(), Fn);
172}
173
174RValue CodeGenFunction::EmitBuiltinExpr(const FunctionDecl *FD,
175                                        unsigned BuiltinID, const CallExpr *E) {
176  // See if we can constant fold this builtin.  If so, don't emit it at all.
177  Expr::EvalResult Result;
178  if (E->EvaluateAsRValue(Result, CGM.getContext()) &&
179      !Result.hasSideEffects()) {
180    if (Result.Val.isInt())
181      return RValue::get(llvm::ConstantInt::get(getLLVMContext(),
182                                                Result.Val.getInt()));
183    if (Result.Val.isFloat())
184      return RValue::get(llvm::ConstantFP::get(getLLVMContext(),
185                                               Result.Val.getFloat()));
186  }
187
188  switch (BuiltinID) {
189  default: break;  // Handle intrinsics and libm functions below.
190  case Builtin::BI__builtin___CFStringMakeConstantString:
191  case Builtin::BI__builtin___NSStringMakeConstantString:
192    return RValue::get(CGM.EmitConstantExpr(E, E->getType(), 0));
193  case Builtin::BI__builtin_stdarg_start:
194  case Builtin::BI__builtin_va_start:
195  case Builtin::BI__builtin_va_end: {
196    Value *ArgValue = EmitVAListRef(E->getArg(0));
197    llvm::Type *DestType = Int8PtrTy;
198    if (ArgValue->getType() != DestType)
199      ArgValue = Builder.CreateBitCast(ArgValue, DestType,
200                                       ArgValue->getName().data());
201
202    Intrinsic::ID inst = (BuiltinID == Builtin::BI__builtin_va_end) ?
203      Intrinsic::vaend : Intrinsic::vastart;
204    return RValue::get(Builder.CreateCall(CGM.getIntrinsic(inst), ArgValue));
205  }
206  case Builtin::BI__builtin_va_copy: {
207    Value *DstPtr = EmitVAListRef(E->getArg(0));
208    Value *SrcPtr = EmitVAListRef(E->getArg(1));
209
210    llvm::Type *Type = Int8PtrTy;
211
212    DstPtr = Builder.CreateBitCast(DstPtr, Type);
213    SrcPtr = Builder.CreateBitCast(SrcPtr, Type);
214    return RValue::get(Builder.CreateCall2(CGM.getIntrinsic(Intrinsic::vacopy),
215                                           DstPtr, SrcPtr));
216  }
217  case Builtin::BI__builtin_abs:
218  case Builtin::BI__builtin_labs:
219  case Builtin::BI__builtin_llabs: {
220    Value *ArgValue = EmitScalarExpr(E->getArg(0));
221
222    Value *NegOp = Builder.CreateNeg(ArgValue, "neg");
223    Value *CmpResult =
224    Builder.CreateICmpSGE(ArgValue,
225                          llvm::Constant::getNullValue(ArgValue->getType()),
226                                                            "abscond");
227    Value *Result =
228      Builder.CreateSelect(CmpResult, ArgValue, NegOp, "abs");
229
230    return RValue::get(Result);
231  }
232  case Builtin::BI__builtin_ctzs:
233  case Builtin::BI__builtin_ctz:
234  case Builtin::BI__builtin_ctzl:
235  case Builtin::BI__builtin_ctzll: {
236    Value *ArgValue = EmitScalarExpr(E->getArg(0));
237
238    llvm::Type *ArgType = ArgValue->getType();
239    Value *F = CGM.getIntrinsic(Intrinsic::cttz, ArgType);
240
241    llvm::Type *ResultType = ConvertType(E->getType());
242    Value *ZeroUndef = Builder.getInt1(Target.isCLZForZeroUndef());
243    Value *Result = Builder.CreateCall2(F, ArgValue, ZeroUndef);
244    if (Result->getType() != ResultType)
245      Result = Builder.CreateIntCast(Result, ResultType, /*isSigned*/true,
246                                     "cast");
247    return RValue::get(Result);
248  }
249  case Builtin::BI__builtin_clzs:
250  case Builtin::BI__builtin_clz:
251  case Builtin::BI__builtin_clzl:
252  case Builtin::BI__builtin_clzll: {
253    Value *ArgValue = EmitScalarExpr(E->getArg(0));
254
255    llvm::Type *ArgType = ArgValue->getType();
256    Value *F = CGM.getIntrinsic(Intrinsic::ctlz, ArgType);
257
258    llvm::Type *ResultType = ConvertType(E->getType());
259    Value *ZeroUndef = Builder.getInt1(Target.isCLZForZeroUndef());
260    Value *Result = Builder.CreateCall2(F, ArgValue, ZeroUndef);
261    if (Result->getType() != ResultType)
262      Result = Builder.CreateIntCast(Result, ResultType, /*isSigned*/true,
263                                     "cast");
264    return RValue::get(Result);
265  }
266  case Builtin::BI__builtin_ffs:
267  case Builtin::BI__builtin_ffsl:
268  case Builtin::BI__builtin_ffsll: {
269    // ffs(x) -> x ? cttz(x) + 1 : 0
270    Value *ArgValue = EmitScalarExpr(E->getArg(0));
271
272    llvm::Type *ArgType = ArgValue->getType();
273    Value *F = CGM.getIntrinsic(Intrinsic::cttz, ArgType);
274
275    llvm::Type *ResultType = ConvertType(E->getType());
276    Value *Tmp = Builder.CreateAdd(Builder.CreateCall2(F, ArgValue,
277                                                       Builder.getTrue()),
278                                   llvm::ConstantInt::get(ArgType, 1));
279    Value *Zero = llvm::Constant::getNullValue(ArgType);
280    Value *IsZero = Builder.CreateICmpEQ(ArgValue, Zero, "iszero");
281    Value *Result = Builder.CreateSelect(IsZero, Zero, Tmp, "ffs");
282    if (Result->getType() != ResultType)
283      Result = Builder.CreateIntCast(Result, ResultType, /*isSigned*/true,
284                                     "cast");
285    return RValue::get(Result);
286  }
287  case Builtin::BI__builtin_parity:
288  case Builtin::BI__builtin_parityl:
289  case Builtin::BI__builtin_parityll: {
290    // parity(x) -> ctpop(x) & 1
291    Value *ArgValue = EmitScalarExpr(E->getArg(0));
292
293    llvm::Type *ArgType = ArgValue->getType();
294    Value *F = CGM.getIntrinsic(Intrinsic::ctpop, ArgType);
295
296    llvm::Type *ResultType = ConvertType(E->getType());
297    Value *Tmp = Builder.CreateCall(F, ArgValue);
298    Value *Result = Builder.CreateAnd(Tmp, llvm::ConstantInt::get(ArgType, 1));
299    if (Result->getType() != ResultType)
300      Result = Builder.CreateIntCast(Result, ResultType, /*isSigned*/true,
301                                     "cast");
302    return RValue::get(Result);
303  }
304  case Builtin::BI__builtin_popcount:
305  case Builtin::BI__builtin_popcountl:
306  case Builtin::BI__builtin_popcountll: {
307    Value *ArgValue = EmitScalarExpr(E->getArg(0));
308
309    llvm::Type *ArgType = ArgValue->getType();
310    Value *F = CGM.getIntrinsic(Intrinsic::ctpop, ArgType);
311
312    llvm::Type *ResultType = ConvertType(E->getType());
313    Value *Result = Builder.CreateCall(F, ArgValue);
314    if (Result->getType() != ResultType)
315      Result = Builder.CreateIntCast(Result, ResultType, /*isSigned*/true,
316                                     "cast");
317    return RValue::get(Result);
318  }
319  case Builtin::BI__builtin_expect: {
320    Value *ArgValue = EmitScalarExpr(E->getArg(0));
321    llvm::Type *ArgType = ArgValue->getType();
322
323    Value *FnExpect = CGM.getIntrinsic(Intrinsic::expect, ArgType);
324    Value *ExpectedValue = EmitScalarExpr(E->getArg(1));
325
326    Value *Result = Builder.CreateCall2(FnExpect, ArgValue, ExpectedValue,
327                                        "expval");
328    return RValue::get(Result);
329  }
330  case Builtin::BI__builtin_bswap32:
331  case Builtin::BI__builtin_bswap64: {
332    Value *ArgValue = EmitScalarExpr(E->getArg(0));
333    llvm::Type *ArgType = ArgValue->getType();
334    Value *F = CGM.getIntrinsic(Intrinsic::bswap, ArgType);
335    return RValue::get(Builder.CreateCall(F, ArgValue));
336  }
337  case Builtin::BI__builtin_object_size: {
338    // We rely on constant folding to deal with expressions with side effects.
339    assert(!E->getArg(0)->HasSideEffects(getContext()) &&
340           "should have been constant folded");
341
342    // We pass this builtin onto the optimizer so that it can
343    // figure out the object size in more complex cases.
344    llvm::Type *ResType = ConvertType(E->getType());
345
346    // LLVM only supports 0 and 2, make sure that we pass along that
347    // as a boolean.
348    Value *Ty = EmitScalarExpr(E->getArg(1));
349    ConstantInt *CI = dyn_cast<ConstantInt>(Ty);
350    assert(CI);
351    uint64_t val = CI->getZExtValue();
352    CI = ConstantInt::get(Builder.getInt1Ty(), (val & 0x2) >> 1);
353
354    Value *F = CGM.getIntrinsic(Intrinsic::objectsize, ResType);
355    return RValue::get(Builder.CreateCall2(F, EmitScalarExpr(E->getArg(0)),CI));
356  }
357  case Builtin::BI__builtin_prefetch: {
358    Value *Locality, *RW, *Address = EmitScalarExpr(E->getArg(0));
359    // FIXME: Technically these constants should of type 'int', yes?
360    RW = (E->getNumArgs() > 1) ? EmitScalarExpr(E->getArg(1)) :
361      llvm::ConstantInt::get(Int32Ty, 0);
362    Locality = (E->getNumArgs() > 2) ? EmitScalarExpr(E->getArg(2)) :
363      llvm::ConstantInt::get(Int32Ty, 3);
364    Value *Data = llvm::ConstantInt::get(Int32Ty, 1);
365    Value *F = CGM.getIntrinsic(Intrinsic::prefetch);
366    return RValue::get(Builder.CreateCall4(F, Address, RW, Locality, Data));
367  }
368  case Builtin::BI__builtin_trap: {
369    Value *F = CGM.getIntrinsic(Intrinsic::trap);
370    return RValue::get(Builder.CreateCall(F));
371  }
372  case Builtin::BI__builtin_unreachable: {
373    if (CatchUndefined)
374      EmitBranch(getTrapBB());
375    else
376      Builder.CreateUnreachable();
377
378    // We do need to preserve an insertion point.
379    EmitBlock(createBasicBlock("unreachable.cont"));
380
381    return RValue::get(0);
382  }
383
384  case Builtin::BI__builtin_powi:
385  case Builtin::BI__builtin_powif:
386  case Builtin::BI__builtin_powil: {
387    Value *Base = EmitScalarExpr(E->getArg(0));
388    Value *Exponent = EmitScalarExpr(E->getArg(1));
389    llvm::Type *ArgType = Base->getType();
390    Value *F = CGM.getIntrinsic(Intrinsic::powi, ArgType);
391    return RValue::get(Builder.CreateCall2(F, Base, Exponent));
392  }
393
394  case Builtin::BI__builtin_isgreater:
395  case Builtin::BI__builtin_isgreaterequal:
396  case Builtin::BI__builtin_isless:
397  case Builtin::BI__builtin_islessequal:
398  case Builtin::BI__builtin_islessgreater:
399  case Builtin::BI__builtin_isunordered: {
400    // Ordered comparisons: we know the arguments to these are matching scalar
401    // floating point values.
402    Value *LHS = EmitScalarExpr(E->getArg(0));
403    Value *RHS = EmitScalarExpr(E->getArg(1));
404
405    switch (BuiltinID) {
406    default: llvm_unreachable("Unknown ordered comparison");
407    case Builtin::BI__builtin_isgreater:
408      LHS = Builder.CreateFCmpOGT(LHS, RHS, "cmp");
409      break;
410    case Builtin::BI__builtin_isgreaterequal:
411      LHS = Builder.CreateFCmpOGE(LHS, RHS, "cmp");
412      break;
413    case Builtin::BI__builtin_isless:
414      LHS = Builder.CreateFCmpOLT(LHS, RHS, "cmp");
415      break;
416    case Builtin::BI__builtin_islessequal:
417      LHS = Builder.CreateFCmpOLE(LHS, RHS, "cmp");
418      break;
419    case Builtin::BI__builtin_islessgreater:
420      LHS = Builder.CreateFCmpONE(LHS, RHS, "cmp");
421      break;
422    case Builtin::BI__builtin_isunordered:
423      LHS = Builder.CreateFCmpUNO(LHS, RHS, "cmp");
424      break;
425    }
426    // ZExt bool to int type.
427    return RValue::get(Builder.CreateZExt(LHS, ConvertType(E->getType())));
428  }
429  case Builtin::BI__builtin_isnan: {
430    Value *V = EmitScalarExpr(E->getArg(0));
431    V = Builder.CreateFCmpUNO(V, V, "cmp");
432    return RValue::get(Builder.CreateZExt(V, ConvertType(E->getType())));
433  }
434
435  case Builtin::BI__builtin_isinf: {
436    // isinf(x) --> fabs(x) == infinity
437    Value *V = EmitScalarExpr(E->getArg(0));
438    V = EmitFAbs(*this, V, E->getArg(0)->getType());
439
440    V = Builder.CreateFCmpOEQ(V, ConstantFP::getInfinity(V->getType()),"isinf");
441    return RValue::get(Builder.CreateZExt(V, ConvertType(E->getType())));
442  }
443
444  // TODO: BI__builtin_isinf_sign
445  //   isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0
446
447  case Builtin::BI__builtin_isnormal: {
448    // isnormal(x) --> x == x && fabsf(x) < infinity && fabsf(x) >= float_min
449    Value *V = EmitScalarExpr(E->getArg(0));
450    Value *Eq = Builder.CreateFCmpOEQ(V, V, "iseq");
451
452    Value *Abs = EmitFAbs(*this, V, E->getArg(0)->getType());
453    Value *IsLessThanInf =
454      Builder.CreateFCmpULT(Abs, ConstantFP::getInfinity(V->getType()),"isinf");
455    APFloat Smallest = APFloat::getSmallestNormalized(
456                   getContext().getFloatTypeSemantics(E->getArg(0)->getType()));
457    Value *IsNormal =
458      Builder.CreateFCmpUGE(Abs, ConstantFP::get(V->getContext(), Smallest),
459                            "isnormal");
460    V = Builder.CreateAnd(Eq, IsLessThanInf, "and");
461    V = Builder.CreateAnd(V, IsNormal, "and");
462    return RValue::get(Builder.CreateZExt(V, ConvertType(E->getType())));
463  }
464
465  case Builtin::BI__builtin_isfinite: {
466    // isfinite(x) --> x == x && fabs(x) != infinity;
467    Value *V = EmitScalarExpr(E->getArg(0));
468    Value *Eq = Builder.CreateFCmpOEQ(V, V, "iseq");
469
470    Value *Abs = EmitFAbs(*this, V, E->getArg(0)->getType());
471    Value *IsNotInf =
472      Builder.CreateFCmpUNE(Abs, ConstantFP::getInfinity(V->getType()),"isinf");
473
474    V = Builder.CreateAnd(Eq, IsNotInf, "and");
475    return RValue::get(Builder.CreateZExt(V, ConvertType(E->getType())));
476  }
477
478  case Builtin::BI__builtin_fpclassify: {
479    Value *V = EmitScalarExpr(E->getArg(5));
480    llvm::Type *Ty = ConvertType(E->getArg(5)->getType());
481
482    // Create Result
483    BasicBlock *Begin = Builder.GetInsertBlock();
484    BasicBlock *End = createBasicBlock("fpclassify_end", this->CurFn);
485    Builder.SetInsertPoint(End);
486    PHINode *Result =
487      Builder.CreatePHI(ConvertType(E->getArg(0)->getType()), 4,
488                        "fpclassify_result");
489
490    // if (V==0) return FP_ZERO
491    Builder.SetInsertPoint(Begin);
492    Value *IsZero = Builder.CreateFCmpOEQ(V, Constant::getNullValue(Ty),
493                                          "iszero");
494    Value *ZeroLiteral = EmitScalarExpr(E->getArg(4));
495    BasicBlock *NotZero = createBasicBlock("fpclassify_not_zero", this->CurFn);
496    Builder.CreateCondBr(IsZero, End, NotZero);
497    Result->addIncoming(ZeroLiteral, Begin);
498
499    // if (V != V) return FP_NAN
500    Builder.SetInsertPoint(NotZero);
501    Value *IsNan = Builder.CreateFCmpUNO(V, V, "cmp");
502    Value *NanLiteral = EmitScalarExpr(E->getArg(0));
503    BasicBlock *NotNan = createBasicBlock("fpclassify_not_nan", this->CurFn);
504    Builder.CreateCondBr(IsNan, End, NotNan);
505    Result->addIncoming(NanLiteral, NotZero);
506
507    // if (fabs(V) == infinity) return FP_INFINITY
508    Builder.SetInsertPoint(NotNan);
509    Value *VAbs = EmitFAbs(*this, V, E->getArg(5)->getType());
510    Value *IsInf =
511      Builder.CreateFCmpOEQ(VAbs, ConstantFP::getInfinity(V->getType()),
512                            "isinf");
513    Value *InfLiteral = EmitScalarExpr(E->getArg(1));
514    BasicBlock *NotInf = createBasicBlock("fpclassify_not_inf", this->CurFn);
515    Builder.CreateCondBr(IsInf, End, NotInf);
516    Result->addIncoming(InfLiteral, NotNan);
517
518    // if (fabs(V) >= MIN_NORMAL) return FP_NORMAL else FP_SUBNORMAL
519    Builder.SetInsertPoint(NotInf);
520    APFloat Smallest = APFloat::getSmallestNormalized(
521        getContext().getFloatTypeSemantics(E->getArg(5)->getType()));
522    Value *IsNormal =
523      Builder.CreateFCmpUGE(VAbs, ConstantFP::get(V->getContext(), Smallest),
524                            "isnormal");
525    Value *NormalResult =
526      Builder.CreateSelect(IsNormal, EmitScalarExpr(E->getArg(2)),
527                           EmitScalarExpr(E->getArg(3)));
528    Builder.CreateBr(End);
529    Result->addIncoming(NormalResult, NotInf);
530
531    // return Result
532    Builder.SetInsertPoint(End);
533    return RValue::get(Result);
534  }
535
536  case Builtin::BIalloca:
537  case Builtin::BI__builtin_alloca: {
538    Value *Size = EmitScalarExpr(E->getArg(0));
539    return RValue::get(Builder.CreateAlloca(Builder.getInt8Ty(), Size));
540  }
541  case Builtin::BIbzero:
542  case Builtin::BI__builtin_bzero: {
543    Value *Address = EmitScalarExpr(E->getArg(0));
544    Value *SizeVal = EmitScalarExpr(E->getArg(1));
545    unsigned Align = GetPointeeAlignment(E->getArg(0));
546    Builder.CreateMemSet(Address, Builder.getInt8(0), SizeVal, Align, false);
547    return RValue::get(Address);
548  }
549  case Builtin::BImemcpy:
550  case Builtin::BI__builtin_memcpy: {
551    Value *Address = EmitScalarExpr(E->getArg(0));
552    Value *SrcAddr = EmitScalarExpr(E->getArg(1));
553    Value *SizeVal = EmitScalarExpr(E->getArg(2));
554    unsigned Align = std::min(GetPointeeAlignment(E->getArg(0)),
555                              GetPointeeAlignment(E->getArg(1)));
556    Builder.CreateMemCpy(Address, SrcAddr, SizeVal, Align, false);
557    return RValue::get(Address);
558  }
559
560  case Builtin::BI__builtin___memcpy_chk: {
561    // fold __builtin_memcpy_chk(x, y, cst1, cst2) to memset iff cst1<=cst2.
562    llvm::APSInt Size, DstSize;
563    if (!E->getArg(2)->EvaluateAsInt(Size, CGM.getContext()) ||
564        !E->getArg(3)->EvaluateAsInt(DstSize, CGM.getContext()))
565      break;
566    if (Size.ugt(DstSize))
567      break;
568    Value *Dest = EmitScalarExpr(E->getArg(0));
569    Value *Src = EmitScalarExpr(E->getArg(1));
570    Value *SizeVal = llvm::ConstantInt::get(Builder.getContext(), Size);
571    unsigned Align = std::min(GetPointeeAlignment(E->getArg(0)),
572                              GetPointeeAlignment(E->getArg(1)));
573    Builder.CreateMemCpy(Dest, Src, SizeVal, Align, false);
574    return RValue::get(Dest);
575  }
576
577  case Builtin::BI__builtin_objc_memmove_collectable: {
578    Value *Address = EmitScalarExpr(E->getArg(0));
579    Value *SrcAddr = EmitScalarExpr(E->getArg(1));
580    Value *SizeVal = EmitScalarExpr(E->getArg(2));
581    CGM.getObjCRuntime().EmitGCMemmoveCollectable(*this,
582                                                  Address, SrcAddr, SizeVal);
583    return RValue::get(Address);
584  }
585
586  case Builtin::BI__builtin___memmove_chk: {
587    // fold __builtin_memmove_chk(x, y, cst1, cst2) to memset iff cst1<=cst2.
588    llvm::APSInt Size, DstSize;
589    if (!E->getArg(2)->EvaluateAsInt(Size, CGM.getContext()) ||
590        !E->getArg(3)->EvaluateAsInt(DstSize, CGM.getContext()))
591      break;
592    if (Size.ugt(DstSize))
593      break;
594    Value *Dest = EmitScalarExpr(E->getArg(0));
595    Value *Src = EmitScalarExpr(E->getArg(1));
596    Value *SizeVal = llvm::ConstantInt::get(Builder.getContext(), Size);
597    unsigned Align = std::min(GetPointeeAlignment(E->getArg(0)),
598                              GetPointeeAlignment(E->getArg(1)));
599    Builder.CreateMemMove(Dest, Src, SizeVal, Align, false);
600    return RValue::get(Dest);
601  }
602
603  case Builtin::BImemmove:
604  case Builtin::BI__builtin_memmove: {
605    Value *Address = EmitScalarExpr(E->getArg(0));
606    Value *SrcAddr = EmitScalarExpr(E->getArg(1));
607    Value *SizeVal = EmitScalarExpr(E->getArg(2));
608    unsigned Align = std::min(GetPointeeAlignment(E->getArg(0)),
609                              GetPointeeAlignment(E->getArg(1)));
610    Builder.CreateMemMove(Address, SrcAddr, SizeVal, Align, false);
611    return RValue::get(Address);
612  }
613  case Builtin::BImemset:
614  case Builtin::BI__builtin_memset: {
615    Value *Address = EmitScalarExpr(E->getArg(0));
616    Value *ByteVal = Builder.CreateTrunc(EmitScalarExpr(E->getArg(1)),
617                                         Builder.getInt8Ty());
618    Value *SizeVal = EmitScalarExpr(E->getArg(2));
619    unsigned Align = GetPointeeAlignment(E->getArg(0));
620    Builder.CreateMemSet(Address, ByteVal, SizeVal, Align, false);
621    return RValue::get(Address);
622  }
623  case Builtin::BI__builtin___memset_chk: {
624    // fold __builtin_memset_chk(x, y, cst1, cst2) to memset iff cst1<=cst2.
625    llvm::APSInt Size, DstSize;
626    if (!E->getArg(2)->EvaluateAsInt(Size, CGM.getContext()) ||
627        !E->getArg(3)->EvaluateAsInt(DstSize, CGM.getContext()))
628      break;
629    if (Size.ugt(DstSize))
630      break;
631    Value *Address = EmitScalarExpr(E->getArg(0));
632    Value *ByteVal = Builder.CreateTrunc(EmitScalarExpr(E->getArg(1)),
633                                         Builder.getInt8Ty());
634    Value *SizeVal = llvm::ConstantInt::get(Builder.getContext(), Size);
635    unsigned Align = GetPointeeAlignment(E->getArg(0));
636    Builder.CreateMemSet(Address, ByteVal, SizeVal, Align, false);
637
638    return RValue::get(Address);
639  }
640  case Builtin::BI__builtin_dwarf_cfa: {
641    // The offset in bytes from the first argument to the CFA.
642    //
643    // Why on earth is this in the frontend?  Is there any reason at
644    // all that the backend can't reasonably determine this while
645    // lowering llvm.eh.dwarf.cfa()?
646    //
647    // TODO: If there's a satisfactory reason, add a target hook for
648    // this instead of hard-coding 0, which is correct for most targets.
649    int32_t Offset = 0;
650
651    Value *F = CGM.getIntrinsic(Intrinsic::eh_dwarf_cfa);
652    return RValue::get(Builder.CreateCall(F,
653                                      llvm::ConstantInt::get(Int32Ty, Offset)));
654  }
655  case Builtin::BI__builtin_return_address: {
656    Value *Depth = EmitScalarExpr(E->getArg(0));
657    Depth = Builder.CreateIntCast(Depth, Int32Ty, false);
658    Value *F = CGM.getIntrinsic(Intrinsic::returnaddress);
659    return RValue::get(Builder.CreateCall(F, Depth));
660  }
661  case Builtin::BI__builtin_frame_address: {
662    Value *Depth = EmitScalarExpr(E->getArg(0));
663    Depth = Builder.CreateIntCast(Depth, Int32Ty, false);
664    Value *F = CGM.getIntrinsic(Intrinsic::frameaddress);
665    return RValue::get(Builder.CreateCall(F, Depth));
666  }
667  case Builtin::BI__builtin_extract_return_addr: {
668    Value *Address = EmitScalarExpr(E->getArg(0));
669    Value *Result = getTargetHooks().decodeReturnAddress(*this, Address);
670    return RValue::get(Result);
671  }
672  case Builtin::BI__builtin_frob_return_addr: {
673    Value *Address = EmitScalarExpr(E->getArg(0));
674    Value *Result = getTargetHooks().encodeReturnAddress(*this, Address);
675    return RValue::get(Result);
676  }
677  case Builtin::BI__builtin_dwarf_sp_column: {
678    llvm::IntegerType *Ty
679      = cast<llvm::IntegerType>(ConvertType(E->getType()));
680    int Column = getTargetHooks().getDwarfEHStackPointer(CGM);
681    if (Column == -1) {
682      CGM.ErrorUnsupported(E, "__builtin_dwarf_sp_column");
683      return RValue::get(llvm::UndefValue::get(Ty));
684    }
685    return RValue::get(llvm::ConstantInt::get(Ty, Column, true));
686  }
687  case Builtin::BI__builtin_init_dwarf_reg_size_table: {
688    Value *Address = EmitScalarExpr(E->getArg(0));
689    if (getTargetHooks().initDwarfEHRegSizeTable(*this, Address))
690      CGM.ErrorUnsupported(E, "__builtin_init_dwarf_reg_size_table");
691    return RValue::get(llvm::UndefValue::get(ConvertType(E->getType())));
692  }
693  case Builtin::BI__builtin_eh_return: {
694    Value *Int = EmitScalarExpr(E->getArg(0));
695    Value *Ptr = EmitScalarExpr(E->getArg(1));
696
697    llvm::IntegerType *IntTy = cast<llvm::IntegerType>(Int->getType());
698    assert((IntTy->getBitWidth() == 32 || IntTy->getBitWidth() == 64) &&
699           "LLVM's __builtin_eh_return only supports 32- and 64-bit variants");
700    Value *F = CGM.getIntrinsic(IntTy->getBitWidth() == 32
701                                  ? Intrinsic::eh_return_i32
702                                  : Intrinsic::eh_return_i64);
703    Builder.CreateCall2(F, Int, Ptr);
704    Builder.CreateUnreachable();
705
706    // We do need to preserve an insertion point.
707    EmitBlock(createBasicBlock("builtin_eh_return.cont"));
708
709    return RValue::get(0);
710  }
711  case Builtin::BI__builtin_unwind_init: {
712    Value *F = CGM.getIntrinsic(Intrinsic::eh_unwind_init);
713    return RValue::get(Builder.CreateCall(F));
714  }
715  case Builtin::BI__builtin_extend_pointer: {
716    // Extends a pointer to the size of an _Unwind_Word, which is
717    // uint64_t on all platforms.  Generally this gets poked into a
718    // register and eventually used as an address, so if the
719    // addressing registers are wider than pointers and the platform
720    // doesn't implicitly ignore high-order bits when doing
721    // addressing, we need to make sure we zext / sext based on
722    // the platform's expectations.
723    //
724    // See: http://gcc.gnu.org/ml/gcc-bugs/2002-02/msg00237.html
725
726    // Cast the pointer to intptr_t.
727    Value *Ptr = EmitScalarExpr(E->getArg(0));
728    Value *Result = Builder.CreatePtrToInt(Ptr, IntPtrTy, "extend.cast");
729
730    // If that's 64 bits, we're done.
731    if (IntPtrTy->getBitWidth() == 64)
732      return RValue::get(Result);
733
734    // Otherwise, ask the codegen data what to do.
735    if (getTargetHooks().extendPointerWithSExt())
736      return RValue::get(Builder.CreateSExt(Result, Int64Ty, "extend.sext"));
737    else
738      return RValue::get(Builder.CreateZExt(Result, Int64Ty, "extend.zext"));
739  }
740  case Builtin::BI__builtin_setjmp: {
741    // Buffer is a void**.
742    Value *Buf = EmitScalarExpr(E->getArg(0));
743
744    // Store the frame pointer to the setjmp buffer.
745    Value *FrameAddr =
746      Builder.CreateCall(CGM.getIntrinsic(Intrinsic::frameaddress),
747                         ConstantInt::get(Int32Ty, 0));
748    Builder.CreateStore(FrameAddr, Buf);
749
750    // Store the stack pointer to the setjmp buffer.
751    Value *StackAddr =
752      Builder.CreateCall(CGM.getIntrinsic(Intrinsic::stacksave));
753    Value *StackSaveSlot =
754      Builder.CreateGEP(Buf, ConstantInt::get(Int32Ty, 2));
755    Builder.CreateStore(StackAddr, StackSaveSlot);
756
757    // Call LLVM's EH setjmp, which is lightweight.
758    Value *F = CGM.getIntrinsic(Intrinsic::eh_sjlj_setjmp);
759    Buf = Builder.CreateBitCast(Buf, Int8PtrTy);
760    return RValue::get(Builder.CreateCall(F, Buf));
761  }
762  case Builtin::BI__builtin_longjmp: {
763    Value *Buf = EmitScalarExpr(E->getArg(0));
764    Buf = Builder.CreateBitCast(Buf, Int8PtrTy);
765
766    // Call LLVM's EH longjmp, which is lightweight.
767    Builder.CreateCall(CGM.getIntrinsic(Intrinsic::eh_sjlj_longjmp), Buf);
768
769    // longjmp doesn't return; mark this as unreachable.
770    Builder.CreateUnreachable();
771
772    // We do need to preserve an insertion point.
773    EmitBlock(createBasicBlock("longjmp.cont"));
774
775    return RValue::get(0);
776  }
777  case Builtin::BI__sync_fetch_and_add:
778  case Builtin::BI__sync_fetch_and_sub:
779  case Builtin::BI__sync_fetch_and_or:
780  case Builtin::BI__sync_fetch_and_and:
781  case Builtin::BI__sync_fetch_and_xor:
782  case Builtin::BI__sync_add_and_fetch:
783  case Builtin::BI__sync_sub_and_fetch:
784  case Builtin::BI__sync_and_and_fetch:
785  case Builtin::BI__sync_or_and_fetch:
786  case Builtin::BI__sync_xor_and_fetch:
787  case Builtin::BI__sync_val_compare_and_swap:
788  case Builtin::BI__sync_bool_compare_and_swap:
789  case Builtin::BI__sync_lock_test_and_set:
790  case Builtin::BI__sync_lock_release:
791  case Builtin::BI__sync_swap:
792    llvm_unreachable("Shouldn't make it through sema");
793  case Builtin::BI__sync_fetch_and_add_1:
794  case Builtin::BI__sync_fetch_and_add_2:
795  case Builtin::BI__sync_fetch_and_add_4:
796  case Builtin::BI__sync_fetch_and_add_8:
797  case Builtin::BI__sync_fetch_and_add_16:
798    return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::Add, E);
799  case Builtin::BI__sync_fetch_and_sub_1:
800  case Builtin::BI__sync_fetch_and_sub_2:
801  case Builtin::BI__sync_fetch_and_sub_4:
802  case Builtin::BI__sync_fetch_and_sub_8:
803  case Builtin::BI__sync_fetch_and_sub_16:
804    return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::Sub, E);
805  case Builtin::BI__sync_fetch_and_or_1:
806  case Builtin::BI__sync_fetch_and_or_2:
807  case Builtin::BI__sync_fetch_and_or_4:
808  case Builtin::BI__sync_fetch_and_or_8:
809  case Builtin::BI__sync_fetch_and_or_16:
810    return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::Or, E);
811  case Builtin::BI__sync_fetch_and_and_1:
812  case Builtin::BI__sync_fetch_and_and_2:
813  case Builtin::BI__sync_fetch_and_and_4:
814  case Builtin::BI__sync_fetch_and_and_8:
815  case Builtin::BI__sync_fetch_and_and_16:
816    return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::And, E);
817  case Builtin::BI__sync_fetch_and_xor_1:
818  case Builtin::BI__sync_fetch_and_xor_2:
819  case Builtin::BI__sync_fetch_and_xor_4:
820  case Builtin::BI__sync_fetch_and_xor_8:
821  case Builtin::BI__sync_fetch_and_xor_16:
822    return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::Xor, E);
823
824  // Clang extensions: not overloaded yet.
825  case Builtin::BI__sync_fetch_and_min:
826    return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::Min, E);
827  case Builtin::BI__sync_fetch_and_max:
828    return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::Max, E);
829  case Builtin::BI__sync_fetch_and_umin:
830    return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::UMin, E);
831  case Builtin::BI__sync_fetch_and_umax:
832    return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::UMax, E);
833
834  case Builtin::BI__sync_add_and_fetch_1:
835  case Builtin::BI__sync_add_and_fetch_2:
836  case Builtin::BI__sync_add_and_fetch_4:
837  case Builtin::BI__sync_add_and_fetch_8:
838  case Builtin::BI__sync_add_and_fetch_16:
839    return EmitBinaryAtomicPost(*this, llvm::AtomicRMWInst::Add, E,
840                                llvm::Instruction::Add);
841  case Builtin::BI__sync_sub_and_fetch_1:
842  case Builtin::BI__sync_sub_and_fetch_2:
843  case Builtin::BI__sync_sub_and_fetch_4:
844  case Builtin::BI__sync_sub_and_fetch_8:
845  case Builtin::BI__sync_sub_and_fetch_16:
846    return EmitBinaryAtomicPost(*this, llvm::AtomicRMWInst::Sub, E,
847                                llvm::Instruction::Sub);
848  case Builtin::BI__sync_and_and_fetch_1:
849  case Builtin::BI__sync_and_and_fetch_2:
850  case Builtin::BI__sync_and_and_fetch_4:
851  case Builtin::BI__sync_and_and_fetch_8:
852  case Builtin::BI__sync_and_and_fetch_16:
853    return EmitBinaryAtomicPost(*this, llvm::AtomicRMWInst::And, E,
854                                llvm::Instruction::And);
855  case Builtin::BI__sync_or_and_fetch_1:
856  case Builtin::BI__sync_or_and_fetch_2:
857  case Builtin::BI__sync_or_and_fetch_4:
858  case Builtin::BI__sync_or_and_fetch_8:
859  case Builtin::BI__sync_or_and_fetch_16:
860    return EmitBinaryAtomicPost(*this, llvm::AtomicRMWInst::Or, E,
861                                llvm::Instruction::Or);
862  case Builtin::BI__sync_xor_and_fetch_1:
863  case Builtin::BI__sync_xor_and_fetch_2:
864  case Builtin::BI__sync_xor_and_fetch_4:
865  case Builtin::BI__sync_xor_and_fetch_8:
866  case Builtin::BI__sync_xor_and_fetch_16:
867    return EmitBinaryAtomicPost(*this, llvm::AtomicRMWInst::Xor, E,
868                                llvm::Instruction::Xor);
869
870  case Builtin::BI__sync_val_compare_and_swap_1:
871  case Builtin::BI__sync_val_compare_and_swap_2:
872  case Builtin::BI__sync_val_compare_and_swap_4:
873  case Builtin::BI__sync_val_compare_and_swap_8:
874  case Builtin::BI__sync_val_compare_and_swap_16: {
875    QualType T = E->getType();
876    llvm::Value *DestPtr = EmitScalarExpr(E->getArg(0));
877    unsigned AddrSpace =
878      cast<llvm::PointerType>(DestPtr->getType())->getAddressSpace();
879
880    llvm::IntegerType *IntType =
881      llvm::IntegerType::get(getLLVMContext(),
882                             getContext().getTypeSize(T));
883    llvm::Type *IntPtrType = IntType->getPointerTo(AddrSpace);
884
885    Value *Args[3];
886    Args[0] = Builder.CreateBitCast(DestPtr, IntPtrType);
887    Args[1] = EmitScalarExpr(E->getArg(1));
888    llvm::Type *ValueType = Args[1]->getType();
889    Args[1] = EmitToInt(*this, Args[1], T, IntType);
890    Args[2] = EmitToInt(*this, EmitScalarExpr(E->getArg(2)), T, IntType);
891
892    Value *Result = Builder.CreateAtomicCmpXchg(Args[0], Args[1], Args[2],
893                                                llvm::SequentiallyConsistent);
894    Result = EmitFromInt(*this, Result, T, ValueType);
895    return RValue::get(Result);
896  }
897
898  case Builtin::BI__sync_bool_compare_and_swap_1:
899  case Builtin::BI__sync_bool_compare_and_swap_2:
900  case Builtin::BI__sync_bool_compare_and_swap_4:
901  case Builtin::BI__sync_bool_compare_and_swap_8:
902  case Builtin::BI__sync_bool_compare_and_swap_16: {
903    QualType T = E->getArg(1)->getType();
904    llvm::Value *DestPtr = EmitScalarExpr(E->getArg(0));
905    unsigned AddrSpace =
906      cast<llvm::PointerType>(DestPtr->getType())->getAddressSpace();
907
908    llvm::IntegerType *IntType =
909      llvm::IntegerType::get(getLLVMContext(),
910                             getContext().getTypeSize(T));
911    llvm::Type *IntPtrType = IntType->getPointerTo(AddrSpace);
912
913    Value *Args[3];
914    Args[0] = Builder.CreateBitCast(DestPtr, IntPtrType);
915    Args[1] = EmitToInt(*this, EmitScalarExpr(E->getArg(1)), T, IntType);
916    Args[2] = EmitToInt(*this, EmitScalarExpr(E->getArg(2)), T, IntType);
917
918    Value *OldVal = Args[1];
919    Value *PrevVal = Builder.CreateAtomicCmpXchg(Args[0], Args[1], Args[2],
920                                                 llvm::SequentiallyConsistent);
921    Value *Result = Builder.CreateICmpEQ(PrevVal, OldVal);
922    // zext bool to int.
923    Result = Builder.CreateZExt(Result, ConvertType(E->getType()));
924    return RValue::get(Result);
925  }
926
927  case Builtin::BI__sync_swap_1:
928  case Builtin::BI__sync_swap_2:
929  case Builtin::BI__sync_swap_4:
930  case Builtin::BI__sync_swap_8:
931  case Builtin::BI__sync_swap_16:
932    return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::Xchg, E);
933
934  case Builtin::BI__sync_lock_test_and_set_1:
935  case Builtin::BI__sync_lock_test_and_set_2:
936  case Builtin::BI__sync_lock_test_and_set_4:
937  case Builtin::BI__sync_lock_test_and_set_8:
938  case Builtin::BI__sync_lock_test_and_set_16:
939    return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::Xchg, E);
940
941  case Builtin::BI__sync_lock_release_1:
942  case Builtin::BI__sync_lock_release_2:
943  case Builtin::BI__sync_lock_release_4:
944  case Builtin::BI__sync_lock_release_8:
945  case Builtin::BI__sync_lock_release_16: {
946    Value *Ptr = EmitScalarExpr(E->getArg(0));
947    QualType ElTy = E->getArg(0)->getType()->getPointeeType();
948    CharUnits StoreSize = getContext().getTypeSizeInChars(ElTy);
949    llvm::Type *ITy = llvm::IntegerType::get(getLLVMContext(),
950                                             StoreSize.getQuantity() * 8);
951    Ptr = Builder.CreateBitCast(Ptr, ITy->getPointerTo());
952    llvm::StoreInst *Store =
953      Builder.CreateStore(llvm::Constant::getNullValue(ITy), Ptr);
954    Store->setAlignment(StoreSize.getQuantity());
955    Store->setAtomic(llvm::Release);
956    return RValue::get(0);
957  }
958
959  case Builtin::BI__sync_synchronize: {
960    // We assume this is supposed to correspond to a C++0x-style
961    // sequentially-consistent fence (i.e. this is only usable for
962    // synchonization, not device I/O or anything like that). This intrinsic
963    // is really badly designed in the sense that in theory, there isn't
964    // any way to safely use it... but in practice, it mostly works
965    // to use it with non-atomic loads and stores to get acquire/release
966    // semantics.
967    Builder.CreateFence(llvm::SequentiallyConsistent);
968    return RValue::get(0);
969  }
970
971  case Builtin::BI__c11_atomic_is_lock_free:
972  case Builtin::BI__atomic_is_lock_free: {
973    // Call "bool __atomic_is_lock_free(size_t size, void *ptr)". For the
974    // __c11 builtin, ptr is 0 (indicating a properly-aligned object), since
975    // _Atomic(T) is always properly-aligned.
976    const char *LibCallName = "__atomic_is_lock_free";
977    CallArgList Args;
978    Args.add(RValue::get(EmitScalarExpr(E->getArg(0))),
979             getContext().getSizeType());
980    if (BuiltinID == Builtin::BI__atomic_is_lock_free)
981      Args.add(RValue::get(EmitScalarExpr(E->getArg(1))),
982               getContext().VoidPtrTy);
983    else
984      Args.add(RValue::get(llvm::Constant::getNullValue(VoidPtrTy)),
985               getContext().VoidPtrTy);
986    const CGFunctionInfo &FuncInfo =
987        CGM.getTypes().arrangeFunctionCall(E->getType(), Args,
988                                           FunctionType::ExtInfo(),
989                                           RequiredArgs::All);
990    llvm::FunctionType *FTy = CGM.getTypes().GetFunctionType(FuncInfo);
991    llvm::Constant *Func = CGM.CreateRuntimeFunction(FTy, LibCallName);
992    return EmitCall(FuncInfo, Func, ReturnValueSlot(), Args);
993  }
994
995  case Builtin::BI__atomic_test_and_set: {
996    // Look at the argument type to determine whether this is a volatile
997    // operation. The parameter type is always volatile.
998    QualType PtrTy = E->getArg(0)->IgnoreImpCasts()->getType();
999    bool Volatile =
1000        PtrTy->castAs<PointerType>()->getPointeeType().isVolatileQualified();
1001
1002    Value *Ptr = EmitScalarExpr(E->getArg(0));
1003    unsigned AddrSpace =
1004        cast<llvm::PointerType>(Ptr->getType())->getAddressSpace();
1005    Ptr = Builder.CreateBitCast(Ptr, Int8Ty->getPointerTo(AddrSpace));
1006    Value *NewVal = Builder.getInt8(1);
1007    Value *Order = EmitScalarExpr(E->getArg(1));
1008    if (isa<llvm::ConstantInt>(Order)) {
1009      int ord = cast<llvm::ConstantInt>(Order)->getZExtValue();
1010      AtomicRMWInst *Result = 0;
1011      switch (ord) {
1012      case 0:  // memory_order_relaxed
1013      default: // invalid order
1014        Result = Builder.CreateAtomicRMW(llvm::AtomicRMWInst::Xchg,
1015                                         Ptr, NewVal,
1016                                         llvm::Monotonic);
1017        break;
1018      case 1:  // memory_order_consume
1019      case 2:  // memory_order_acquire
1020        Result = Builder.CreateAtomicRMW(llvm::AtomicRMWInst::Xchg,
1021                                         Ptr, NewVal,
1022                                         llvm::Acquire);
1023        break;
1024      case 3:  // memory_order_release
1025        Result = Builder.CreateAtomicRMW(llvm::AtomicRMWInst::Xchg,
1026                                         Ptr, NewVal,
1027                                         llvm::Release);
1028        break;
1029      case 4:  // memory_order_acq_rel
1030        Result = Builder.CreateAtomicRMW(llvm::AtomicRMWInst::Xchg,
1031                                         Ptr, NewVal,
1032                                         llvm::AcquireRelease);
1033        break;
1034      case 5:  // memory_order_seq_cst
1035        Result = Builder.CreateAtomicRMW(llvm::AtomicRMWInst::Xchg,
1036                                         Ptr, NewVal,
1037                                         llvm::SequentiallyConsistent);
1038        break;
1039      }
1040      Result->setVolatile(Volatile);
1041      return RValue::get(Builder.CreateIsNotNull(Result, "tobool"));
1042    }
1043
1044    llvm::BasicBlock *ContBB = createBasicBlock("atomic.continue", CurFn);
1045
1046    llvm::BasicBlock *BBs[5] = {
1047      createBasicBlock("monotonic", CurFn),
1048      createBasicBlock("acquire", CurFn),
1049      createBasicBlock("release", CurFn),
1050      createBasicBlock("acqrel", CurFn),
1051      createBasicBlock("seqcst", CurFn)
1052    };
1053    llvm::AtomicOrdering Orders[5] = {
1054      llvm::Monotonic, llvm::Acquire, llvm::Release,
1055      llvm::AcquireRelease, llvm::SequentiallyConsistent
1056    };
1057
1058    Order = Builder.CreateIntCast(Order, Builder.getInt32Ty(), false);
1059    llvm::SwitchInst *SI = Builder.CreateSwitch(Order, BBs[0]);
1060
1061    Builder.SetInsertPoint(ContBB);
1062    PHINode *Result = Builder.CreatePHI(Int8Ty, 5, "was_set");
1063
1064    for (unsigned i = 0; i < 5; ++i) {
1065      Builder.SetInsertPoint(BBs[i]);
1066      AtomicRMWInst *RMW = Builder.CreateAtomicRMW(llvm::AtomicRMWInst::Xchg,
1067                                                   Ptr, NewVal, Orders[i]);
1068      RMW->setVolatile(Volatile);
1069      Result->addIncoming(RMW, BBs[i]);
1070      Builder.CreateBr(ContBB);
1071    }
1072
1073    SI->addCase(Builder.getInt32(0), BBs[0]);
1074    SI->addCase(Builder.getInt32(1), BBs[1]);
1075    SI->addCase(Builder.getInt32(2), BBs[1]);
1076    SI->addCase(Builder.getInt32(3), BBs[2]);
1077    SI->addCase(Builder.getInt32(4), BBs[3]);
1078    SI->addCase(Builder.getInt32(5), BBs[4]);
1079
1080    Builder.SetInsertPoint(ContBB);
1081    return RValue::get(Builder.CreateIsNotNull(Result, "tobool"));
1082  }
1083
1084  case Builtin::BI__atomic_clear: {
1085    QualType PtrTy = E->getArg(0)->IgnoreImpCasts()->getType();
1086    bool Volatile =
1087        PtrTy->castAs<PointerType>()->getPointeeType().isVolatileQualified();
1088
1089    Value *Ptr = EmitScalarExpr(E->getArg(0));
1090    unsigned AddrSpace =
1091        cast<llvm::PointerType>(Ptr->getType())->getAddressSpace();
1092    Ptr = Builder.CreateBitCast(Ptr, Int8Ty->getPointerTo(AddrSpace));
1093    Value *NewVal = Builder.getInt8(0);
1094    Value *Order = EmitScalarExpr(E->getArg(1));
1095    if (isa<llvm::ConstantInt>(Order)) {
1096      int ord = cast<llvm::ConstantInt>(Order)->getZExtValue();
1097      StoreInst *Store = Builder.CreateStore(NewVal, Ptr, Volatile);
1098      Store->setAlignment(1);
1099      switch (ord) {
1100      case 0:  // memory_order_relaxed
1101      default: // invalid order
1102        Store->setOrdering(llvm::Monotonic);
1103        break;
1104      case 3:  // memory_order_release
1105        Store->setOrdering(llvm::Release);
1106        break;
1107      case 5:  // memory_order_seq_cst
1108        Store->setOrdering(llvm::SequentiallyConsistent);
1109        break;
1110      }
1111      return RValue::get(0);
1112    }
1113
1114    llvm::BasicBlock *ContBB = createBasicBlock("atomic.continue", CurFn);
1115
1116    llvm::BasicBlock *BBs[3] = {
1117      createBasicBlock("monotonic", CurFn),
1118      createBasicBlock("release", CurFn),
1119      createBasicBlock("seqcst", CurFn)
1120    };
1121    llvm::AtomicOrdering Orders[3] = {
1122      llvm::Monotonic, llvm::Release, llvm::SequentiallyConsistent
1123    };
1124
1125    Order = Builder.CreateIntCast(Order, Builder.getInt32Ty(), false);
1126    llvm::SwitchInst *SI = Builder.CreateSwitch(Order, BBs[0]);
1127
1128    for (unsigned i = 0; i < 3; ++i) {
1129      Builder.SetInsertPoint(BBs[i]);
1130      StoreInst *Store = Builder.CreateStore(NewVal, Ptr, Volatile);
1131      Store->setAlignment(1);
1132      Store->setOrdering(Orders[i]);
1133      Builder.CreateBr(ContBB);
1134    }
1135
1136    SI->addCase(Builder.getInt32(0), BBs[0]);
1137    SI->addCase(Builder.getInt32(3), BBs[1]);
1138    SI->addCase(Builder.getInt32(5), BBs[2]);
1139
1140    Builder.SetInsertPoint(ContBB);
1141    return RValue::get(0);
1142  }
1143
1144  case Builtin::BI__atomic_thread_fence:
1145  case Builtin::BI__atomic_signal_fence:
1146  case Builtin::BI__c11_atomic_thread_fence:
1147  case Builtin::BI__c11_atomic_signal_fence: {
1148    llvm::SynchronizationScope Scope;
1149    if (BuiltinID == Builtin::BI__atomic_signal_fence ||
1150        BuiltinID == Builtin::BI__c11_atomic_signal_fence)
1151      Scope = llvm::SingleThread;
1152    else
1153      Scope = llvm::CrossThread;
1154    Value *Order = EmitScalarExpr(E->getArg(0));
1155    if (isa<llvm::ConstantInt>(Order)) {
1156      int ord = cast<llvm::ConstantInt>(Order)->getZExtValue();
1157      switch (ord) {
1158      case 0:  // memory_order_relaxed
1159      default: // invalid order
1160        break;
1161      case 1:  // memory_order_consume
1162      case 2:  // memory_order_acquire
1163        Builder.CreateFence(llvm::Acquire, Scope);
1164        break;
1165      case 3:  // memory_order_release
1166        Builder.CreateFence(llvm::Release, Scope);
1167        break;
1168      case 4:  // memory_order_acq_rel
1169        Builder.CreateFence(llvm::AcquireRelease, Scope);
1170        break;
1171      case 5:  // memory_order_seq_cst
1172        Builder.CreateFence(llvm::SequentiallyConsistent, Scope);
1173        break;
1174      }
1175      return RValue::get(0);
1176    }
1177
1178    llvm::BasicBlock *AcquireBB, *ReleaseBB, *AcqRelBB, *SeqCstBB;
1179    AcquireBB = createBasicBlock("acquire", CurFn);
1180    ReleaseBB = createBasicBlock("release", CurFn);
1181    AcqRelBB = createBasicBlock("acqrel", CurFn);
1182    SeqCstBB = createBasicBlock("seqcst", CurFn);
1183    llvm::BasicBlock *ContBB = createBasicBlock("atomic.continue", CurFn);
1184
1185    Order = Builder.CreateIntCast(Order, Builder.getInt32Ty(), false);
1186    llvm::SwitchInst *SI = Builder.CreateSwitch(Order, ContBB);
1187
1188    Builder.SetInsertPoint(AcquireBB);
1189    Builder.CreateFence(llvm::Acquire, Scope);
1190    Builder.CreateBr(ContBB);
1191    SI->addCase(Builder.getInt32(1), AcquireBB);
1192    SI->addCase(Builder.getInt32(2), AcquireBB);
1193
1194    Builder.SetInsertPoint(ReleaseBB);
1195    Builder.CreateFence(llvm::Release, Scope);
1196    Builder.CreateBr(ContBB);
1197    SI->addCase(Builder.getInt32(3), ReleaseBB);
1198
1199    Builder.SetInsertPoint(AcqRelBB);
1200    Builder.CreateFence(llvm::AcquireRelease, Scope);
1201    Builder.CreateBr(ContBB);
1202    SI->addCase(Builder.getInt32(4), AcqRelBB);
1203
1204    Builder.SetInsertPoint(SeqCstBB);
1205    Builder.CreateFence(llvm::SequentiallyConsistent, Scope);
1206    Builder.CreateBr(ContBB);
1207    SI->addCase(Builder.getInt32(5), SeqCstBB);
1208
1209    Builder.SetInsertPoint(ContBB);
1210    return RValue::get(0);
1211  }
1212
1213    // Library functions with special handling.
1214  case Builtin::BIsqrt:
1215  case Builtin::BIsqrtf:
1216  case Builtin::BIsqrtl: {
1217    // TODO: there is currently no set of optimizer flags
1218    // sufficient for us to rewrite sqrt to @llvm.sqrt.
1219    // -fmath-errno=0 is not good enough; we need finiteness.
1220    // We could probably precondition the call with an ult
1221    // against 0, but is that worth the complexity?
1222    break;
1223  }
1224
1225  case Builtin::BIpow:
1226  case Builtin::BIpowf:
1227  case Builtin::BIpowl: {
1228    // Rewrite sqrt to intrinsic if allowed.
1229    if (!FD->hasAttr<ConstAttr>())
1230      break;
1231    Value *Base = EmitScalarExpr(E->getArg(0));
1232    Value *Exponent = EmitScalarExpr(E->getArg(1));
1233    llvm::Type *ArgType = Base->getType();
1234    Value *F = CGM.getIntrinsic(Intrinsic::pow, ArgType);
1235    return RValue::get(Builder.CreateCall2(F, Base, Exponent));
1236  }
1237
1238  case Builtin::BIfma:
1239  case Builtin::BIfmaf:
1240  case Builtin::BIfmal:
1241  case Builtin::BI__builtin_fma:
1242  case Builtin::BI__builtin_fmaf:
1243  case Builtin::BI__builtin_fmal: {
1244    // Rewrite fma to intrinsic.
1245    Value *FirstArg = EmitScalarExpr(E->getArg(0));
1246    llvm::Type *ArgType = FirstArg->getType();
1247    Value *F = CGM.getIntrinsic(Intrinsic::fma, ArgType);
1248    return RValue::get(Builder.CreateCall3(F, FirstArg,
1249                                              EmitScalarExpr(E->getArg(1)),
1250                                              EmitScalarExpr(E->getArg(2))));
1251  }
1252
1253  case Builtin::BI__builtin_signbit:
1254  case Builtin::BI__builtin_signbitf:
1255  case Builtin::BI__builtin_signbitl: {
1256    LLVMContext &C = CGM.getLLVMContext();
1257
1258    Value *Arg = EmitScalarExpr(E->getArg(0));
1259    llvm::Type *ArgTy = Arg->getType();
1260    if (ArgTy->isPPC_FP128Ty())
1261      break; // FIXME: I'm not sure what the right implementation is here.
1262    int ArgWidth = ArgTy->getPrimitiveSizeInBits();
1263    llvm::Type *ArgIntTy = llvm::IntegerType::get(C, ArgWidth);
1264    Value *BCArg = Builder.CreateBitCast(Arg, ArgIntTy);
1265    Value *ZeroCmp = llvm::Constant::getNullValue(ArgIntTy);
1266    Value *Result = Builder.CreateICmpSLT(BCArg, ZeroCmp);
1267    return RValue::get(Builder.CreateZExt(Result, ConvertType(E->getType())));
1268  }
1269  case Builtin::BI__builtin_annotation: {
1270    llvm::Value *AnnVal = EmitScalarExpr(E->getArg(0));
1271    llvm::Value *F = CGM.getIntrinsic(llvm::Intrinsic::annotation,
1272                                      AnnVal->getType());
1273
1274    // Get the annotation string, go through casts. Sema requires this to be a
1275    // non-wide string literal, potentially casted, so the cast<> is safe.
1276    const Expr *AnnotationStrExpr = E->getArg(1)->IgnoreParenCasts();
1277    llvm::StringRef Str = cast<StringLiteral>(AnnotationStrExpr)->getString();
1278    return RValue::get(EmitAnnotationCall(F, AnnVal, Str, E->getExprLoc()));
1279  }
1280  }
1281
1282  // If this is an alias for a lib function (e.g. __builtin_sin), emit
1283  // the call using the normal call path, but using the unmangled
1284  // version of the function name.
1285  if (getContext().BuiltinInfo.isLibFunction(BuiltinID))
1286    return emitLibraryCall(*this, FD, E,
1287                           CGM.getBuiltinLibFunction(FD, BuiltinID));
1288
1289  // If this is a predefined lib function (e.g. malloc), emit the call
1290  // using exactly the normal call path.
1291  if (getContext().BuiltinInfo.isPredefinedLibFunction(BuiltinID))
1292    return emitLibraryCall(*this, FD, E, EmitScalarExpr(E->getCallee()));
1293
1294  // See if we have a target specific intrinsic.
1295  const char *Name = getContext().BuiltinInfo.GetName(BuiltinID);
1296  Intrinsic::ID IntrinsicID = Intrinsic::not_intrinsic;
1297  if (const char *Prefix =
1298      llvm::Triple::getArchTypePrefix(Target.getTriple().getArch()))
1299    IntrinsicID = Intrinsic::getIntrinsicForGCCBuiltin(Prefix, Name);
1300
1301  if (IntrinsicID != Intrinsic::not_intrinsic) {
1302    SmallVector<Value*, 16> Args;
1303
1304    // Find out if any arguments are required to be integer constant
1305    // expressions.
1306    unsigned ICEArguments = 0;
1307    ASTContext::GetBuiltinTypeError Error;
1308    getContext().GetBuiltinType(BuiltinID, Error, &ICEArguments);
1309    assert(Error == ASTContext::GE_None && "Should not codegen an error");
1310
1311    Function *F = CGM.getIntrinsic(IntrinsicID);
1312    llvm::FunctionType *FTy = F->getFunctionType();
1313
1314    for (unsigned i = 0, e = E->getNumArgs(); i != e; ++i) {
1315      Value *ArgValue;
1316      // If this is a normal argument, just emit it as a scalar.
1317      if ((ICEArguments & (1 << i)) == 0) {
1318        ArgValue = EmitScalarExpr(E->getArg(i));
1319      } else {
1320        // If this is required to be a constant, constant fold it so that we
1321        // know that the generated intrinsic gets a ConstantInt.
1322        llvm::APSInt Result;
1323        bool IsConst = E->getArg(i)->isIntegerConstantExpr(Result,getContext());
1324        assert(IsConst && "Constant arg isn't actually constant?");
1325        (void)IsConst;
1326        ArgValue = llvm::ConstantInt::get(getLLVMContext(), Result);
1327      }
1328
1329      // If the intrinsic arg type is different from the builtin arg type
1330      // we need to do a bit cast.
1331      llvm::Type *PTy = FTy->getParamType(i);
1332      if (PTy != ArgValue->getType()) {
1333        assert(PTy->canLosslesslyBitCastTo(FTy->getParamType(i)) &&
1334               "Must be able to losslessly bit cast to param");
1335        ArgValue = Builder.CreateBitCast(ArgValue, PTy);
1336      }
1337
1338      Args.push_back(ArgValue);
1339    }
1340
1341    Value *V = Builder.CreateCall(F, Args);
1342    QualType BuiltinRetType = E->getType();
1343
1344    llvm::Type *RetTy = VoidTy;
1345    if (!BuiltinRetType->isVoidType())
1346      RetTy = ConvertType(BuiltinRetType);
1347
1348    if (RetTy != V->getType()) {
1349      assert(V->getType()->canLosslesslyBitCastTo(RetTy) &&
1350             "Must be able to losslessly bit cast result type");
1351      V = Builder.CreateBitCast(V, RetTy);
1352    }
1353
1354    return RValue::get(V);
1355  }
1356
1357  // See if we have a target specific builtin that needs to be lowered.
1358  if (Value *V = EmitTargetBuiltinExpr(BuiltinID, E))
1359    return RValue::get(V);
1360
1361  ErrorUnsupported(E, "builtin function");
1362
1363  // Unknown builtin, for now just dump it out and return undef.
1364  if (hasAggregateLLVMType(E->getType()))
1365    return RValue::getAggregate(CreateMemTemp(E->getType()));
1366  return RValue::get(llvm::UndefValue::get(ConvertType(E->getType())));
1367}
1368
1369Value *CodeGenFunction::EmitTargetBuiltinExpr(unsigned BuiltinID,
1370                                              const CallExpr *E) {
1371  switch (Target.getTriple().getArch()) {
1372  case llvm::Triple::arm:
1373  case llvm::Triple::thumb:
1374    return EmitARMBuiltinExpr(BuiltinID, E);
1375  case llvm::Triple::x86:
1376  case llvm::Triple::x86_64:
1377    return EmitX86BuiltinExpr(BuiltinID, E);
1378  case llvm::Triple::ppc:
1379  case llvm::Triple::ppc64:
1380    return EmitPPCBuiltinExpr(BuiltinID, E);
1381  case llvm::Triple::hexagon:
1382    return EmitHexagonBuiltinExpr(BuiltinID, E);
1383  default:
1384    return 0;
1385  }
1386}
1387
1388static llvm::VectorType *GetNeonType(CodeGenFunction *CGF,
1389                                     NeonTypeFlags TypeFlags) {
1390  int IsQuad = TypeFlags.isQuad();
1391  switch (TypeFlags.getEltType()) {
1392  case NeonTypeFlags::Int8:
1393  case NeonTypeFlags::Poly8:
1394    return llvm::VectorType::get(CGF->Int8Ty, 8 << IsQuad);
1395  case NeonTypeFlags::Int16:
1396  case NeonTypeFlags::Poly16:
1397  case NeonTypeFlags::Float16:
1398    return llvm::VectorType::get(CGF->Int16Ty, 4 << IsQuad);
1399  case NeonTypeFlags::Int32:
1400    return llvm::VectorType::get(CGF->Int32Ty, 2 << IsQuad);
1401  case NeonTypeFlags::Int64:
1402    return llvm::VectorType::get(CGF->Int64Ty, 1 << IsQuad);
1403  case NeonTypeFlags::Float32:
1404    return llvm::VectorType::get(CGF->FloatTy, 2 << IsQuad);
1405  }
1406  llvm_unreachable("Invalid NeonTypeFlags element type!");
1407}
1408
1409Value *CodeGenFunction::EmitNeonSplat(Value *V, Constant *C) {
1410  unsigned nElts = cast<llvm::VectorType>(V->getType())->getNumElements();
1411  Value* SV = llvm::ConstantVector::getSplat(nElts, C);
1412  return Builder.CreateShuffleVector(V, V, SV, "lane");
1413}
1414
1415Value *CodeGenFunction::EmitNeonCall(Function *F, SmallVectorImpl<Value*> &Ops,
1416                                     const char *name,
1417                                     unsigned shift, bool rightshift) {
1418  unsigned j = 0;
1419  for (Function::const_arg_iterator ai = F->arg_begin(), ae = F->arg_end();
1420       ai != ae; ++ai, ++j)
1421    if (shift > 0 && shift == j)
1422      Ops[j] = EmitNeonShiftVector(Ops[j], ai->getType(), rightshift);
1423    else
1424      Ops[j] = Builder.CreateBitCast(Ops[j], ai->getType(), name);
1425
1426  return Builder.CreateCall(F, Ops, name);
1427}
1428
1429Value *CodeGenFunction::EmitNeonShiftVector(Value *V, llvm::Type *Ty,
1430                                            bool neg) {
1431  int SV = cast<ConstantInt>(V)->getSExtValue();
1432
1433  llvm::VectorType *VTy = cast<llvm::VectorType>(Ty);
1434  llvm::Constant *C = ConstantInt::get(VTy->getElementType(), neg ? -SV : SV);
1435  return llvm::ConstantVector::getSplat(VTy->getNumElements(), C);
1436}
1437
1438/// GetPointeeAlignment - Given an expression with a pointer type, find the
1439/// alignment of the type referenced by the pointer.  Skip over implicit
1440/// casts.
1441unsigned CodeGenFunction::GetPointeeAlignment(const Expr *Addr) {
1442  unsigned Align = 1;
1443  // Check if the type is a pointer.  The implicit cast operand might not be.
1444  while (Addr->getType()->isPointerType()) {
1445    QualType PtTy = Addr->getType()->getPointeeType();
1446
1447    // Can't get alignment of incomplete types.
1448    if (!PtTy->isIncompleteType()) {
1449      unsigned NewA = getContext().getTypeAlignInChars(PtTy).getQuantity();
1450      if (NewA > Align)
1451        Align = NewA;
1452    }
1453
1454    // If the address is an implicit cast, repeat with the cast operand.
1455    if (const ImplicitCastExpr *CastAddr = dyn_cast<ImplicitCastExpr>(Addr)) {
1456      Addr = CastAddr->getSubExpr();
1457      continue;
1458    }
1459    break;
1460  }
1461  return Align;
1462}
1463
1464/// GetPointeeAlignmentValue - Given an expression with a pointer type, find
1465/// the alignment of the type referenced by the pointer.  Skip over implicit
1466/// casts.  Return the alignment as an llvm::Value.
1467Value *CodeGenFunction::GetPointeeAlignmentValue(const Expr *Addr) {
1468  return llvm::ConstantInt::get(Int32Ty, GetPointeeAlignment(Addr));
1469}
1470
1471Value *CodeGenFunction::EmitARMBuiltinExpr(unsigned BuiltinID,
1472                                           const CallExpr *E) {
1473  if (BuiltinID == ARM::BI__clear_cache) {
1474    const FunctionDecl *FD = E->getDirectCallee();
1475    // Oddly people write this call without args on occasion and gcc accepts
1476    // it - it's also marked as varargs in the description file.
1477    SmallVector<Value*, 2> Ops;
1478    for (unsigned i = 0; i < E->getNumArgs(); i++)
1479      Ops.push_back(EmitScalarExpr(E->getArg(i)));
1480    llvm::Type *Ty = CGM.getTypes().ConvertType(FD->getType());
1481    llvm::FunctionType *FTy = cast<llvm::FunctionType>(Ty);
1482    StringRef Name = FD->getName();
1483    return Builder.CreateCall(CGM.CreateRuntimeFunction(FTy, Name), Ops);
1484  }
1485
1486  if (BuiltinID == ARM::BI__builtin_arm_ldrexd) {
1487    Function *F = CGM.getIntrinsic(Intrinsic::arm_ldrexd);
1488
1489    Value *LdPtr = EmitScalarExpr(E->getArg(0));
1490    Value *Val = Builder.CreateCall(F, LdPtr, "ldrexd");
1491
1492    Value *Val0 = Builder.CreateExtractValue(Val, 1);
1493    Value *Val1 = Builder.CreateExtractValue(Val, 0);
1494    Val0 = Builder.CreateZExt(Val0, Int64Ty);
1495    Val1 = Builder.CreateZExt(Val1, Int64Ty);
1496
1497    Value *ShiftCst = llvm::ConstantInt::get(Int64Ty, 32);
1498    Val = Builder.CreateShl(Val0, ShiftCst, "shl", true /* nuw */);
1499    return Builder.CreateOr(Val, Val1);
1500  }
1501
1502  if (BuiltinID == ARM::BI__builtin_arm_strexd) {
1503    Function *F = CGM.getIntrinsic(Intrinsic::arm_strexd);
1504    llvm::Type *STy = llvm::StructType::get(Int32Ty, Int32Ty, NULL);
1505
1506    Value *One = llvm::ConstantInt::get(Int32Ty, 1);
1507    Value *Tmp = Builder.CreateAlloca(Int64Ty, One);
1508    Value *Val = EmitScalarExpr(E->getArg(0));
1509    Builder.CreateStore(Val, Tmp);
1510
1511    Value *LdPtr = Builder.CreateBitCast(Tmp,llvm::PointerType::getUnqual(STy));
1512    Val = Builder.CreateLoad(LdPtr);
1513
1514    Value *Arg0 = Builder.CreateExtractValue(Val, 0);
1515    Value *Arg1 = Builder.CreateExtractValue(Val, 1);
1516    Value *StPtr = EmitScalarExpr(E->getArg(1));
1517    return Builder.CreateCall3(F, Arg0, Arg1, StPtr, "strexd");
1518  }
1519
1520  SmallVector<Value*, 4> Ops;
1521  for (unsigned i = 0, e = E->getNumArgs() - 1; i != e; i++)
1522    Ops.push_back(EmitScalarExpr(E->getArg(i)));
1523
1524  // vget_lane and vset_lane are not overloaded and do not have an extra
1525  // argument that specifies the vector type.
1526  switch (BuiltinID) {
1527  default: break;
1528  case ARM::BI__builtin_neon_vget_lane_i8:
1529  case ARM::BI__builtin_neon_vget_lane_i16:
1530  case ARM::BI__builtin_neon_vget_lane_i32:
1531  case ARM::BI__builtin_neon_vget_lane_i64:
1532  case ARM::BI__builtin_neon_vget_lane_f32:
1533  case ARM::BI__builtin_neon_vgetq_lane_i8:
1534  case ARM::BI__builtin_neon_vgetq_lane_i16:
1535  case ARM::BI__builtin_neon_vgetq_lane_i32:
1536  case ARM::BI__builtin_neon_vgetq_lane_i64:
1537  case ARM::BI__builtin_neon_vgetq_lane_f32:
1538    return Builder.CreateExtractElement(Ops[0], EmitScalarExpr(E->getArg(1)),
1539                                        "vget_lane");
1540  case ARM::BI__builtin_neon_vset_lane_i8:
1541  case ARM::BI__builtin_neon_vset_lane_i16:
1542  case ARM::BI__builtin_neon_vset_lane_i32:
1543  case ARM::BI__builtin_neon_vset_lane_i64:
1544  case ARM::BI__builtin_neon_vset_lane_f32:
1545  case ARM::BI__builtin_neon_vsetq_lane_i8:
1546  case ARM::BI__builtin_neon_vsetq_lane_i16:
1547  case ARM::BI__builtin_neon_vsetq_lane_i32:
1548  case ARM::BI__builtin_neon_vsetq_lane_i64:
1549  case ARM::BI__builtin_neon_vsetq_lane_f32:
1550    Ops.push_back(EmitScalarExpr(E->getArg(2)));
1551    return Builder.CreateInsertElement(Ops[1], Ops[0], Ops[2], "vset_lane");
1552  }
1553
1554  // Get the last argument, which specifies the vector type.
1555  llvm::APSInt Result;
1556  const Expr *Arg = E->getArg(E->getNumArgs()-1);
1557  if (!Arg->isIntegerConstantExpr(Result, getContext()))
1558    return 0;
1559
1560  if (BuiltinID == ARM::BI__builtin_arm_vcvtr_f ||
1561      BuiltinID == ARM::BI__builtin_arm_vcvtr_d) {
1562    // Determine the overloaded type of this builtin.
1563    llvm::Type *Ty;
1564    if (BuiltinID == ARM::BI__builtin_arm_vcvtr_f)
1565      Ty = FloatTy;
1566    else
1567      Ty = DoubleTy;
1568
1569    // Determine whether this is an unsigned conversion or not.
1570    bool usgn = Result.getZExtValue() == 1;
1571    unsigned Int = usgn ? Intrinsic::arm_vcvtru : Intrinsic::arm_vcvtr;
1572
1573    // Call the appropriate intrinsic.
1574    Function *F = CGM.getIntrinsic(Int, Ty);
1575    return Builder.CreateCall(F, Ops, "vcvtr");
1576  }
1577
1578  // Determine the type of this overloaded NEON intrinsic.
1579  NeonTypeFlags Type(Result.getZExtValue());
1580  bool usgn = Type.isUnsigned();
1581  bool quad = Type.isQuad();
1582  bool rightShift = false;
1583
1584  llvm::VectorType *VTy = GetNeonType(this, Type);
1585  llvm::Type *Ty = VTy;
1586  if (!Ty)
1587    return 0;
1588
1589  unsigned Int;
1590  switch (BuiltinID) {
1591  default: return 0;
1592  case ARM::BI__builtin_neon_vabd_v:
1593  case ARM::BI__builtin_neon_vabdq_v:
1594    Int = usgn ? Intrinsic::arm_neon_vabdu : Intrinsic::arm_neon_vabds;
1595    return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vabd");
1596  case ARM::BI__builtin_neon_vabs_v:
1597  case ARM::BI__builtin_neon_vabsq_v:
1598    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vabs, Ty),
1599                        Ops, "vabs");
1600  case ARM::BI__builtin_neon_vaddhn_v:
1601    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vaddhn, Ty),
1602                        Ops, "vaddhn");
1603  case ARM::BI__builtin_neon_vcale_v:
1604    std::swap(Ops[0], Ops[1]);
1605  case ARM::BI__builtin_neon_vcage_v: {
1606    Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vacged);
1607    return EmitNeonCall(F, Ops, "vcage");
1608  }
1609  case ARM::BI__builtin_neon_vcaleq_v:
1610    std::swap(Ops[0], Ops[1]);
1611  case ARM::BI__builtin_neon_vcageq_v: {
1612    Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vacgeq);
1613    return EmitNeonCall(F, Ops, "vcage");
1614  }
1615  case ARM::BI__builtin_neon_vcalt_v:
1616    std::swap(Ops[0], Ops[1]);
1617  case ARM::BI__builtin_neon_vcagt_v: {
1618    Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vacgtd);
1619    return EmitNeonCall(F, Ops, "vcagt");
1620  }
1621  case ARM::BI__builtin_neon_vcaltq_v:
1622    std::swap(Ops[0], Ops[1]);
1623  case ARM::BI__builtin_neon_vcagtq_v: {
1624    Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vacgtq);
1625    return EmitNeonCall(F, Ops, "vcagt");
1626  }
1627  case ARM::BI__builtin_neon_vcls_v:
1628  case ARM::BI__builtin_neon_vclsq_v: {
1629    Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vcls, Ty);
1630    return EmitNeonCall(F, Ops, "vcls");
1631  }
1632  case ARM::BI__builtin_neon_vclz_v:
1633  case ARM::BI__builtin_neon_vclzq_v: {
1634    Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vclz, Ty);
1635    return EmitNeonCall(F, Ops, "vclz");
1636  }
1637  case ARM::BI__builtin_neon_vcnt_v:
1638  case ARM::BI__builtin_neon_vcntq_v: {
1639    Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vcnt, Ty);
1640    return EmitNeonCall(F, Ops, "vcnt");
1641  }
1642  case ARM::BI__builtin_neon_vcvt_f16_v: {
1643    assert(Type.getEltType() == NeonTypeFlags::Float16 && !quad &&
1644           "unexpected vcvt_f16_v builtin");
1645    Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vcvtfp2hf);
1646    return EmitNeonCall(F, Ops, "vcvt");
1647  }
1648  case ARM::BI__builtin_neon_vcvt_f32_f16: {
1649    assert(Type.getEltType() == NeonTypeFlags::Float16 && !quad &&
1650           "unexpected vcvt_f32_f16 builtin");
1651    Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vcvthf2fp);
1652    return EmitNeonCall(F, Ops, "vcvt");
1653  }
1654  case ARM::BI__builtin_neon_vcvt_f32_v:
1655  case ARM::BI__builtin_neon_vcvtq_f32_v:
1656    Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
1657    Ty = GetNeonType(this, NeonTypeFlags(NeonTypeFlags::Float32, false, quad));
1658    return usgn ? Builder.CreateUIToFP(Ops[0], Ty, "vcvt")
1659                : Builder.CreateSIToFP(Ops[0], Ty, "vcvt");
1660  case ARM::BI__builtin_neon_vcvt_s32_v:
1661  case ARM::BI__builtin_neon_vcvt_u32_v:
1662  case ARM::BI__builtin_neon_vcvtq_s32_v:
1663  case ARM::BI__builtin_neon_vcvtq_u32_v: {
1664    llvm::Type *FloatTy =
1665      GetNeonType(this, NeonTypeFlags(NeonTypeFlags::Float32, false, quad));
1666    Ops[0] = Builder.CreateBitCast(Ops[0], FloatTy);
1667    return usgn ? Builder.CreateFPToUI(Ops[0], Ty, "vcvt")
1668                : Builder.CreateFPToSI(Ops[0], Ty, "vcvt");
1669  }
1670  case ARM::BI__builtin_neon_vcvt_n_f32_v:
1671  case ARM::BI__builtin_neon_vcvtq_n_f32_v: {
1672    llvm::Type *FloatTy =
1673      GetNeonType(this, NeonTypeFlags(NeonTypeFlags::Float32, false, quad));
1674    llvm::Type *Tys[2] = { FloatTy, Ty };
1675    Int = usgn ? Intrinsic::arm_neon_vcvtfxu2fp
1676               : Intrinsic::arm_neon_vcvtfxs2fp;
1677    Function *F = CGM.getIntrinsic(Int, Tys);
1678    return EmitNeonCall(F, Ops, "vcvt_n");
1679  }
1680  case ARM::BI__builtin_neon_vcvt_n_s32_v:
1681  case ARM::BI__builtin_neon_vcvt_n_u32_v:
1682  case ARM::BI__builtin_neon_vcvtq_n_s32_v:
1683  case ARM::BI__builtin_neon_vcvtq_n_u32_v: {
1684    llvm::Type *FloatTy =
1685      GetNeonType(this, NeonTypeFlags(NeonTypeFlags::Float32, false, quad));
1686    llvm::Type *Tys[2] = { Ty, FloatTy };
1687    Int = usgn ? Intrinsic::arm_neon_vcvtfp2fxu
1688               : Intrinsic::arm_neon_vcvtfp2fxs;
1689    Function *F = CGM.getIntrinsic(Int, Tys);
1690    return EmitNeonCall(F, Ops, "vcvt_n");
1691  }
1692  case ARM::BI__builtin_neon_vext_v:
1693  case ARM::BI__builtin_neon_vextq_v: {
1694    int CV = cast<ConstantInt>(Ops[2])->getSExtValue();
1695    SmallVector<Constant*, 16> Indices;
1696    for (unsigned i = 0, e = VTy->getNumElements(); i != e; ++i)
1697      Indices.push_back(ConstantInt::get(Int32Ty, i+CV));
1698
1699    Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
1700    Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
1701    Value *SV = llvm::ConstantVector::get(Indices);
1702    return Builder.CreateShuffleVector(Ops[0], Ops[1], SV, "vext");
1703  }
1704  case ARM::BI__builtin_neon_vhadd_v:
1705  case ARM::BI__builtin_neon_vhaddq_v:
1706    Int = usgn ? Intrinsic::arm_neon_vhaddu : Intrinsic::arm_neon_vhadds;
1707    return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vhadd");
1708  case ARM::BI__builtin_neon_vhsub_v:
1709  case ARM::BI__builtin_neon_vhsubq_v:
1710    Int = usgn ? Intrinsic::arm_neon_vhsubu : Intrinsic::arm_neon_vhsubs;
1711    return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vhsub");
1712  case ARM::BI__builtin_neon_vld1_v:
1713  case ARM::BI__builtin_neon_vld1q_v:
1714    Ops.push_back(GetPointeeAlignmentValue(E->getArg(0)));
1715    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vld1, Ty),
1716                        Ops, "vld1");
1717  case ARM::BI__builtin_neon_vld1_lane_v:
1718  case ARM::BI__builtin_neon_vld1q_lane_v: {
1719    Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
1720    Ty = llvm::PointerType::getUnqual(VTy->getElementType());
1721    Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
1722    LoadInst *Ld = Builder.CreateLoad(Ops[0]);
1723    Value *Align = GetPointeeAlignmentValue(E->getArg(0));
1724    Ld->setAlignment(cast<ConstantInt>(Align)->getZExtValue());
1725    return Builder.CreateInsertElement(Ops[1], Ld, Ops[2], "vld1_lane");
1726  }
1727  case ARM::BI__builtin_neon_vld1_dup_v:
1728  case ARM::BI__builtin_neon_vld1q_dup_v: {
1729    Value *V = UndefValue::get(Ty);
1730    Ty = llvm::PointerType::getUnqual(VTy->getElementType());
1731    Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
1732    LoadInst *Ld = Builder.CreateLoad(Ops[0]);
1733    Value *Align = GetPointeeAlignmentValue(E->getArg(0));
1734    Ld->setAlignment(cast<ConstantInt>(Align)->getZExtValue());
1735    llvm::Constant *CI = ConstantInt::get(Int32Ty, 0);
1736    Ops[0] = Builder.CreateInsertElement(V, Ld, CI);
1737    return EmitNeonSplat(Ops[0], CI);
1738  }
1739  case ARM::BI__builtin_neon_vld2_v:
1740  case ARM::BI__builtin_neon_vld2q_v: {
1741    Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vld2, Ty);
1742    Value *Align = GetPointeeAlignmentValue(E->getArg(1));
1743    Ops[1] = Builder.CreateCall2(F, Ops[1], Align, "vld2");
1744    Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
1745    Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
1746    return Builder.CreateStore(Ops[1], Ops[0]);
1747  }
1748  case ARM::BI__builtin_neon_vld3_v:
1749  case ARM::BI__builtin_neon_vld3q_v: {
1750    Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vld3, Ty);
1751    Value *Align = GetPointeeAlignmentValue(E->getArg(1));
1752    Ops[1] = Builder.CreateCall2(F, Ops[1], Align, "vld3");
1753    Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
1754    Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
1755    return Builder.CreateStore(Ops[1], Ops[0]);
1756  }
1757  case ARM::BI__builtin_neon_vld4_v:
1758  case ARM::BI__builtin_neon_vld4q_v: {
1759    Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vld4, Ty);
1760    Value *Align = GetPointeeAlignmentValue(E->getArg(1));
1761    Ops[1] = Builder.CreateCall2(F, Ops[1], Align, "vld4");
1762    Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
1763    Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
1764    return Builder.CreateStore(Ops[1], Ops[0]);
1765  }
1766  case ARM::BI__builtin_neon_vld2_lane_v:
1767  case ARM::BI__builtin_neon_vld2q_lane_v: {
1768    Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vld2lane, Ty);
1769    Ops[2] = Builder.CreateBitCast(Ops[2], Ty);
1770    Ops[3] = Builder.CreateBitCast(Ops[3], Ty);
1771    Ops.push_back(GetPointeeAlignmentValue(E->getArg(1)));
1772    Ops[1] = Builder.CreateCall(F, makeArrayRef(Ops).slice(1), "vld2_lane");
1773    Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
1774    Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
1775    return Builder.CreateStore(Ops[1], Ops[0]);
1776  }
1777  case ARM::BI__builtin_neon_vld3_lane_v:
1778  case ARM::BI__builtin_neon_vld3q_lane_v: {
1779    Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vld3lane, Ty);
1780    Ops[2] = Builder.CreateBitCast(Ops[2], Ty);
1781    Ops[3] = Builder.CreateBitCast(Ops[3], Ty);
1782    Ops[4] = Builder.CreateBitCast(Ops[4], Ty);
1783    Ops.push_back(GetPointeeAlignmentValue(E->getArg(1)));
1784    Ops[1] = Builder.CreateCall(F, makeArrayRef(Ops).slice(1), "vld3_lane");
1785    Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
1786    Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
1787    return Builder.CreateStore(Ops[1], Ops[0]);
1788  }
1789  case ARM::BI__builtin_neon_vld4_lane_v:
1790  case ARM::BI__builtin_neon_vld4q_lane_v: {
1791    Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vld4lane, Ty);
1792    Ops[2] = Builder.CreateBitCast(Ops[2], Ty);
1793    Ops[3] = Builder.CreateBitCast(Ops[3], Ty);
1794    Ops[4] = Builder.CreateBitCast(Ops[4], Ty);
1795    Ops[5] = Builder.CreateBitCast(Ops[5], Ty);
1796    Ops.push_back(GetPointeeAlignmentValue(E->getArg(1)));
1797    Ops[1] = Builder.CreateCall(F, makeArrayRef(Ops).slice(1), "vld3_lane");
1798    Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
1799    Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
1800    return Builder.CreateStore(Ops[1], Ops[0]);
1801  }
1802  case ARM::BI__builtin_neon_vld2_dup_v:
1803  case ARM::BI__builtin_neon_vld3_dup_v:
1804  case ARM::BI__builtin_neon_vld4_dup_v: {
1805    // Handle 64-bit elements as a special-case.  There is no "dup" needed.
1806    if (VTy->getElementType()->getPrimitiveSizeInBits() == 64) {
1807      switch (BuiltinID) {
1808      case ARM::BI__builtin_neon_vld2_dup_v:
1809        Int = Intrinsic::arm_neon_vld2;
1810        break;
1811      case ARM::BI__builtin_neon_vld3_dup_v:
1812        Int = Intrinsic::arm_neon_vld3;
1813        break;
1814      case ARM::BI__builtin_neon_vld4_dup_v:
1815        Int = Intrinsic::arm_neon_vld4;
1816        break;
1817      default: llvm_unreachable("unknown vld_dup intrinsic?");
1818      }
1819      Function *F = CGM.getIntrinsic(Int, Ty);
1820      Value *Align = GetPointeeAlignmentValue(E->getArg(1));
1821      Ops[1] = Builder.CreateCall2(F, Ops[1], Align, "vld_dup");
1822      Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
1823      Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
1824      return Builder.CreateStore(Ops[1], Ops[0]);
1825    }
1826    switch (BuiltinID) {
1827    case ARM::BI__builtin_neon_vld2_dup_v:
1828      Int = Intrinsic::arm_neon_vld2lane;
1829      break;
1830    case ARM::BI__builtin_neon_vld3_dup_v:
1831      Int = Intrinsic::arm_neon_vld3lane;
1832      break;
1833    case ARM::BI__builtin_neon_vld4_dup_v:
1834      Int = Intrinsic::arm_neon_vld4lane;
1835      break;
1836    default: llvm_unreachable("unknown vld_dup intrinsic?");
1837    }
1838    Function *F = CGM.getIntrinsic(Int, Ty);
1839    llvm::StructType *STy = cast<llvm::StructType>(F->getReturnType());
1840
1841    SmallVector<Value*, 6> Args;
1842    Args.push_back(Ops[1]);
1843    Args.append(STy->getNumElements(), UndefValue::get(Ty));
1844
1845    llvm::Constant *CI = ConstantInt::get(Int32Ty, 0);
1846    Args.push_back(CI);
1847    Args.push_back(GetPointeeAlignmentValue(E->getArg(1)));
1848
1849    Ops[1] = Builder.CreateCall(F, Args, "vld_dup");
1850    // splat lane 0 to all elts in each vector of the result.
1851    for (unsigned i = 0, e = STy->getNumElements(); i != e; ++i) {
1852      Value *Val = Builder.CreateExtractValue(Ops[1], i);
1853      Value *Elt = Builder.CreateBitCast(Val, Ty);
1854      Elt = EmitNeonSplat(Elt, CI);
1855      Elt = Builder.CreateBitCast(Elt, Val->getType());
1856      Ops[1] = Builder.CreateInsertValue(Ops[1], Elt, i);
1857    }
1858    Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
1859    Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
1860    return Builder.CreateStore(Ops[1], Ops[0]);
1861  }
1862  case ARM::BI__builtin_neon_vmax_v:
1863  case ARM::BI__builtin_neon_vmaxq_v:
1864    Int = usgn ? Intrinsic::arm_neon_vmaxu : Intrinsic::arm_neon_vmaxs;
1865    return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vmax");
1866  case ARM::BI__builtin_neon_vmin_v:
1867  case ARM::BI__builtin_neon_vminq_v:
1868    Int = usgn ? Intrinsic::arm_neon_vminu : Intrinsic::arm_neon_vmins;
1869    return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vmin");
1870  case ARM::BI__builtin_neon_vmovl_v: {
1871    llvm::Type *DTy =llvm::VectorType::getTruncatedElementVectorType(VTy);
1872    Ops[0] = Builder.CreateBitCast(Ops[0], DTy);
1873    if (usgn)
1874      return Builder.CreateZExt(Ops[0], Ty, "vmovl");
1875    return Builder.CreateSExt(Ops[0], Ty, "vmovl");
1876  }
1877  case ARM::BI__builtin_neon_vmovn_v: {
1878    llvm::Type *QTy = llvm::VectorType::getExtendedElementVectorType(VTy);
1879    Ops[0] = Builder.CreateBitCast(Ops[0], QTy);
1880    return Builder.CreateTrunc(Ops[0], Ty, "vmovn");
1881  }
1882  case ARM::BI__builtin_neon_vmul_v:
1883  case ARM::BI__builtin_neon_vmulq_v:
1884    assert(Type.isPoly() && "vmul builtin only supported for polynomial types");
1885    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vmulp, Ty),
1886                        Ops, "vmul");
1887  case ARM::BI__builtin_neon_vmull_v:
1888    Int = usgn ? Intrinsic::arm_neon_vmullu : Intrinsic::arm_neon_vmulls;
1889    Int = Type.isPoly() ? (unsigned)Intrinsic::arm_neon_vmullp : Int;
1890    return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vmull");
1891  case ARM::BI__builtin_neon_vpadal_v:
1892  case ARM::BI__builtin_neon_vpadalq_v: {
1893    Int = usgn ? Intrinsic::arm_neon_vpadalu : Intrinsic::arm_neon_vpadals;
1894    // The source operand type has twice as many elements of half the size.
1895    unsigned EltBits = VTy->getElementType()->getPrimitiveSizeInBits();
1896    llvm::Type *EltTy =
1897      llvm::IntegerType::get(getLLVMContext(), EltBits / 2);
1898    llvm::Type *NarrowTy =
1899      llvm::VectorType::get(EltTy, VTy->getNumElements() * 2);
1900    llvm::Type *Tys[2] = { Ty, NarrowTy };
1901    return EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vpadal");
1902  }
1903  case ARM::BI__builtin_neon_vpadd_v:
1904    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vpadd, Ty),
1905                        Ops, "vpadd");
1906  case ARM::BI__builtin_neon_vpaddl_v:
1907  case ARM::BI__builtin_neon_vpaddlq_v: {
1908    Int = usgn ? Intrinsic::arm_neon_vpaddlu : Intrinsic::arm_neon_vpaddls;
1909    // The source operand type has twice as many elements of half the size.
1910    unsigned EltBits = VTy->getElementType()->getPrimitiveSizeInBits();
1911    llvm::Type *EltTy = llvm::IntegerType::get(getLLVMContext(), EltBits / 2);
1912    llvm::Type *NarrowTy =
1913      llvm::VectorType::get(EltTy, VTy->getNumElements() * 2);
1914    llvm::Type *Tys[2] = { Ty, NarrowTy };
1915    return EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vpaddl");
1916  }
1917  case ARM::BI__builtin_neon_vpmax_v:
1918    Int = usgn ? Intrinsic::arm_neon_vpmaxu : Intrinsic::arm_neon_vpmaxs;
1919    return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vpmax");
1920  case ARM::BI__builtin_neon_vpmin_v:
1921    Int = usgn ? Intrinsic::arm_neon_vpminu : Intrinsic::arm_neon_vpmins;
1922    return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vpmin");
1923  case ARM::BI__builtin_neon_vqabs_v:
1924  case ARM::BI__builtin_neon_vqabsq_v:
1925    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vqabs, Ty),
1926                        Ops, "vqabs");
1927  case ARM::BI__builtin_neon_vqadd_v:
1928  case ARM::BI__builtin_neon_vqaddq_v:
1929    Int = usgn ? Intrinsic::arm_neon_vqaddu : Intrinsic::arm_neon_vqadds;
1930    return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqadd");
1931  case ARM::BI__builtin_neon_vqdmlal_v:
1932    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vqdmlal, Ty),
1933                        Ops, "vqdmlal");
1934  case ARM::BI__builtin_neon_vqdmlsl_v:
1935    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vqdmlsl, Ty),
1936                        Ops, "vqdmlsl");
1937  case ARM::BI__builtin_neon_vqdmulh_v:
1938  case ARM::BI__builtin_neon_vqdmulhq_v:
1939    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vqdmulh, Ty),
1940                        Ops, "vqdmulh");
1941  case ARM::BI__builtin_neon_vqdmull_v:
1942    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vqdmull, Ty),
1943                        Ops, "vqdmull");
1944  case ARM::BI__builtin_neon_vqmovn_v:
1945    Int = usgn ? Intrinsic::arm_neon_vqmovnu : Intrinsic::arm_neon_vqmovns;
1946    return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqmovn");
1947  case ARM::BI__builtin_neon_vqmovun_v:
1948    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vqmovnsu, Ty),
1949                        Ops, "vqdmull");
1950  case ARM::BI__builtin_neon_vqneg_v:
1951  case ARM::BI__builtin_neon_vqnegq_v:
1952    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vqneg, Ty),
1953                        Ops, "vqneg");
1954  case ARM::BI__builtin_neon_vqrdmulh_v:
1955  case ARM::BI__builtin_neon_vqrdmulhq_v:
1956    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vqrdmulh, Ty),
1957                        Ops, "vqrdmulh");
1958  case ARM::BI__builtin_neon_vqrshl_v:
1959  case ARM::BI__builtin_neon_vqrshlq_v:
1960    Int = usgn ? Intrinsic::arm_neon_vqrshiftu : Intrinsic::arm_neon_vqrshifts;
1961    return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqrshl");
1962  case ARM::BI__builtin_neon_vqrshrn_n_v:
1963    Int = usgn ? Intrinsic::arm_neon_vqrshiftnu : Intrinsic::arm_neon_vqrshiftns;
1964    return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqrshrn_n",
1965                        1, true);
1966  case ARM::BI__builtin_neon_vqrshrun_n_v:
1967    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vqrshiftnsu, Ty),
1968                        Ops, "vqrshrun_n", 1, true);
1969  case ARM::BI__builtin_neon_vqshl_v:
1970  case ARM::BI__builtin_neon_vqshlq_v:
1971    Int = usgn ? Intrinsic::arm_neon_vqshiftu : Intrinsic::arm_neon_vqshifts;
1972    return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqshl");
1973  case ARM::BI__builtin_neon_vqshl_n_v:
1974  case ARM::BI__builtin_neon_vqshlq_n_v:
1975    Int = usgn ? Intrinsic::arm_neon_vqshiftu : Intrinsic::arm_neon_vqshifts;
1976    return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqshl_n",
1977                        1, false);
1978  case ARM::BI__builtin_neon_vqshlu_n_v:
1979  case ARM::BI__builtin_neon_vqshluq_n_v:
1980    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vqshiftsu, Ty),
1981                        Ops, "vqshlu", 1, false);
1982  case ARM::BI__builtin_neon_vqshrn_n_v:
1983    Int = usgn ? Intrinsic::arm_neon_vqshiftnu : Intrinsic::arm_neon_vqshiftns;
1984    return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqshrn_n",
1985                        1, true);
1986  case ARM::BI__builtin_neon_vqshrun_n_v:
1987    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vqshiftnsu, Ty),
1988                        Ops, "vqshrun_n", 1, true);
1989  case ARM::BI__builtin_neon_vqsub_v:
1990  case ARM::BI__builtin_neon_vqsubq_v:
1991    Int = usgn ? Intrinsic::arm_neon_vqsubu : Intrinsic::arm_neon_vqsubs;
1992    return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqsub");
1993  case ARM::BI__builtin_neon_vraddhn_v:
1994    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vraddhn, Ty),
1995                        Ops, "vraddhn");
1996  case ARM::BI__builtin_neon_vrecpe_v:
1997  case ARM::BI__builtin_neon_vrecpeq_v:
1998    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vrecpe, Ty),
1999                        Ops, "vrecpe");
2000  case ARM::BI__builtin_neon_vrecps_v:
2001  case ARM::BI__builtin_neon_vrecpsq_v:
2002    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vrecps, Ty),
2003                        Ops, "vrecps");
2004  case ARM::BI__builtin_neon_vrhadd_v:
2005  case ARM::BI__builtin_neon_vrhaddq_v:
2006    Int = usgn ? Intrinsic::arm_neon_vrhaddu : Intrinsic::arm_neon_vrhadds;
2007    return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vrhadd");
2008  case ARM::BI__builtin_neon_vrshl_v:
2009  case ARM::BI__builtin_neon_vrshlq_v:
2010    Int = usgn ? Intrinsic::arm_neon_vrshiftu : Intrinsic::arm_neon_vrshifts;
2011    return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vrshl");
2012  case ARM::BI__builtin_neon_vrshrn_n_v:
2013    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vrshiftn, Ty),
2014                        Ops, "vrshrn_n", 1, true);
2015  case ARM::BI__builtin_neon_vrshr_n_v:
2016  case ARM::BI__builtin_neon_vrshrq_n_v:
2017    Int = usgn ? Intrinsic::arm_neon_vrshiftu : Intrinsic::arm_neon_vrshifts;
2018    return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vrshr_n", 1, true);
2019  case ARM::BI__builtin_neon_vrsqrte_v:
2020  case ARM::BI__builtin_neon_vrsqrteq_v:
2021    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vrsqrte, Ty),
2022                        Ops, "vrsqrte");
2023  case ARM::BI__builtin_neon_vrsqrts_v:
2024  case ARM::BI__builtin_neon_vrsqrtsq_v:
2025    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vrsqrts, Ty),
2026                        Ops, "vrsqrts");
2027  case ARM::BI__builtin_neon_vrsra_n_v:
2028  case ARM::BI__builtin_neon_vrsraq_n_v:
2029    Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
2030    Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
2031    Ops[2] = EmitNeonShiftVector(Ops[2], Ty, true);
2032    Int = usgn ? Intrinsic::arm_neon_vrshiftu : Intrinsic::arm_neon_vrshifts;
2033    Ops[1] = Builder.CreateCall2(CGM.getIntrinsic(Int, Ty), Ops[1], Ops[2]);
2034    return Builder.CreateAdd(Ops[0], Ops[1], "vrsra_n");
2035  case ARM::BI__builtin_neon_vrsubhn_v:
2036    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vrsubhn, Ty),
2037                        Ops, "vrsubhn");
2038  case ARM::BI__builtin_neon_vshl_v:
2039  case ARM::BI__builtin_neon_vshlq_v:
2040    Int = usgn ? Intrinsic::arm_neon_vshiftu : Intrinsic::arm_neon_vshifts;
2041    return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vshl");
2042  case ARM::BI__builtin_neon_vshll_n_v:
2043    Int = usgn ? Intrinsic::arm_neon_vshiftlu : Intrinsic::arm_neon_vshiftls;
2044    return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vshll", 1);
2045  case ARM::BI__builtin_neon_vshl_n_v:
2046  case ARM::BI__builtin_neon_vshlq_n_v:
2047    Ops[1] = EmitNeonShiftVector(Ops[1], Ty, false);
2048    return Builder.CreateShl(Builder.CreateBitCast(Ops[0],Ty), Ops[1], "vshl_n");
2049  case ARM::BI__builtin_neon_vshrn_n_v:
2050    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vshiftn, Ty),
2051                        Ops, "vshrn_n", 1, true);
2052  case ARM::BI__builtin_neon_vshr_n_v:
2053  case ARM::BI__builtin_neon_vshrq_n_v:
2054    Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
2055    Ops[1] = EmitNeonShiftVector(Ops[1], Ty, false);
2056    if (usgn)
2057      return Builder.CreateLShr(Ops[0], Ops[1], "vshr_n");
2058    else
2059      return Builder.CreateAShr(Ops[0], Ops[1], "vshr_n");
2060  case ARM::BI__builtin_neon_vsri_n_v:
2061  case ARM::BI__builtin_neon_vsriq_n_v:
2062    rightShift = true;
2063  case ARM::BI__builtin_neon_vsli_n_v:
2064  case ARM::BI__builtin_neon_vsliq_n_v:
2065    Ops[2] = EmitNeonShiftVector(Ops[2], Ty, rightShift);
2066    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vshiftins, Ty),
2067                        Ops, "vsli_n");
2068  case ARM::BI__builtin_neon_vsra_n_v:
2069  case ARM::BI__builtin_neon_vsraq_n_v:
2070    Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
2071    Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
2072    Ops[2] = EmitNeonShiftVector(Ops[2], Ty, false);
2073    if (usgn)
2074      Ops[1] = Builder.CreateLShr(Ops[1], Ops[2], "vsra_n");
2075    else
2076      Ops[1] = Builder.CreateAShr(Ops[1], Ops[2], "vsra_n");
2077    return Builder.CreateAdd(Ops[0], Ops[1]);
2078  case ARM::BI__builtin_neon_vst1_v:
2079  case ARM::BI__builtin_neon_vst1q_v:
2080    Ops.push_back(GetPointeeAlignmentValue(E->getArg(0)));
2081    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vst1, Ty),
2082                        Ops, "");
2083  case ARM::BI__builtin_neon_vst1_lane_v:
2084  case ARM::BI__builtin_neon_vst1q_lane_v: {
2085    Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
2086    Ops[1] = Builder.CreateExtractElement(Ops[1], Ops[2]);
2087    Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
2088    StoreInst *St = Builder.CreateStore(Ops[1],
2089                                        Builder.CreateBitCast(Ops[0], Ty));
2090    Value *Align = GetPointeeAlignmentValue(E->getArg(0));
2091    St->setAlignment(cast<ConstantInt>(Align)->getZExtValue());
2092    return St;
2093  }
2094  case ARM::BI__builtin_neon_vst2_v:
2095  case ARM::BI__builtin_neon_vst2q_v:
2096    Ops.push_back(GetPointeeAlignmentValue(E->getArg(0)));
2097    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vst2, Ty),
2098                        Ops, "");
2099  case ARM::BI__builtin_neon_vst2_lane_v:
2100  case ARM::BI__builtin_neon_vst2q_lane_v:
2101    Ops.push_back(GetPointeeAlignmentValue(E->getArg(0)));
2102    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vst2lane, Ty),
2103                        Ops, "");
2104  case ARM::BI__builtin_neon_vst3_v:
2105  case ARM::BI__builtin_neon_vst3q_v:
2106    Ops.push_back(GetPointeeAlignmentValue(E->getArg(0)));
2107    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vst3, Ty),
2108                        Ops, "");
2109  case ARM::BI__builtin_neon_vst3_lane_v:
2110  case ARM::BI__builtin_neon_vst3q_lane_v:
2111    Ops.push_back(GetPointeeAlignmentValue(E->getArg(0)));
2112    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vst3lane, Ty),
2113                        Ops, "");
2114  case ARM::BI__builtin_neon_vst4_v:
2115  case ARM::BI__builtin_neon_vst4q_v:
2116    Ops.push_back(GetPointeeAlignmentValue(E->getArg(0)));
2117    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vst4, Ty),
2118                        Ops, "");
2119  case ARM::BI__builtin_neon_vst4_lane_v:
2120  case ARM::BI__builtin_neon_vst4q_lane_v:
2121    Ops.push_back(GetPointeeAlignmentValue(E->getArg(0)));
2122    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vst4lane, Ty),
2123                        Ops, "");
2124  case ARM::BI__builtin_neon_vsubhn_v:
2125    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vsubhn, Ty),
2126                        Ops, "vsubhn");
2127  case ARM::BI__builtin_neon_vtbl1_v:
2128    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vtbl1),
2129                        Ops, "vtbl1");
2130  case ARM::BI__builtin_neon_vtbl2_v:
2131    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vtbl2),
2132                        Ops, "vtbl2");
2133  case ARM::BI__builtin_neon_vtbl3_v:
2134    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vtbl3),
2135                        Ops, "vtbl3");
2136  case ARM::BI__builtin_neon_vtbl4_v:
2137    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vtbl4),
2138                        Ops, "vtbl4");
2139  case ARM::BI__builtin_neon_vtbx1_v:
2140    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vtbx1),
2141                        Ops, "vtbx1");
2142  case ARM::BI__builtin_neon_vtbx2_v:
2143    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vtbx2),
2144                        Ops, "vtbx2");
2145  case ARM::BI__builtin_neon_vtbx3_v:
2146    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vtbx3),
2147                        Ops, "vtbx3");
2148  case ARM::BI__builtin_neon_vtbx4_v:
2149    return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vtbx4),
2150                        Ops, "vtbx4");
2151  case ARM::BI__builtin_neon_vtst_v:
2152  case ARM::BI__builtin_neon_vtstq_v: {
2153    Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
2154    Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
2155    Ops[0] = Builder.CreateAnd(Ops[0], Ops[1]);
2156    Ops[0] = Builder.CreateICmp(ICmpInst::ICMP_NE, Ops[0],
2157                                ConstantAggregateZero::get(Ty));
2158    return Builder.CreateSExt(Ops[0], Ty, "vtst");
2159  }
2160  case ARM::BI__builtin_neon_vtrn_v:
2161  case ARM::BI__builtin_neon_vtrnq_v: {
2162    Ops[0] = Builder.CreateBitCast(Ops[0], llvm::PointerType::getUnqual(Ty));
2163    Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
2164    Ops[2] = Builder.CreateBitCast(Ops[2], Ty);
2165    Value *SV = 0;
2166
2167    for (unsigned vi = 0; vi != 2; ++vi) {
2168      SmallVector<Constant*, 16> Indices;
2169      for (unsigned i = 0, e = VTy->getNumElements(); i != e; i += 2) {
2170        Indices.push_back(Builder.getInt32(i+vi));
2171        Indices.push_back(Builder.getInt32(i+e+vi));
2172      }
2173      Value *Addr = Builder.CreateConstInBoundsGEP1_32(Ops[0], vi);
2174      SV = llvm::ConstantVector::get(Indices);
2175      SV = Builder.CreateShuffleVector(Ops[1], Ops[2], SV, "vtrn");
2176      SV = Builder.CreateStore(SV, Addr);
2177    }
2178    return SV;
2179  }
2180  case ARM::BI__builtin_neon_vuzp_v:
2181  case ARM::BI__builtin_neon_vuzpq_v: {
2182    Ops[0] = Builder.CreateBitCast(Ops[0], llvm::PointerType::getUnqual(Ty));
2183    Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
2184    Ops[2] = Builder.CreateBitCast(Ops[2], Ty);
2185    Value *SV = 0;
2186
2187    for (unsigned vi = 0; vi != 2; ++vi) {
2188      SmallVector<Constant*, 16> Indices;
2189      for (unsigned i = 0, e = VTy->getNumElements(); i != e; ++i)
2190        Indices.push_back(ConstantInt::get(Int32Ty, 2*i+vi));
2191
2192      Value *Addr = Builder.CreateConstInBoundsGEP1_32(Ops[0], vi);
2193      SV = llvm::ConstantVector::get(Indices);
2194      SV = Builder.CreateShuffleVector(Ops[1], Ops[2], SV, "vuzp");
2195      SV = Builder.CreateStore(SV, Addr);
2196    }
2197    return SV;
2198  }
2199  case ARM::BI__builtin_neon_vzip_v:
2200  case ARM::BI__builtin_neon_vzipq_v: {
2201    Ops[0] = Builder.CreateBitCast(Ops[0], llvm::PointerType::getUnqual(Ty));
2202    Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
2203    Ops[2] = Builder.CreateBitCast(Ops[2], Ty);
2204    Value *SV = 0;
2205
2206    for (unsigned vi = 0; vi != 2; ++vi) {
2207      SmallVector<Constant*, 16> Indices;
2208      for (unsigned i = 0, e = VTy->getNumElements(); i != e; i += 2) {
2209        Indices.push_back(ConstantInt::get(Int32Ty, (i + vi*e) >> 1));
2210        Indices.push_back(ConstantInt::get(Int32Ty, ((i + vi*e) >> 1)+e));
2211      }
2212      Value *Addr = Builder.CreateConstInBoundsGEP1_32(Ops[0], vi);
2213      SV = llvm::ConstantVector::get(Indices);
2214      SV = Builder.CreateShuffleVector(Ops[1], Ops[2], SV, "vzip");
2215      SV = Builder.CreateStore(SV, Addr);
2216    }
2217    return SV;
2218  }
2219  }
2220}
2221
2222llvm::Value *CodeGenFunction::
2223BuildVector(ArrayRef<llvm::Value*> Ops) {
2224  assert((Ops.size() & (Ops.size() - 1)) == 0 &&
2225         "Not a power-of-two sized vector!");
2226  bool AllConstants = true;
2227  for (unsigned i = 0, e = Ops.size(); i != e && AllConstants; ++i)
2228    AllConstants &= isa<Constant>(Ops[i]);
2229
2230  // If this is a constant vector, create a ConstantVector.
2231  if (AllConstants) {
2232    SmallVector<llvm::Constant*, 16> CstOps;
2233    for (unsigned i = 0, e = Ops.size(); i != e; ++i)
2234      CstOps.push_back(cast<Constant>(Ops[i]));
2235    return llvm::ConstantVector::get(CstOps);
2236  }
2237
2238  // Otherwise, insertelement the values to build the vector.
2239  Value *Result =
2240    llvm::UndefValue::get(llvm::VectorType::get(Ops[0]->getType(), Ops.size()));
2241
2242  for (unsigned i = 0, e = Ops.size(); i != e; ++i)
2243    Result = Builder.CreateInsertElement(Result, Ops[i], Builder.getInt32(i));
2244
2245  return Result;
2246}
2247
2248Value *CodeGenFunction::EmitX86BuiltinExpr(unsigned BuiltinID,
2249                                           const CallExpr *E) {
2250  SmallVector<Value*, 4> Ops;
2251
2252  // Find out if any arguments are required to be integer constant expressions.
2253  unsigned ICEArguments = 0;
2254  ASTContext::GetBuiltinTypeError Error;
2255  getContext().GetBuiltinType(BuiltinID, Error, &ICEArguments);
2256  assert(Error == ASTContext::GE_None && "Should not codegen an error");
2257
2258  for (unsigned i = 0, e = E->getNumArgs(); i != e; i++) {
2259    // If this is a normal argument, just emit it as a scalar.
2260    if ((ICEArguments & (1 << i)) == 0) {
2261      Ops.push_back(EmitScalarExpr(E->getArg(i)));
2262      continue;
2263    }
2264
2265    // If this is required to be a constant, constant fold it so that we know
2266    // that the generated intrinsic gets a ConstantInt.
2267    llvm::APSInt Result;
2268    bool IsConst = E->getArg(i)->isIntegerConstantExpr(Result, getContext());
2269    assert(IsConst && "Constant arg isn't actually constant?"); (void)IsConst;
2270    Ops.push_back(llvm::ConstantInt::get(getLLVMContext(), Result));
2271  }
2272
2273  switch (BuiltinID) {
2274  default: return 0;
2275  case X86::BI__builtin_ia32_vec_init_v8qi:
2276  case X86::BI__builtin_ia32_vec_init_v4hi:
2277  case X86::BI__builtin_ia32_vec_init_v2si:
2278    return Builder.CreateBitCast(BuildVector(Ops),
2279                                 llvm::Type::getX86_MMXTy(getLLVMContext()));
2280  case X86::BI__builtin_ia32_vec_ext_v2si:
2281    return Builder.CreateExtractElement(Ops[0],
2282                                  llvm::ConstantInt::get(Ops[1]->getType(), 0));
2283  case X86::BI__builtin_ia32_ldmxcsr: {
2284    llvm::Type *PtrTy = Int8PtrTy;
2285    Value *One = llvm::ConstantInt::get(Int32Ty, 1);
2286    Value *Tmp = Builder.CreateAlloca(Int32Ty, One);
2287    Builder.CreateStore(Ops[0], Tmp);
2288    return Builder.CreateCall(CGM.getIntrinsic(Intrinsic::x86_sse_ldmxcsr),
2289                              Builder.CreateBitCast(Tmp, PtrTy));
2290  }
2291  case X86::BI__builtin_ia32_stmxcsr: {
2292    llvm::Type *PtrTy = Int8PtrTy;
2293    Value *One = llvm::ConstantInt::get(Int32Ty, 1);
2294    Value *Tmp = Builder.CreateAlloca(Int32Ty, One);
2295    Builder.CreateCall(CGM.getIntrinsic(Intrinsic::x86_sse_stmxcsr),
2296                       Builder.CreateBitCast(Tmp, PtrTy));
2297    return Builder.CreateLoad(Tmp, "stmxcsr");
2298  }
2299  case X86::BI__builtin_ia32_storehps:
2300  case X86::BI__builtin_ia32_storelps: {
2301    llvm::Type *PtrTy = llvm::PointerType::getUnqual(Int64Ty);
2302    llvm::Type *VecTy = llvm::VectorType::get(Int64Ty, 2);
2303
2304    // cast val v2i64
2305    Ops[1] = Builder.CreateBitCast(Ops[1], VecTy, "cast");
2306
2307    // extract (0, 1)
2308    unsigned Index = BuiltinID == X86::BI__builtin_ia32_storelps ? 0 : 1;
2309    llvm::Value *Idx = llvm::ConstantInt::get(Int32Ty, Index);
2310    Ops[1] = Builder.CreateExtractElement(Ops[1], Idx, "extract");
2311
2312    // cast pointer to i64 & store
2313    Ops[0] = Builder.CreateBitCast(Ops[0], PtrTy);
2314    return Builder.CreateStore(Ops[1], Ops[0]);
2315  }
2316  case X86::BI__builtin_ia32_palignr: {
2317    unsigned shiftVal = cast<llvm::ConstantInt>(Ops[2])->getZExtValue();
2318
2319    // If palignr is shifting the pair of input vectors less than 9 bytes,
2320    // emit a shuffle instruction.
2321    if (shiftVal <= 8) {
2322      SmallVector<llvm::Constant*, 8> Indices;
2323      for (unsigned i = 0; i != 8; ++i)
2324        Indices.push_back(llvm::ConstantInt::get(Int32Ty, shiftVal + i));
2325
2326      Value* SV = llvm::ConstantVector::get(Indices);
2327      return Builder.CreateShuffleVector(Ops[1], Ops[0], SV, "palignr");
2328    }
2329
2330    // If palignr is shifting the pair of input vectors more than 8 but less
2331    // than 16 bytes, emit a logical right shift of the destination.
2332    if (shiftVal < 16) {
2333      // MMX has these as 1 x i64 vectors for some odd optimization reasons.
2334      llvm::Type *VecTy = llvm::VectorType::get(Int64Ty, 1);
2335
2336      Ops[0] = Builder.CreateBitCast(Ops[0], VecTy, "cast");
2337      Ops[1] = llvm::ConstantInt::get(VecTy, (shiftVal-8) * 8);
2338
2339      // create i32 constant
2340      llvm::Function *F = CGM.getIntrinsic(Intrinsic::x86_mmx_psrl_q);
2341      return Builder.CreateCall(F, makeArrayRef(&Ops[0], 2), "palignr");
2342    }
2343
2344    // If palignr is shifting the pair of vectors more than 16 bytes, emit zero.
2345    return llvm::Constant::getNullValue(ConvertType(E->getType()));
2346  }
2347  case X86::BI__builtin_ia32_palignr128: {
2348    unsigned shiftVal = cast<llvm::ConstantInt>(Ops[2])->getZExtValue();
2349
2350    // If palignr is shifting the pair of input vectors less than 17 bytes,
2351    // emit a shuffle instruction.
2352    if (shiftVal <= 16) {
2353      SmallVector<llvm::Constant*, 16> Indices;
2354      for (unsigned i = 0; i != 16; ++i)
2355        Indices.push_back(llvm::ConstantInt::get(Int32Ty, shiftVal + i));
2356
2357      Value* SV = llvm::ConstantVector::get(Indices);
2358      return Builder.CreateShuffleVector(Ops[1], Ops[0], SV, "palignr");
2359    }
2360
2361    // If palignr is shifting the pair of input vectors more than 16 but less
2362    // than 32 bytes, emit a logical right shift of the destination.
2363    if (shiftVal < 32) {
2364      llvm::Type *VecTy = llvm::VectorType::get(Int64Ty, 2);
2365
2366      Ops[0] = Builder.CreateBitCast(Ops[0], VecTy, "cast");
2367      Ops[1] = llvm::ConstantInt::get(Int32Ty, (shiftVal-16) * 8);
2368
2369      // create i32 constant
2370      llvm::Function *F = CGM.getIntrinsic(Intrinsic::x86_sse2_psrl_dq);
2371      return Builder.CreateCall(F, makeArrayRef(&Ops[0], 2), "palignr");
2372    }
2373
2374    // If palignr is shifting the pair of vectors more than 32 bytes, emit zero.
2375    return llvm::Constant::getNullValue(ConvertType(E->getType()));
2376  }
2377  case X86::BI__builtin_ia32_palignr256: {
2378    unsigned shiftVal = cast<llvm::ConstantInt>(Ops[2])->getZExtValue();
2379
2380    // If palignr is shifting the pair of input vectors less than 17 bytes,
2381    // emit a shuffle instruction.
2382    if (shiftVal <= 16) {
2383      SmallVector<llvm::Constant*, 32> Indices;
2384      // 256-bit palignr operates on 128-bit lanes so we need to handle that
2385      for (unsigned l = 0; l != 2; ++l) {
2386        unsigned LaneStart = l * 16;
2387        unsigned LaneEnd = (l+1) * 16;
2388        for (unsigned i = 0; i != 16; ++i) {
2389          unsigned Idx = shiftVal + i + LaneStart;
2390          if (Idx >= LaneEnd) Idx += 16; // end of lane, switch operand
2391          Indices.push_back(llvm::ConstantInt::get(Int32Ty, Idx));
2392        }
2393      }
2394
2395      Value* SV = llvm::ConstantVector::get(Indices);
2396      return Builder.CreateShuffleVector(Ops[1], Ops[0], SV, "palignr");
2397    }
2398
2399    // If palignr is shifting the pair of input vectors more than 16 but less
2400    // than 32 bytes, emit a logical right shift of the destination.
2401    if (shiftVal < 32) {
2402      llvm::Type *VecTy = llvm::VectorType::get(Int64Ty, 4);
2403
2404      Ops[0] = Builder.CreateBitCast(Ops[0], VecTy, "cast");
2405      Ops[1] = llvm::ConstantInt::get(Int32Ty, (shiftVal-16) * 8);
2406
2407      // create i32 constant
2408      llvm::Function *F = CGM.getIntrinsic(Intrinsic::x86_avx2_psrl_dq);
2409      return Builder.CreateCall(F, makeArrayRef(&Ops[0], 2), "palignr");
2410    }
2411
2412    // If palignr is shifting the pair of vectors more than 32 bytes, emit zero.
2413    return llvm::Constant::getNullValue(ConvertType(E->getType()));
2414  }
2415  case X86::BI__builtin_ia32_movntps:
2416  case X86::BI__builtin_ia32_movntps256:
2417  case X86::BI__builtin_ia32_movntpd:
2418  case X86::BI__builtin_ia32_movntpd256:
2419  case X86::BI__builtin_ia32_movntdq:
2420  case X86::BI__builtin_ia32_movntdq256:
2421  case X86::BI__builtin_ia32_movnti: {
2422    llvm::MDNode *Node = llvm::MDNode::get(getLLVMContext(),
2423                                           Builder.getInt32(1));
2424
2425    // Convert the type of the pointer to a pointer to the stored type.
2426    Value *BC = Builder.CreateBitCast(Ops[0],
2427                                llvm::PointerType::getUnqual(Ops[1]->getType()),
2428                                      "cast");
2429    StoreInst *SI = Builder.CreateStore(Ops[1], BC);
2430    SI->setMetadata(CGM.getModule().getMDKindID("nontemporal"), Node);
2431    SI->setAlignment(16);
2432    return SI;
2433  }
2434  // 3DNow!
2435  case X86::BI__builtin_ia32_pswapdsf:
2436  case X86::BI__builtin_ia32_pswapdsi: {
2437    const char *name = 0;
2438    Intrinsic::ID ID = Intrinsic::not_intrinsic;
2439    switch(BuiltinID) {
2440    default: llvm_unreachable("Unsupported intrinsic!");
2441    case X86::BI__builtin_ia32_pswapdsf:
2442    case X86::BI__builtin_ia32_pswapdsi:
2443      name = "pswapd";
2444      ID = Intrinsic::x86_3dnowa_pswapd;
2445      break;
2446    }
2447    llvm::Type *MMXTy = llvm::Type::getX86_MMXTy(getLLVMContext());
2448    Ops[0] = Builder.CreateBitCast(Ops[0], MMXTy, "cast");
2449    llvm::Function *F = CGM.getIntrinsic(ID);
2450    return Builder.CreateCall(F, Ops, name);
2451  }
2452  }
2453}
2454
2455
2456Value *CodeGenFunction::EmitHexagonBuiltinExpr(unsigned BuiltinID,
2457                                             const CallExpr *E) {
2458  llvm::SmallVector<Value*, 4> Ops;
2459
2460  for (unsigned i = 0, e = E->getNumArgs(); i != e; i++)
2461    Ops.push_back(EmitScalarExpr(E->getArg(i)));
2462
2463  Intrinsic::ID ID = Intrinsic::not_intrinsic;
2464
2465  switch (BuiltinID) {
2466  default: return 0;
2467
2468// The builtins below are not autogenerated from iset.py.
2469// Make sure you do not overwrite these.
2470
2471  case Hexagon::BI__builtin_SI_to_SXTHI_asrh:
2472    ID = Intrinsic::hexagon_SI_to_SXTHI_asrh; break;
2473
2474  case Hexagon::BI__builtin_circ_ldd:
2475    ID = Intrinsic::hexagon_circ_ldd; break;
2476
2477// The builtins above are not autogenerated from iset.py.
2478// Make sure you do not overwrite these.
2479
2480  case Hexagon::BI__builtin_HEXAGON_C2_cmpeq:
2481    ID = Intrinsic::hexagon_C2_cmpeq; break;
2482
2483  case Hexagon::BI__builtin_HEXAGON_C2_cmpgt:
2484    ID = Intrinsic::hexagon_C2_cmpgt; break;
2485
2486  case Hexagon::BI__builtin_HEXAGON_C2_cmpgtu:
2487    ID = Intrinsic::hexagon_C2_cmpgtu; break;
2488
2489  case Hexagon::BI__builtin_HEXAGON_C2_cmpeqp:
2490    ID = Intrinsic::hexagon_C2_cmpeqp; break;
2491
2492  case Hexagon::BI__builtin_HEXAGON_C2_cmpgtp:
2493    ID = Intrinsic::hexagon_C2_cmpgtp; break;
2494
2495  case Hexagon::BI__builtin_HEXAGON_C2_cmpgtup:
2496    ID = Intrinsic::hexagon_C2_cmpgtup; break;
2497
2498  case Hexagon::BI__builtin_HEXAGON_A4_rcmpeqi:
2499    ID = Intrinsic::hexagon_A4_rcmpeqi; break;
2500
2501  case Hexagon::BI__builtin_HEXAGON_A4_rcmpneqi:
2502    ID = Intrinsic::hexagon_A4_rcmpneqi; break;
2503
2504  case Hexagon::BI__builtin_HEXAGON_A4_rcmpeq:
2505    ID = Intrinsic::hexagon_A4_rcmpeq; break;
2506
2507  case Hexagon::BI__builtin_HEXAGON_A4_rcmpneq:
2508    ID = Intrinsic::hexagon_A4_rcmpneq; break;
2509
2510  case Hexagon::BI__builtin_HEXAGON_C2_bitsset:
2511    ID = Intrinsic::hexagon_C2_bitsset; break;
2512
2513  case Hexagon::BI__builtin_HEXAGON_C2_bitsclr:
2514    ID = Intrinsic::hexagon_C2_bitsclr; break;
2515
2516  case Hexagon::BI__builtin_HEXAGON_C4_nbitsset:
2517    ID = Intrinsic::hexagon_C4_nbitsset; break;
2518
2519  case Hexagon::BI__builtin_HEXAGON_C4_nbitsclr:
2520    ID = Intrinsic::hexagon_C4_nbitsclr; break;
2521
2522  case Hexagon::BI__builtin_HEXAGON_C2_cmpeqi:
2523    ID = Intrinsic::hexagon_C2_cmpeqi; break;
2524
2525  case Hexagon::BI__builtin_HEXAGON_C2_cmpgti:
2526    ID = Intrinsic::hexagon_C2_cmpgti; break;
2527
2528  case Hexagon::BI__builtin_HEXAGON_C2_cmpgtui:
2529    ID = Intrinsic::hexagon_C2_cmpgtui; break;
2530
2531  case Hexagon::BI__builtin_HEXAGON_C2_cmpgei:
2532    ID = Intrinsic::hexagon_C2_cmpgei; break;
2533
2534  case Hexagon::BI__builtin_HEXAGON_C2_cmpgeui:
2535    ID = Intrinsic::hexagon_C2_cmpgeui; break;
2536
2537  case Hexagon::BI__builtin_HEXAGON_C2_cmplt:
2538    ID = Intrinsic::hexagon_C2_cmplt; break;
2539
2540  case Hexagon::BI__builtin_HEXAGON_C2_cmpltu:
2541    ID = Intrinsic::hexagon_C2_cmpltu; break;
2542
2543  case Hexagon::BI__builtin_HEXAGON_C2_bitsclri:
2544    ID = Intrinsic::hexagon_C2_bitsclri; break;
2545
2546  case Hexagon::BI__builtin_HEXAGON_C4_nbitsclri:
2547    ID = Intrinsic::hexagon_C4_nbitsclri; break;
2548
2549  case Hexagon::BI__builtin_HEXAGON_C4_cmpneqi:
2550    ID = Intrinsic::hexagon_C4_cmpneqi; break;
2551
2552  case Hexagon::BI__builtin_HEXAGON_C4_cmpltei:
2553    ID = Intrinsic::hexagon_C4_cmpltei; break;
2554
2555  case Hexagon::BI__builtin_HEXAGON_C4_cmplteui:
2556    ID = Intrinsic::hexagon_C4_cmplteui; break;
2557
2558  case Hexagon::BI__builtin_HEXAGON_C4_cmpneq:
2559    ID = Intrinsic::hexagon_C4_cmpneq; break;
2560
2561  case Hexagon::BI__builtin_HEXAGON_C4_cmplte:
2562    ID = Intrinsic::hexagon_C4_cmplte; break;
2563
2564  case Hexagon::BI__builtin_HEXAGON_C4_cmplteu:
2565    ID = Intrinsic::hexagon_C4_cmplteu; break;
2566
2567  case Hexagon::BI__builtin_HEXAGON_C2_and:
2568    ID = Intrinsic::hexagon_C2_and; break;
2569
2570  case Hexagon::BI__builtin_HEXAGON_C2_or:
2571    ID = Intrinsic::hexagon_C2_or; break;
2572
2573  case Hexagon::BI__builtin_HEXAGON_C2_xor:
2574    ID = Intrinsic::hexagon_C2_xor; break;
2575
2576  case Hexagon::BI__builtin_HEXAGON_C2_andn:
2577    ID = Intrinsic::hexagon_C2_andn; break;
2578
2579  case Hexagon::BI__builtin_HEXAGON_C2_not:
2580    ID = Intrinsic::hexagon_C2_not; break;
2581
2582  case Hexagon::BI__builtin_HEXAGON_C2_orn:
2583    ID = Intrinsic::hexagon_C2_orn; break;
2584
2585  case Hexagon::BI__builtin_HEXAGON_C4_and_and:
2586    ID = Intrinsic::hexagon_C4_and_and; break;
2587
2588  case Hexagon::BI__builtin_HEXAGON_C4_and_or:
2589    ID = Intrinsic::hexagon_C4_and_or; break;
2590
2591  case Hexagon::BI__builtin_HEXAGON_C4_or_and:
2592    ID = Intrinsic::hexagon_C4_or_and; break;
2593
2594  case Hexagon::BI__builtin_HEXAGON_C4_or_or:
2595    ID = Intrinsic::hexagon_C4_or_or; break;
2596
2597  case Hexagon::BI__builtin_HEXAGON_C4_and_andn:
2598    ID = Intrinsic::hexagon_C4_and_andn; break;
2599
2600  case Hexagon::BI__builtin_HEXAGON_C4_and_orn:
2601    ID = Intrinsic::hexagon_C4_and_orn; break;
2602
2603  case Hexagon::BI__builtin_HEXAGON_C4_or_andn:
2604    ID = Intrinsic::hexagon_C4_or_andn; break;
2605
2606  case Hexagon::BI__builtin_HEXAGON_C4_or_orn:
2607    ID = Intrinsic::hexagon_C4_or_orn; break;
2608
2609  case Hexagon::BI__builtin_HEXAGON_C2_pxfer_map:
2610    ID = Intrinsic::hexagon_C2_pxfer_map; break;
2611
2612  case Hexagon::BI__builtin_HEXAGON_C2_any8:
2613    ID = Intrinsic::hexagon_C2_any8; break;
2614
2615  case Hexagon::BI__builtin_HEXAGON_C2_all8:
2616    ID = Intrinsic::hexagon_C2_all8; break;
2617
2618  case Hexagon::BI__builtin_HEXAGON_C2_vitpack:
2619    ID = Intrinsic::hexagon_C2_vitpack; break;
2620
2621  case Hexagon::BI__builtin_HEXAGON_C2_mux:
2622    ID = Intrinsic::hexagon_C2_mux; break;
2623
2624  case Hexagon::BI__builtin_HEXAGON_C2_muxii:
2625    ID = Intrinsic::hexagon_C2_muxii; break;
2626
2627  case Hexagon::BI__builtin_HEXAGON_C2_muxir:
2628    ID = Intrinsic::hexagon_C2_muxir; break;
2629
2630  case Hexagon::BI__builtin_HEXAGON_C2_muxri:
2631    ID = Intrinsic::hexagon_C2_muxri; break;
2632
2633  case Hexagon::BI__builtin_HEXAGON_C2_vmux:
2634    ID = Intrinsic::hexagon_C2_vmux; break;
2635
2636  case Hexagon::BI__builtin_HEXAGON_C2_mask:
2637    ID = Intrinsic::hexagon_C2_mask; break;
2638
2639  case Hexagon::BI__builtin_HEXAGON_A2_vcmpbeq:
2640    ID = Intrinsic::hexagon_A2_vcmpbeq; break;
2641
2642  case Hexagon::BI__builtin_HEXAGON_A4_vcmpbeqi:
2643    ID = Intrinsic::hexagon_A4_vcmpbeqi; break;
2644
2645  case Hexagon::BI__builtin_HEXAGON_A4_vcmpbeq_any:
2646    ID = Intrinsic::hexagon_A4_vcmpbeq_any; break;
2647
2648  case Hexagon::BI__builtin_HEXAGON_A2_vcmpbgtu:
2649    ID = Intrinsic::hexagon_A2_vcmpbgtu; break;
2650
2651  case Hexagon::BI__builtin_HEXAGON_A4_vcmpbgtui:
2652    ID = Intrinsic::hexagon_A4_vcmpbgtui; break;
2653
2654  case Hexagon::BI__builtin_HEXAGON_A4_vcmpbgt:
2655    ID = Intrinsic::hexagon_A4_vcmpbgt; break;
2656
2657  case Hexagon::BI__builtin_HEXAGON_A4_vcmpbgti:
2658    ID = Intrinsic::hexagon_A4_vcmpbgti; break;
2659
2660  case Hexagon::BI__builtin_HEXAGON_A4_cmpbeq:
2661    ID = Intrinsic::hexagon_A4_cmpbeq; break;
2662
2663  case Hexagon::BI__builtin_HEXAGON_A4_cmpbeqi:
2664    ID = Intrinsic::hexagon_A4_cmpbeqi; break;
2665
2666  case Hexagon::BI__builtin_HEXAGON_A4_cmpbgtu:
2667    ID = Intrinsic::hexagon_A4_cmpbgtu; break;
2668
2669  case Hexagon::BI__builtin_HEXAGON_A4_cmpbgtui:
2670    ID = Intrinsic::hexagon_A4_cmpbgtui; break;
2671
2672  case Hexagon::BI__builtin_HEXAGON_A4_cmpbgt:
2673    ID = Intrinsic::hexagon_A4_cmpbgt; break;
2674
2675  case Hexagon::BI__builtin_HEXAGON_A4_cmpbgti:
2676    ID = Intrinsic::hexagon_A4_cmpbgti; break;
2677
2678  case Hexagon::BI__builtin_HEXAGON_A2_vcmpheq:
2679    ID = Intrinsic::hexagon_A2_vcmpheq; break;
2680
2681  case Hexagon::BI__builtin_HEXAGON_A2_vcmphgt:
2682    ID = Intrinsic::hexagon_A2_vcmphgt; break;
2683
2684  case Hexagon::BI__builtin_HEXAGON_A2_vcmphgtu:
2685    ID = Intrinsic::hexagon_A2_vcmphgtu; break;
2686
2687  case Hexagon::BI__builtin_HEXAGON_A4_vcmpheqi:
2688    ID = Intrinsic::hexagon_A4_vcmpheqi; break;
2689
2690  case Hexagon::BI__builtin_HEXAGON_A4_vcmphgti:
2691    ID = Intrinsic::hexagon_A4_vcmphgti; break;
2692
2693  case Hexagon::BI__builtin_HEXAGON_A4_vcmphgtui:
2694    ID = Intrinsic::hexagon_A4_vcmphgtui; break;
2695
2696  case Hexagon::BI__builtin_HEXAGON_A4_cmpheq:
2697    ID = Intrinsic::hexagon_A4_cmpheq; break;
2698
2699  case Hexagon::BI__builtin_HEXAGON_A4_cmphgt:
2700    ID = Intrinsic::hexagon_A4_cmphgt; break;
2701
2702  case Hexagon::BI__builtin_HEXAGON_A4_cmphgtu:
2703    ID = Intrinsic::hexagon_A4_cmphgtu; break;
2704
2705  case Hexagon::BI__builtin_HEXAGON_A4_cmpheqi:
2706    ID = Intrinsic::hexagon_A4_cmpheqi; break;
2707
2708  case Hexagon::BI__builtin_HEXAGON_A4_cmphgti:
2709    ID = Intrinsic::hexagon_A4_cmphgti; break;
2710
2711  case Hexagon::BI__builtin_HEXAGON_A4_cmphgtui:
2712    ID = Intrinsic::hexagon_A4_cmphgtui; break;
2713
2714  case Hexagon::BI__builtin_HEXAGON_A2_vcmpweq:
2715    ID = Intrinsic::hexagon_A2_vcmpweq; break;
2716
2717  case Hexagon::BI__builtin_HEXAGON_A2_vcmpwgt:
2718    ID = Intrinsic::hexagon_A2_vcmpwgt; break;
2719
2720  case Hexagon::BI__builtin_HEXAGON_A2_vcmpwgtu:
2721    ID = Intrinsic::hexagon_A2_vcmpwgtu; break;
2722
2723  case Hexagon::BI__builtin_HEXAGON_A4_vcmpweqi:
2724    ID = Intrinsic::hexagon_A4_vcmpweqi; break;
2725
2726  case Hexagon::BI__builtin_HEXAGON_A4_vcmpwgti:
2727    ID = Intrinsic::hexagon_A4_vcmpwgti; break;
2728
2729  case Hexagon::BI__builtin_HEXAGON_A4_vcmpwgtui:
2730    ID = Intrinsic::hexagon_A4_vcmpwgtui; break;
2731
2732  case Hexagon::BI__builtin_HEXAGON_A4_boundscheck:
2733    ID = Intrinsic::hexagon_A4_boundscheck; break;
2734
2735  case Hexagon::BI__builtin_HEXAGON_A4_tlbmatch:
2736    ID = Intrinsic::hexagon_A4_tlbmatch; break;
2737
2738  case Hexagon::BI__builtin_HEXAGON_C2_tfrpr:
2739    ID = Intrinsic::hexagon_C2_tfrpr; break;
2740
2741  case Hexagon::BI__builtin_HEXAGON_C2_tfrrp:
2742    ID = Intrinsic::hexagon_C2_tfrrp; break;
2743
2744  case Hexagon::BI__builtin_HEXAGON_C4_fastcorner9:
2745    ID = Intrinsic::hexagon_C4_fastcorner9; break;
2746
2747  case Hexagon::BI__builtin_HEXAGON_C4_fastcorner9_not:
2748    ID = Intrinsic::hexagon_C4_fastcorner9_not; break;
2749
2750  case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_hh_s0:
2751    ID = Intrinsic::hexagon_M2_mpy_acc_hh_s0; break;
2752
2753  case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_hh_s1:
2754    ID = Intrinsic::hexagon_M2_mpy_acc_hh_s1; break;
2755
2756  case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_hl_s0:
2757    ID = Intrinsic::hexagon_M2_mpy_acc_hl_s0; break;
2758
2759  case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_hl_s1:
2760    ID = Intrinsic::hexagon_M2_mpy_acc_hl_s1; break;
2761
2762  case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_lh_s0:
2763    ID = Intrinsic::hexagon_M2_mpy_acc_lh_s0; break;
2764
2765  case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_lh_s1:
2766    ID = Intrinsic::hexagon_M2_mpy_acc_lh_s1; break;
2767
2768  case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_ll_s0:
2769    ID = Intrinsic::hexagon_M2_mpy_acc_ll_s0; break;
2770
2771  case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_ll_s1:
2772    ID = Intrinsic::hexagon_M2_mpy_acc_ll_s1; break;
2773
2774  case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_hh_s0:
2775    ID = Intrinsic::hexagon_M2_mpy_nac_hh_s0; break;
2776
2777  case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_hh_s1:
2778    ID = Intrinsic::hexagon_M2_mpy_nac_hh_s1; break;
2779
2780  case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_hl_s0:
2781    ID = Intrinsic::hexagon_M2_mpy_nac_hl_s0; break;
2782
2783  case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_hl_s1:
2784    ID = Intrinsic::hexagon_M2_mpy_nac_hl_s1; break;
2785
2786  case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_lh_s0:
2787    ID = Intrinsic::hexagon_M2_mpy_nac_lh_s0; break;
2788
2789  case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_lh_s1:
2790    ID = Intrinsic::hexagon_M2_mpy_nac_lh_s1; break;
2791
2792  case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_ll_s0:
2793    ID = Intrinsic::hexagon_M2_mpy_nac_ll_s0; break;
2794
2795  case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_ll_s1:
2796    ID = Intrinsic::hexagon_M2_mpy_nac_ll_s1; break;
2797
2798  case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_sat_hh_s0:
2799    ID = Intrinsic::hexagon_M2_mpy_acc_sat_hh_s0; break;
2800
2801  case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_sat_hh_s1:
2802    ID = Intrinsic::hexagon_M2_mpy_acc_sat_hh_s1; break;
2803
2804  case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_sat_hl_s0:
2805    ID = Intrinsic::hexagon_M2_mpy_acc_sat_hl_s0; break;
2806
2807  case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_sat_hl_s1:
2808    ID = Intrinsic::hexagon_M2_mpy_acc_sat_hl_s1; break;
2809
2810  case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_sat_lh_s0:
2811    ID = Intrinsic::hexagon_M2_mpy_acc_sat_lh_s0; break;
2812
2813  case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_sat_lh_s1:
2814    ID = Intrinsic::hexagon_M2_mpy_acc_sat_lh_s1; break;
2815
2816  case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_sat_ll_s0:
2817    ID = Intrinsic::hexagon_M2_mpy_acc_sat_ll_s0; break;
2818
2819  case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_sat_ll_s1:
2820    ID = Intrinsic::hexagon_M2_mpy_acc_sat_ll_s1; break;
2821
2822  case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_sat_hh_s0:
2823    ID = Intrinsic::hexagon_M2_mpy_nac_sat_hh_s0; break;
2824
2825  case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_sat_hh_s1:
2826    ID = Intrinsic::hexagon_M2_mpy_nac_sat_hh_s1; break;
2827
2828  case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_sat_hl_s0:
2829    ID = Intrinsic::hexagon_M2_mpy_nac_sat_hl_s0; break;
2830
2831  case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_sat_hl_s1:
2832    ID = Intrinsic::hexagon_M2_mpy_nac_sat_hl_s1; break;
2833
2834  case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_sat_lh_s0:
2835    ID = Intrinsic::hexagon_M2_mpy_nac_sat_lh_s0; break;
2836
2837  case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_sat_lh_s1:
2838    ID = Intrinsic::hexagon_M2_mpy_nac_sat_lh_s1; break;
2839
2840  case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_sat_ll_s0:
2841    ID = Intrinsic::hexagon_M2_mpy_nac_sat_ll_s0; break;
2842
2843  case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_sat_ll_s1:
2844    ID = Intrinsic::hexagon_M2_mpy_nac_sat_ll_s1; break;
2845
2846  case Hexagon::BI__builtin_HEXAGON_M2_mpy_hh_s0:
2847    ID = Intrinsic::hexagon_M2_mpy_hh_s0; break;
2848
2849  case Hexagon::BI__builtin_HEXAGON_M2_mpy_hh_s1:
2850    ID = Intrinsic::hexagon_M2_mpy_hh_s1; break;
2851
2852  case Hexagon::BI__builtin_HEXAGON_M2_mpy_hl_s0:
2853    ID = Intrinsic::hexagon_M2_mpy_hl_s0; break;
2854
2855  case Hexagon::BI__builtin_HEXAGON_M2_mpy_hl_s1:
2856    ID = Intrinsic::hexagon_M2_mpy_hl_s1; break;
2857
2858  case Hexagon::BI__builtin_HEXAGON_M2_mpy_lh_s0:
2859    ID = Intrinsic::hexagon_M2_mpy_lh_s0; break;
2860
2861  case Hexagon::BI__builtin_HEXAGON_M2_mpy_lh_s1:
2862    ID = Intrinsic::hexagon_M2_mpy_lh_s1; break;
2863
2864  case Hexagon::BI__builtin_HEXAGON_M2_mpy_ll_s0:
2865    ID = Intrinsic::hexagon_M2_mpy_ll_s0; break;
2866
2867  case Hexagon::BI__builtin_HEXAGON_M2_mpy_ll_s1:
2868    ID = Intrinsic::hexagon_M2_mpy_ll_s1; break;
2869
2870  case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_hh_s0:
2871    ID = Intrinsic::hexagon_M2_mpy_sat_hh_s0; break;
2872
2873  case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_hh_s1:
2874    ID = Intrinsic::hexagon_M2_mpy_sat_hh_s1; break;
2875
2876  case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_hl_s0:
2877    ID = Intrinsic::hexagon_M2_mpy_sat_hl_s0; break;
2878
2879  case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_hl_s1:
2880    ID = Intrinsic::hexagon_M2_mpy_sat_hl_s1; break;
2881
2882  case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_lh_s0:
2883    ID = Intrinsic::hexagon_M2_mpy_sat_lh_s0; break;
2884
2885  case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_lh_s1:
2886    ID = Intrinsic::hexagon_M2_mpy_sat_lh_s1; break;
2887
2888  case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_ll_s0:
2889    ID = Intrinsic::hexagon_M2_mpy_sat_ll_s0; break;
2890
2891  case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_ll_s1:
2892    ID = Intrinsic::hexagon_M2_mpy_sat_ll_s1; break;
2893
2894  case Hexagon::BI__builtin_HEXAGON_M2_mpy_rnd_hh_s0:
2895    ID = Intrinsic::hexagon_M2_mpy_rnd_hh_s0; break;
2896
2897  case Hexagon::BI__builtin_HEXAGON_M2_mpy_rnd_hh_s1:
2898    ID = Intrinsic::hexagon_M2_mpy_rnd_hh_s1; break;
2899
2900  case Hexagon::BI__builtin_HEXAGON_M2_mpy_rnd_hl_s0:
2901    ID = Intrinsic::hexagon_M2_mpy_rnd_hl_s0; break;
2902
2903  case Hexagon::BI__builtin_HEXAGON_M2_mpy_rnd_hl_s1:
2904    ID = Intrinsic::hexagon_M2_mpy_rnd_hl_s1; break;
2905
2906  case Hexagon::BI__builtin_HEXAGON_M2_mpy_rnd_lh_s0:
2907    ID = Intrinsic::hexagon_M2_mpy_rnd_lh_s0; break;
2908
2909  case Hexagon::BI__builtin_HEXAGON_M2_mpy_rnd_lh_s1:
2910    ID = Intrinsic::hexagon_M2_mpy_rnd_lh_s1; break;
2911
2912  case Hexagon::BI__builtin_HEXAGON_M2_mpy_rnd_ll_s0:
2913    ID = Intrinsic::hexagon_M2_mpy_rnd_ll_s0; break;
2914
2915  case Hexagon::BI__builtin_HEXAGON_M2_mpy_rnd_ll_s1:
2916    ID = Intrinsic::hexagon_M2_mpy_rnd_ll_s1; break;
2917
2918  case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_rnd_hh_s0:
2919    ID = Intrinsic::hexagon_M2_mpy_sat_rnd_hh_s0; break;
2920
2921  case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_rnd_hh_s1:
2922    ID = Intrinsic::hexagon_M2_mpy_sat_rnd_hh_s1; break;
2923
2924  case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_rnd_hl_s0:
2925    ID = Intrinsic::hexagon_M2_mpy_sat_rnd_hl_s0; break;
2926
2927  case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_rnd_hl_s1:
2928    ID = Intrinsic::hexagon_M2_mpy_sat_rnd_hl_s1; break;
2929
2930  case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_rnd_lh_s0:
2931    ID = Intrinsic::hexagon_M2_mpy_sat_rnd_lh_s0; break;
2932
2933  case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_rnd_lh_s1:
2934    ID = Intrinsic::hexagon_M2_mpy_sat_rnd_lh_s1; break;
2935
2936  case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_rnd_ll_s0:
2937    ID = Intrinsic::hexagon_M2_mpy_sat_rnd_ll_s0; break;
2938
2939  case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_rnd_ll_s1:
2940    ID = Intrinsic::hexagon_M2_mpy_sat_rnd_ll_s1; break;
2941
2942  case Hexagon::BI__builtin_HEXAGON_M2_mpyd_acc_hh_s0:
2943    ID = Intrinsic::hexagon_M2_mpyd_acc_hh_s0; break;
2944
2945  case Hexagon::BI__builtin_HEXAGON_M2_mpyd_acc_hh_s1:
2946    ID = Intrinsic::hexagon_M2_mpyd_acc_hh_s1; break;
2947
2948  case Hexagon::BI__builtin_HEXAGON_M2_mpyd_acc_hl_s0:
2949    ID = Intrinsic::hexagon_M2_mpyd_acc_hl_s0; break;
2950
2951  case Hexagon::BI__builtin_HEXAGON_M2_mpyd_acc_hl_s1:
2952    ID = Intrinsic::hexagon_M2_mpyd_acc_hl_s1; break;
2953
2954  case Hexagon::BI__builtin_HEXAGON_M2_mpyd_acc_lh_s0:
2955    ID = Intrinsic::hexagon_M2_mpyd_acc_lh_s0; break;
2956
2957  case Hexagon::BI__builtin_HEXAGON_M2_mpyd_acc_lh_s1:
2958    ID = Intrinsic::hexagon_M2_mpyd_acc_lh_s1; break;
2959
2960  case Hexagon::BI__builtin_HEXAGON_M2_mpyd_acc_ll_s0:
2961    ID = Intrinsic::hexagon_M2_mpyd_acc_ll_s0; break;
2962
2963  case Hexagon::BI__builtin_HEXAGON_M2_mpyd_acc_ll_s1:
2964    ID = Intrinsic::hexagon_M2_mpyd_acc_ll_s1; break;
2965
2966  case Hexagon::BI__builtin_HEXAGON_M2_mpyd_nac_hh_s0:
2967    ID = Intrinsic::hexagon_M2_mpyd_nac_hh_s0; break;
2968
2969  case Hexagon::BI__builtin_HEXAGON_M2_mpyd_nac_hh_s1:
2970    ID = Intrinsic::hexagon_M2_mpyd_nac_hh_s1; break;
2971
2972  case Hexagon::BI__builtin_HEXAGON_M2_mpyd_nac_hl_s0:
2973    ID = Intrinsic::hexagon_M2_mpyd_nac_hl_s0; break;
2974
2975  case Hexagon::BI__builtin_HEXAGON_M2_mpyd_nac_hl_s1:
2976    ID = Intrinsic::hexagon_M2_mpyd_nac_hl_s1; break;
2977
2978  case Hexagon::BI__builtin_HEXAGON_M2_mpyd_nac_lh_s0:
2979    ID = Intrinsic::hexagon_M2_mpyd_nac_lh_s0; break;
2980
2981  case Hexagon::BI__builtin_HEXAGON_M2_mpyd_nac_lh_s1:
2982    ID = Intrinsic::hexagon_M2_mpyd_nac_lh_s1; break;
2983
2984  case Hexagon::BI__builtin_HEXAGON_M2_mpyd_nac_ll_s0:
2985    ID = Intrinsic::hexagon_M2_mpyd_nac_ll_s0; break;
2986
2987  case Hexagon::BI__builtin_HEXAGON_M2_mpyd_nac_ll_s1:
2988    ID = Intrinsic::hexagon_M2_mpyd_nac_ll_s1; break;
2989
2990  case Hexagon::BI__builtin_HEXAGON_M2_mpyd_hh_s0:
2991    ID = Intrinsic::hexagon_M2_mpyd_hh_s0; break;
2992
2993  case Hexagon::BI__builtin_HEXAGON_M2_mpyd_hh_s1:
2994    ID = Intrinsic::hexagon_M2_mpyd_hh_s1; break;
2995
2996  case Hexagon::BI__builtin_HEXAGON_M2_mpyd_hl_s0:
2997    ID = Intrinsic::hexagon_M2_mpyd_hl_s0; break;
2998
2999  case Hexagon::BI__builtin_HEXAGON_M2_mpyd_hl_s1:
3000    ID = Intrinsic::hexagon_M2_mpyd_hl_s1; break;
3001
3002  case Hexagon::BI__builtin_HEXAGON_M2_mpyd_lh_s0:
3003    ID = Intrinsic::hexagon_M2_mpyd_lh_s0; break;
3004
3005  case Hexagon::BI__builtin_HEXAGON_M2_mpyd_lh_s1:
3006    ID = Intrinsic::hexagon_M2_mpyd_lh_s1; break;
3007
3008  case Hexagon::BI__builtin_HEXAGON_M2_mpyd_ll_s0:
3009    ID = Intrinsic::hexagon_M2_mpyd_ll_s0; break;
3010
3011  case Hexagon::BI__builtin_HEXAGON_M2_mpyd_ll_s1:
3012    ID = Intrinsic::hexagon_M2_mpyd_ll_s1; break;
3013
3014  case Hexagon::BI__builtin_HEXAGON_M2_mpyd_rnd_hh_s0:
3015    ID = Intrinsic::hexagon_M2_mpyd_rnd_hh_s0; break;
3016
3017  case Hexagon::BI__builtin_HEXAGON_M2_mpyd_rnd_hh_s1:
3018    ID = Intrinsic::hexagon_M2_mpyd_rnd_hh_s1; break;
3019
3020  case Hexagon::BI__builtin_HEXAGON_M2_mpyd_rnd_hl_s0:
3021    ID = Intrinsic::hexagon_M2_mpyd_rnd_hl_s0; break;
3022
3023  case Hexagon::BI__builtin_HEXAGON_M2_mpyd_rnd_hl_s1:
3024    ID = Intrinsic::hexagon_M2_mpyd_rnd_hl_s1; break;
3025
3026  case Hexagon::BI__builtin_HEXAGON_M2_mpyd_rnd_lh_s0:
3027    ID = Intrinsic::hexagon_M2_mpyd_rnd_lh_s0; break;
3028
3029  case Hexagon::BI__builtin_HEXAGON_M2_mpyd_rnd_lh_s1:
3030    ID = Intrinsic::hexagon_M2_mpyd_rnd_lh_s1; break;
3031
3032  case Hexagon::BI__builtin_HEXAGON_M2_mpyd_rnd_ll_s0:
3033    ID = Intrinsic::hexagon_M2_mpyd_rnd_ll_s0; break;
3034
3035  case Hexagon::BI__builtin_HEXAGON_M2_mpyd_rnd_ll_s1:
3036    ID = Intrinsic::hexagon_M2_mpyd_rnd_ll_s1; break;
3037
3038  case Hexagon::BI__builtin_HEXAGON_M2_mpyu_acc_hh_s0:
3039    ID = Intrinsic::hexagon_M2_mpyu_acc_hh_s0; break;
3040
3041  case Hexagon::BI__builtin_HEXAGON_M2_mpyu_acc_hh_s1:
3042    ID = Intrinsic::hexagon_M2_mpyu_acc_hh_s1; break;
3043
3044  case Hexagon::BI__builtin_HEXAGON_M2_mpyu_acc_hl_s0:
3045    ID = Intrinsic::hexagon_M2_mpyu_acc_hl_s0; break;
3046
3047  case Hexagon::BI__builtin_HEXAGON_M2_mpyu_acc_hl_s1:
3048    ID = Intrinsic::hexagon_M2_mpyu_acc_hl_s1; break;
3049
3050  case Hexagon::BI__builtin_HEXAGON_M2_mpyu_acc_lh_s0:
3051    ID = Intrinsic::hexagon_M2_mpyu_acc_lh_s0; break;
3052
3053  case Hexagon::BI__builtin_HEXAGON_M2_mpyu_acc_lh_s1:
3054    ID = Intrinsic::hexagon_M2_mpyu_acc_lh_s1; break;
3055
3056  case Hexagon::BI__builtin_HEXAGON_M2_mpyu_acc_ll_s0:
3057    ID = Intrinsic::hexagon_M2_mpyu_acc_ll_s0; break;
3058
3059  case Hexagon::BI__builtin_HEXAGON_M2_mpyu_acc_ll_s1:
3060    ID = Intrinsic::hexagon_M2_mpyu_acc_ll_s1; break;
3061
3062  case Hexagon::BI__builtin_HEXAGON_M2_mpyu_nac_hh_s0:
3063    ID = Intrinsic::hexagon_M2_mpyu_nac_hh_s0; break;
3064
3065  case Hexagon::BI__builtin_HEXAGON_M2_mpyu_nac_hh_s1:
3066    ID = Intrinsic::hexagon_M2_mpyu_nac_hh_s1; break;
3067
3068  case Hexagon::BI__builtin_HEXAGON_M2_mpyu_nac_hl_s0:
3069    ID = Intrinsic::hexagon_M2_mpyu_nac_hl_s0; break;
3070
3071  case Hexagon::BI__builtin_HEXAGON_M2_mpyu_nac_hl_s1:
3072    ID = Intrinsic::hexagon_M2_mpyu_nac_hl_s1; break;
3073
3074  case Hexagon::BI__builtin_HEXAGON_M2_mpyu_nac_lh_s0:
3075    ID = Intrinsic::hexagon_M2_mpyu_nac_lh_s0; break;
3076
3077  case Hexagon::BI__builtin_HEXAGON_M2_mpyu_nac_lh_s1:
3078    ID = Intrinsic::hexagon_M2_mpyu_nac_lh_s1; break;
3079
3080  case Hexagon::BI__builtin_HEXAGON_M2_mpyu_nac_ll_s0:
3081    ID = Intrinsic::hexagon_M2_mpyu_nac_ll_s0; break;
3082
3083  case Hexagon::BI__builtin_HEXAGON_M2_mpyu_nac_ll_s1:
3084    ID = Intrinsic::hexagon_M2_mpyu_nac_ll_s1; break;
3085
3086  case Hexagon::BI__builtin_HEXAGON_M2_mpyu_hh_s0:
3087    ID = Intrinsic::hexagon_M2_mpyu_hh_s0; break;
3088
3089  case Hexagon::BI__builtin_HEXAGON_M2_mpyu_hh_s1:
3090    ID = Intrinsic::hexagon_M2_mpyu_hh_s1; break;
3091
3092  case Hexagon::BI__builtin_HEXAGON_M2_mpyu_hl_s0:
3093    ID = Intrinsic::hexagon_M2_mpyu_hl_s0; break;
3094
3095  case Hexagon::BI__builtin_HEXAGON_M2_mpyu_hl_s1:
3096    ID = Intrinsic::hexagon_M2_mpyu_hl_s1; break;
3097
3098  case Hexagon::BI__builtin_HEXAGON_M2_mpyu_lh_s0:
3099    ID = Intrinsic::hexagon_M2_mpyu_lh_s0; break;
3100
3101  case Hexagon::BI__builtin_HEXAGON_M2_mpyu_lh_s1:
3102    ID = Intrinsic::hexagon_M2_mpyu_lh_s1; break;
3103
3104  case Hexagon::BI__builtin_HEXAGON_M2_mpyu_ll_s0:
3105    ID = Intrinsic::hexagon_M2_mpyu_ll_s0; break;
3106
3107  case Hexagon::BI__builtin_HEXAGON_M2_mpyu_ll_s1:
3108    ID = Intrinsic::hexagon_M2_mpyu_ll_s1; break;
3109
3110  case Hexagon::BI__builtin_HEXAGON_M2_mpyud_acc_hh_s0:
3111    ID = Intrinsic::hexagon_M2_mpyud_acc_hh_s0; break;
3112
3113  case Hexagon::BI__builtin_HEXAGON_M2_mpyud_acc_hh_s1:
3114    ID = Intrinsic::hexagon_M2_mpyud_acc_hh_s1; break;
3115
3116  case Hexagon::BI__builtin_HEXAGON_M2_mpyud_acc_hl_s0:
3117    ID = Intrinsic::hexagon_M2_mpyud_acc_hl_s0; break;
3118
3119  case Hexagon::BI__builtin_HEXAGON_M2_mpyud_acc_hl_s1:
3120    ID = Intrinsic::hexagon_M2_mpyud_acc_hl_s1; break;
3121
3122  case Hexagon::BI__builtin_HEXAGON_M2_mpyud_acc_lh_s0:
3123    ID = Intrinsic::hexagon_M2_mpyud_acc_lh_s0; break;
3124
3125  case Hexagon::BI__builtin_HEXAGON_M2_mpyud_acc_lh_s1:
3126    ID = Intrinsic::hexagon_M2_mpyud_acc_lh_s1; break;
3127
3128  case Hexagon::BI__builtin_HEXAGON_M2_mpyud_acc_ll_s0:
3129    ID = Intrinsic::hexagon_M2_mpyud_acc_ll_s0; break;
3130
3131  case Hexagon::BI__builtin_HEXAGON_M2_mpyud_acc_ll_s1:
3132    ID = Intrinsic::hexagon_M2_mpyud_acc_ll_s1; break;
3133
3134  case Hexagon::BI__builtin_HEXAGON_M2_mpyud_nac_hh_s0:
3135    ID = Intrinsic::hexagon_M2_mpyud_nac_hh_s0; break;
3136
3137  case Hexagon::BI__builtin_HEXAGON_M2_mpyud_nac_hh_s1:
3138    ID = Intrinsic::hexagon_M2_mpyud_nac_hh_s1; break;
3139
3140  case Hexagon::BI__builtin_HEXAGON_M2_mpyud_nac_hl_s0:
3141    ID = Intrinsic::hexagon_M2_mpyud_nac_hl_s0; break;
3142
3143  case Hexagon::BI__builtin_HEXAGON_M2_mpyud_nac_hl_s1:
3144    ID = Intrinsic::hexagon_M2_mpyud_nac_hl_s1; break;
3145
3146  case Hexagon::BI__builtin_HEXAGON_M2_mpyud_nac_lh_s0:
3147    ID = Intrinsic::hexagon_M2_mpyud_nac_lh_s0; break;
3148
3149  case Hexagon::BI__builtin_HEXAGON_M2_mpyud_nac_lh_s1:
3150    ID = Intrinsic::hexagon_M2_mpyud_nac_lh_s1; break;
3151
3152  case Hexagon::BI__builtin_HEXAGON_M2_mpyud_nac_ll_s0:
3153    ID = Intrinsic::hexagon_M2_mpyud_nac_ll_s0; break;
3154
3155  case Hexagon::BI__builtin_HEXAGON_M2_mpyud_nac_ll_s1:
3156    ID = Intrinsic::hexagon_M2_mpyud_nac_ll_s1; break;
3157
3158  case Hexagon::BI__builtin_HEXAGON_M2_mpyud_hh_s0:
3159    ID = Intrinsic::hexagon_M2_mpyud_hh_s0; break;
3160
3161  case Hexagon::BI__builtin_HEXAGON_M2_mpyud_hh_s1:
3162    ID = Intrinsic::hexagon_M2_mpyud_hh_s1; break;
3163
3164  case Hexagon::BI__builtin_HEXAGON_M2_mpyud_hl_s0:
3165    ID = Intrinsic::hexagon_M2_mpyud_hl_s0; break;
3166
3167  case Hexagon::BI__builtin_HEXAGON_M2_mpyud_hl_s1:
3168    ID = Intrinsic::hexagon_M2_mpyud_hl_s1; break;
3169
3170  case Hexagon::BI__builtin_HEXAGON_M2_mpyud_lh_s0:
3171    ID = Intrinsic::hexagon_M2_mpyud_lh_s0; break;
3172
3173  case Hexagon::BI__builtin_HEXAGON_M2_mpyud_lh_s1:
3174    ID = Intrinsic::hexagon_M2_mpyud_lh_s1; break;
3175
3176  case Hexagon::BI__builtin_HEXAGON_M2_mpyud_ll_s0:
3177    ID = Intrinsic::hexagon_M2_mpyud_ll_s0; break;
3178
3179  case Hexagon::BI__builtin_HEXAGON_M2_mpyud_ll_s1:
3180    ID = Intrinsic::hexagon_M2_mpyud_ll_s1; break;
3181
3182  case Hexagon::BI__builtin_HEXAGON_M2_mpysmi:
3183    ID = Intrinsic::hexagon_M2_mpysmi; break;
3184
3185  case Hexagon::BI__builtin_HEXAGON_M2_macsip:
3186    ID = Intrinsic::hexagon_M2_macsip; break;
3187
3188  case Hexagon::BI__builtin_HEXAGON_M2_macsin:
3189    ID = Intrinsic::hexagon_M2_macsin; break;
3190
3191  case Hexagon::BI__builtin_HEXAGON_M2_dpmpyss_s0:
3192    ID = Intrinsic::hexagon_M2_dpmpyss_s0; break;
3193
3194  case Hexagon::BI__builtin_HEXAGON_M2_dpmpyss_acc_s0:
3195    ID = Intrinsic::hexagon_M2_dpmpyss_acc_s0; break;
3196
3197  case Hexagon::BI__builtin_HEXAGON_M2_dpmpyss_nac_s0:
3198    ID = Intrinsic::hexagon_M2_dpmpyss_nac_s0; break;
3199
3200  case Hexagon::BI__builtin_HEXAGON_M2_dpmpyuu_s0:
3201    ID = Intrinsic::hexagon_M2_dpmpyuu_s0; break;
3202
3203  case Hexagon::BI__builtin_HEXAGON_M2_dpmpyuu_acc_s0:
3204    ID = Intrinsic::hexagon_M2_dpmpyuu_acc_s0; break;
3205
3206  case Hexagon::BI__builtin_HEXAGON_M2_dpmpyuu_nac_s0:
3207    ID = Intrinsic::hexagon_M2_dpmpyuu_nac_s0; break;
3208
3209  case Hexagon::BI__builtin_HEXAGON_M2_mpy_up:
3210    ID = Intrinsic::hexagon_M2_mpy_up; break;
3211
3212  case Hexagon::BI__builtin_HEXAGON_M2_mpy_up_s1:
3213    ID = Intrinsic::hexagon_M2_mpy_up_s1; break;
3214
3215  case Hexagon::BI__builtin_HEXAGON_M2_mpy_up_s1_sat:
3216    ID = Intrinsic::hexagon_M2_mpy_up_s1_sat; break;
3217
3218  case Hexagon::BI__builtin_HEXAGON_M2_mpyu_up:
3219    ID = Intrinsic::hexagon_M2_mpyu_up; break;
3220
3221  case Hexagon::BI__builtin_HEXAGON_M2_mpysu_up:
3222    ID = Intrinsic::hexagon_M2_mpysu_up; break;
3223
3224  case Hexagon::BI__builtin_HEXAGON_M2_dpmpyss_rnd_s0:
3225    ID = Intrinsic::hexagon_M2_dpmpyss_rnd_s0; break;
3226
3227  case Hexagon::BI__builtin_HEXAGON_M4_mac_up_s1_sat:
3228    ID = Intrinsic::hexagon_M4_mac_up_s1_sat; break;
3229
3230  case Hexagon::BI__builtin_HEXAGON_M4_nac_up_s1_sat:
3231    ID = Intrinsic::hexagon_M4_nac_up_s1_sat; break;
3232
3233  case Hexagon::BI__builtin_HEXAGON_M2_mpyi:
3234    ID = Intrinsic::hexagon_M2_mpyi; break;
3235
3236  case Hexagon::BI__builtin_HEXAGON_M2_mpyui:
3237    ID = Intrinsic::hexagon_M2_mpyui; break;
3238
3239  case Hexagon::BI__builtin_HEXAGON_M2_maci:
3240    ID = Intrinsic::hexagon_M2_maci; break;
3241
3242  case Hexagon::BI__builtin_HEXAGON_M2_acci:
3243    ID = Intrinsic::hexagon_M2_acci; break;
3244
3245  case Hexagon::BI__builtin_HEXAGON_M2_accii:
3246    ID = Intrinsic::hexagon_M2_accii; break;
3247
3248  case Hexagon::BI__builtin_HEXAGON_M2_nacci:
3249    ID = Intrinsic::hexagon_M2_nacci; break;
3250
3251  case Hexagon::BI__builtin_HEXAGON_M2_naccii:
3252    ID = Intrinsic::hexagon_M2_naccii; break;
3253
3254  case Hexagon::BI__builtin_HEXAGON_M2_subacc:
3255    ID = Intrinsic::hexagon_M2_subacc; break;
3256
3257  case Hexagon::BI__builtin_HEXAGON_M4_mpyrr_addr:
3258    ID = Intrinsic::hexagon_M4_mpyrr_addr; break;
3259
3260  case Hexagon::BI__builtin_HEXAGON_M4_mpyri_addr_u2:
3261    ID = Intrinsic::hexagon_M4_mpyri_addr_u2; break;
3262
3263  case Hexagon::BI__builtin_HEXAGON_M4_mpyri_addr:
3264    ID = Intrinsic::hexagon_M4_mpyri_addr; break;
3265
3266  case Hexagon::BI__builtin_HEXAGON_M4_mpyri_addi:
3267    ID = Intrinsic::hexagon_M4_mpyri_addi; break;
3268
3269  case Hexagon::BI__builtin_HEXAGON_M4_mpyrr_addi:
3270    ID = Intrinsic::hexagon_M4_mpyrr_addi; break;
3271
3272  case Hexagon::BI__builtin_HEXAGON_M2_vmpy2s_s0:
3273    ID = Intrinsic::hexagon_M2_vmpy2s_s0; break;
3274
3275  case Hexagon::BI__builtin_HEXAGON_M2_vmpy2s_s1:
3276    ID = Intrinsic::hexagon_M2_vmpy2s_s1; break;
3277
3278  case Hexagon::BI__builtin_HEXAGON_M2_vmac2s_s0:
3279    ID = Intrinsic::hexagon_M2_vmac2s_s0; break;
3280
3281  case Hexagon::BI__builtin_HEXAGON_M2_vmac2s_s1:
3282    ID = Intrinsic::hexagon_M2_vmac2s_s1; break;
3283
3284  case Hexagon::BI__builtin_HEXAGON_M2_vmpy2su_s0:
3285    ID = Intrinsic::hexagon_M2_vmpy2su_s0; break;
3286
3287  case Hexagon::BI__builtin_HEXAGON_M2_vmpy2su_s1:
3288    ID = Intrinsic::hexagon_M2_vmpy2su_s1; break;
3289
3290  case Hexagon::BI__builtin_HEXAGON_M2_vmac2su_s0:
3291    ID = Intrinsic::hexagon_M2_vmac2su_s0; break;
3292
3293  case Hexagon::BI__builtin_HEXAGON_M2_vmac2su_s1:
3294    ID = Intrinsic::hexagon_M2_vmac2su_s1; break;
3295
3296  case Hexagon::BI__builtin_HEXAGON_M2_vmpy2s_s0pack:
3297    ID = Intrinsic::hexagon_M2_vmpy2s_s0pack; break;
3298
3299  case Hexagon::BI__builtin_HEXAGON_M2_vmpy2s_s1pack:
3300    ID = Intrinsic::hexagon_M2_vmpy2s_s1pack; break;
3301
3302  case Hexagon::BI__builtin_HEXAGON_M2_vmac2:
3303    ID = Intrinsic::hexagon_M2_vmac2; break;
3304
3305  case Hexagon::BI__builtin_HEXAGON_M2_vmpy2es_s0:
3306    ID = Intrinsic::hexagon_M2_vmpy2es_s0; break;
3307
3308  case Hexagon::BI__builtin_HEXAGON_M2_vmpy2es_s1:
3309    ID = Intrinsic::hexagon_M2_vmpy2es_s1; break;
3310
3311  case Hexagon::BI__builtin_HEXAGON_M2_vmac2es_s0:
3312    ID = Intrinsic::hexagon_M2_vmac2es_s0; break;
3313
3314  case Hexagon::BI__builtin_HEXAGON_M2_vmac2es_s1:
3315    ID = Intrinsic::hexagon_M2_vmac2es_s1; break;
3316
3317  case Hexagon::BI__builtin_HEXAGON_M2_vmac2es:
3318    ID = Intrinsic::hexagon_M2_vmac2es; break;
3319
3320  case Hexagon::BI__builtin_HEXAGON_M2_vrmac_s0:
3321    ID = Intrinsic::hexagon_M2_vrmac_s0; break;
3322
3323  case Hexagon::BI__builtin_HEXAGON_M2_vrmpy_s0:
3324    ID = Intrinsic::hexagon_M2_vrmpy_s0; break;
3325
3326  case Hexagon::BI__builtin_HEXAGON_M2_vdmpyrs_s0:
3327    ID = Intrinsic::hexagon_M2_vdmpyrs_s0; break;
3328
3329  case Hexagon::BI__builtin_HEXAGON_M2_vdmpyrs_s1:
3330    ID = Intrinsic::hexagon_M2_vdmpyrs_s1; break;
3331
3332  case Hexagon::BI__builtin_HEXAGON_M5_vrmpybuu:
3333    ID = Intrinsic::hexagon_M5_vrmpybuu; break;
3334
3335  case Hexagon::BI__builtin_HEXAGON_M5_vrmacbuu:
3336    ID = Intrinsic::hexagon_M5_vrmacbuu; break;
3337
3338  case Hexagon::BI__builtin_HEXAGON_M5_vrmpybsu:
3339    ID = Intrinsic::hexagon_M5_vrmpybsu; break;
3340
3341  case Hexagon::BI__builtin_HEXAGON_M5_vrmacbsu:
3342    ID = Intrinsic::hexagon_M5_vrmacbsu; break;
3343
3344  case Hexagon::BI__builtin_HEXAGON_M5_vmpybuu:
3345    ID = Intrinsic::hexagon_M5_vmpybuu; break;
3346
3347  case Hexagon::BI__builtin_HEXAGON_M5_vmpybsu:
3348    ID = Intrinsic::hexagon_M5_vmpybsu; break;
3349
3350  case Hexagon::BI__builtin_HEXAGON_M5_vmacbuu:
3351    ID = Intrinsic::hexagon_M5_vmacbuu; break;
3352
3353  case Hexagon::BI__builtin_HEXAGON_M5_vmacbsu:
3354    ID = Intrinsic::hexagon_M5_vmacbsu; break;
3355
3356  case Hexagon::BI__builtin_HEXAGON_M5_vdmpybsu:
3357    ID = Intrinsic::hexagon_M5_vdmpybsu; break;
3358
3359  case Hexagon::BI__builtin_HEXAGON_M5_vdmacbsu:
3360    ID = Intrinsic::hexagon_M5_vdmacbsu; break;
3361
3362  case Hexagon::BI__builtin_HEXAGON_M2_vdmacs_s0:
3363    ID = Intrinsic::hexagon_M2_vdmacs_s0; break;
3364
3365  case Hexagon::BI__builtin_HEXAGON_M2_vdmacs_s1:
3366    ID = Intrinsic::hexagon_M2_vdmacs_s1; break;
3367
3368  case Hexagon::BI__builtin_HEXAGON_M2_vdmpys_s0:
3369    ID = Intrinsic::hexagon_M2_vdmpys_s0; break;
3370
3371  case Hexagon::BI__builtin_HEXAGON_M2_vdmpys_s1:
3372    ID = Intrinsic::hexagon_M2_vdmpys_s1; break;
3373
3374  case Hexagon::BI__builtin_HEXAGON_M2_cmpyrs_s0:
3375    ID = Intrinsic::hexagon_M2_cmpyrs_s0; break;
3376
3377  case Hexagon::BI__builtin_HEXAGON_M2_cmpyrs_s1:
3378    ID = Intrinsic::hexagon_M2_cmpyrs_s1; break;
3379
3380  case Hexagon::BI__builtin_HEXAGON_M2_cmpyrsc_s0:
3381    ID = Intrinsic::hexagon_M2_cmpyrsc_s0; break;
3382
3383  case Hexagon::BI__builtin_HEXAGON_M2_cmpyrsc_s1:
3384    ID = Intrinsic::hexagon_M2_cmpyrsc_s1; break;
3385
3386  case Hexagon::BI__builtin_HEXAGON_M2_cmacs_s0:
3387    ID = Intrinsic::hexagon_M2_cmacs_s0; break;
3388
3389  case Hexagon::BI__builtin_HEXAGON_M2_cmacs_s1:
3390    ID = Intrinsic::hexagon_M2_cmacs_s1; break;
3391
3392  case Hexagon::BI__builtin_HEXAGON_M2_cmacsc_s0:
3393    ID = Intrinsic::hexagon_M2_cmacsc_s0; break;
3394
3395  case Hexagon::BI__builtin_HEXAGON_M2_cmacsc_s1:
3396    ID = Intrinsic::hexagon_M2_cmacsc_s1; break;
3397
3398  case Hexagon::BI__builtin_HEXAGON_M2_cmpys_s0:
3399    ID = Intrinsic::hexagon_M2_cmpys_s0; break;
3400
3401  case Hexagon::BI__builtin_HEXAGON_M2_cmpys_s1:
3402    ID = Intrinsic::hexagon_M2_cmpys_s1; break;
3403
3404  case Hexagon::BI__builtin_HEXAGON_M2_cmpysc_s0:
3405    ID = Intrinsic::hexagon_M2_cmpysc_s0; break;
3406
3407  case Hexagon::BI__builtin_HEXAGON_M2_cmpysc_s1:
3408    ID = Intrinsic::hexagon_M2_cmpysc_s1; break;
3409
3410  case Hexagon::BI__builtin_HEXAGON_M2_cnacs_s0:
3411    ID = Intrinsic::hexagon_M2_cnacs_s0; break;
3412
3413  case Hexagon::BI__builtin_HEXAGON_M2_cnacs_s1:
3414    ID = Intrinsic::hexagon_M2_cnacs_s1; break;
3415
3416  case Hexagon::BI__builtin_HEXAGON_M2_cnacsc_s0:
3417    ID = Intrinsic::hexagon_M2_cnacsc_s0; break;
3418
3419  case Hexagon::BI__builtin_HEXAGON_M2_cnacsc_s1:
3420    ID = Intrinsic::hexagon_M2_cnacsc_s1; break;
3421
3422  case Hexagon::BI__builtin_HEXAGON_M2_vrcmpys_s1:
3423    ID = Intrinsic::hexagon_M2_vrcmpys_s1; break;
3424
3425  case Hexagon::BI__builtin_HEXAGON_M2_vrcmpys_acc_s1:
3426    ID = Intrinsic::hexagon_M2_vrcmpys_acc_s1; break;
3427
3428  case Hexagon::BI__builtin_HEXAGON_M2_vrcmpys_s1rp:
3429    ID = Intrinsic::hexagon_M2_vrcmpys_s1rp; break;
3430
3431  case Hexagon::BI__builtin_HEXAGON_M2_mmacls_s0:
3432    ID = Intrinsic::hexagon_M2_mmacls_s0; break;
3433
3434  case Hexagon::BI__builtin_HEXAGON_M2_mmacls_s1:
3435    ID = Intrinsic::hexagon_M2_mmacls_s1; break;
3436
3437  case Hexagon::BI__builtin_HEXAGON_M2_mmachs_s0:
3438    ID = Intrinsic::hexagon_M2_mmachs_s0; break;
3439
3440  case Hexagon::BI__builtin_HEXAGON_M2_mmachs_s1:
3441    ID = Intrinsic::hexagon_M2_mmachs_s1; break;
3442
3443  case Hexagon::BI__builtin_HEXAGON_M2_mmpyl_s0:
3444    ID = Intrinsic::hexagon_M2_mmpyl_s0; break;
3445
3446  case Hexagon::BI__builtin_HEXAGON_M2_mmpyl_s1:
3447    ID = Intrinsic::hexagon_M2_mmpyl_s1; break;
3448
3449  case Hexagon::BI__builtin_HEXAGON_M2_mmpyh_s0:
3450    ID = Intrinsic::hexagon_M2_mmpyh_s0; break;
3451
3452  case Hexagon::BI__builtin_HEXAGON_M2_mmpyh_s1:
3453    ID = Intrinsic::hexagon_M2_mmpyh_s1; break;
3454
3455  case Hexagon::BI__builtin_HEXAGON_M2_mmacls_rs0:
3456    ID = Intrinsic::hexagon_M2_mmacls_rs0; break;
3457
3458  case Hexagon::BI__builtin_HEXAGON_M2_mmacls_rs1:
3459    ID = Intrinsic::hexagon_M2_mmacls_rs1; break;
3460
3461  case Hexagon::BI__builtin_HEXAGON_M2_mmachs_rs0:
3462    ID = Intrinsic::hexagon_M2_mmachs_rs0; break;
3463
3464  case Hexagon::BI__builtin_HEXAGON_M2_mmachs_rs1:
3465    ID = Intrinsic::hexagon_M2_mmachs_rs1; break;
3466
3467  case Hexagon::BI__builtin_HEXAGON_M2_mmpyl_rs0:
3468    ID = Intrinsic::hexagon_M2_mmpyl_rs0; break;
3469
3470  case Hexagon::BI__builtin_HEXAGON_M2_mmpyl_rs1:
3471    ID = Intrinsic::hexagon_M2_mmpyl_rs1; break;
3472
3473  case Hexagon::BI__builtin_HEXAGON_M2_mmpyh_rs0:
3474    ID = Intrinsic::hexagon_M2_mmpyh_rs0; break;
3475
3476  case Hexagon::BI__builtin_HEXAGON_M2_mmpyh_rs1:
3477    ID = Intrinsic::hexagon_M2_mmpyh_rs1; break;
3478
3479  case Hexagon::BI__builtin_HEXAGON_M4_vrmpyeh_s0:
3480    ID = Intrinsic::hexagon_M4_vrmpyeh_s0; break;
3481
3482  case Hexagon::BI__builtin_HEXAGON_M4_vrmpyeh_s1:
3483    ID = Intrinsic::hexagon_M4_vrmpyeh_s1; break;
3484
3485  case Hexagon::BI__builtin_HEXAGON_M4_vrmpyeh_acc_s0:
3486    ID = Intrinsic::hexagon_M4_vrmpyeh_acc_s0; break;
3487
3488  case Hexagon::BI__builtin_HEXAGON_M4_vrmpyeh_acc_s1:
3489    ID = Intrinsic::hexagon_M4_vrmpyeh_acc_s1; break;
3490
3491  case Hexagon::BI__builtin_HEXAGON_M4_vrmpyoh_s0:
3492    ID = Intrinsic::hexagon_M4_vrmpyoh_s0; break;
3493
3494  case Hexagon::BI__builtin_HEXAGON_M4_vrmpyoh_s1:
3495    ID = Intrinsic::hexagon_M4_vrmpyoh_s1; break;
3496
3497  case Hexagon::BI__builtin_HEXAGON_M4_vrmpyoh_acc_s0:
3498    ID = Intrinsic::hexagon_M4_vrmpyoh_acc_s0; break;
3499
3500  case Hexagon::BI__builtin_HEXAGON_M4_vrmpyoh_acc_s1:
3501    ID = Intrinsic::hexagon_M4_vrmpyoh_acc_s1; break;
3502
3503  case Hexagon::BI__builtin_HEXAGON_M2_hmmpyl_rs1:
3504    ID = Intrinsic::hexagon_M2_hmmpyl_rs1; break;
3505
3506  case Hexagon::BI__builtin_HEXAGON_M2_hmmpyh_rs1:
3507    ID = Intrinsic::hexagon_M2_hmmpyh_rs1; break;
3508
3509  case Hexagon::BI__builtin_HEXAGON_M2_hmmpyl_s1:
3510    ID = Intrinsic::hexagon_M2_hmmpyl_s1; break;
3511
3512  case Hexagon::BI__builtin_HEXAGON_M2_hmmpyh_s1:
3513    ID = Intrinsic::hexagon_M2_hmmpyh_s1; break;
3514
3515  case Hexagon::BI__builtin_HEXAGON_M2_mmaculs_s0:
3516    ID = Intrinsic::hexagon_M2_mmaculs_s0; break;
3517
3518  case Hexagon::BI__builtin_HEXAGON_M2_mmaculs_s1:
3519    ID = Intrinsic::hexagon_M2_mmaculs_s1; break;
3520
3521  case Hexagon::BI__builtin_HEXAGON_M2_mmacuhs_s0:
3522    ID = Intrinsic::hexagon_M2_mmacuhs_s0; break;
3523
3524  case Hexagon::BI__builtin_HEXAGON_M2_mmacuhs_s1:
3525    ID = Intrinsic::hexagon_M2_mmacuhs_s1; break;
3526
3527  case Hexagon::BI__builtin_HEXAGON_M2_mmpyul_s0:
3528    ID = Intrinsic::hexagon_M2_mmpyul_s0; break;
3529
3530  case Hexagon::BI__builtin_HEXAGON_M2_mmpyul_s1:
3531    ID = Intrinsic::hexagon_M2_mmpyul_s1; break;
3532
3533  case Hexagon::BI__builtin_HEXAGON_M2_mmpyuh_s0:
3534    ID = Intrinsic::hexagon_M2_mmpyuh_s0; break;
3535
3536  case Hexagon::BI__builtin_HEXAGON_M2_mmpyuh_s1:
3537    ID = Intrinsic::hexagon_M2_mmpyuh_s1; break;
3538
3539  case Hexagon::BI__builtin_HEXAGON_M2_mmaculs_rs0:
3540    ID = Intrinsic::hexagon_M2_mmaculs_rs0; break;
3541
3542  case Hexagon::BI__builtin_HEXAGON_M2_mmaculs_rs1:
3543    ID = Intrinsic::hexagon_M2_mmaculs_rs1; break;
3544
3545  case Hexagon::BI__builtin_HEXAGON_M2_mmacuhs_rs0:
3546    ID = Intrinsic::hexagon_M2_mmacuhs_rs0; break;
3547
3548  case Hexagon::BI__builtin_HEXAGON_M2_mmacuhs_rs1:
3549    ID = Intrinsic::hexagon_M2_mmacuhs_rs1; break;
3550
3551  case Hexagon::BI__builtin_HEXAGON_M2_mmpyul_rs0:
3552    ID = Intrinsic::hexagon_M2_mmpyul_rs0; break;
3553
3554  case Hexagon::BI__builtin_HEXAGON_M2_mmpyul_rs1:
3555    ID = Intrinsic::hexagon_M2_mmpyul_rs1; break;
3556
3557  case Hexagon::BI__builtin_HEXAGON_M2_mmpyuh_rs0:
3558    ID = Intrinsic::hexagon_M2_mmpyuh_rs0; break;
3559
3560  case Hexagon::BI__builtin_HEXAGON_M2_mmpyuh_rs1:
3561    ID = Intrinsic::hexagon_M2_mmpyuh_rs1; break;
3562
3563  case Hexagon::BI__builtin_HEXAGON_M2_vrcmaci_s0:
3564    ID = Intrinsic::hexagon_M2_vrcmaci_s0; break;
3565
3566  case Hexagon::BI__builtin_HEXAGON_M2_vrcmacr_s0:
3567    ID = Intrinsic::hexagon_M2_vrcmacr_s0; break;
3568
3569  case Hexagon::BI__builtin_HEXAGON_M2_vrcmaci_s0c:
3570    ID = Intrinsic::hexagon_M2_vrcmaci_s0c; break;
3571
3572  case Hexagon::BI__builtin_HEXAGON_M2_vrcmacr_s0c:
3573    ID = Intrinsic::hexagon_M2_vrcmacr_s0c; break;
3574
3575  case Hexagon::BI__builtin_HEXAGON_M2_cmaci_s0:
3576    ID = Intrinsic::hexagon_M2_cmaci_s0; break;
3577
3578  case Hexagon::BI__builtin_HEXAGON_M2_cmacr_s0:
3579    ID = Intrinsic::hexagon_M2_cmacr_s0; break;
3580
3581  case Hexagon::BI__builtin_HEXAGON_M2_vrcmpyi_s0:
3582    ID = Intrinsic::hexagon_M2_vrcmpyi_s0; break;
3583
3584  case Hexagon::BI__builtin_HEXAGON_M2_vrcmpyr_s0:
3585    ID = Intrinsic::hexagon_M2_vrcmpyr_s0; break;
3586
3587  case Hexagon::BI__builtin_HEXAGON_M2_vrcmpyi_s0c:
3588    ID = Intrinsic::hexagon_M2_vrcmpyi_s0c; break;
3589
3590  case Hexagon::BI__builtin_HEXAGON_M2_vrcmpyr_s0c:
3591    ID = Intrinsic::hexagon_M2_vrcmpyr_s0c; break;
3592
3593  case Hexagon::BI__builtin_HEXAGON_M2_cmpyi_s0:
3594    ID = Intrinsic::hexagon_M2_cmpyi_s0; break;
3595
3596  case Hexagon::BI__builtin_HEXAGON_M2_cmpyr_s0:
3597    ID = Intrinsic::hexagon_M2_cmpyr_s0; break;
3598
3599  case Hexagon::BI__builtin_HEXAGON_M4_cmpyi_wh:
3600    ID = Intrinsic::hexagon_M4_cmpyi_wh; break;
3601
3602  case Hexagon::BI__builtin_HEXAGON_M4_cmpyr_wh:
3603    ID = Intrinsic::hexagon_M4_cmpyr_wh; break;
3604
3605  case Hexagon::BI__builtin_HEXAGON_M4_cmpyi_whc:
3606    ID = Intrinsic::hexagon_M4_cmpyi_whc; break;
3607
3608  case Hexagon::BI__builtin_HEXAGON_M4_cmpyr_whc:
3609    ID = Intrinsic::hexagon_M4_cmpyr_whc; break;
3610
3611  case Hexagon::BI__builtin_HEXAGON_M2_vcmpy_s0_sat_i:
3612    ID = Intrinsic::hexagon_M2_vcmpy_s0_sat_i; break;
3613
3614  case Hexagon::BI__builtin_HEXAGON_M2_vcmpy_s0_sat_r:
3615    ID = Intrinsic::hexagon_M2_vcmpy_s0_sat_r; break;
3616
3617  case Hexagon::BI__builtin_HEXAGON_M2_vcmpy_s1_sat_i:
3618    ID = Intrinsic::hexagon_M2_vcmpy_s1_sat_i; break;
3619
3620  case Hexagon::BI__builtin_HEXAGON_M2_vcmpy_s1_sat_r:
3621    ID = Intrinsic::hexagon_M2_vcmpy_s1_sat_r; break;
3622
3623  case Hexagon::BI__builtin_HEXAGON_M2_vcmac_s0_sat_i:
3624    ID = Intrinsic::hexagon_M2_vcmac_s0_sat_i; break;
3625
3626  case Hexagon::BI__builtin_HEXAGON_M2_vcmac_s0_sat_r:
3627    ID = Intrinsic::hexagon_M2_vcmac_s0_sat_r; break;
3628
3629  case Hexagon::BI__builtin_HEXAGON_S2_vcrotate:
3630    ID = Intrinsic::hexagon_S2_vcrotate; break;
3631
3632  case Hexagon::BI__builtin_HEXAGON_S4_vrcrotate_acc:
3633    ID = Intrinsic::hexagon_S4_vrcrotate_acc; break;
3634
3635  case Hexagon::BI__builtin_HEXAGON_S4_vrcrotate:
3636    ID = Intrinsic::hexagon_S4_vrcrotate; break;
3637
3638  case Hexagon::BI__builtin_HEXAGON_S2_vcnegh:
3639    ID = Intrinsic::hexagon_S2_vcnegh; break;
3640
3641  case Hexagon::BI__builtin_HEXAGON_S2_vrcnegh:
3642    ID = Intrinsic::hexagon_S2_vrcnegh; break;
3643
3644  case Hexagon::BI__builtin_HEXAGON_M4_pmpyw:
3645    ID = Intrinsic::hexagon_M4_pmpyw; break;
3646
3647  case Hexagon::BI__builtin_HEXAGON_M4_vpmpyh:
3648    ID = Intrinsic::hexagon_M4_vpmpyh; break;
3649
3650  case Hexagon::BI__builtin_HEXAGON_M4_pmpyw_acc:
3651    ID = Intrinsic::hexagon_M4_pmpyw_acc; break;
3652
3653  case Hexagon::BI__builtin_HEXAGON_M4_vpmpyh_acc:
3654    ID = Intrinsic::hexagon_M4_vpmpyh_acc; break;
3655
3656  case Hexagon::BI__builtin_HEXAGON_A2_add:
3657    ID = Intrinsic::hexagon_A2_add; break;
3658
3659  case Hexagon::BI__builtin_HEXAGON_A2_sub:
3660    ID = Intrinsic::hexagon_A2_sub; break;
3661
3662  case Hexagon::BI__builtin_HEXAGON_A2_addsat:
3663    ID = Intrinsic::hexagon_A2_addsat; break;
3664
3665  case Hexagon::BI__builtin_HEXAGON_A2_subsat:
3666    ID = Intrinsic::hexagon_A2_subsat; break;
3667
3668  case Hexagon::BI__builtin_HEXAGON_A2_addi:
3669    ID = Intrinsic::hexagon_A2_addi; break;
3670
3671  case Hexagon::BI__builtin_HEXAGON_A2_addh_l16_ll:
3672    ID = Intrinsic::hexagon_A2_addh_l16_ll; break;
3673
3674  case Hexagon::BI__builtin_HEXAGON_A2_addh_l16_hl:
3675    ID = Intrinsic::hexagon_A2_addh_l16_hl; break;
3676
3677  case Hexagon::BI__builtin_HEXAGON_A2_addh_l16_sat_ll:
3678    ID = Intrinsic::hexagon_A2_addh_l16_sat_ll; break;
3679
3680  case Hexagon::BI__builtin_HEXAGON_A2_addh_l16_sat_hl:
3681    ID = Intrinsic::hexagon_A2_addh_l16_sat_hl; break;
3682
3683  case Hexagon::BI__builtin_HEXAGON_A2_subh_l16_ll:
3684    ID = Intrinsic::hexagon_A2_subh_l16_ll; break;
3685
3686  case Hexagon::BI__builtin_HEXAGON_A2_subh_l16_hl:
3687    ID = Intrinsic::hexagon_A2_subh_l16_hl; break;
3688
3689  case Hexagon::BI__builtin_HEXAGON_A2_subh_l16_sat_ll:
3690    ID = Intrinsic::hexagon_A2_subh_l16_sat_ll; break;
3691
3692  case Hexagon::BI__builtin_HEXAGON_A2_subh_l16_sat_hl:
3693    ID = Intrinsic::hexagon_A2_subh_l16_sat_hl; break;
3694
3695  case Hexagon::BI__builtin_HEXAGON_A2_addh_h16_ll:
3696    ID = Intrinsic::hexagon_A2_addh_h16_ll; break;
3697
3698  case Hexagon::BI__builtin_HEXAGON_A2_addh_h16_lh:
3699    ID = Intrinsic::hexagon_A2_addh_h16_lh; break;
3700
3701  case Hexagon::BI__builtin_HEXAGON_A2_addh_h16_hl:
3702    ID = Intrinsic::hexagon_A2_addh_h16_hl; break;
3703
3704  case Hexagon::BI__builtin_HEXAGON_A2_addh_h16_hh:
3705    ID = Intrinsic::hexagon_A2_addh_h16_hh; break;
3706
3707  case Hexagon::BI__builtin_HEXAGON_A2_addh_h16_sat_ll:
3708    ID = Intrinsic::hexagon_A2_addh_h16_sat_ll; break;
3709
3710  case Hexagon::BI__builtin_HEXAGON_A2_addh_h16_sat_lh:
3711    ID = Intrinsic::hexagon_A2_addh_h16_sat_lh; break;
3712
3713  case Hexagon::BI__builtin_HEXAGON_A2_addh_h16_sat_hl:
3714    ID = Intrinsic::hexagon_A2_addh_h16_sat_hl; break;
3715
3716  case Hexagon::BI__builtin_HEXAGON_A2_addh_h16_sat_hh:
3717    ID = Intrinsic::hexagon_A2_addh_h16_sat_hh; break;
3718
3719  case Hexagon::BI__builtin_HEXAGON_A2_subh_h16_ll:
3720    ID = Intrinsic::hexagon_A2_subh_h16_ll; break;
3721
3722  case Hexagon::BI__builtin_HEXAGON_A2_subh_h16_lh:
3723    ID = Intrinsic::hexagon_A2_subh_h16_lh; break;
3724
3725  case Hexagon::BI__builtin_HEXAGON_A2_subh_h16_hl:
3726    ID = Intrinsic::hexagon_A2_subh_h16_hl; break;
3727
3728  case Hexagon::BI__builtin_HEXAGON_A2_subh_h16_hh:
3729    ID = Intrinsic::hexagon_A2_subh_h16_hh; break;
3730
3731  case Hexagon::BI__builtin_HEXAGON_A2_subh_h16_sat_ll:
3732    ID = Intrinsic::hexagon_A2_subh_h16_sat_ll; break;
3733
3734  case Hexagon::BI__builtin_HEXAGON_A2_subh_h16_sat_lh:
3735    ID = Intrinsic::hexagon_A2_subh_h16_sat_lh; break;
3736
3737  case Hexagon::BI__builtin_HEXAGON_A2_subh_h16_sat_hl:
3738    ID = Intrinsic::hexagon_A2_subh_h16_sat_hl; break;
3739
3740  case Hexagon::BI__builtin_HEXAGON_A2_subh_h16_sat_hh:
3741    ID = Intrinsic::hexagon_A2_subh_h16_sat_hh; break;
3742
3743  case Hexagon::BI__builtin_HEXAGON_A2_aslh:
3744    ID = Intrinsic::hexagon_A2_aslh; break;
3745
3746  case Hexagon::BI__builtin_HEXAGON_A2_asrh:
3747    ID = Intrinsic::hexagon_A2_asrh; break;
3748
3749  case Hexagon::BI__builtin_HEXAGON_A2_addp:
3750    ID = Intrinsic::hexagon_A2_addp; break;
3751
3752  case Hexagon::BI__builtin_HEXAGON_A2_addpsat:
3753    ID = Intrinsic::hexagon_A2_addpsat; break;
3754
3755  case Hexagon::BI__builtin_HEXAGON_A2_addsp:
3756    ID = Intrinsic::hexagon_A2_addsp; break;
3757
3758  case Hexagon::BI__builtin_HEXAGON_A2_subp:
3759    ID = Intrinsic::hexagon_A2_subp; break;
3760
3761  case Hexagon::BI__builtin_HEXAGON_A2_neg:
3762    ID = Intrinsic::hexagon_A2_neg; break;
3763
3764  case Hexagon::BI__builtin_HEXAGON_A2_negsat:
3765    ID = Intrinsic::hexagon_A2_negsat; break;
3766
3767  case Hexagon::BI__builtin_HEXAGON_A2_abs:
3768    ID = Intrinsic::hexagon_A2_abs; break;
3769
3770  case Hexagon::BI__builtin_HEXAGON_A2_abssat:
3771    ID = Intrinsic::hexagon_A2_abssat; break;
3772
3773  case Hexagon::BI__builtin_HEXAGON_A2_vconj:
3774    ID = Intrinsic::hexagon_A2_vconj; break;
3775
3776  case Hexagon::BI__builtin_HEXAGON_A2_negp:
3777    ID = Intrinsic::hexagon_A2_negp; break;
3778
3779  case Hexagon::BI__builtin_HEXAGON_A2_absp:
3780    ID = Intrinsic::hexagon_A2_absp; break;
3781
3782  case Hexagon::BI__builtin_HEXAGON_A2_max:
3783    ID = Intrinsic::hexagon_A2_max; break;
3784
3785  case Hexagon::BI__builtin_HEXAGON_A2_maxu:
3786    ID = Intrinsic::hexagon_A2_maxu; break;
3787
3788  case Hexagon::BI__builtin_HEXAGON_A2_min:
3789    ID = Intrinsic::hexagon_A2_min; break;
3790
3791  case Hexagon::BI__builtin_HEXAGON_A2_minu:
3792    ID = Intrinsic::hexagon_A2_minu; break;
3793
3794  case Hexagon::BI__builtin_HEXAGON_A2_maxp:
3795    ID = Intrinsic::hexagon_A2_maxp; break;
3796
3797  case Hexagon::BI__builtin_HEXAGON_A2_maxup:
3798    ID = Intrinsic::hexagon_A2_maxup; break;
3799
3800  case Hexagon::BI__builtin_HEXAGON_A2_minp:
3801    ID = Intrinsic::hexagon_A2_minp; break;
3802
3803  case Hexagon::BI__builtin_HEXAGON_A2_minup:
3804    ID = Intrinsic::hexagon_A2_minup; break;
3805
3806  case Hexagon::BI__builtin_HEXAGON_A2_tfr:
3807    ID = Intrinsic::hexagon_A2_tfr; break;
3808
3809  case Hexagon::BI__builtin_HEXAGON_A2_tfrsi:
3810    ID = Intrinsic::hexagon_A2_tfrsi; break;
3811
3812  case Hexagon::BI__builtin_HEXAGON_A2_tfrp:
3813    ID = Intrinsic::hexagon_A2_tfrp; break;
3814
3815  case Hexagon::BI__builtin_HEXAGON_A2_tfrpi:
3816    ID = Intrinsic::hexagon_A2_tfrpi; break;
3817
3818  case Hexagon::BI__builtin_HEXAGON_A2_zxtb:
3819    ID = Intrinsic::hexagon_A2_zxtb; break;
3820
3821  case Hexagon::BI__builtin_HEXAGON_A2_sxtb:
3822    ID = Intrinsic::hexagon_A2_sxtb; break;
3823
3824  case Hexagon::BI__builtin_HEXAGON_A2_zxth:
3825    ID = Intrinsic::hexagon_A2_zxth; break;
3826
3827  case Hexagon::BI__builtin_HEXAGON_A2_sxth:
3828    ID = Intrinsic::hexagon_A2_sxth; break;
3829
3830  case Hexagon::BI__builtin_HEXAGON_A2_combinew:
3831    ID = Intrinsic::hexagon_A2_combinew; break;
3832
3833  case Hexagon::BI__builtin_HEXAGON_A4_combineri:
3834    ID = Intrinsic::hexagon_A4_combineri; break;
3835
3836  case Hexagon::BI__builtin_HEXAGON_A4_combineir:
3837    ID = Intrinsic::hexagon_A4_combineir; break;
3838
3839  case Hexagon::BI__builtin_HEXAGON_A2_combineii:
3840    ID = Intrinsic::hexagon_A2_combineii; break;
3841
3842  case Hexagon::BI__builtin_HEXAGON_A2_combine_hh:
3843    ID = Intrinsic::hexagon_A2_combine_hh; break;
3844
3845  case Hexagon::BI__builtin_HEXAGON_A2_combine_hl:
3846    ID = Intrinsic::hexagon_A2_combine_hl; break;
3847
3848  case Hexagon::BI__builtin_HEXAGON_A2_combine_lh:
3849    ID = Intrinsic::hexagon_A2_combine_lh; break;
3850
3851  case Hexagon::BI__builtin_HEXAGON_A2_combine_ll:
3852    ID = Intrinsic::hexagon_A2_combine_ll; break;
3853
3854  case Hexagon::BI__builtin_HEXAGON_A2_tfril:
3855    ID = Intrinsic::hexagon_A2_tfril; break;
3856
3857  case Hexagon::BI__builtin_HEXAGON_A2_tfrih:
3858    ID = Intrinsic::hexagon_A2_tfrih; break;
3859
3860  case Hexagon::BI__builtin_HEXAGON_A2_and:
3861    ID = Intrinsic::hexagon_A2_and; break;
3862
3863  case Hexagon::BI__builtin_HEXAGON_A2_or:
3864    ID = Intrinsic::hexagon_A2_or; break;
3865
3866  case Hexagon::BI__builtin_HEXAGON_A2_xor:
3867    ID = Intrinsic::hexagon_A2_xor; break;
3868
3869  case Hexagon::BI__builtin_HEXAGON_A2_not:
3870    ID = Intrinsic::hexagon_A2_not; break;
3871
3872  case Hexagon::BI__builtin_HEXAGON_M2_xor_xacc:
3873    ID = Intrinsic::hexagon_M2_xor_xacc; break;
3874
3875  case Hexagon::BI__builtin_HEXAGON_M4_xor_xacc:
3876    ID = Intrinsic::hexagon_M4_xor_xacc; break;
3877
3878  case Hexagon::BI__builtin_HEXAGON_A4_andn:
3879    ID = Intrinsic::hexagon_A4_andn; break;
3880
3881  case Hexagon::BI__builtin_HEXAGON_A4_orn:
3882    ID = Intrinsic::hexagon_A4_orn; break;
3883
3884  case Hexagon::BI__builtin_HEXAGON_A4_andnp:
3885    ID = Intrinsic::hexagon_A4_andnp; break;
3886
3887  case Hexagon::BI__builtin_HEXAGON_A4_ornp:
3888    ID = Intrinsic::hexagon_A4_ornp; break;
3889
3890  case Hexagon::BI__builtin_HEXAGON_S4_addaddi:
3891    ID = Intrinsic::hexagon_S4_addaddi; break;
3892
3893  case Hexagon::BI__builtin_HEXAGON_S4_subaddi:
3894    ID = Intrinsic::hexagon_S4_subaddi; break;
3895
3896  case Hexagon::BI__builtin_HEXAGON_M4_and_and:
3897    ID = Intrinsic::hexagon_M4_and_and; break;
3898
3899  case Hexagon::BI__builtin_HEXAGON_M4_and_andn:
3900    ID = Intrinsic::hexagon_M4_and_andn; break;
3901
3902  case Hexagon::BI__builtin_HEXAGON_M4_and_or:
3903    ID = Intrinsic::hexagon_M4_and_or; break;
3904
3905  case Hexagon::BI__builtin_HEXAGON_M4_and_xor:
3906    ID = Intrinsic::hexagon_M4_and_xor; break;
3907
3908  case Hexagon::BI__builtin_HEXAGON_M4_or_and:
3909    ID = Intrinsic::hexagon_M4_or_and; break;
3910
3911  case Hexagon::BI__builtin_HEXAGON_M4_or_andn:
3912    ID = Intrinsic::hexagon_M4_or_andn; break;
3913
3914  case Hexagon::BI__builtin_HEXAGON_M4_or_or:
3915    ID = Intrinsic::hexagon_M4_or_or; break;
3916
3917  case Hexagon::BI__builtin_HEXAGON_M4_or_xor:
3918    ID = Intrinsic::hexagon_M4_or_xor; break;
3919
3920  case Hexagon::BI__builtin_HEXAGON_S4_or_andix:
3921    ID = Intrinsic::hexagon_S4_or_andix; break;
3922
3923  case Hexagon::BI__builtin_HEXAGON_S4_or_andi:
3924    ID = Intrinsic::hexagon_S4_or_andi; break;
3925
3926  case Hexagon::BI__builtin_HEXAGON_S4_or_ori:
3927    ID = Intrinsic::hexagon_S4_or_ori; break;
3928
3929  case Hexagon::BI__builtin_HEXAGON_M4_xor_and:
3930    ID = Intrinsic::hexagon_M4_xor_and; break;
3931
3932  case Hexagon::BI__builtin_HEXAGON_M4_xor_or:
3933    ID = Intrinsic::hexagon_M4_xor_or; break;
3934
3935  case Hexagon::BI__builtin_HEXAGON_M4_xor_andn:
3936    ID = Intrinsic::hexagon_M4_xor_andn; break;
3937
3938  case Hexagon::BI__builtin_HEXAGON_A2_subri:
3939    ID = Intrinsic::hexagon_A2_subri; break;
3940
3941  case Hexagon::BI__builtin_HEXAGON_A2_andir:
3942    ID = Intrinsic::hexagon_A2_andir; break;
3943
3944  case Hexagon::BI__builtin_HEXAGON_A2_orir:
3945    ID = Intrinsic::hexagon_A2_orir; break;
3946
3947  case Hexagon::BI__builtin_HEXAGON_A2_andp:
3948    ID = Intrinsic::hexagon_A2_andp; break;
3949
3950  case Hexagon::BI__builtin_HEXAGON_A2_orp:
3951    ID = Intrinsic::hexagon_A2_orp; break;
3952
3953  case Hexagon::BI__builtin_HEXAGON_A2_xorp:
3954    ID = Intrinsic::hexagon_A2_xorp; break;
3955
3956  case Hexagon::BI__builtin_HEXAGON_A2_notp:
3957    ID = Intrinsic::hexagon_A2_notp; break;
3958
3959  case Hexagon::BI__builtin_HEXAGON_A2_sxtw:
3960    ID = Intrinsic::hexagon_A2_sxtw; break;
3961
3962  case Hexagon::BI__builtin_HEXAGON_A2_sat:
3963    ID = Intrinsic::hexagon_A2_sat; break;
3964
3965  case Hexagon::BI__builtin_HEXAGON_A2_roundsat:
3966    ID = Intrinsic::hexagon_A2_roundsat; break;
3967
3968  case Hexagon::BI__builtin_HEXAGON_A2_sath:
3969    ID = Intrinsic::hexagon_A2_sath; break;
3970
3971  case Hexagon::BI__builtin_HEXAGON_A2_satuh:
3972    ID = Intrinsic::hexagon_A2_satuh; break;
3973
3974  case Hexagon::BI__builtin_HEXAGON_A2_satub:
3975    ID = Intrinsic::hexagon_A2_satub; break;
3976
3977  case Hexagon::BI__builtin_HEXAGON_A2_satb:
3978    ID = Intrinsic::hexagon_A2_satb; break;
3979
3980  case Hexagon::BI__builtin_HEXAGON_A2_vaddub:
3981    ID = Intrinsic::hexagon_A2_vaddub; break;
3982
3983  case Hexagon::BI__builtin_HEXAGON_A2_vaddb_map:
3984    ID = Intrinsic::hexagon_A2_vaddb_map; break;
3985
3986  case Hexagon::BI__builtin_HEXAGON_A2_vaddubs:
3987    ID = Intrinsic::hexagon_A2_vaddubs; break;
3988
3989  case Hexagon::BI__builtin_HEXAGON_A2_vaddh:
3990    ID = Intrinsic::hexagon_A2_vaddh; break;
3991
3992  case Hexagon::BI__builtin_HEXAGON_A2_vaddhs:
3993    ID = Intrinsic::hexagon_A2_vaddhs; break;
3994
3995  case Hexagon::BI__builtin_HEXAGON_A2_vadduhs:
3996    ID = Intrinsic::hexagon_A2_vadduhs; break;
3997
3998  case Hexagon::BI__builtin_HEXAGON_A5_vaddhubs:
3999    ID = Intrinsic::hexagon_A5_vaddhubs; break;
4000
4001  case Hexagon::BI__builtin_HEXAGON_A2_vaddw:
4002    ID = Intrinsic::hexagon_A2_vaddw; break;
4003
4004  case Hexagon::BI__builtin_HEXAGON_A2_vaddws:
4005    ID = Intrinsic::hexagon_A2_vaddws; break;
4006
4007  case Hexagon::BI__builtin_HEXAGON_S4_vxaddsubw:
4008    ID = Intrinsic::hexagon_S4_vxaddsubw; break;
4009
4010  case Hexagon::BI__builtin_HEXAGON_S4_vxsubaddw:
4011    ID = Intrinsic::hexagon_S4_vxsubaddw; break;
4012
4013  case Hexagon::BI__builtin_HEXAGON_S4_vxaddsubh:
4014    ID = Intrinsic::hexagon_S4_vxaddsubh; break;
4015
4016  case Hexagon::BI__builtin_HEXAGON_S4_vxsubaddh:
4017    ID = Intrinsic::hexagon_S4_vxsubaddh; break;
4018
4019  case Hexagon::BI__builtin_HEXAGON_S4_vxaddsubhr:
4020    ID = Intrinsic::hexagon_S4_vxaddsubhr; break;
4021
4022  case Hexagon::BI__builtin_HEXAGON_S4_vxsubaddhr:
4023    ID = Intrinsic::hexagon_S4_vxsubaddhr; break;
4024
4025  case Hexagon::BI__builtin_HEXAGON_A2_svavgh:
4026    ID = Intrinsic::hexagon_A2_svavgh; break;
4027
4028  case Hexagon::BI__builtin_HEXAGON_A2_svavghs:
4029    ID = Intrinsic::hexagon_A2_svavghs; break;
4030
4031  case Hexagon::BI__builtin_HEXAGON_A2_svnavgh:
4032    ID = Intrinsic::hexagon_A2_svnavgh; break;
4033
4034  case Hexagon::BI__builtin_HEXAGON_A2_svaddh:
4035    ID = Intrinsic::hexagon_A2_svaddh; break;
4036
4037  case Hexagon::BI__builtin_HEXAGON_A2_svaddhs:
4038    ID = Intrinsic::hexagon_A2_svaddhs; break;
4039
4040  case Hexagon::BI__builtin_HEXAGON_A2_svadduhs:
4041    ID = Intrinsic::hexagon_A2_svadduhs; break;
4042
4043  case Hexagon::BI__builtin_HEXAGON_A2_svsubh:
4044    ID = Intrinsic::hexagon_A2_svsubh; break;
4045
4046  case Hexagon::BI__builtin_HEXAGON_A2_svsubhs:
4047    ID = Intrinsic::hexagon_A2_svsubhs; break;
4048
4049  case Hexagon::BI__builtin_HEXAGON_A2_svsubuhs:
4050    ID = Intrinsic::hexagon_A2_svsubuhs; break;
4051
4052  case Hexagon::BI__builtin_HEXAGON_A2_vraddub:
4053    ID = Intrinsic::hexagon_A2_vraddub; break;
4054
4055  case Hexagon::BI__builtin_HEXAGON_A2_vraddub_acc:
4056    ID = Intrinsic::hexagon_A2_vraddub_acc; break;
4057
4058  case Hexagon::BI__builtin_HEXAGON_M2_vraddh:
4059    ID = Intrinsic::hexagon_M2_vraddh; break;
4060
4061  case Hexagon::BI__builtin_HEXAGON_M2_vradduh:
4062    ID = Intrinsic::hexagon_M2_vradduh; break;
4063
4064  case Hexagon::BI__builtin_HEXAGON_A2_vsubub:
4065    ID = Intrinsic::hexagon_A2_vsubub; break;
4066
4067  case Hexagon::BI__builtin_HEXAGON_A2_vsubb_map:
4068    ID = Intrinsic::hexagon_A2_vsubb_map; break;
4069
4070  case Hexagon::BI__builtin_HEXAGON_A2_vsububs:
4071    ID = Intrinsic::hexagon_A2_vsububs; break;
4072
4073  case Hexagon::BI__builtin_HEXAGON_A2_vsubh:
4074    ID = Intrinsic::hexagon_A2_vsubh; break;
4075
4076  case Hexagon::BI__builtin_HEXAGON_A2_vsubhs:
4077    ID = Intrinsic::hexagon_A2_vsubhs; break;
4078
4079  case Hexagon::BI__builtin_HEXAGON_A2_vsubuhs:
4080    ID = Intrinsic::hexagon_A2_vsubuhs; break;
4081
4082  case Hexagon::BI__builtin_HEXAGON_A2_vsubw:
4083    ID = Intrinsic::hexagon_A2_vsubw; break;
4084
4085  case Hexagon::BI__builtin_HEXAGON_A2_vsubws:
4086    ID = Intrinsic::hexagon_A2_vsubws; break;
4087
4088  case Hexagon::BI__builtin_HEXAGON_A2_vabsh:
4089    ID = Intrinsic::hexagon_A2_vabsh; break;
4090
4091  case Hexagon::BI__builtin_HEXAGON_A2_vabshsat:
4092    ID = Intrinsic::hexagon_A2_vabshsat; break;
4093
4094  case Hexagon::BI__builtin_HEXAGON_A2_vabsw:
4095    ID = Intrinsic::hexagon_A2_vabsw; break;
4096
4097  case Hexagon::BI__builtin_HEXAGON_A2_vabswsat:
4098    ID = Intrinsic::hexagon_A2_vabswsat; break;
4099
4100  case Hexagon::BI__builtin_HEXAGON_M2_vabsdiffw:
4101    ID = Intrinsic::hexagon_M2_vabsdiffw; break;
4102
4103  case Hexagon::BI__builtin_HEXAGON_M2_vabsdiffh:
4104    ID = Intrinsic::hexagon_M2_vabsdiffh; break;
4105
4106  case Hexagon::BI__builtin_HEXAGON_A2_vrsadub:
4107    ID = Intrinsic::hexagon_A2_vrsadub; break;
4108
4109  case Hexagon::BI__builtin_HEXAGON_A2_vrsadub_acc:
4110    ID = Intrinsic::hexagon_A2_vrsadub_acc; break;
4111
4112  case Hexagon::BI__builtin_HEXAGON_A2_vavgub:
4113    ID = Intrinsic::hexagon_A2_vavgub; break;
4114
4115  case Hexagon::BI__builtin_HEXAGON_A2_vavguh:
4116    ID = Intrinsic::hexagon_A2_vavguh; break;
4117
4118  case Hexagon::BI__builtin_HEXAGON_A2_vavgh:
4119    ID = Intrinsic::hexagon_A2_vavgh; break;
4120
4121  case Hexagon::BI__builtin_HEXAGON_A2_vnavgh:
4122    ID = Intrinsic::hexagon_A2_vnavgh; break;
4123
4124  case Hexagon::BI__builtin_HEXAGON_A2_vavgw:
4125    ID = Intrinsic::hexagon_A2_vavgw; break;
4126
4127  case Hexagon::BI__builtin_HEXAGON_A2_vnavgw:
4128    ID = Intrinsic::hexagon_A2_vnavgw; break;
4129
4130  case Hexagon::BI__builtin_HEXAGON_A2_vavgwr:
4131    ID = Intrinsic::hexagon_A2_vavgwr; break;
4132
4133  case Hexagon::BI__builtin_HEXAGON_A2_vnavgwr:
4134    ID = Intrinsic::hexagon_A2_vnavgwr; break;
4135
4136  case Hexagon::BI__builtin_HEXAGON_A2_vavgwcr:
4137    ID = Intrinsic::hexagon_A2_vavgwcr; break;
4138
4139  case Hexagon::BI__builtin_HEXAGON_A2_vnavgwcr:
4140    ID = Intrinsic::hexagon_A2_vnavgwcr; break;
4141
4142  case Hexagon::BI__builtin_HEXAGON_A2_vavghcr:
4143    ID = Intrinsic::hexagon_A2_vavghcr; break;
4144
4145  case Hexagon::BI__builtin_HEXAGON_A2_vnavghcr:
4146    ID = Intrinsic::hexagon_A2_vnavghcr; break;
4147
4148  case Hexagon::BI__builtin_HEXAGON_A2_vavguw:
4149    ID = Intrinsic::hexagon_A2_vavguw; break;
4150
4151  case Hexagon::BI__builtin_HEXAGON_A2_vavguwr:
4152    ID = Intrinsic::hexagon_A2_vavguwr; break;
4153
4154  case Hexagon::BI__builtin_HEXAGON_A2_vavgubr:
4155    ID = Intrinsic::hexagon_A2_vavgubr; break;
4156
4157  case Hexagon::BI__builtin_HEXAGON_A2_vavguhr:
4158    ID = Intrinsic::hexagon_A2_vavguhr; break;
4159
4160  case Hexagon::BI__builtin_HEXAGON_A2_vavghr:
4161    ID = Intrinsic::hexagon_A2_vavghr; break;
4162
4163  case Hexagon::BI__builtin_HEXAGON_A2_vnavghr:
4164    ID = Intrinsic::hexagon_A2_vnavghr; break;
4165
4166  case Hexagon::BI__builtin_HEXAGON_A4_round_ri:
4167    ID = Intrinsic::hexagon_A4_round_ri; break;
4168
4169  case Hexagon::BI__builtin_HEXAGON_A4_round_rr:
4170    ID = Intrinsic::hexagon_A4_round_rr; break;
4171
4172  case Hexagon::BI__builtin_HEXAGON_A4_round_ri_sat:
4173    ID = Intrinsic::hexagon_A4_round_ri_sat; break;
4174
4175  case Hexagon::BI__builtin_HEXAGON_A4_round_rr_sat:
4176    ID = Intrinsic::hexagon_A4_round_rr_sat; break;
4177
4178  case Hexagon::BI__builtin_HEXAGON_A4_cround_ri:
4179    ID = Intrinsic::hexagon_A4_cround_ri; break;
4180
4181  case Hexagon::BI__builtin_HEXAGON_A4_cround_rr:
4182    ID = Intrinsic::hexagon_A4_cround_rr; break;
4183
4184  case Hexagon::BI__builtin_HEXAGON_A4_vrminh:
4185    ID = Intrinsic::hexagon_A4_vrminh; break;
4186
4187  case Hexagon::BI__builtin_HEXAGON_A4_vrmaxh:
4188    ID = Intrinsic::hexagon_A4_vrmaxh; break;
4189
4190  case Hexagon::BI__builtin_HEXAGON_A4_vrminuh:
4191    ID = Intrinsic::hexagon_A4_vrminuh; break;
4192
4193  case Hexagon::BI__builtin_HEXAGON_A4_vrmaxuh:
4194    ID = Intrinsic::hexagon_A4_vrmaxuh; break;
4195
4196  case Hexagon::BI__builtin_HEXAGON_A4_vrminw:
4197    ID = Intrinsic::hexagon_A4_vrminw; break;
4198
4199  case Hexagon::BI__builtin_HEXAGON_A4_vrmaxw:
4200    ID = Intrinsic::hexagon_A4_vrmaxw; break;
4201
4202  case Hexagon::BI__builtin_HEXAGON_A4_vrminuw:
4203    ID = Intrinsic::hexagon_A4_vrminuw; break;
4204
4205  case Hexagon::BI__builtin_HEXAGON_A4_vrmaxuw:
4206    ID = Intrinsic::hexagon_A4_vrmaxuw; break;
4207
4208  case Hexagon::BI__builtin_HEXAGON_A2_vminb:
4209    ID = Intrinsic::hexagon_A2_vminb; break;
4210
4211  case Hexagon::BI__builtin_HEXAGON_A2_vmaxb:
4212    ID = Intrinsic::hexagon_A2_vmaxb; break;
4213
4214  case Hexagon::BI__builtin_HEXAGON_A2_vminub:
4215    ID = Intrinsic::hexagon_A2_vminub; break;
4216
4217  case Hexagon::BI__builtin_HEXAGON_A2_vmaxub:
4218    ID = Intrinsic::hexagon_A2_vmaxub; break;
4219
4220  case Hexagon::BI__builtin_HEXAGON_A2_vminh:
4221    ID = Intrinsic::hexagon_A2_vminh; break;
4222
4223  case Hexagon::BI__builtin_HEXAGON_A2_vmaxh:
4224    ID = Intrinsic::hexagon_A2_vmaxh; break;
4225
4226  case Hexagon::BI__builtin_HEXAGON_A2_vminuh:
4227    ID = Intrinsic::hexagon_A2_vminuh; break;
4228
4229  case Hexagon::BI__builtin_HEXAGON_A2_vmaxuh:
4230    ID = Intrinsic::hexagon_A2_vmaxuh; break;
4231
4232  case Hexagon::BI__builtin_HEXAGON_A2_vminw:
4233    ID = Intrinsic::hexagon_A2_vminw; break;
4234
4235  case Hexagon::BI__builtin_HEXAGON_A2_vmaxw:
4236    ID = Intrinsic::hexagon_A2_vmaxw; break;
4237
4238  case Hexagon::BI__builtin_HEXAGON_A2_vminuw:
4239    ID = Intrinsic::hexagon_A2_vminuw; break;
4240
4241  case Hexagon::BI__builtin_HEXAGON_A2_vmaxuw:
4242    ID = Intrinsic::hexagon_A2_vmaxuw; break;
4243
4244  case Hexagon::BI__builtin_HEXAGON_A4_modwrapu:
4245    ID = Intrinsic::hexagon_A4_modwrapu; break;
4246
4247  case Hexagon::BI__builtin_HEXAGON_F2_sfadd:
4248    ID = Intrinsic::hexagon_F2_sfadd; break;
4249
4250  case Hexagon::BI__builtin_HEXAGON_F2_sfsub:
4251    ID = Intrinsic::hexagon_F2_sfsub; break;
4252
4253  case Hexagon::BI__builtin_HEXAGON_F2_sfmpy:
4254    ID = Intrinsic::hexagon_F2_sfmpy; break;
4255
4256  case Hexagon::BI__builtin_HEXAGON_F2_sffma:
4257    ID = Intrinsic::hexagon_F2_sffma; break;
4258
4259  case Hexagon::BI__builtin_HEXAGON_F2_sffma_sc:
4260    ID = Intrinsic::hexagon_F2_sffma_sc; break;
4261
4262  case Hexagon::BI__builtin_HEXAGON_F2_sffms:
4263    ID = Intrinsic::hexagon_F2_sffms; break;
4264
4265  case Hexagon::BI__builtin_HEXAGON_F2_sffma_lib:
4266    ID = Intrinsic::hexagon_F2_sffma_lib; break;
4267
4268  case Hexagon::BI__builtin_HEXAGON_F2_sffms_lib:
4269    ID = Intrinsic::hexagon_F2_sffms_lib; break;
4270
4271  case Hexagon::BI__builtin_HEXAGON_F2_sfcmpeq:
4272    ID = Intrinsic::hexagon_F2_sfcmpeq; break;
4273
4274  case Hexagon::BI__builtin_HEXAGON_F2_sfcmpgt:
4275    ID = Intrinsic::hexagon_F2_sfcmpgt; break;
4276
4277  case Hexagon::BI__builtin_HEXAGON_F2_sfcmpge:
4278    ID = Intrinsic::hexagon_F2_sfcmpge; break;
4279
4280  case Hexagon::BI__builtin_HEXAGON_F2_sfcmpuo:
4281    ID = Intrinsic::hexagon_F2_sfcmpuo; break;
4282
4283  case Hexagon::BI__builtin_HEXAGON_F2_sfmax:
4284    ID = Intrinsic::hexagon_F2_sfmax; break;
4285
4286  case Hexagon::BI__builtin_HEXAGON_F2_sfmin:
4287    ID = Intrinsic::hexagon_F2_sfmin; break;
4288
4289  case Hexagon::BI__builtin_HEXAGON_F2_sfclass:
4290    ID = Intrinsic::hexagon_F2_sfclass; break;
4291
4292  case Hexagon::BI__builtin_HEXAGON_F2_sfimm_p:
4293    ID = Intrinsic::hexagon_F2_sfimm_p; break;
4294
4295  case Hexagon::BI__builtin_HEXAGON_F2_sfimm_n:
4296    ID = Intrinsic::hexagon_F2_sfimm_n; break;
4297
4298  case Hexagon::BI__builtin_HEXAGON_F2_sffixupn:
4299    ID = Intrinsic::hexagon_F2_sffixupn; break;
4300
4301  case Hexagon::BI__builtin_HEXAGON_F2_sffixupd:
4302    ID = Intrinsic::hexagon_F2_sffixupd; break;
4303
4304  case Hexagon::BI__builtin_HEXAGON_F2_sffixupr:
4305    ID = Intrinsic::hexagon_F2_sffixupr; break;
4306
4307  case Hexagon::BI__builtin_HEXAGON_F2_dfadd:
4308    ID = Intrinsic::hexagon_F2_dfadd; break;
4309
4310  case Hexagon::BI__builtin_HEXAGON_F2_dfsub:
4311    ID = Intrinsic::hexagon_F2_dfsub; break;
4312
4313  case Hexagon::BI__builtin_HEXAGON_F2_dfmpy:
4314    ID = Intrinsic::hexagon_F2_dfmpy; break;
4315
4316  case Hexagon::BI__builtin_HEXAGON_F2_dffma:
4317    ID = Intrinsic::hexagon_F2_dffma; break;
4318
4319  case Hexagon::BI__builtin_HEXAGON_F2_dffms:
4320    ID = Intrinsic::hexagon_F2_dffms; break;
4321
4322  case Hexagon::BI__builtin_HEXAGON_F2_dffma_lib:
4323    ID = Intrinsic::hexagon_F2_dffma_lib; break;
4324
4325  case Hexagon::BI__builtin_HEXAGON_F2_dffms_lib:
4326    ID = Intrinsic::hexagon_F2_dffms_lib; break;
4327
4328  case Hexagon::BI__builtin_HEXAGON_F2_dffma_sc:
4329    ID = Intrinsic::hexagon_F2_dffma_sc; break;
4330
4331  case Hexagon::BI__builtin_HEXAGON_F2_dfmax:
4332    ID = Intrinsic::hexagon_F2_dfmax; break;
4333
4334  case Hexagon::BI__builtin_HEXAGON_F2_dfmin:
4335    ID = Intrinsic::hexagon_F2_dfmin; break;
4336
4337  case Hexagon::BI__builtin_HEXAGON_F2_dfcmpeq:
4338    ID = Intrinsic::hexagon_F2_dfcmpeq; break;
4339
4340  case Hexagon::BI__builtin_HEXAGON_F2_dfcmpgt:
4341    ID = Intrinsic::hexagon_F2_dfcmpgt; break;
4342
4343  case Hexagon::BI__builtin_HEXAGON_F2_dfcmpge:
4344    ID = Intrinsic::hexagon_F2_dfcmpge; break;
4345
4346  case Hexagon::BI__builtin_HEXAGON_F2_dfcmpuo:
4347    ID = Intrinsic::hexagon_F2_dfcmpuo; break;
4348
4349  case Hexagon::BI__builtin_HEXAGON_F2_dfclass:
4350    ID = Intrinsic::hexagon_F2_dfclass; break;
4351
4352  case Hexagon::BI__builtin_HEXAGON_F2_dfimm_p:
4353    ID = Intrinsic::hexagon_F2_dfimm_p; break;
4354
4355  case Hexagon::BI__builtin_HEXAGON_F2_dfimm_n:
4356    ID = Intrinsic::hexagon_F2_dfimm_n; break;
4357
4358  case Hexagon::BI__builtin_HEXAGON_F2_dffixupn:
4359    ID = Intrinsic::hexagon_F2_dffixupn; break;
4360
4361  case Hexagon::BI__builtin_HEXAGON_F2_dffixupd:
4362    ID = Intrinsic::hexagon_F2_dffixupd; break;
4363
4364  case Hexagon::BI__builtin_HEXAGON_F2_dffixupr:
4365    ID = Intrinsic::hexagon_F2_dffixupr; break;
4366
4367  case Hexagon::BI__builtin_HEXAGON_F2_conv_sf2df:
4368    ID = Intrinsic::hexagon_F2_conv_sf2df; break;
4369
4370  case Hexagon::BI__builtin_HEXAGON_F2_conv_df2sf:
4371    ID = Intrinsic::hexagon_F2_conv_df2sf; break;
4372
4373  case Hexagon::BI__builtin_HEXAGON_F2_conv_uw2sf:
4374    ID = Intrinsic::hexagon_F2_conv_uw2sf; break;
4375
4376  case Hexagon::BI__builtin_HEXAGON_F2_conv_uw2df:
4377    ID = Intrinsic::hexagon_F2_conv_uw2df; break;
4378
4379  case Hexagon::BI__builtin_HEXAGON_F2_conv_w2sf:
4380    ID = Intrinsic::hexagon_F2_conv_w2sf; break;
4381
4382  case Hexagon::BI__builtin_HEXAGON_F2_conv_w2df:
4383    ID = Intrinsic::hexagon_F2_conv_w2df; break;
4384
4385  case Hexagon::BI__builtin_HEXAGON_F2_conv_ud2sf:
4386    ID = Intrinsic::hexagon_F2_conv_ud2sf; break;
4387
4388  case Hexagon::BI__builtin_HEXAGON_F2_conv_ud2df:
4389    ID = Intrinsic::hexagon_F2_conv_ud2df; break;
4390
4391  case Hexagon::BI__builtin_HEXAGON_F2_conv_d2sf:
4392    ID = Intrinsic::hexagon_F2_conv_d2sf; break;
4393
4394  case Hexagon::BI__builtin_HEXAGON_F2_conv_d2df:
4395    ID = Intrinsic::hexagon_F2_conv_d2df; break;
4396
4397  case Hexagon::BI__builtin_HEXAGON_F2_conv_sf2uw:
4398    ID = Intrinsic::hexagon_F2_conv_sf2uw; break;
4399
4400  case Hexagon::BI__builtin_HEXAGON_F2_conv_sf2w:
4401    ID = Intrinsic::hexagon_F2_conv_sf2w; break;
4402
4403  case Hexagon::BI__builtin_HEXAGON_F2_conv_sf2ud:
4404    ID = Intrinsic::hexagon_F2_conv_sf2ud; break;
4405
4406  case Hexagon::BI__builtin_HEXAGON_F2_conv_sf2d:
4407    ID = Intrinsic::hexagon_F2_conv_sf2d; break;
4408
4409  case Hexagon::BI__builtin_HEXAGON_F2_conv_df2uw:
4410    ID = Intrinsic::hexagon_F2_conv_df2uw; break;
4411
4412  case Hexagon::BI__builtin_HEXAGON_F2_conv_df2w:
4413    ID = Intrinsic::hexagon_F2_conv_df2w; break;
4414
4415  case Hexagon::BI__builtin_HEXAGON_F2_conv_df2ud:
4416    ID = Intrinsic::hexagon_F2_conv_df2ud; break;
4417
4418  case Hexagon::BI__builtin_HEXAGON_F2_conv_df2d:
4419    ID = Intrinsic::hexagon_F2_conv_df2d; break;
4420
4421  case Hexagon::BI__builtin_HEXAGON_F2_conv_sf2uw_chop:
4422    ID = Intrinsic::hexagon_F2_conv_sf2uw_chop; break;
4423
4424  case Hexagon::BI__builtin_HEXAGON_F2_conv_sf2w_chop:
4425    ID = Intrinsic::hexagon_F2_conv_sf2w_chop; break;
4426
4427  case Hexagon::BI__builtin_HEXAGON_F2_conv_sf2ud_chop:
4428    ID = Intrinsic::hexagon_F2_conv_sf2ud_chop; break;
4429
4430  case Hexagon::BI__builtin_HEXAGON_F2_conv_sf2d_chop:
4431    ID = Intrinsic::hexagon_F2_conv_sf2d_chop; break;
4432
4433  case Hexagon::BI__builtin_HEXAGON_F2_conv_df2uw_chop:
4434    ID = Intrinsic::hexagon_F2_conv_df2uw_chop; break;
4435
4436  case Hexagon::BI__builtin_HEXAGON_F2_conv_df2w_chop:
4437    ID = Intrinsic::hexagon_F2_conv_df2w_chop; break;
4438
4439  case Hexagon::BI__builtin_HEXAGON_F2_conv_df2ud_chop:
4440    ID = Intrinsic::hexagon_F2_conv_df2ud_chop; break;
4441
4442  case Hexagon::BI__builtin_HEXAGON_F2_conv_df2d_chop:
4443    ID = Intrinsic::hexagon_F2_conv_df2d_chop; break;
4444
4445  case Hexagon::BI__builtin_HEXAGON_S2_asr_r_r:
4446    ID = Intrinsic::hexagon_S2_asr_r_r; break;
4447
4448  case Hexagon::BI__builtin_HEXAGON_S2_asl_r_r:
4449    ID = Intrinsic::hexagon_S2_asl_r_r; break;
4450
4451  case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_r:
4452    ID = Intrinsic::hexagon_S2_lsr_r_r; break;
4453
4454  case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_r:
4455    ID = Intrinsic::hexagon_S2_lsl_r_r; break;
4456
4457  case Hexagon::BI__builtin_HEXAGON_S2_asr_r_p:
4458    ID = Intrinsic::hexagon_S2_asr_r_p; break;
4459
4460  case Hexagon::BI__builtin_HEXAGON_S2_asl_r_p:
4461    ID = Intrinsic::hexagon_S2_asl_r_p; break;
4462
4463  case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_p:
4464    ID = Intrinsic::hexagon_S2_lsr_r_p; break;
4465
4466  case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_p:
4467    ID = Intrinsic::hexagon_S2_lsl_r_p; break;
4468
4469  case Hexagon::BI__builtin_HEXAGON_S2_asr_r_r_acc:
4470    ID = Intrinsic::hexagon_S2_asr_r_r_acc; break;
4471
4472  case Hexagon::BI__builtin_HEXAGON_S2_asl_r_r_acc:
4473    ID = Intrinsic::hexagon_S2_asl_r_r_acc; break;
4474
4475  case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_r_acc:
4476    ID = Intrinsic::hexagon_S2_lsr_r_r_acc; break;
4477
4478  case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_r_acc:
4479    ID = Intrinsic::hexagon_S2_lsl_r_r_acc; break;
4480
4481  case Hexagon::BI__builtin_HEXAGON_S2_asr_r_p_acc:
4482    ID = Intrinsic::hexagon_S2_asr_r_p_acc; break;
4483
4484  case Hexagon::BI__builtin_HEXAGON_S2_asl_r_p_acc:
4485    ID = Intrinsic::hexagon_S2_asl_r_p_acc; break;
4486
4487  case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_p_acc:
4488    ID = Intrinsic::hexagon_S2_lsr_r_p_acc; break;
4489
4490  case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_p_acc:
4491    ID = Intrinsic::hexagon_S2_lsl_r_p_acc; break;
4492
4493  case Hexagon::BI__builtin_HEXAGON_S2_asr_r_r_nac:
4494    ID = Intrinsic::hexagon_S2_asr_r_r_nac; break;
4495
4496  case Hexagon::BI__builtin_HEXAGON_S2_asl_r_r_nac:
4497    ID = Intrinsic::hexagon_S2_asl_r_r_nac; break;
4498
4499  case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_r_nac:
4500    ID = Intrinsic::hexagon_S2_lsr_r_r_nac; break;
4501
4502  case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_r_nac:
4503    ID = Intrinsic::hexagon_S2_lsl_r_r_nac; break;
4504
4505  case Hexagon::BI__builtin_HEXAGON_S2_asr_r_p_nac:
4506    ID = Intrinsic::hexagon_S2_asr_r_p_nac; break;
4507
4508  case Hexagon::BI__builtin_HEXAGON_S2_asl_r_p_nac:
4509    ID = Intrinsic::hexagon_S2_asl_r_p_nac; break;
4510
4511  case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_p_nac:
4512    ID = Intrinsic::hexagon_S2_lsr_r_p_nac; break;
4513
4514  case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_p_nac:
4515    ID = Intrinsic::hexagon_S2_lsl_r_p_nac; break;
4516
4517  case Hexagon::BI__builtin_HEXAGON_S2_asr_r_r_and:
4518    ID = Intrinsic::hexagon_S2_asr_r_r_and; break;
4519
4520  case Hexagon::BI__builtin_HEXAGON_S2_asl_r_r_and:
4521    ID = Intrinsic::hexagon_S2_asl_r_r_and; break;
4522
4523  case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_r_and:
4524    ID = Intrinsic::hexagon_S2_lsr_r_r_and; break;
4525
4526  case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_r_and:
4527    ID = Intrinsic::hexagon_S2_lsl_r_r_and; break;
4528
4529  case Hexagon::BI__builtin_HEXAGON_S2_asr_r_r_or:
4530    ID = Intrinsic::hexagon_S2_asr_r_r_or; break;
4531
4532  case Hexagon::BI__builtin_HEXAGON_S2_asl_r_r_or:
4533    ID = Intrinsic::hexagon_S2_asl_r_r_or; break;
4534
4535  case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_r_or:
4536    ID = Intrinsic::hexagon_S2_lsr_r_r_or; break;
4537
4538  case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_r_or:
4539    ID = Intrinsic::hexagon_S2_lsl_r_r_or; break;
4540
4541  case Hexagon::BI__builtin_HEXAGON_S2_asr_r_p_and:
4542    ID = Intrinsic::hexagon_S2_asr_r_p_and; break;
4543
4544  case Hexagon::BI__builtin_HEXAGON_S2_asl_r_p_and:
4545    ID = Intrinsic::hexagon_S2_asl_r_p_and; break;
4546
4547  case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_p_and:
4548    ID = Intrinsic::hexagon_S2_lsr_r_p_and; break;
4549
4550  case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_p_and:
4551    ID = Intrinsic::hexagon_S2_lsl_r_p_and; break;
4552
4553  case Hexagon::BI__builtin_HEXAGON_S2_asr_r_p_or:
4554    ID = Intrinsic::hexagon_S2_asr_r_p_or; break;
4555
4556  case Hexagon::BI__builtin_HEXAGON_S2_asl_r_p_or:
4557    ID = Intrinsic::hexagon_S2_asl_r_p_or; break;
4558
4559  case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_p_or:
4560    ID = Intrinsic::hexagon_S2_lsr_r_p_or; break;
4561
4562  case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_p_or:
4563    ID = Intrinsic::hexagon_S2_lsl_r_p_or; break;
4564
4565  case Hexagon::BI__builtin_HEXAGON_S2_asr_r_p_xor:
4566    ID = Intrinsic::hexagon_S2_asr_r_p_xor; break;
4567
4568  case Hexagon::BI__builtin_HEXAGON_S2_asl_r_p_xor:
4569    ID = Intrinsic::hexagon_S2_asl_r_p_xor; break;
4570
4571  case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_p_xor:
4572    ID = Intrinsic::hexagon_S2_lsr_r_p_xor; break;
4573
4574  case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_p_xor:
4575    ID = Intrinsic::hexagon_S2_lsl_r_p_xor; break;
4576
4577  case Hexagon::BI__builtin_HEXAGON_S2_asr_r_r_sat:
4578    ID = Intrinsic::hexagon_S2_asr_r_r_sat; break;
4579
4580  case Hexagon::BI__builtin_HEXAGON_S2_asl_r_r_sat:
4581    ID = Intrinsic::hexagon_S2_asl_r_r_sat; break;
4582
4583  case Hexagon::BI__builtin_HEXAGON_S2_asr_i_r:
4584    ID = Intrinsic::hexagon_S2_asr_i_r; break;
4585
4586  case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_r:
4587    ID = Intrinsic::hexagon_S2_lsr_i_r; break;
4588
4589  case Hexagon::BI__builtin_HEXAGON_S2_asl_i_r:
4590    ID = Intrinsic::hexagon_S2_asl_i_r; break;
4591
4592  case Hexagon::BI__builtin_HEXAGON_S2_asr_i_p:
4593    ID = Intrinsic::hexagon_S2_asr_i_p; break;
4594
4595  case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_p:
4596    ID = Intrinsic::hexagon_S2_lsr_i_p; break;
4597
4598  case Hexagon::BI__builtin_HEXAGON_S2_asl_i_p:
4599    ID = Intrinsic::hexagon_S2_asl_i_p; break;
4600
4601  case Hexagon::BI__builtin_HEXAGON_S2_asr_i_r_acc:
4602    ID = Intrinsic::hexagon_S2_asr_i_r_acc; break;
4603
4604  case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_r_acc:
4605    ID = Intrinsic::hexagon_S2_lsr_i_r_acc; break;
4606
4607  case Hexagon::BI__builtin_HEXAGON_S2_asl_i_r_acc:
4608    ID = Intrinsic::hexagon_S2_asl_i_r_acc; break;
4609
4610  case Hexagon::BI__builtin_HEXAGON_S2_asr_i_p_acc:
4611    ID = Intrinsic::hexagon_S2_asr_i_p_acc; break;
4612
4613  case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_p_acc:
4614    ID = Intrinsic::hexagon_S2_lsr_i_p_acc; break;
4615
4616  case Hexagon::BI__builtin_HEXAGON_S2_asl_i_p_acc:
4617    ID = Intrinsic::hexagon_S2_asl_i_p_acc; break;
4618
4619  case Hexagon::BI__builtin_HEXAGON_S2_asr_i_r_nac:
4620    ID = Intrinsic::hexagon_S2_asr_i_r_nac; break;
4621
4622  case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_r_nac:
4623    ID = Intrinsic::hexagon_S2_lsr_i_r_nac; break;
4624
4625  case Hexagon::BI__builtin_HEXAGON_S2_asl_i_r_nac:
4626    ID = Intrinsic::hexagon_S2_asl_i_r_nac; break;
4627
4628  case Hexagon::BI__builtin_HEXAGON_S2_asr_i_p_nac:
4629    ID = Intrinsic::hexagon_S2_asr_i_p_nac; break;
4630
4631  case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_p_nac:
4632    ID = Intrinsic::hexagon_S2_lsr_i_p_nac; break;
4633
4634  case Hexagon::BI__builtin_HEXAGON_S2_asl_i_p_nac:
4635    ID = Intrinsic::hexagon_S2_asl_i_p_nac; break;
4636
4637  case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_r_xacc:
4638    ID = Intrinsic::hexagon_S2_lsr_i_r_xacc; break;
4639
4640  case Hexagon::BI__builtin_HEXAGON_S2_asl_i_r_xacc:
4641    ID = Intrinsic::hexagon_S2_asl_i_r_xacc; break;
4642
4643  case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_p_xacc:
4644    ID = Intrinsic::hexagon_S2_lsr_i_p_xacc; break;
4645
4646  case Hexagon::BI__builtin_HEXAGON_S2_asl_i_p_xacc:
4647    ID = Intrinsic::hexagon_S2_asl_i_p_xacc; break;
4648
4649  case Hexagon::BI__builtin_HEXAGON_S2_asr_i_r_and:
4650    ID = Intrinsic::hexagon_S2_asr_i_r_and; break;
4651
4652  case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_r_and:
4653    ID = Intrinsic::hexagon_S2_lsr_i_r_and; break;
4654
4655  case Hexagon::BI__builtin_HEXAGON_S2_asl_i_r_and:
4656    ID = Intrinsic::hexagon_S2_asl_i_r_and; break;
4657
4658  case Hexagon::BI__builtin_HEXAGON_S2_asr_i_r_or:
4659    ID = Intrinsic::hexagon_S2_asr_i_r_or; break;
4660
4661  case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_r_or:
4662    ID = Intrinsic::hexagon_S2_lsr_i_r_or; break;
4663
4664  case Hexagon::BI__builtin_HEXAGON_S2_asl_i_r_or:
4665    ID = Intrinsic::hexagon_S2_asl_i_r_or; break;
4666
4667  case Hexagon::BI__builtin_HEXAGON_S2_asr_i_p_and:
4668    ID = Intrinsic::hexagon_S2_asr_i_p_and; break;
4669
4670  case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_p_and:
4671    ID = Intrinsic::hexagon_S2_lsr_i_p_and; break;
4672
4673  case Hexagon::BI__builtin_HEXAGON_S2_asl_i_p_and:
4674    ID = Intrinsic::hexagon_S2_asl_i_p_and; break;
4675
4676  case Hexagon::BI__builtin_HEXAGON_S2_asr_i_p_or:
4677    ID = Intrinsic::hexagon_S2_asr_i_p_or; break;
4678
4679  case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_p_or:
4680    ID = Intrinsic::hexagon_S2_lsr_i_p_or; break;
4681
4682  case Hexagon::BI__builtin_HEXAGON_S2_asl_i_p_or:
4683    ID = Intrinsic::hexagon_S2_asl_i_p_or; break;
4684
4685  case Hexagon::BI__builtin_HEXAGON_S2_asl_i_r_sat:
4686    ID = Intrinsic::hexagon_S2_asl_i_r_sat; break;
4687
4688  case Hexagon::BI__builtin_HEXAGON_S2_asr_i_r_rnd:
4689    ID = Intrinsic::hexagon_S2_asr_i_r_rnd; break;
4690
4691  case Hexagon::BI__builtin_HEXAGON_S2_asr_i_r_rnd_goodsyntax:
4692    ID = Intrinsic::hexagon_S2_asr_i_r_rnd_goodsyntax; break;
4693
4694  case Hexagon::BI__builtin_HEXAGON_S2_asr_i_p_rnd:
4695    ID = Intrinsic::hexagon_S2_asr_i_p_rnd; break;
4696
4697  case Hexagon::BI__builtin_HEXAGON_S2_asr_i_p_rnd_goodsyntax:
4698    ID = Intrinsic::hexagon_S2_asr_i_p_rnd_goodsyntax; break;
4699
4700  case Hexagon::BI__builtin_HEXAGON_S4_lsli:
4701    ID = Intrinsic::hexagon_S4_lsli; break;
4702
4703  case Hexagon::BI__builtin_HEXAGON_S2_addasl_rrri:
4704    ID = Intrinsic::hexagon_S2_addasl_rrri; break;
4705
4706  case Hexagon::BI__builtin_HEXAGON_S4_andi_asl_ri:
4707    ID = Intrinsic::hexagon_S4_andi_asl_ri; break;
4708
4709  case Hexagon::BI__builtin_HEXAGON_S4_ori_asl_ri:
4710    ID = Intrinsic::hexagon_S4_ori_asl_ri; break;
4711
4712  case Hexagon::BI__builtin_HEXAGON_S4_addi_asl_ri:
4713    ID = Intrinsic::hexagon_S4_addi_asl_ri; break;
4714
4715  case Hexagon::BI__builtin_HEXAGON_S4_subi_asl_ri:
4716    ID = Intrinsic::hexagon_S4_subi_asl_ri; break;
4717
4718  case Hexagon::BI__builtin_HEXAGON_S4_andi_lsr_ri:
4719    ID = Intrinsic::hexagon_S4_andi_lsr_ri; break;
4720
4721  case Hexagon::BI__builtin_HEXAGON_S4_ori_lsr_ri:
4722    ID = Intrinsic::hexagon_S4_ori_lsr_ri; break;
4723
4724  case Hexagon::BI__builtin_HEXAGON_S4_addi_lsr_ri:
4725    ID = Intrinsic::hexagon_S4_addi_lsr_ri; break;
4726
4727  case Hexagon::BI__builtin_HEXAGON_S4_subi_lsr_ri:
4728    ID = Intrinsic::hexagon_S4_subi_lsr_ri; break;
4729
4730  case Hexagon::BI__builtin_HEXAGON_S2_valignib:
4731    ID = Intrinsic::hexagon_S2_valignib; break;
4732
4733  case Hexagon::BI__builtin_HEXAGON_S2_valignrb:
4734    ID = Intrinsic::hexagon_S2_valignrb; break;
4735
4736  case Hexagon::BI__builtin_HEXAGON_S2_vspliceib:
4737    ID = Intrinsic::hexagon_S2_vspliceib; break;
4738
4739  case Hexagon::BI__builtin_HEXAGON_S2_vsplicerb:
4740    ID = Intrinsic::hexagon_S2_vsplicerb; break;
4741
4742  case Hexagon::BI__builtin_HEXAGON_S2_vsplatrh:
4743    ID = Intrinsic::hexagon_S2_vsplatrh; break;
4744
4745  case Hexagon::BI__builtin_HEXAGON_S2_vsplatrb:
4746    ID = Intrinsic::hexagon_S2_vsplatrb; break;
4747
4748  case Hexagon::BI__builtin_HEXAGON_S2_insert:
4749    ID = Intrinsic::hexagon_S2_insert; break;
4750
4751  case Hexagon::BI__builtin_HEXAGON_S2_tableidxb_goodsyntax:
4752    ID = Intrinsic::hexagon_S2_tableidxb_goodsyntax; break;
4753
4754  case Hexagon::BI__builtin_HEXAGON_S2_tableidxh_goodsyntax:
4755    ID = Intrinsic::hexagon_S2_tableidxh_goodsyntax; break;
4756
4757  case Hexagon::BI__builtin_HEXAGON_S2_tableidxw_goodsyntax:
4758    ID = Intrinsic::hexagon_S2_tableidxw_goodsyntax; break;
4759
4760  case Hexagon::BI__builtin_HEXAGON_S2_tableidxd_goodsyntax:
4761    ID = Intrinsic::hexagon_S2_tableidxd_goodsyntax; break;
4762
4763  case Hexagon::BI__builtin_HEXAGON_A4_bitspliti:
4764    ID = Intrinsic::hexagon_A4_bitspliti; break;
4765
4766  case Hexagon::BI__builtin_HEXAGON_A4_bitsplit:
4767    ID = Intrinsic::hexagon_A4_bitsplit; break;
4768
4769  case Hexagon::BI__builtin_HEXAGON_S4_extract:
4770    ID = Intrinsic::hexagon_S4_extract; break;
4771
4772  case Hexagon::BI__builtin_HEXAGON_S2_extractu:
4773    ID = Intrinsic::hexagon_S2_extractu; break;
4774
4775  case Hexagon::BI__builtin_HEXAGON_S2_insertp:
4776    ID = Intrinsic::hexagon_S2_insertp; break;
4777
4778  case Hexagon::BI__builtin_HEXAGON_S4_extractp:
4779    ID = Intrinsic::hexagon_S4_extractp; break;
4780
4781  case Hexagon::BI__builtin_HEXAGON_S2_extractup:
4782    ID = Intrinsic::hexagon_S2_extractup; break;
4783
4784  case Hexagon::BI__builtin_HEXAGON_S2_insert_rp:
4785    ID = Intrinsic::hexagon_S2_insert_rp; break;
4786
4787  case Hexagon::BI__builtin_HEXAGON_S4_extract_rp:
4788    ID = Intrinsic::hexagon_S4_extract_rp; break;
4789
4790  case Hexagon::BI__builtin_HEXAGON_S2_extractu_rp:
4791    ID = Intrinsic::hexagon_S2_extractu_rp; break;
4792
4793  case Hexagon::BI__builtin_HEXAGON_S2_insertp_rp:
4794    ID = Intrinsic::hexagon_S2_insertp_rp; break;
4795
4796  case Hexagon::BI__builtin_HEXAGON_S4_extractp_rp:
4797    ID = Intrinsic::hexagon_S4_extractp_rp; break;
4798
4799  case Hexagon::BI__builtin_HEXAGON_S2_extractup_rp:
4800    ID = Intrinsic::hexagon_S2_extractup_rp; break;
4801
4802  case Hexagon::BI__builtin_HEXAGON_S2_tstbit_i:
4803    ID = Intrinsic::hexagon_S2_tstbit_i; break;
4804
4805  case Hexagon::BI__builtin_HEXAGON_S4_ntstbit_i:
4806    ID = Intrinsic::hexagon_S4_ntstbit_i; break;
4807
4808  case Hexagon::BI__builtin_HEXAGON_S2_setbit_i:
4809    ID = Intrinsic::hexagon_S2_setbit_i; break;
4810
4811  case Hexagon::BI__builtin_HEXAGON_S2_togglebit_i:
4812    ID = Intrinsic::hexagon_S2_togglebit_i; break;
4813
4814  case Hexagon::BI__builtin_HEXAGON_S2_clrbit_i:
4815    ID = Intrinsic::hexagon_S2_clrbit_i; break;
4816
4817  case Hexagon::BI__builtin_HEXAGON_S2_tstbit_r:
4818    ID = Intrinsic::hexagon_S2_tstbit_r; break;
4819
4820  case Hexagon::BI__builtin_HEXAGON_S4_ntstbit_r:
4821    ID = Intrinsic::hexagon_S4_ntstbit_r; break;
4822
4823  case Hexagon::BI__builtin_HEXAGON_S2_setbit_r:
4824    ID = Intrinsic::hexagon_S2_setbit_r; break;
4825
4826  case Hexagon::BI__builtin_HEXAGON_S2_togglebit_r:
4827    ID = Intrinsic::hexagon_S2_togglebit_r; break;
4828
4829  case Hexagon::BI__builtin_HEXAGON_S2_clrbit_r:
4830    ID = Intrinsic::hexagon_S2_clrbit_r; break;
4831
4832  case Hexagon::BI__builtin_HEXAGON_S2_asr_i_vh:
4833    ID = Intrinsic::hexagon_S2_asr_i_vh; break;
4834
4835  case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_vh:
4836    ID = Intrinsic::hexagon_S2_lsr_i_vh; break;
4837
4838  case Hexagon::BI__builtin_HEXAGON_S2_asl_i_vh:
4839    ID = Intrinsic::hexagon_S2_asl_i_vh; break;
4840
4841  case Hexagon::BI__builtin_HEXAGON_S2_asr_r_vh:
4842    ID = Intrinsic::hexagon_S2_asr_r_vh; break;
4843
4844  case Hexagon::BI__builtin_HEXAGON_S5_asrhub_rnd_sat_goodsyntax:
4845    ID = Intrinsic::hexagon_S5_asrhub_rnd_sat_goodsyntax; break;
4846
4847  case Hexagon::BI__builtin_HEXAGON_S5_asrhub_sat:
4848    ID = Intrinsic::hexagon_S5_asrhub_sat; break;
4849
4850  case Hexagon::BI__builtin_HEXAGON_S5_vasrhrnd_goodsyntax:
4851    ID = Intrinsic::hexagon_S5_vasrhrnd_goodsyntax; break;
4852
4853  case Hexagon::BI__builtin_HEXAGON_S2_asl_r_vh:
4854    ID = Intrinsic::hexagon_S2_asl_r_vh; break;
4855
4856  case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_vh:
4857    ID = Intrinsic::hexagon_S2_lsr_r_vh; break;
4858
4859  case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_vh:
4860    ID = Intrinsic::hexagon_S2_lsl_r_vh; break;
4861
4862  case Hexagon::BI__builtin_HEXAGON_S2_asr_i_vw:
4863    ID = Intrinsic::hexagon_S2_asr_i_vw; break;
4864
4865  case Hexagon::BI__builtin_HEXAGON_S2_asr_i_svw_trun:
4866    ID = Intrinsic::hexagon_S2_asr_i_svw_trun; break;
4867
4868  case Hexagon::BI__builtin_HEXAGON_S2_asr_r_svw_trun:
4869    ID = Intrinsic::hexagon_S2_asr_r_svw_trun; break;
4870
4871  case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_vw:
4872    ID = Intrinsic::hexagon_S2_lsr_i_vw; break;
4873
4874  case Hexagon::BI__builtin_HEXAGON_S2_asl_i_vw:
4875    ID = Intrinsic::hexagon_S2_asl_i_vw; break;
4876
4877  case Hexagon::BI__builtin_HEXAGON_S2_asr_r_vw:
4878    ID = Intrinsic::hexagon_S2_asr_r_vw; break;
4879
4880  case Hexagon::BI__builtin_HEXAGON_S2_asl_r_vw:
4881    ID = Intrinsic::hexagon_S2_asl_r_vw; break;
4882
4883  case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_vw:
4884    ID = Intrinsic::hexagon_S2_lsr_r_vw; break;
4885
4886  case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_vw:
4887    ID = Intrinsic::hexagon_S2_lsl_r_vw; break;
4888
4889  case Hexagon::BI__builtin_HEXAGON_S2_vrndpackwh:
4890    ID = Intrinsic::hexagon_S2_vrndpackwh; break;
4891
4892  case Hexagon::BI__builtin_HEXAGON_S2_vrndpackwhs:
4893    ID = Intrinsic::hexagon_S2_vrndpackwhs; break;
4894
4895  case Hexagon::BI__builtin_HEXAGON_S2_vsxtbh:
4896    ID = Intrinsic::hexagon_S2_vsxtbh; break;
4897
4898  case Hexagon::BI__builtin_HEXAGON_S2_vzxtbh:
4899    ID = Intrinsic::hexagon_S2_vzxtbh; break;
4900
4901  case Hexagon::BI__builtin_HEXAGON_S2_vsathub:
4902    ID = Intrinsic::hexagon_S2_vsathub; break;
4903
4904  case Hexagon::BI__builtin_HEXAGON_S2_svsathub:
4905    ID = Intrinsic::hexagon_S2_svsathub; break;
4906
4907  case Hexagon::BI__builtin_HEXAGON_S2_svsathb:
4908    ID = Intrinsic::hexagon_S2_svsathb; break;
4909
4910  case Hexagon::BI__builtin_HEXAGON_S2_vsathb:
4911    ID = Intrinsic::hexagon_S2_vsathb; break;
4912
4913  case Hexagon::BI__builtin_HEXAGON_S2_vtrunohb:
4914    ID = Intrinsic::hexagon_S2_vtrunohb; break;
4915
4916  case Hexagon::BI__builtin_HEXAGON_S2_vtrunewh:
4917    ID = Intrinsic::hexagon_S2_vtrunewh; break;
4918
4919  case Hexagon::BI__builtin_HEXAGON_S2_vtrunowh:
4920    ID = Intrinsic::hexagon_S2_vtrunowh; break;
4921
4922  case Hexagon::BI__builtin_HEXAGON_S2_vtrunehb:
4923    ID = Intrinsic::hexagon_S2_vtrunehb; break;
4924
4925  case Hexagon::BI__builtin_HEXAGON_S2_vsxthw:
4926    ID = Intrinsic::hexagon_S2_vsxthw; break;
4927
4928  case Hexagon::BI__builtin_HEXAGON_S2_vzxthw:
4929    ID = Intrinsic::hexagon_S2_vzxthw; break;
4930
4931  case Hexagon::BI__builtin_HEXAGON_S2_vsatwh:
4932    ID = Intrinsic::hexagon_S2_vsatwh; break;
4933
4934  case Hexagon::BI__builtin_HEXAGON_S2_vsatwuh:
4935    ID = Intrinsic::hexagon_S2_vsatwuh; break;
4936
4937  case Hexagon::BI__builtin_HEXAGON_S2_packhl:
4938    ID = Intrinsic::hexagon_S2_packhl; break;
4939
4940  case Hexagon::BI__builtin_HEXAGON_A2_swiz:
4941    ID = Intrinsic::hexagon_A2_swiz; break;
4942
4943  case Hexagon::BI__builtin_HEXAGON_S2_vsathub_nopack:
4944    ID = Intrinsic::hexagon_S2_vsathub_nopack; break;
4945
4946  case Hexagon::BI__builtin_HEXAGON_S2_vsathb_nopack:
4947    ID = Intrinsic::hexagon_S2_vsathb_nopack; break;
4948
4949  case Hexagon::BI__builtin_HEXAGON_S2_vsatwh_nopack:
4950    ID = Intrinsic::hexagon_S2_vsatwh_nopack; break;
4951
4952  case Hexagon::BI__builtin_HEXAGON_S2_vsatwuh_nopack:
4953    ID = Intrinsic::hexagon_S2_vsatwuh_nopack; break;
4954
4955  case Hexagon::BI__builtin_HEXAGON_S2_shuffob:
4956    ID = Intrinsic::hexagon_S2_shuffob; break;
4957
4958  case Hexagon::BI__builtin_HEXAGON_S2_shuffeb:
4959    ID = Intrinsic::hexagon_S2_shuffeb; break;
4960
4961  case Hexagon::BI__builtin_HEXAGON_S2_shuffoh:
4962    ID = Intrinsic::hexagon_S2_shuffoh; break;
4963
4964  case Hexagon::BI__builtin_HEXAGON_S2_shuffeh:
4965    ID = Intrinsic::hexagon_S2_shuffeh; break;
4966
4967  case Hexagon::BI__builtin_HEXAGON_S5_popcountp:
4968    ID = Intrinsic::hexagon_S5_popcountp; break;
4969
4970  case Hexagon::BI__builtin_HEXAGON_S4_parity:
4971    ID = Intrinsic::hexagon_S4_parity; break;
4972
4973  case Hexagon::BI__builtin_HEXAGON_S2_parityp:
4974    ID = Intrinsic::hexagon_S2_parityp; break;
4975
4976  case Hexagon::BI__builtin_HEXAGON_S2_lfsp:
4977    ID = Intrinsic::hexagon_S2_lfsp; break;
4978
4979  case Hexagon::BI__builtin_HEXAGON_S2_clbnorm:
4980    ID = Intrinsic::hexagon_S2_clbnorm; break;
4981
4982  case Hexagon::BI__builtin_HEXAGON_S4_clbaddi:
4983    ID = Intrinsic::hexagon_S4_clbaddi; break;
4984
4985  case Hexagon::BI__builtin_HEXAGON_S4_clbpnorm:
4986    ID = Intrinsic::hexagon_S4_clbpnorm; break;
4987
4988  case Hexagon::BI__builtin_HEXAGON_S4_clbpaddi:
4989    ID = Intrinsic::hexagon_S4_clbpaddi; break;
4990
4991  case Hexagon::BI__builtin_HEXAGON_S2_clb:
4992    ID = Intrinsic::hexagon_S2_clb; break;
4993
4994  case Hexagon::BI__builtin_HEXAGON_S2_cl0:
4995    ID = Intrinsic::hexagon_S2_cl0; break;
4996
4997  case Hexagon::BI__builtin_HEXAGON_S2_cl1:
4998    ID = Intrinsic::hexagon_S2_cl1; break;
4999
5000  case Hexagon::BI__builtin_HEXAGON_S2_clbp:
5001    ID = Intrinsic::hexagon_S2_clbp; break;
5002
5003  case Hexagon::BI__builtin_HEXAGON_S2_cl0p:
5004    ID = Intrinsic::hexagon_S2_cl0p; break;
5005
5006  case Hexagon::BI__builtin_HEXAGON_S2_cl1p:
5007    ID = Intrinsic::hexagon_S2_cl1p; break;
5008
5009  case Hexagon::BI__builtin_HEXAGON_S2_brev:
5010    ID = Intrinsic::hexagon_S2_brev; break;
5011
5012  case Hexagon::BI__builtin_HEXAGON_S2_brevp:
5013    ID = Intrinsic::hexagon_S2_brevp; break;
5014
5015  case Hexagon::BI__builtin_HEXAGON_S2_ct0:
5016    ID = Intrinsic::hexagon_S2_ct0; break;
5017
5018  case Hexagon::BI__builtin_HEXAGON_S2_ct1:
5019    ID = Intrinsic::hexagon_S2_ct1; break;
5020
5021  case Hexagon::BI__builtin_HEXAGON_S2_ct0p:
5022    ID = Intrinsic::hexagon_S2_ct0p; break;
5023
5024  case Hexagon::BI__builtin_HEXAGON_S2_ct1p:
5025    ID = Intrinsic::hexagon_S2_ct1p; break;
5026
5027  case Hexagon::BI__builtin_HEXAGON_S2_interleave:
5028    ID = Intrinsic::hexagon_S2_interleave; break;
5029
5030  case Hexagon::BI__builtin_HEXAGON_S2_deinterleave:
5031    ID = Intrinsic::hexagon_S2_deinterleave; break;
5032  }
5033
5034  llvm::Function *F = CGM.getIntrinsic(ID);
5035  return Builder.CreateCall(F, Ops, "");
5036}
5037
5038Value *CodeGenFunction::EmitPPCBuiltinExpr(unsigned BuiltinID,
5039                                           const CallExpr *E) {
5040  SmallVector<Value*, 4> Ops;
5041
5042  for (unsigned i = 0, e = E->getNumArgs(); i != e; i++)
5043    Ops.push_back(EmitScalarExpr(E->getArg(i)));
5044
5045  Intrinsic::ID ID = Intrinsic::not_intrinsic;
5046
5047  switch (BuiltinID) {
5048  default: return 0;
5049
5050  // vec_ld, vec_lvsl, vec_lvsr
5051  case PPC::BI__builtin_altivec_lvx:
5052  case PPC::BI__builtin_altivec_lvxl:
5053  case PPC::BI__builtin_altivec_lvebx:
5054  case PPC::BI__builtin_altivec_lvehx:
5055  case PPC::BI__builtin_altivec_lvewx:
5056  case PPC::BI__builtin_altivec_lvsl:
5057  case PPC::BI__builtin_altivec_lvsr:
5058  {
5059    Ops[1] = Builder.CreateBitCast(Ops[1], Int8PtrTy);
5060
5061    Ops[0] = Builder.CreateGEP(Ops[1], Ops[0]);
5062    Ops.pop_back();
5063
5064    switch (BuiltinID) {
5065    default: llvm_unreachable("Unsupported ld/lvsl/lvsr intrinsic!");
5066    case PPC::BI__builtin_altivec_lvx:
5067      ID = Intrinsic::ppc_altivec_lvx;
5068      break;
5069    case PPC::BI__builtin_altivec_lvxl:
5070      ID = Intrinsic::ppc_altivec_lvxl;
5071      break;
5072    case PPC::BI__builtin_altivec_lvebx:
5073      ID = Intrinsic::ppc_altivec_lvebx;
5074      break;
5075    case PPC::BI__builtin_altivec_lvehx:
5076      ID = Intrinsic::ppc_altivec_lvehx;
5077      break;
5078    case PPC::BI__builtin_altivec_lvewx:
5079      ID = Intrinsic::ppc_altivec_lvewx;
5080      break;
5081    case PPC::BI__builtin_altivec_lvsl:
5082      ID = Intrinsic::ppc_altivec_lvsl;
5083      break;
5084    case PPC::BI__builtin_altivec_lvsr:
5085      ID = Intrinsic::ppc_altivec_lvsr;
5086      break;
5087    }
5088    llvm::Function *F = CGM.getIntrinsic(ID);
5089    return Builder.CreateCall(F, Ops, "");
5090  }
5091
5092  // vec_st
5093  case PPC::BI__builtin_altivec_stvx:
5094  case PPC::BI__builtin_altivec_stvxl:
5095  case PPC::BI__builtin_altivec_stvebx:
5096  case PPC::BI__builtin_altivec_stvehx:
5097  case PPC::BI__builtin_altivec_stvewx:
5098  {
5099    Ops[2] = Builder.CreateBitCast(Ops[2], Int8PtrTy);
5100    Ops[1] = Builder.CreateGEP(Ops[2], Ops[1]);
5101    Ops.pop_back();
5102
5103    switch (BuiltinID) {
5104    default: llvm_unreachable("Unsupported st intrinsic!");
5105    case PPC::BI__builtin_altivec_stvx:
5106      ID = Intrinsic::ppc_altivec_stvx;
5107      break;
5108    case PPC::BI__builtin_altivec_stvxl:
5109      ID = Intrinsic::ppc_altivec_stvxl;
5110      break;
5111    case PPC::BI__builtin_altivec_stvebx:
5112      ID = Intrinsic::ppc_altivec_stvebx;
5113      break;
5114    case PPC::BI__builtin_altivec_stvehx:
5115      ID = Intrinsic::ppc_altivec_stvehx;
5116      break;
5117    case PPC::BI__builtin_altivec_stvewx:
5118      ID = Intrinsic::ppc_altivec_stvewx;
5119      break;
5120    }
5121    llvm::Function *F = CGM.getIntrinsic(ID);
5122    return Builder.CreateCall(F, Ops, "");
5123  }
5124  }
5125}
5126