/external/llvm/lib/IR/ |
H A D | Operator.cpp | 29 ConstantInt *OpC = dyn_cast<ConstantInt>(GTI.getOperand()); local 30 if (!OpC) 32 if (OpC->isZero()) 37 unsigned ElementIdx = OpC->getZExtValue(); 44 APInt Index = OpC->getValue().sextOrTrunc(Offset.getBitWidth());
|
/external/llvm/lib/Target/AMDGPU/ |
H A D | AMDGPUAnnotateKernelFeatures.cpp | 102 const auto *OpC = dyn_cast<Constant>(U); local 103 if (!OpC) 106 if (!ConstantExprVisited.insert(OpC).second) 109 Stack.push_back(OpC); 128 const auto *OpC = dyn_cast<Constant>(U); local 129 if (!OpC) 132 if (visitConstantExprsRecursively(OpC, ConstantExprVisited))
|
/external/llvm/include/llvm/Transforms/Utils/ |
H A D | Local.h | 207 if (Constant *OpC = dyn_cast<Constant>(Op)) { 208 if (OpC->isZeroValue()) 213 if (OpC->getType()->isVectorTy()) 214 OpC = OpC->getSplatValue(); 216 uint64_t OpValue = cast<ConstantInt>(OpC)->getZExtValue(); 226 Constant *OC = ConstantExpr::getIntegerCast(OpC, IntPtrTy, true /*SExt*/);
|
/external/swiftshader/third_party/LLVM/include/llvm/ |
H A D | Operator.h | 147 static bool isPossiblyExactOpcode(unsigned OpC) { argument 148 return OpC == Instruction::SDiv || 149 OpC == Instruction::UDiv || 150 OpC == Instruction::AShr || 151 OpC == Instruction::LShr;
|
/external/llvm/lib/Target/PowerPC/ |
H A D | PPCRegisterInfo.cpp | 747 unsigned OpC = MI.getOpcode(); local 749 switch (OpC) { 803 unsigned OpC = MI.getOpcode(); local 805 if ((OpC == PPC::DYNAREAOFFSET || OpC == PPC::DYNAREAOFFSET8)) { 812 (OpC == PPC::DYNALLOC || OpC == PPC::DYNALLOC8)) { 818 if (OpC == PPC::SPILL_CR) { 821 } else if (OpC == PPC::RESTORE_CR) { 824 } else if (OpC [all...] |
H A D | PPCInstrInfo.cpp | 1328 unsigned OpC = MI.getOpcode(); 1329 if (OpC == PPC::BLR || OpC == PPC::BLR8) { 1350 } else if (OpC == PPC::B) { 1383 } else if (OpC == PPC::BCTR || OpC == PPC::BCTR8 || 1384 OpC == PPC::BCTRL || OpC == PPC::BCTRL8) { 1388 bool setLR = OpC == PPC::BCTRL || OpC [all...] |
/external/llvm/include/llvm/IR/ |
H A D | Operator.h | 145 static bool isPossiblyExactOpcode(unsigned OpC) { argument 146 return OpC == Instruction::SDiv || 147 OpC == Instruction::UDiv || 148 OpC == Instruction::AShr || 149 OpC == Instruction::LShr;
|
/external/llvm/lib/Transforms/Utils/ |
H A D | SimplifyLibCalls.cpp | 1020 auto *OpC = dyn_cast<CallInst>(Op1); 1021 if (OpC && OpC->hasUnsafeAlgebra() && CI->hasUnsafeAlgebra()) { 1023 Function *OpCCallee = OpC->getCalledFunction(); 1028 Value *FMul = B.CreateFMul(OpC->getArgOperand(0), Op2, "mul"); 1124 if (SIToFPInst *OpC = dyn_cast<SIToFPInst>(Op)) { 1125 if (OpC->getOperand(0)->getType()->getPrimitiveSizeInBits() <= 32) 1126 LdExpArg = B.CreateSExt(OpC->getOperand(0), B.getInt32Ty()); 1127 } else if (UIToFPInst *OpC = dyn_cast<UIToFPInst>(Op)) { 1128 if (OpC [all...] |
/external/swiftshader/third_party/LLVM/lib/Target/PowerPC/ |
H A D | PPCRegisterInfo.cpp | 529 unsigned OpC = MI.getOpcode(); local 533 (OpC == PPC::DYNALLOC || OpC == PPC::DYNALLOC8)) { 540 if (OpC == PPC::SPILL_CR) { 554 switch (OpC) { 615 if (OpC != TargetOpcode::INLINEASM) { 616 assert(ImmToIdxMap.count(OpC) && 618 unsigned NewOpcode = ImmToIdxMap.find(OpC)->second;
|
H A D | PPCISelDAGToDAG.cpp | 783 unsigned OpC = Hi ? PPC::LIS8 : PPC::LI8; local 784 Result = CurDAG->getMachineNode(OpC, dl, MVT::i64, getI32Imm(Hi));
|
/external/swiftshader/third_party/LLVM/lib/Transforms/IPO/ |
H A D | GlobalDCE.cpp | 197 if (Constant *OpC = dyn_cast<Constant>(*I)) 198 MarkUsedGlobalsAsNeeded(OpC);
|
/external/swiftshader/third_party/LLVM/lib/Transforms/InstCombine/ |
H A D | InstCombineSimplifyDemanded.cpp | 33 ConstantInt *OpC = dyn_cast<ConstantInt>(I->getOperand(OpNo)); local 34 if (!OpC) return false; 37 Demanded = Demanded.zextOrTrunc(OpC->getValue().getBitWidth()); 38 if ((~Demanded & OpC->getValue()) == 0) 42 Demanded &= OpC->getValue(); 43 I->setOperand(OpNo, ConstantInt::get(OpC->getType(), Demanded));
|
H A D | InstCombineAddSub.cpp | 419 if (ConstantInt *OpC = dyn_cast<ConstantInt>(Op)) { 420 if (OpC->isZero()) continue; 424 Size = TD.getStructLayout(STy)->getElementOffset(OpC->getZExtValue()); 434 ConstantExpr::getIntegerCast(OpC, IntPtrTy, true /*SExt*/);
|
/external/llvm/lib/Analysis/ |
H A D | InlineCost.cpp | 313 ConstantInt *OpC = dyn_cast<ConstantInt>(GTI.getOperand()); local 314 if (!OpC) 316 OpC = dyn_cast<ConstantInt>(SimpleOp); 317 if (!OpC) 319 if (OpC->isZero()) 324 unsigned ElementIdx = OpC->getZExtValue(); 331 Offset += OpC->getValue().sextOrTrunc(IntPtrWidth) * TypeSize;
|
H A D | ValueTracking.cpp | 1613 ConstantInt *OpC = cast<ConstantInt>(GTI.getOperand()); local 1614 unsigned ElementIdx = OpC->getZExtValue(); 1628 if (ConstantInt *OpC = dyn_cast<ConstantInt>(GTI.getOperand())) { 1629 if (!OpC->isZero())
|
/external/swiftshader/third_party/LLVM/lib/Transforms/Scalar/ |
H A D | MemCpyOptimizer.cpp | 51 ConstantInt *OpC = dyn_cast<ConstantInt>(GEP->getOperand(i)); local 52 if (OpC == 0) 54 if (OpC->isZero()) continue; // No offset. 58 Offset += TD.getStructLayout(STy)->getElementOffset(OpC->getZExtValue()); 65 Offset += Size*OpC->getSExtValue();
|
H A D | SimplifyLibCalls.cpp | 914 if (SIToFPInst *OpC = dyn_cast<SIToFPInst>(Op)) { 915 if (OpC->getOperand(0)->getType()->getPrimitiveSizeInBits() <= 32) 916 LdExpArg = B.CreateSExt(OpC->getOperand(0), B.getInt32Ty()); 917 } else if (UIToFPInst *OpC = dyn_cast<UIToFPInst>(Op)) { 918 if (OpC->getOperand(0)->getType()->getPrimitiveSizeInBits() < 32) 919 LdExpArg = B.CreateZExt(OpC->getOperand(0), B.getInt32Ty());
|
/external/llvm/lib/Transforms/InstCombine/ |
H A D | InstCombineSimplifyDemanded.cpp | 34 ConstantInt *OpC = dyn_cast<ConstantInt>(I->getOperand(OpNo)); local 35 if (!OpC) return false; 38 Demanded = Demanded.zextOrTrunc(OpC->getValue().getBitWidth()); 39 if ((~Demanded & OpC->getValue()) == 0) 43 Demanded &= OpC->getValue(); 44 I->setOperand(OpNo, ConstantInt::get(OpC->getType(), Demanded));
|
/external/llvm/lib/Target/Hexagon/ |
H A D | HexagonStoreWidening.cpp | 114 unsigned OpC = MI->getOpcode(); local 117 switch (OpC) {
|
/external/llvm/lib/Transforms/Scalar/ |
H A D | MemCpyOptimizer.cpp | 49 ConstantInt *OpC = dyn_cast<ConstantInt>(GEP->getOperand(i)); local 50 if (!OpC) 52 if (OpC->isZero()) continue; // No offset. 56 Offset += DL.getStructLayout(STy)->getElementOffset(OpC->getZExtValue()); 63 Offset += Size*OpC->getSExtValue();
|
H A D | SROA.cpp | 691 ConstantInt *OpC = dyn_cast<ConstantInt>(GTI.getOperand()); 692 if (!OpC) 697 unsigned ElementIdx = OpC->getZExtValue(); 704 APInt Index = OpC->getValue().sextOrTrunc(Offset.getBitWidth());
|
/external/swiftshader/third_party/LLVM/lib/Analysis/ |
H A D | ValueTracking.cpp | 1502 ConstantInt *OpC = cast<ConstantInt>(*I); local 1503 if (OpC->isZero()) continue; 1507 Offset += TD.getStructLayout(STy)->getElementOffset(OpC->getZExtValue()); 1510 Offset += OpC->getSExtValue()*Size;
|
H A D | ConstantFolding.cpp | 968 unsigned OpC = local 971 return ConstantFoldInstOperands(OpC, LHS->getType(), Ops, TD);
|
/external/swiftshader/third_party/LLVM/lib/Target/X86/ |
H A D | X86FastISel.cpp | 1459 unsigned OpC = 0; local 1461 OpC = X86::ADD32rr; 1463 OpC = X86::ADD64rr; 1470 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DL, TII.get(OpC), ResultReg)
|
/external/llvm/lib/CodeGen/ |
H A D | TargetInstrInfo.cpp | 699 MachineOperand &OpC = Root.getOperand(0); local 705 unsigned RegC = OpC.getReg();
|