LLVM: lib/CodeGen/GlobalISel/CombinerHelper.cpp File Reference (original) (raw)
Go to the source code of this file.
Functions | |
---|---|
static unsigned | littleEndianByteAt (const unsigned ByteWidth, const unsigned I) |
static Register | buildLogBase2 (Register V, MachineIRBuilder &MIB) |
Determines the LogBase2 value for a non-null input value using the transform: LogBase2(V) = (EltBits - 1) - ctlz(V). | |
static unsigned | bigEndianByteAt (const unsigned ByteWidth, const unsigned I) |
static std::optional< bool > | isBigEndian (const SmallDenseMap< int64_t, int64_t, 8 > &MemOffset2Idx, int64_t LowestIdx) |
Given a map from byte offsets in memory to indices in a load/store, determine if that map corresponds to a little or big endian byte pattern. | |
static unsigned | getExtLoadOpcForExtend (unsigned ExtOpc) |
static bool | canFoldInAddressingMode (GLoadStore *MI, const TargetLowering &TLI, MachineRegisterInfo &MRI) |
Return true if 'MI' is a load or a store that may be fold it's address operand into the load / store addressing mode. | |
static unsigned | getIndexedOpc (unsigned LdStOpc) |
static APFloat | constantFoldFpUnary (const MachineInstr &MI, const MachineRegisterInfo &MRI, const APFloat &Val) |
static Register | peekThroughBitcast (Register Reg, const MachineRegisterInfo &MRI) |
static LLT | getMidVTForTruncRightShiftCombine (LLT ShiftTy, LLT TruncTy) |
static bool | isConstValidTrue (const TargetLowering &TLI, unsigned ScalarSizeBits, int64_t Cst, bool IsVector, bool IsFP) |
static std::optional< std::pair< GZExtLoad *, int64_t > > | matchLoadAndBytePosition (Register Reg, unsigned MemSizeInBits, const MachineRegisterInfo &MRI) |
Helper function for findLoadOffsetsForLoadOrCombine. | |
static bool | isContractableFMul (MachineInstr &MI, bool AllowFusionGlobally) |
Checks if MI is TargetOpcode::G_FMUL and contractable either due to global flags or MachineInstr flags. | |
static bool | hasMoreUses (const MachineInstr &MI0, const MachineInstr &MI1, const MachineRegisterInfo &MRI) |
static std::optional< unsigned > | getMinUselessShift (KnownBits ValueKB, unsigned Opcode, std::optional< int64_t > &Result) |
Return the minimum useless shift amount that results in complete loss of the source value. | |
static void | commuteMask (MutableArrayRef< int > Mask, const unsigned NumElems) |
◆ DEBUG_TYPE
#define DEBUG_TYPE "gi-combiner"
◆ bigEndianByteAt()
◆ buildLogBase2()
◆ canFoldInAddressingMode()
◆ commuteMask()
◆ constantFoldFpUnary()
◆ getExtLoadOpcForExtend()
◆ getIndexedOpc()
◆ getMidVTForTruncRightShiftCombine()
static LLT getMidVTForTruncRightShiftCombine ( LLT ShiftTy, LLT TruncTy ) | static |
---|
◆ getMinUselessShift()
static std::optional< unsigned > getMinUselessShift ( KnownBits ValueKB, unsigned Opcode, std::optional< int64_t > & Result ) | static |
---|
◆ hasMoreUses()
◆ isBigEndian()
static std::optional< bool > isBigEndian ( const SmallDenseMap< int64_t, int64_t, 8 > & MemOffset2Idx, int64_t LowestIdx ) | static |
---|
Given a map from byte offsets in memory to indices in a load/store, determine if that map corresponds to a little or big endian byte pattern.
Parameters
MemOffset2Idx | maps memory offsets to address offsets. |
---|---|
LowestIdx | is the lowest index in MemOffset2Idx. |
Returns
true if the map corresponds to a big endian byte pattern, false if it corresponds to a little endian byte pattern, and std::nullopt otherwise.
E.g. given a 32-bit type x, and x[AddrOffset], the in-memory byte patterns are as follows:
AddrOffset Little endian Big endian 0 0 3 1 1 2 2 2 1 3 3 0
Definition at line 127 of file CombinerHelper.cpp.
References assert(), bigEndianByteAt(), llvm::DenseMapBase< DerivedT, KeyT, ValueT, KeyInfoT, BucketT >::end(), llvm::DenseMapBase< DerivedT, KeyT, ValueT, KeyInfoT, BucketT >::find(), Idx, littleEndianByteAt(), and llvm::DenseMapBase< DerivedT, KeyT, ValueT, KeyInfoT, BucketT >::size().
Referenced by collectInsertionElements(), createGPRPairNodei64(), llvm::SelectionDAG::getConstant(), llvm::CombinerHelper::matchLoadOrCombine(), PerformSTORECombine(), llvm::LegalizerHelper::reduceLoadStoreWidth(), ReplaceCMP_SWAP_64Results(), and llvm::orc::OrcMips32_Base::writeResolverCode().
◆ isConstValidTrue()
◆ isContractableFMul()
◆ littleEndianByteAt()
◆ matchLoadAndBytePosition()
◆ peekThroughBitcast()
◆ ForceLegalIndexing
cl::opt< bool > ForceLegalIndexing("force-legal-indexing", cl::Hidden, cl::init(false), cl::desc("Force all indexed operations to be " "legal for the GlobalISel combiner")) ( "force-legal-indexing" , cl::Hidden , cl::init(false) , cl::desc("Force all indexed operations to be " "legal for the GlobalISel combiner") ) | static |
---|
◆ PostIndexUseThreshold
cl::opt< unsigned > PostIndexUseThreshold("post-index-use-threshold", cl::Hidden, cl::init(32), cl::desc("Number of uses of a base pointer to check before it is no longer " "considered for post-indexing.")) ( "post-index-use-threshold" , cl::Hidden , cl::init(32) , cl::desc("Number of uses of a base pointer to check before it is no longer " "considered for post-indexing.") ) | static |
---|