@@ -4524,40 +4524,36 @@ static SDValue lowerBUILD_VECTOR(SDValue Op, SelectionDAG &DAG,
45244524
45254525 // General case: splat the first operand and sliding other operands down one
45264526 // by one to form a vector. Alternatively, if the last operand is an
4527- // extraction from a reduction result , we can use the original vector
4527+ // extraction from element 0 of a vector , we can use the original vector
45284528 // reduction result as the start value and slide up instead of slide down.
45294529 // Such that we can avoid the splat.
45304530 SmallVector<SDValue> Operands(Op->op_begin(), Op->op_end());
4531- SDValue Reduce ;
4531+ SDValue EVec ;
45324532 bool SlideUp = false;
45334533 // Find the first first non-undef from the tail.
45344534 auto ItLastNonUndef = find_if(Operands.rbegin(), Operands.rend(),
45354535 [](SDValue V) { return !V.isUndef(); });
45364536 if (ItLastNonUndef != Operands.rend()) {
45374537 using namespace SDPatternMatch;
4538- // Check if the last non-undef operand was extracted from a reduction.
4539- for (unsigned Opc :
4540- {RISCVISD::VECREDUCE_ADD_VL, RISCVISD::VECREDUCE_UMAX_VL,
4541- RISCVISD::VECREDUCE_SMAX_VL, RISCVISD::VECREDUCE_UMIN_VL,
4542- RISCVISD::VECREDUCE_SMIN_VL, RISCVISD::VECREDUCE_AND_VL,
4543- RISCVISD::VECREDUCE_OR_VL, RISCVISD::VECREDUCE_XOR_VL,
4544- RISCVISD::VECREDUCE_FADD_VL, RISCVISD::VECREDUCE_SEQ_FADD_VL,
4545- RISCVISD::VECREDUCE_FMAX_VL, RISCVISD::VECREDUCE_FMIN_VL}) {
4546- SlideUp = sd_match(
4547- *ItLastNonUndef,
4548- m_ExtractElt(m_AllOf(m_Opc(Opc), m_Value(Reduce)), m_Zero()));
4549- if (SlideUp)
4550- break;
4551- }
4538+ // Check if the last non-undef operand was an extraction.
4539+ SlideUp = sd_match(*ItLastNonUndef, m_ExtractElt(m_Value(EVec), m_Zero()));
45524540 }
45534541
45544542 if (SlideUp) {
4555- // Adapt Reduce's type into ContainerVT.
4556- if (Reduce.getValueType().getVectorMinNumElements() <
4543+ MVT EVecContainerVT = EVec.getSimpleValueType();
4544+ // Make sure the original vector has scalable vector type.
4545+ if (EVecContainerVT.isFixedLengthVector()) {
4546+ EVecContainerVT =
4547+ getContainerForFixedLengthVector(DAG, EVecContainerVT, Subtarget);
4548+ EVec = convertToScalableVector(EVecContainerVT, EVec, DAG, Subtarget);
4549+ }
4550+
4551+ // Adapt EVec's type into ContainerVT.
4552+ if (EVecContainerVT.getVectorMinNumElements() <
45574553 ContainerVT.getVectorMinNumElements())
4558- Reduce = DAG.getInsertSubvector(DL, DAG.getUNDEF(ContainerVT), Reduce , 0);
4554+ EVec = DAG.getInsertSubvector(DL, DAG.getUNDEF(ContainerVT), EVec , 0);
45594555 else
4560- Reduce = DAG.getExtractSubvector(DL, ContainerVT, Reduce , 0);
4556+ EVec = DAG.getExtractSubvector(DL, ContainerVT, EVec , 0);
45614557
45624558 // Reverse the elements as we're going to slide up from the last element.
45634559 for (unsigned i = 0U, N = Operands.size(), H = divideCeil(N, 2); i < H; ++i)
@@ -4577,7 +4573,7 @@ static SDValue lowerBUILD_VECTOR(SDValue Op, SelectionDAG &DAG,
45774573 // prior value of our temporary register.
45784574 if (!Vec) {
45794575 if (SlideUp) {
4580- Vec = Reduce ;
4576+ Vec = EVec ;
45814577 } else {
45824578 Vec = DAG.getSplatVector(VT, DL, V);
45834579 Vec = convertToScalableVector(ContainerVT, Vec, DAG, Subtarget);
0 commit comments