-
Notifications
You must be signed in to change notification settings - Fork 15.2k
[TableGen][SelectionDAG][GISel] Support IsNonExtLoad for IsAtomic PatFrags. #137401
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Conversation
…Frags. Use it for RISC-V as a demonstration. Other targets will follow.
|
@llvm/pr-subscribers-llvm-selectiondag @llvm/pr-subscribers-backend-risc-v Author: Craig Topper (topperc) ChangesUse it for RISC-V as a demonstration. Other targets will follow. Full diff: https://github.com/llvm/llvm-project/pull/137401.diff 5 Files Affected:
diff --git a/llvm/include/llvm/Target/TargetSelectionDAG.td b/llvm/include/llvm/Target/TargetSelectionDAG.td
index e5ffcceeba8a7..a53527442719a 100644
--- a/llvm/include/llvm/Target/TargetSelectionDAG.td
+++ b/llvm/include/llvm/Target/TargetSelectionDAG.td
@@ -1835,6 +1835,13 @@ defm atomic_load_uinc_wrap : binary_atomic_op<atomic_load_uinc_wrap>;
defm atomic_load_udec_wrap : binary_atomic_op<atomic_load_udec_wrap>;
defm atomic_cmp_swap : ternary_atomic_op<atomic_cmp_swap>;
+/// Atomic load which does not extend.
+def atomic_load_nonext :
+ PatFrag<(ops node:$ptr), (atomic_load node:$ptr)> {
+ let IsAtomic = true; // FIXME: Should be IsLoad and/or IsAtomic?
+ let IsNonExtLoad = true;
+}
+
/// Atomic load which zeroes the excess high bits.
def atomic_load_zext :
PatFrag<(ops node:$ptr), (atomic_load node:$ptr)> {
@@ -1876,6 +1883,7 @@ def atomic_load_32 :
let IsAtomic = true;
let MemoryVT = i32;
}
+
def atomic_load_64 :
PatFrag<(ops node:$ptr),
(atomic_load node:$ptr)> {
@@ -1883,6 +1891,30 @@ def atomic_load_64 :
let MemoryVT = i64;
}
+def atomic_load_nonext_8 :
+ PatFrag<(ops node:$ptr), (atomic_load_nonext node:$ptr)> {
+ let IsAtomic = true; // FIXME: Should be IsLoad and/or IsAtomic?
+ let MemoryVT = i8;
+}
+
+def atomic_load_nonext_16 :
+ PatFrag<(ops node:$ptr), (atomic_load_nonext node:$ptr)> {
+ let IsAtomic = true; // FIXME: Should be IsLoad and/or IsAtomic?
+ let MemoryVT = i16;
+}
+
+def atomic_load_nonext_32 :
+ PatFrag<(ops node:$ptr), (atomic_load_nonext node:$ptr)> {
+ let IsAtomic = true; // FIXME: Should be IsLoad and/or IsAtomic?
+ let MemoryVT = i32;
+}
+
+def atomic_load_nonext_64 :
+ PatFrag<(ops node:$ptr), (atomic_load_nonext node:$ptr)> {
+ let IsAtomic = true; // FIXME: Should be IsLoad and/or IsAtomic?
+ let MemoryVT = i64;
+}
+
def atomic_load_zext_8 :
PatFrag<(ops node:$ptr), (atomic_load_zext node:$ptr)> {
let IsAtomic = true; // FIXME: Should be IsLoad and/or IsAtomic?
diff --git a/llvm/lib/Target/RISCV/RISCVInstrInfoA.td b/llvm/lib/Target/RISCV/RISCVInstrInfoA.td
index b348e774d50b8..74873a66bc8c9 100644
--- a/llvm/lib/Target/RISCV/RISCVInstrInfoA.td
+++ b/llvm/lib/Target/RISCV/RISCVInstrInfoA.td
@@ -174,12 +174,12 @@ let Predicates = [HasAtomicLdSt] in {
}
let Predicates = [HasAtomicLdSt, IsRV32] in {
- def : LdPat<relaxed_load<atomic_load_32>, LW>;
+ def : LdPat<relaxed_load<atomic_load_nonext_32>, LW>;
}
let Predicates = [HasAtomicLdSt, IsRV64] in {
def : LdPat<relaxed_load<atomic_load_asext_32>, LW>;
- def : LdPat<relaxed_load<atomic_load_64>, LD, i64>;
+ def : LdPat<relaxed_load<atomic_load_nonext_64>, LD, i64>;
def : StPat<relaxed_store<atomic_store_64>, SD, GPR, i64>;
}
diff --git a/llvm/lib/Target/RISCV/RISCVInstrInfoZalasr.td b/llvm/lib/Target/RISCV/RISCVInstrInfoZalasr.td
index 837aa7f1005af..5e013b496c6b1 100644
--- a/llvm/lib/Target/RISCV/RISCVInstrInfoZalasr.td
+++ b/llvm/lib/Target/RISCV/RISCVInstrInfoZalasr.td
@@ -95,8 +95,8 @@ let Predicates = [HasStdExtZalasr] in {
} // Predicates = [HasStdExtZalasr]
let Predicates = [HasStdExtZalasr, IsRV32] in {
- def : PatLAQ<acquiring_load<atomic_load_32>, LW_AQ>;
- def : PatLAQ<seq_cst_load<atomic_load_32>, LW_AQ>;
+ def : PatLAQ<acquiring_load<atomic_load_nonext_32>, LW_AQ>;
+ def : PatLAQ<seq_cst_load<atomic_load_nonext_32>, LW_AQ>;
} // Predicates = [HasStdExtZalasr, IsRV64]
@@ -104,8 +104,8 @@ let Predicates = [HasStdExtZalasr, IsRV64] in {
def : PatLAQ<acquiring_load<atomic_load_asext_32>, LW_AQ>;
def : PatLAQ<seq_cst_load<atomic_load_asext_32>, LW_AQ>;
- def : PatLAQ<acquiring_load<atomic_load_64>, LD_AQ>;
- def : PatLAQ<seq_cst_load<atomic_load_64>, LD_AQ>;
+ def : PatLAQ<acquiring_load<atomic_load_nonext_64>, LD_AQ>;
+ def : PatLAQ<seq_cst_load<atomic_load_nonext_64>, LD_AQ>;
def : PatSRL<releasing_store<atomic_store_64>, SD_RL>;
def : PatSRL<seq_cst_store<atomic_store_64>, SD_RL>;
diff --git a/llvm/utils/TableGen/Common/CodeGenDAGPatterns.cpp b/llvm/utils/TableGen/Common/CodeGenDAGPatterns.cpp
index a4fa063ae61cb..615c077fe4bdc 100644
--- a/llvm/utils/TableGen/Common/CodeGenDAGPatterns.cpp
+++ b/llvm/utils/TableGen/Common/CodeGenDAGPatterns.cpp
@@ -933,21 +933,19 @@ std::string TreePredicateFn::getPredCode() const {
getMinAlignment() < 1)
PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
"IsLoad cannot be used by itself");
- } else {
+ } else if (!isAtomic()) {
if (isNonExtLoad())
PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
- "IsNonExtLoad requires IsLoad");
- if (!isAtomic()) {
- if (isAnyExtLoad())
- PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
- "IsAnyExtLoad requires IsLoad or IsAtomic");
- if (isSignExtLoad())
- PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
- "IsSignExtLoad requires IsLoad or IsAtomic");
- if (isZeroExtLoad())
- PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
- "IsZeroExtLoad requires IsLoad or IsAtomic");
- }
+ "IsNonExtLoad requires IsLoad or IsAtomic");
+ if (isAnyExtLoad())
+ PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
+ "IsAnyExtLoad requires IsLoad or IsAtomic");
+ if (isSignExtLoad())
+ PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
+ "IsSignExtLoad requires IsLoad or IsAtomic");
+ if (isZeroExtLoad())
+ PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
+ "IsZeroExtLoad requires IsLoad or IsAtomic");
}
if (isStore()) {
@@ -966,10 +964,10 @@ std::string TreePredicateFn::getPredCode() const {
}
if (isAtomic()) {
- if (getMemoryVT() == nullptr && !isAtomicOrderingMonotonic() &&
- getAddressSpaces() == nullptr &&
+ if (getMemoryVT() == nullptr && getAddressSpaces() == nullptr &&
// FIXME: Should atomic loads be IsLoad, IsAtomic, or both?
- !isAnyExtLoad() && !isZeroExtLoad() && !isSignExtLoad() &&
+ !isNonExtLoad() && !isAnyExtLoad() && !isZeroExtLoad() &&
+ !isSignExtLoad() && !isAtomicOrderingMonotonic() &&
!isAtomicOrderingAcquire() && !isAtomicOrderingRelease() &&
!isAtomicOrderingAcquireRelease() &&
!isAtomicOrderingSequentiallyConsistent() &&
@@ -1076,11 +1074,16 @@ std::string TreePredicateFn::getPredCode() const {
"return false;\n";
if (isAtomic()) {
- if ((isAnyExtLoad() + isSignExtLoad() + isZeroExtLoad()) > 1)
- PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
- "IsAnyExtLoad, IsSignExtLoad, and IsZeroExtLoad are "
- "mutually exclusive");
+ if ((isNonExtLoad() + isAnyExtLoad() + isSignExtLoad() + isZeroExtLoad()) >
+ 1)
+ PrintFatalError(
+ getOrigPatFragRecord()->getRecord()->getLoc(),
+ "IsNonExtLoad, IsAnyExtLoad, IsSignExtLoad, and IsZeroExtLoad are "
+ "mutually exclusive");
+ if (isNonExtLoad())
+ Code += "if (cast<AtomicSDNode>(N)->getExtensionType() != "
+ "ISD::NON_EXTLOAD) return false;\n";
if (isAnyExtLoad())
Code += "if (cast<AtomicSDNode>(N)->getExtensionType() != ISD::EXTLOAD) "
"return false;\n";
diff --git a/llvm/utils/TableGen/GlobalISelEmitter.cpp b/llvm/utils/TableGen/GlobalISelEmitter.cpp
index b3bad55ebf278..b9561c137ec8b 100644
--- a/llvm/utils/TableGen/GlobalISelEmitter.cpp
+++ b/llvm/utils/TableGen/GlobalISelEmitter.cpp
@@ -622,16 +622,17 @@ Expected<InstructionMatcher &> GlobalISelEmitter::addBuiltinPredicates(
}
// G_LOAD is used for both non-extending and any-extending loads.
- if (Predicate.isLoad() && Predicate.isNonExtLoad()) {
- InsnMatcher.addPredicate<MemoryVsLLTSizePredicateMatcher>(
- 0, MemoryVsLLTSizePredicateMatcher::EqualTo, 0);
- return InsnMatcher;
- }
- if ((Predicate.isLoad() || Predicate.isAtomic()) &&
- Predicate.isAnyExtLoad()) {
- InsnMatcher.addPredicate<MemoryVsLLTSizePredicateMatcher>(
- 0, MemoryVsLLTSizePredicateMatcher::LessThan, 0);
- return InsnMatcher;
+ if (Predicate.isLoad() || Predicate.isAtomic()) {
+ if (Predicate.isNonExtLoad()) {
+ InsnMatcher.addPredicate<MemoryVsLLTSizePredicateMatcher>(
+ 0, MemoryVsLLTSizePredicateMatcher::EqualTo, 0);
+ return InsnMatcher;
+ }
+ if (Predicate.isAnyExtLoad()) {
+ InsnMatcher.addPredicate<MemoryVsLLTSizePredicateMatcher>(
+ 0, MemoryVsLLTSizePredicateMatcher::LessThan, 0);
+ return InsnMatcher;
+ }
}
if (Predicate.isStore()) {
|
|
@llvm/pr-subscribers-tablegen Author: Craig Topper (topperc) ChangesUse it for RISC-V as a demonstration. Other targets will follow. Full diff: https://github.com/llvm/llvm-project/pull/137401.diff 5 Files Affected:
diff --git a/llvm/include/llvm/Target/TargetSelectionDAG.td b/llvm/include/llvm/Target/TargetSelectionDAG.td
index e5ffcceeba8a7..a53527442719a 100644
--- a/llvm/include/llvm/Target/TargetSelectionDAG.td
+++ b/llvm/include/llvm/Target/TargetSelectionDAG.td
@@ -1835,6 +1835,13 @@ defm atomic_load_uinc_wrap : binary_atomic_op<atomic_load_uinc_wrap>;
defm atomic_load_udec_wrap : binary_atomic_op<atomic_load_udec_wrap>;
defm atomic_cmp_swap : ternary_atomic_op<atomic_cmp_swap>;
+/// Atomic load which does not extend.
+def atomic_load_nonext :
+ PatFrag<(ops node:$ptr), (atomic_load node:$ptr)> {
+ let IsAtomic = true; // FIXME: Should be IsLoad and/or IsAtomic?
+ let IsNonExtLoad = true;
+}
+
/// Atomic load which zeroes the excess high bits.
def atomic_load_zext :
PatFrag<(ops node:$ptr), (atomic_load node:$ptr)> {
@@ -1876,6 +1883,7 @@ def atomic_load_32 :
let IsAtomic = true;
let MemoryVT = i32;
}
+
def atomic_load_64 :
PatFrag<(ops node:$ptr),
(atomic_load node:$ptr)> {
@@ -1883,6 +1891,30 @@ def atomic_load_64 :
let MemoryVT = i64;
}
+def atomic_load_nonext_8 :
+ PatFrag<(ops node:$ptr), (atomic_load_nonext node:$ptr)> {
+ let IsAtomic = true; // FIXME: Should be IsLoad and/or IsAtomic?
+ let MemoryVT = i8;
+}
+
+def atomic_load_nonext_16 :
+ PatFrag<(ops node:$ptr), (atomic_load_nonext node:$ptr)> {
+ let IsAtomic = true; // FIXME: Should be IsLoad and/or IsAtomic?
+ let MemoryVT = i16;
+}
+
+def atomic_load_nonext_32 :
+ PatFrag<(ops node:$ptr), (atomic_load_nonext node:$ptr)> {
+ let IsAtomic = true; // FIXME: Should be IsLoad and/or IsAtomic?
+ let MemoryVT = i32;
+}
+
+def atomic_load_nonext_64 :
+ PatFrag<(ops node:$ptr), (atomic_load_nonext node:$ptr)> {
+ let IsAtomic = true; // FIXME: Should be IsLoad and/or IsAtomic?
+ let MemoryVT = i64;
+}
+
def atomic_load_zext_8 :
PatFrag<(ops node:$ptr), (atomic_load_zext node:$ptr)> {
let IsAtomic = true; // FIXME: Should be IsLoad and/or IsAtomic?
diff --git a/llvm/lib/Target/RISCV/RISCVInstrInfoA.td b/llvm/lib/Target/RISCV/RISCVInstrInfoA.td
index b348e774d50b8..74873a66bc8c9 100644
--- a/llvm/lib/Target/RISCV/RISCVInstrInfoA.td
+++ b/llvm/lib/Target/RISCV/RISCVInstrInfoA.td
@@ -174,12 +174,12 @@ let Predicates = [HasAtomicLdSt] in {
}
let Predicates = [HasAtomicLdSt, IsRV32] in {
- def : LdPat<relaxed_load<atomic_load_32>, LW>;
+ def : LdPat<relaxed_load<atomic_load_nonext_32>, LW>;
}
let Predicates = [HasAtomicLdSt, IsRV64] in {
def : LdPat<relaxed_load<atomic_load_asext_32>, LW>;
- def : LdPat<relaxed_load<atomic_load_64>, LD, i64>;
+ def : LdPat<relaxed_load<atomic_load_nonext_64>, LD, i64>;
def : StPat<relaxed_store<atomic_store_64>, SD, GPR, i64>;
}
diff --git a/llvm/lib/Target/RISCV/RISCVInstrInfoZalasr.td b/llvm/lib/Target/RISCV/RISCVInstrInfoZalasr.td
index 837aa7f1005af..5e013b496c6b1 100644
--- a/llvm/lib/Target/RISCV/RISCVInstrInfoZalasr.td
+++ b/llvm/lib/Target/RISCV/RISCVInstrInfoZalasr.td
@@ -95,8 +95,8 @@ let Predicates = [HasStdExtZalasr] in {
} // Predicates = [HasStdExtZalasr]
let Predicates = [HasStdExtZalasr, IsRV32] in {
- def : PatLAQ<acquiring_load<atomic_load_32>, LW_AQ>;
- def : PatLAQ<seq_cst_load<atomic_load_32>, LW_AQ>;
+ def : PatLAQ<acquiring_load<atomic_load_nonext_32>, LW_AQ>;
+ def : PatLAQ<seq_cst_load<atomic_load_nonext_32>, LW_AQ>;
} // Predicates = [HasStdExtZalasr, IsRV64]
@@ -104,8 +104,8 @@ let Predicates = [HasStdExtZalasr, IsRV64] in {
def : PatLAQ<acquiring_load<atomic_load_asext_32>, LW_AQ>;
def : PatLAQ<seq_cst_load<atomic_load_asext_32>, LW_AQ>;
- def : PatLAQ<acquiring_load<atomic_load_64>, LD_AQ>;
- def : PatLAQ<seq_cst_load<atomic_load_64>, LD_AQ>;
+ def : PatLAQ<acquiring_load<atomic_load_nonext_64>, LD_AQ>;
+ def : PatLAQ<seq_cst_load<atomic_load_nonext_64>, LD_AQ>;
def : PatSRL<releasing_store<atomic_store_64>, SD_RL>;
def : PatSRL<seq_cst_store<atomic_store_64>, SD_RL>;
diff --git a/llvm/utils/TableGen/Common/CodeGenDAGPatterns.cpp b/llvm/utils/TableGen/Common/CodeGenDAGPatterns.cpp
index a4fa063ae61cb..615c077fe4bdc 100644
--- a/llvm/utils/TableGen/Common/CodeGenDAGPatterns.cpp
+++ b/llvm/utils/TableGen/Common/CodeGenDAGPatterns.cpp
@@ -933,21 +933,19 @@ std::string TreePredicateFn::getPredCode() const {
getMinAlignment() < 1)
PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
"IsLoad cannot be used by itself");
- } else {
+ } else if (!isAtomic()) {
if (isNonExtLoad())
PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
- "IsNonExtLoad requires IsLoad");
- if (!isAtomic()) {
- if (isAnyExtLoad())
- PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
- "IsAnyExtLoad requires IsLoad or IsAtomic");
- if (isSignExtLoad())
- PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
- "IsSignExtLoad requires IsLoad or IsAtomic");
- if (isZeroExtLoad())
- PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
- "IsZeroExtLoad requires IsLoad or IsAtomic");
- }
+ "IsNonExtLoad requires IsLoad or IsAtomic");
+ if (isAnyExtLoad())
+ PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
+ "IsAnyExtLoad requires IsLoad or IsAtomic");
+ if (isSignExtLoad())
+ PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
+ "IsSignExtLoad requires IsLoad or IsAtomic");
+ if (isZeroExtLoad())
+ PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
+ "IsZeroExtLoad requires IsLoad or IsAtomic");
}
if (isStore()) {
@@ -966,10 +964,10 @@ std::string TreePredicateFn::getPredCode() const {
}
if (isAtomic()) {
- if (getMemoryVT() == nullptr && !isAtomicOrderingMonotonic() &&
- getAddressSpaces() == nullptr &&
+ if (getMemoryVT() == nullptr && getAddressSpaces() == nullptr &&
// FIXME: Should atomic loads be IsLoad, IsAtomic, or both?
- !isAnyExtLoad() && !isZeroExtLoad() && !isSignExtLoad() &&
+ !isNonExtLoad() && !isAnyExtLoad() && !isZeroExtLoad() &&
+ !isSignExtLoad() && !isAtomicOrderingMonotonic() &&
!isAtomicOrderingAcquire() && !isAtomicOrderingRelease() &&
!isAtomicOrderingAcquireRelease() &&
!isAtomicOrderingSequentiallyConsistent() &&
@@ -1076,11 +1074,16 @@ std::string TreePredicateFn::getPredCode() const {
"return false;\n";
if (isAtomic()) {
- if ((isAnyExtLoad() + isSignExtLoad() + isZeroExtLoad()) > 1)
- PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
- "IsAnyExtLoad, IsSignExtLoad, and IsZeroExtLoad are "
- "mutually exclusive");
+ if ((isNonExtLoad() + isAnyExtLoad() + isSignExtLoad() + isZeroExtLoad()) >
+ 1)
+ PrintFatalError(
+ getOrigPatFragRecord()->getRecord()->getLoc(),
+ "IsNonExtLoad, IsAnyExtLoad, IsSignExtLoad, and IsZeroExtLoad are "
+ "mutually exclusive");
+ if (isNonExtLoad())
+ Code += "if (cast<AtomicSDNode>(N)->getExtensionType() != "
+ "ISD::NON_EXTLOAD) return false;\n";
if (isAnyExtLoad())
Code += "if (cast<AtomicSDNode>(N)->getExtensionType() != ISD::EXTLOAD) "
"return false;\n";
diff --git a/llvm/utils/TableGen/GlobalISelEmitter.cpp b/llvm/utils/TableGen/GlobalISelEmitter.cpp
index b3bad55ebf278..b9561c137ec8b 100644
--- a/llvm/utils/TableGen/GlobalISelEmitter.cpp
+++ b/llvm/utils/TableGen/GlobalISelEmitter.cpp
@@ -622,16 +622,17 @@ Expected<InstructionMatcher &> GlobalISelEmitter::addBuiltinPredicates(
}
// G_LOAD is used for both non-extending and any-extending loads.
- if (Predicate.isLoad() && Predicate.isNonExtLoad()) {
- InsnMatcher.addPredicate<MemoryVsLLTSizePredicateMatcher>(
- 0, MemoryVsLLTSizePredicateMatcher::EqualTo, 0);
- return InsnMatcher;
- }
- if ((Predicate.isLoad() || Predicate.isAtomic()) &&
- Predicate.isAnyExtLoad()) {
- InsnMatcher.addPredicate<MemoryVsLLTSizePredicateMatcher>(
- 0, MemoryVsLLTSizePredicateMatcher::LessThan, 0);
- return InsnMatcher;
+ if (Predicate.isLoad() || Predicate.isAtomic()) {
+ if (Predicate.isNonExtLoad()) {
+ InsnMatcher.addPredicate<MemoryVsLLTSizePredicateMatcher>(
+ 0, MemoryVsLLTSizePredicateMatcher::EqualTo, 0);
+ return InsnMatcher;
+ }
+ if (Predicate.isAnyExtLoad()) {
+ InsnMatcher.addPredicate<MemoryVsLLTSizePredicateMatcher>(
+ 0, MemoryVsLLTSizePredicateMatcher::LessThan, 0);
+ return InsnMatcher;
+ }
}
if (Predicate.isStore()) {
|
|
@llvm/pr-subscribers-llvm-globalisel Author: Craig Topper (topperc) ChangesUse it for RISC-V as a demonstration. Other targets will follow. Full diff: https://github.com/llvm/llvm-project/pull/137401.diff 5 Files Affected:
diff --git a/llvm/include/llvm/Target/TargetSelectionDAG.td b/llvm/include/llvm/Target/TargetSelectionDAG.td
index e5ffcceeba8a7..a53527442719a 100644
--- a/llvm/include/llvm/Target/TargetSelectionDAG.td
+++ b/llvm/include/llvm/Target/TargetSelectionDAG.td
@@ -1835,6 +1835,13 @@ defm atomic_load_uinc_wrap : binary_atomic_op<atomic_load_uinc_wrap>;
defm atomic_load_udec_wrap : binary_atomic_op<atomic_load_udec_wrap>;
defm atomic_cmp_swap : ternary_atomic_op<atomic_cmp_swap>;
+/// Atomic load which does not extend.
+def atomic_load_nonext :
+ PatFrag<(ops node:$ptr), (atomic_load node:$ptr)> {
+ let IsAtomic = true; // FIXME: Should be IsLoad and/or IsAtomic?
+ let IsNonExtLoad = true;
+}
+
/// Atomic load which zeroes the excess high bits.
def atomic_load_zext :
PatFrag<(ops node:$ptr), (atomic_load node:$ptr)> {
@@ -1876,6 +1883,7 @@ def atomic_load_32 :
let IsAtomic = true;
let MemoryVT = i32;
}
+
def atomic_load_64 :
PatFrag<(ops node:$ptr),
(atomic_load node:$ptr)> {
@@ -1883,6 +1891,30 @@ def atomic_load_64 :
let MemoryVT = i64;
}
+def atomic_load_nonext_8 :
+ PatFrag<(ops node:$ptr), (atomic_load_nonext node:$ptr)> {
+ let IsAtomic = true; // FIXME: Should be IsLoad and/or IsAtomic?
+ let MemoryVT = i8;
+}
+
+def atomic_load_nonext_16 :
+ PatFrag<(ops node:$ptr), (atomic_load_nonext node:$ptr)> {
+ let IsAtomic = true; // FIXME: Should be IsLoad and/or IsAtomic?
+ let MemoryVT = i16;
+}
+
+def atomic_load_nonext_32 :
+ PatFrag<(ops node:$ptr), (atomic_load_nonext node:$ptr)> {
+ let IsAtomic = true; // FIXME: Should be IsLoad and/or IsAtomic?
+ let MemoryVT = i32;
+}
+
+def atomic_load_nonext_64 :
+ PatFrag<(ops node:$ptr), (atomic_load_nonext node:$ptr)> {
+ let IsAtomic = true; // FIXME: Should be IsLoad and/or IsAtomic?
+ let MemoryVT = i64;
+}
+
def atomic_load_zext_8 :
PatFrag<(ops node:$ptr), (atomic_load_zext node:$ptr)> {
let IsAtomic = true; // FIXME: Should be IsLoad and/or IsAtomic?
diff --git a/llvm/lib/Target/RISCV/RISCVInstrInfoA.td b/llvm/lib/Target/RISCV/RISCVInstrInfoA.td
index b348e774d50b8..74873a66bc8c9 100644
--- a/llvm/lib/Target/RISCV/RISCVInstrInfoA.td
+++ b/llvm/lib/Target/RISCV/RISCVInstrInfoA.td
@@ -174,12 +174,12 @@ let Predicates = [HasAtomicLdSt] in {
}
let Predicates = [HasAtomicLdSt, IsRV32] in {
- def : LdPat<relaxed_load<atomic_load_32>, LW>;
+ def : LdPat<relaxed_load<atomic_load_nonext_32>, LW>;
}
let Predicates = [HasAtomicLdSt, IsRV64] in {
def : LdPat<relaxed_load<atomic_load_asext_32>, LW>;
- def : LdPat<relaxed_load<atomic_load_64>, LD, i64>;
+ def : LdPat<relaxed_load<atomic_load_nonext_64>, LD, i64>;
def : StPat<relaxed_store<atomic_store_64>, SD, GPR, i64>;
}
diff --git a/llvm/lib/Target/RISCV/RISCVInstrInfoZalasr.td b/llvm/lib/Target/RISCV/RISCVInstrInfoZalasr.td
index 837aa7f1005af..5e013b496c6b1 100644
--- a/llvm/lib/Target/RISCV/RISCVInstrInfoZalasr.td
+++ b/llvm/lib/Target/RISCV/RISCVInstrInfoZalasr.td
@@ -95,8 +95,8 @@ let Predicates = [HasStdExtZalasr] in {
} // Predicates = [HasStdExtZalasr]
let Predicates = [HasStdExtZalasr, IsRV32] in {
- def : PatLAQ<acquiring_load<atomic_load_32>, LW_AQ>;
- def : PatLAQ<seq_cst_load<atomic_load_32>, LW_AQ>;
+ def : PatLAQ<acquiring_load<atomic_load_nonext_32>, LW_AQ>;
+ def : PatLAQ<seq_cst_load<atomic_load_nonext_32>, LW_AQ>;
} // Predicates = [HasStdExtZalasr, IsRV64]
@@ -104,8 +104,8 @@ let Predicates = [HasStdExtZalasr, IsRV64] in {
def : PatLAQ<acquiring_load<atomic_load_asext_32>, LW_AQ>;
def : PatLAQ<seq_cst_load<atomic_load_asext_32>, LW_AQ>;
- def : PatLAQ<acquiring_load<atomic_load_64>, LD_AQ>;
- def : PatLAQ<seq_cst_load<atomic_load_64>, LD_AQ>;
+ def : PatLAQ<acquiring_load<atomic_load_nonext_64>, LD_AQ>;
+ def : PatLAQ<seq_cst_load<atomic_load_nonext_64>, LD_AQ>;
def : PatSRL<releasing_store<atomic_store_64>, SD_RL>;
def : PatSRL<seq_cst_store<atomic_store_64>, SD_RL>;
diff --git a/llvm/utils/TableGen/Common/CodeGenDAGPatterns.cpp b/llvm/utils/TableGen/Common/CodeGenDAGPatterns.cpp
index a4fa063ae61cb..615c077fe4bdc 100644
--- a/llvm/utils/TableGen/Common/CodeGenDAGPatterns.cpp
+++ b/llvm/utils/TableGen/Common/CodeGenDAGPatterns.cpp
@@ -933,21 +933,19 @@ std::string TreePredicateFn::getPredCode() const {
getMinAlignment() < 1)
PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
"IsLoad cannot be used by itself");
- } else {
+ } else if (!isAtomic()) {
if (isNonExtLoad())
PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
- "IsNonExtLoad requires IsLoad");
- if (!isAtomic()) {
- if (isAnyExtLoad())
- PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
- "IsAnyExtLoad requires IsLoad or IsAtomic");
- if (isSignExtLoad())
- PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
- "IsSignExtLoad requires IsLoad or IsAtomic");
- if (isZeroExtLoad())
- PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
- "IsZeroExtLoad requires IsLoad or IsAtomic");
- }
+ "IsNonExtLoad requires IsLoad or IsAtomic");
+ if (isAnyExtLoad())
+ PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
+ "IsAnyExtLoad requires IsLoad or IsAtomic");
+ if (isSignExtLoad())
+ PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
+ "IsSignExtLoad requires IsLoad or IsAtomic");
+ if (isZeroExtLoad())
+ PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
+ "IsZeroExtLoad requires IsLoad or IsAtomic");
}
if (isStore()) {
@@ -966,10 +964,10 @@ std::string TreePredicateFn::getPredCode() const {
}
if (isAtomic()) {
- if (getMemoryVT() == nullptr && !isAtomicOrderingMonotonic() &&
- getAddressSpaces() == nullptr &&
+ if (getMemoryVT() == nullptr && getAddressSpaces() == nullptr &&
// FIXME: Should atomic loads be IsLoad, IsAtomic, or both?
- !isAnyExtLoad() && !isZeroExtLoad() && !isSignExtLoad() &&
+ !isNonExtLoad() && !isAnyExtLoad() && !isZeroExtLoad() &&
+ !isSignExtLoad() && !isAtomicOrderingMonotonic() &&
!isAtomicOrderingAcquire() && !isAtomicOrderingRelease() &&
!isAtomicOrderingAcquireRelease() &&
!isAtomicOrderingSequentiallyConsistent() &&
@@ -1076,11 +1074,16 @@ std::string TreePredicateFn::getPredCode() const {
"return false;\n";
if (isAtomic()) {
- if ((isAnyExtLoad() + isSignExtLoad() + isZeroExtLoad()) > 1)
- PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
- "IsAnyExtLoad, IsSignExtLoad, and IsZeroExtLoad are "
- "mutually exclusive");
+ if ((isNonExtLoad() + isAnyExtLoad() + isSignExtLoad() + isZeroExtLoad()) >
+ 1)
+ PrintFatalError(
+ getOrigPatFragRecord()->getRecord()->getLoc(),
+ "IsNonExtLoad, IsAnyExtLoad, IsSignExtLoad, and IsZeroExtLoad are "
+ "mutually exclusive");
+ if (isNonExtLoad())
+ Code += "if (cast<AtomicSDNode>(N)->getExtensionType() != "
+ "ISD::NON_EXTLOAD) return false;\n";
if (isAnyExtLoad())
Code += "if (cast<AtomicSDNode>(N)->getExtensionType() != ISD::EXTLOAD) "
"return false;\n";
diff --git a/llvm/utils/TableGen/GlobalISelEmitter.cpp b/llvm/utils/TableGen/GlobalISelEmitter.cpp
index b3bad55ebf278..b9561c137ec8b 100644
--- a/llvm/utils/TableGen/GlobalISelEmitter.cpp
+++ b/llvm/utils/TableGen/GlobalISelEmitter.cpp
@@ -622,16 +622,17 @@ Expected<InstructionMatcher &> GlobalISelEmitter::addBuiltinPredicates(
}
// G_LOAD is used for both non-extending and any-extending loads.
- if (Predicate.isLoad() && Predicate.isNonExtLoad()) {
- InsnMatcher.addPredicate<MemoryVsLLTSizePredicateMatcher>(
- 0, MemoryVsLLTSizePredicateMatcher::EqualTo, 0);
- return InsnMatcher;
- }
- if ((Predicate.isLoad() || Predicate.isAtomic()) &&
- Predicate.isAnyExtLoad()) {
- InsnMatcher.addPredicate<MemoryVsLLTSizePredicateMatcher>(
- 0, MemoryVsLLTSizePredicateMatcher::LessThan, 0);
- return InsnMatcher;
+ if (Predicate.isLoad() || Predicate.isAtomic()) {
+ if (Predicate.isNonExtLoad()) {
+ InsnMatcher.addPredicate<MemoryVsLLTSizePredicateMatcher>(
+ 0, MemoryVsLLTSizePredicateMatcher::EqualTo, 0);
+ return InsnMatcher;
+ }
+ if (Predicate.isAnyExtLoad()) {
+ InsnMatcher.addPredicate<MemoryVsLLTSizePredicateMatcher>(
+ 0, MemoryVsLLTSizePredicateMatcher::LessThan, 0);
+ return InsnMatcher;
+ }
}
if (Predicate.isStore()) {
|
…mic PatFrags. (llvm#137401) Use it for RISC-V as a demonstration. Other targets will follow.
Use it for RISC-V as a demonstration. Other targets will follow.