Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 32 additions & 0 deletions llvm/include/llvm/Target/TargetSelectionDAG.td
Original file line number Diff line number Diff line change
Expand Up @@ -1835,6 +1835,13 @@ defm atomic_load_uinc_wrap : binary_atomic_op<atomic_load_uinc_wrap>;
defm atomic_load_udec_wrap : binary_atomic_op<atomic_load_udec_wrap>;
defm atomic_cmp_swap : ternary_atomic_op<atomic_cmp_swap>;

/// Atomic load which does not extend.
def atomic_load_nonext :
PatFrag<(ops node:$ptr), (atomic_load node:$ptr)> {
let IsAtomic = true; // FIXME: Should be IsLoad and/or IsAtomic?
let IsNonExtLoad = true;
}

/// Atomic load which zeroes the excess high bits.
def atomic_load_zext :
PatFrag<(ops node:$ptr), (atomic_load node:$ptr)> {
Expand Down Expand Up @@ -1876,13 +1883,38 @@ def atomic_load_32 :
let IsAtomic = true;
let MemoryVT = i32;
}

def atomic_load_64 :
PatFrag<(ops node:$ptr),
(atomic_load node:$ptr)> {
let IsAtomic = true;
let MemoryVT = i64;
}

def atomic_load_nonext_8 :
PatFrag<(ops node:$ptr), (atomic_load_nonext node:$ptr)> {
let IsAtomic = true; // FIXME: Should be IsLoad and/or IsAtomic?
let MemoryVT = i8;
}

def atomic_load_nonext_16 :
PatFrag<(ops node:$ptr), (atomic_load_nonext node:$ptr)> {
let IsAtomic = true; // FIXME: Should be IsLoad and/or IsAtomic?
let MemoryVT = i16;
}

def atomic_load_nonext_32 :
PatFrag<(ops node:$ptr), (atomic_load_nonext node:$ptr)> {
let IsAtomic = true; // FIXME: Should be IsLoad and/or IsAtomic?
let MemoryVT = i32;
}

def atomic_load_nonext_64 :
PatFrag<(ops node:$ptr), (atomic_load_nonext node:$ptr)> {
let IsAtomic = true; // FIXME: Should be IsLoad and/or IsAtomic?
let MemoryVT = i64;
}

def atomic_load_zext_8 :
PatFrag<(ops node:$ptr), (atomic_load_zext node:$ptr)> {
let IsAtomic = true; // FIXME: Should be IsLoad and/or IsAtomic?
Expand Down
4 changes: 2 additions & 2 deletions llvm/lib/Target/RISCV/RISCVInstrInfoA.td
Original file line number Diff line number Diff line change
Expand Up @@ -174,12 +174,12 @@ let Predicates = [HasAtomicLdSt] in {
}

let Predicates = [HasAtomicLdSt, IsRV32] in {
def : LdPat<relaxed_load<atomic_load_32>, LW>;
def : LdPat<relaxed_load<atomic_load_nonext_32>, LW>;
}

let Predicates = [HasAtomicLdSt, IsRV64] in {
def : LdPat<relaxed_load<atomic_load_asext_32>, LW>;
def : LdPat<relaxed_load<atomic_load_64>, LD, i64>;
def : LdPat<relaxed_load<atomic_load_nonext_64>, LD, i64>;
def : StPat<relaxed_store<atomic_store_64>, SD, GPR, i64>;
}

Expand Down
8 changes: 4 additions & 4 deletions llvm/lib/Target/RISCV/RISCVInstrInfoZalasr.td
Original file line number Diff line number Diff line change
Expand Up @@ -95,17 +95,17 @@ let Predicates = [HasStdExtZalasr] in {
} // Predicates = [HasStdExtZalasr]

let Predicates = [HasStdExtZalasr, IsRV32] in {
def : PatLAQ<acquiring_load<atomic_load_32>, LW_AQ>;
def : PatLAQ<seq_cst_load<atomic_load_32>, LW_AQ>;
def : PatLAQ<acquiring_load<atomic_load_nonext_32>, LW_AQ>;
def : PatLAQ<seq_cst_load<atomic_load_nonext_32>, LW_AQ>;

} // Predicates = [HasStdExtZalasr, IsRV64]

let Predicates = [HasStdExtZalasr, IsRV64] in {
def : PatLAQ<acquiring_load<atomic_load_asext_32>, LW_AQ>;
def : PatLAQ<seq_cst_load<atomic_load_asext_32>, LW_AQ>;

def : PatLAQ<acquiring_load<atomic_load_64>, LD_AQ>;
def : PatLAQ<seq_cst_load<atomic_load_64>, LD_AQ>;
def : PatLAQ<acquiring_load<atomic_load_nonext_64>, LD_AQ>;
def : PatLAQ<seq_cst_load<atomic_load_nonext_64>, LD_AQ>;

def : PatSRL<releasing_store<atomic_store_64>, SD_RL>;
def : PatSRL<seq_cst_store<atomic_store_64>, SD_RL>;
Expand Down
43 changes: 23 additions & 20 deletions llvm/utils/TableGen/Common/CodeGenDAGPatterns.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -933,21 +933,19 @@ std::string TreePredicateFn::getPredCode() const {
getMinAlignment() < 1)
PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
"IsLoad cannot be used by itself");
} else {
} else if (!isAtomic()) {
if (isNonExtLoad())
PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
"IsNonExtLoad requires IsLoad");
if (!isAtomic()) {
if (isAnyExtLoad())
PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
"IsAnyExtLoad requires IsLoad or IsAtomic");
if (isSignExtLoad())
PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
"IsSignExtLoad requires IsLoad or IsAtomic");
if (isZeroExtLoad())
PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
"IsZeroExtLoad requires IsLoad or IsAtomic");
}
"IsNonExtLoad requires IsLoad or IsAtomic");
if (isAnyExtLoad())
PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
"IsAnyExtLoad requires IsLoad or IsAtomic");
if (isSignExtLoad())
PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
"IsSignExtLoad requires IsLoad or IsAtomic");
if (isZeroExtLoad())
PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
"IsZeroExtLoad requires IsLoad or IsAtomic");
}

if (isStore()) {
Expand All @@ -966,10 +964,10 @@ std::string TreePredicateFn::getPredCode() const {
}

if (isAtomic()) {
if (getMemoryVT() == nullptr && !isAtomicOrderingMonotonic() &&
getAddressSpaces() == nullptr &&
if (getMemoryVT() == nullptr && getAddressSpaces() == nullptr &&
// FIXME: Should atomic loads be IsLoad, IsAtomic, or both?
!isAnyExtLoad() && !isZeroExtLoad() && !isSignExtLoad() &&
!isNonExtLoad() && !isAnyExtLoad() && !isZeroExtLoad() &&
!isSignExtLoad() && !isAtomicOrderingMonotonic() &&
!isAtomicOrderingAcquire() && !isAtomicOrderingRelease() &&
!isAtomicOrderingAcquireRelease() &&
!isAtomicOrderingSequentiallyConsistent() &&
Expand Down Expand Up @@ -1076,11 +1074,16 @@ std::string TreePredicateFn::getPredCode() const {
"return false;\n";

if (isAtomic()) {
if ((isAnyExtLoad() + isSignExtLoad() + isZeroExtLoad()) > 1)
PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(),
"IsAnyExtLoad, IsSignExtLoad, and IsZeroExtLoad are "
"mutually exclusive");
if ((isNonExtLoad() + isAnyExtLoad() + isSignExtLoad() + isZeroExtLoad()) >
1)
PrintFatalError(
getOrigPatFragRecord()->getRecord()->getLoc(),
"IsNonExtLoad, IsAnyExtLoad, IsSignExtLoad, and IsZeroExtLoad are "
"mutually exclusive");

if (isNonExtLoad())
Code += "if (cast<AtomicSDNode>(N)->getExtensionType() != "
"ISD::NON_EXTLOAD) return false;\n";
if (isAnyExtLoad())
Code += "if (cast<AtomicSDNode>(N)->getExtensionType() != ISD::EXTLOAD) "
"return false;\n";
Expand Down
21 changes: 11 additions & 10 deletions llvm/utils/TableGen/GlobalISelEmitter.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -622,16 +622,17 @@ Expected<InstructionMatcher &> GlobalISelEmitter::addBuiltinPredicates(
}

// G_LOAD is used for both non-extending and any-extending loads.
if (Predicate.isLoad() && Predicate.isNonExtLoad()) {
InsnMatcher.addPredicate<MemoryVsLLTSizePredicateMatcher>(
0, MemoryVsLLTSizePredicateMatcher::EqualTo, 0);
return InsnMatcher;
}
if ((Predicate.isLoad() || Predicate.isAtomic()) &&
Predicate.isAnyExtLoad()) {
InsnMatcher.addPredicate<MemoryVsLLTSizePredicateMatcher>(
0, MemoryVsLLTSizePredicateMatcher::LessThan, 0);
return InsnMatcher;
if (Predicate.isLoad() || Predicate.isAtomic()) {
if (Predicate.isNonExtLoad()) {
InsnMatcher.addPredicate<MemoryVsLLTSizePredicateMatcher>(
0, MemoryVsLLTSizePredicateMatcher::EqualTo, 0);
return InsnMatcher;
}
if (Predicate.isAnyExtLoad()) {
InsnMatcher.addPredicate<MemoryVsLLTSizePredicateMatcher>(
0, MemoryVsLLTSizePredicateMatcher::LessThan, 0);
return InsnMatcher;
}
}

if (Predicate.isStore()) {
Expand Down
Loading