@@ -35,7 +35,7 @@ use crate::attributes;
35
35
use crate :: common:: Funclet ;
36
36
use crate :: context:: { CodegenCx , FullCx , GenericCx , SCx } ;
37
37
use crate :: llvm:: {
38
- self , AtomicOrdering , AtomicRmwBinOp , BasicBlock , False , GEPNoWrapFlags , Metadata , True ,
38
+ self , AtomicOrdering , AtomicRmwBinOp , BasicBlock , GEPNoWrapFlags , Metadata , TRUE , ToLlvmBool ,
39
39
} ;
40
40
use crate :: type_:: Type ;
41
41
use crate :: type_of:: LayoutLlvmExt ;
@@ -493,8 +493,8 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
493
493
unsafe {
494
494
let add = llvm:: LLVMBuildAdd ( self . llbuilder , a, b, UNNAMED ) ;
495
495
if llvm:: LLVMIsAInstruction ( add) . is_some ( ) {
496
- llvm:: LLVMSetNUW ( add, True ) ;
497
- llvm:: LLVMSetNSW ( add, True ) ;
496
+ llvm:: LLVMSetNUW ( add, TRUE ) ;
497
+ llvm:: LLVMSetNSW ( add, TRUE ) ;
498
498
}
499
499
add
500
500
}
@@ -503,8 +503,8 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
503
503
unsafe {
504
504
let sub = llvm:: LLVMBuildSub ( self . llbuilder , a, b, UNNAMED ) ;
505
505
if llvm:: LLVMIsAInstruction ( sub) . is_some ( ) {
506
- llvm:: LLVMSetNUW ( sub, True ) ;
507
- llvm:: LLVMSetNSW ( sub, True ) ;
506
+ llvm:: LLVMSetNUW ( sub, TRUE ) ;
507
+ llvm:: LLVMSetNSW ( sub, TRUE ) ;
508
508
}
509
509
sub
510
510
}
@@ -513,8 +513,8 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
513
513
unsafe {
514
514
let mul = llvm:: LLVMBuildMul ( self . llbuilder , a, b, UNNAMED ) ;
515
515
if llvm:: LLVMIsAInstruction ( mul) . is_some ( ) {
516
- llvm:: LLVMSetNUW ( mul, True ) ;
517
- llvm:: LLVMSetNSW ( mul, True ) ;
516
+ llvm:: LLVMSetNUW ( mul, TRUE ) ;
517
+ llvm:: LLVMSetNSW ( mul, TRUE ) ;
518
518
}
519
519
mul
520
520
}
@@ -528,7 +528,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
528
528
// an instruction, so we need to check before setting the flag.
529
529
// (See also `LLVMBuildNUWNeg` which also needs a check.)
530
530
if llvm:: LLVMIsAInstruction ( or) . is_some ( ) {
531
- llvm:: LLVMSetIsDisjoint ( or, True ) ;
531
+ llvm:: LLVMSetIsDisjoint ( or, TRUE ) ;
532
532
}
533
533
or
534
534
}
@@ -629,7 +629,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
629
629
fn volatile_load ( & mut self , ty : & ' ll Type , ptr : & ' ll Value ) -> & ' ll Value {
630
630
unsafe {
631
631
let load = llvm:: LLVMBuildLoad2 ( self . llbuilder , ty, ptr, UNNAMED ) ;
632
- llvm:: LLVMSetVolatile ( load, llvm:: True ) ;
632
+ llvm:: LLVMSetVolatile ( load, llvm:: TRUE ) ;
633
633
load
634
634
}
635
635
}
@@ -717,7 +717,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
717
717
let mut const_llval = None ;
718
718
let llty = place. layout . llvm_type ( self ) ;
719
719
if let Some ( global) = llvm:: LLVMIsAGlobalVariable ( place. val . llval ) {
720
- if llvm:: LLVMIsGlobalConstant ( global) == llvm :: True {
720
+ if llvm:: LLVMIsGlobalConstant ( global) . is_true ( ) {
721
721
if let Some ( init) = llvm:: LLVMGetInitializer ( global) {
722
722
if self . val_ty ( init) == llty {
723
723
const_llval = Some ( init) ;
@@ -838,7 +838,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
838
838
if flags. contains ( MemFlags :: UNALIGNED ) { 1 } else { align. bytes ( ) as c_uint } ;
839
839
llvm:: LLVMSetAlignment ( store, align) ;
840
840
if flags. contains ( MemFlags :: VOLATILE ) {
841
- llvm:: LLVMSetVolatile ( store, llvm:: True ) ;
841
+ llvm:: LLVMSetVolatile ( store, llvm:: TRUE ) ;
842
842
}
843
843
if flags. contains ( MemFlags :: NONTEMPORAL ) {
844
844
// Make sure that the current target architectures supports "sane" non-temporal
@@ -956,7 +956,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
956
956
let trunc = self . trunc ( val, dest_ty) ;
957
957
unsafe {
958
958
if llvm:: LLVMIsAInstruction ( trunc) . is_some ( ) {
959
- llvm:: LLVMSetNUW ( trunc, True ) ;
959
+ llvm:: LLVMSetNUW ( trunc, TRUE ) ;
960
960
}
961
961
}
962
962
trunc
@@ -968,7 +968,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
968
968
let trunc = self . trunc ( val, dest_ty) ;
969
969
unsafe {
970
970
if llvm:: LLVMIsAInstruction ( trunc) . is_some ( ) {
971
- llvm:: LLVMSetNSW ( trunc, True ) ;
971
+ llvm:: LLVMSetNSW ( trunc, TRUE ) ;
972
972
}
973
973
}
974
974
trunc
@@ -1067,13 +1067,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
1067
1067
1068
1068
fn intcast ( & mut self , val : & ' ll Value , dest_ty : & ' ll Type , is_signed : bool ) -> & ' ll Value {
1069
1069
unsafe {
1070
- llvm:: LLVMBuildIntCast2 (
1071
- self . llbuilder ,
1072
- val,
1073
- dest_ty,
1074
- if is_signed { True } else { False } ,
1075
- UNNAMED ,
1076
- )
1070
+ llvm:: LLVMBuildIntCast2 ( self . llbuilder , val, dest_ty, is_signed. to_llvm_bool ( ) , UNNAMED )
1077
1071
}
1078
1072
}
1079
1073
@@ -1229,7 +1223,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
1229
1223
let ty = self . type_struct ( & [ self . type_ptr ( ) , self . type_i32 ( ) ] , false ) ;
1230
1224
let landing_pad = self . landing_pad ( ty, pers_fn, 0 ) ;
1231
1225
unsafe {
1232
- llvm:: LLVMSetCleanup ( landing_pad, llvm:: True ) ;
1226
+ llvm:: LLVMSetCleanup ( landing_pad, llvm:: TRUE ) ;
1233
1227
}
1234
1228
( self . extract_value ( landing_pad, 0 ) , self . extract_value ( landing_pad, 1 ) )
1235
1229
}
@@ -1317,7 +1311,6 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
1317
1311
failure_order : rustc_middle:: ty:: AtomicOrdering ,
1318
1312
weak : bool ,
1319
1313
) -> ( & ' ll Value , & ' ll Value ) {
1320
- let weak = if weak { llvm:: True } else { llvm:: False } ;
1321
1314
unsafe {
1322
1315
let value = llvm:: LLVMBuildAtomicCmpXchg (
1323
1316
self . llbuilder ,
@@ -1326,9 +1319,9 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
1326
1319
src,
1327
1320
AtomicOrdering :: from_generic ( order) ,
1328
1321
AtomicOrdering :: from_generic ( failure_order) ,
1329
- llvm:: False , // SingleThreaded
1322
+ llvm:: FALSE , // SingleThreaded
1330
1323
) ;
1331
- llvm:: LLVMSetWeak ( value, weak) ;
1324
+ llvm:: LLVMSetWeak ( value, weak. to_llvm_bool ( ) ) ;
1332
1325
let val = self . extract_value ( value, 0 ) ;
1333
1326
let success = self . extract_value ( value, 1 ) ;
1334
1327
( val, success)
@@ -1353,7 +1346,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
1353
1346
dst,
1354
1347
src,
1355
1348
AtomicOrdering :: from_generic ( order) ,
1356
- llvm:: False , // SingleThreaded
1349
+ llvm:: FALSE , // SingleThreaded
1357
1350
)
1358
1351
} ;
1359
1352
if ret_ptr && self . val_ty ( res) != self . type_ptr ( ) {
@@ -1368,14 +1361,14 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
1368
1361
scope : SynchronizationScope ,
1369
1362
) {
1370
1363
let single_threaded = match scope {
1371
- SynchronizationScope :: SingleThread => llvm :: True ,
1372
- SynchronizationScope :: CrossThread => llvm :: False ,
1364
+ SynchronizationScope :: SingleThread => true ,
1365
+ SynchronizationScope :: CrossThread => false ,
1373
1366
} ;
1374
1367
unsafe {
1375
1368
llvm:: LLVMBuildFence (
1376
1369
self . llbuilder ,
1377
1370
AtomicOrdering :: from_generic ( order) ,
1378
- single_threaded,
1371
+ single_threaded. to_llvm_bool ( ) ,
1379
1372
UNNAMED ,
1380
1373
) ;
1381
1374
}
0 commit comments