diff --git a/.vsts-dotnet-ci.yml b/.vsts-dotnet-ci.yml
index cd05b1802e..9ad25d0b1a 100644
--- a/.vsts-dotnet-ci.yml
+++ b/.vsts-dotnet-ci.yml
@@ -19,7 +19,7 @@ trigger:
resources:
containers:
- container: CentosContainer
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-7-mlnet
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8-mlnet
- container: UbuntuContainer
image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-mlnet
@@ -86,7 +86,7 @@ jobs:
pool:
name: NetCore-Public
demands: ImageOverride -equals build.ubuntu.1804.amd64.open
- helixQueue: Ubuntu.1804.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:centos-7-mlnet-helix
+ helixQueue: Ubuntu.1804.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8-mlnet-helix
- template: /build/ci/job-template.yml
parameters:
diff --git a/Directory.Build.props b/Directory.Build.props
index 443f4258f2..1fe944cfee 100644
--- a/Directory.Build.props
+++ b/Directory.Build.props
@@ -23,6 +23,9 @@
$(TargetArchitecture.ToLower())
$(Platform).$(Configuration)
Open
+
+
+ $(NoWarn);NETSDK1206
@@ -54,8 +57,7 @@
true
-
+
true
- snupkg
diff --git a/NuGet.config b/NuGet.config
index eed95518c1..cf1fc2312f 100644
--- a/NuGet.config
+++ b/NuGet.config
@@ -1,4 +1,4 @@
-
+
diff --git a/build/.night-build.yml b/build/.night-build.yml
index 83c7645cee..d17393bc46 100644
--- a/build/.night-build.yml
+++ b/build/.night-build.yml
@@ -22,7 +22,7 @@ schedules:
resources:
containers:
- container: CentosContainer
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-7-mlnet
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8-mlnet
- container: UbuntuContainer
image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-mlnet
diff --git a/build/.outer-loop-build.yml b/build/.outer-loop-build.yml
index 27f1b935ab..2c58e6aacd 100644
--- a/build/.outer-loop-build.yml
+++ b/build/.outer-loop-build.yml
@@ -23,7 +23,7 @@ schedules:
resources:
containers:
- container: CentosContainer
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-7-mlnet
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8-mlnet
- container: UbuntuContainer
image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-mlnet
diff --git a/build/vsts-ci.yml b/build/vsts-ci.yml
index 9487484d14..85d6cfc5ff 100644
--- a/build/vsts-ci.yml
+++ b/build/vsts-ci.yml
@@ -5,7 +5,7 @@
resources:
containers:
- container: CentosContainer
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-7-mlnet
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8-mlnet
- container: UbuntuCrossArmContainer
image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-mlnet-cross-arm
diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml
index 0ae3fc4712..b12195cacd 100644
--- a/eng/Version.Details.xml
+++ b/eng/Version.Details.xml
@@ -7,25 +7,25 @@
-
+
https://github.com/dotnet/arcade
- 2d8d59065b5e090584a8e90c4371fc06ed60bdc5
+ 9e5fd26c0d1a187dfa08242d83e341634d4b1b07
-
+
https://github.com/dotnet/arcade
- 2d8d59065b5e090584a8e90c4371fc06ed60bdc5
+ 9e5fd26c0d1a187dfa08242d83e341634d4b1b07
-
+
https://github.com/dotnet/arcade
- 2d8d59065b5e090584a8e90c4371fc06ed60bdc5
+ 9e5fd26c0d1a187dfa08242d83e341634d4b1b07
-
+
https://github.com/dotnet/arcade
- 2d8d59065b5e090584a8e90c4371fc06ed60bdc5
+ 9e5fd26c0d1a187dfa08242d83e341634d4b1b07
-
+
https://github.com/dotnet/arcade
- 2d8d59065b5e090584a8e90c4371fc06ed60bdc5
+ 9e5fd26c0d1a187dfa08242d83e341634d4b1b07
https://github.com/dotnet/arcade-services
@@ -39,9 +39,9 @@
https://github.com/dotnet/xharness
89cb4b1d368e0f15b4df8e02a176dd1f1c33958b
-
+
https://github.com/dotnet/arcade
- 2d8d59065b5e090584a8e90c4371fc06ed60bdc5
+ 9e5fd26c0d1a187dfa08242d83e341634d4b1b07
https://github.com/dotnet/roslyn
@@ -63,9 +63,9 @@
https://github.com/dotnet/symreader-converter
c5ba7c88f92e2dde156c324a8c8edc04d9fa4fe0
-
- https://github.com/dotnet/xliff-tasks
- 397ff033b467003d51619f9ac3928e02a4d4178f
+
+ https://github.com/dotnet/arcade
+ 9e5fd26c0d1a187dfa08242d83e341634d4b1b07
diff --git a/eng/Versions.props b/eng/Versions.props
index 0e19fc2e16..664669e5b8 100644
--- a/eng/Versions.props
+++ b/eng/Versions.props
@@ -81,7 +81,7 @@
8.0.0
5.10.2
1.1.2-beta1.23431.1
- 8.0.0-beta.23265.1
+ 9.0.0-beta.23628.1
2.1.0
3.0.1
0.0.6-test
@@ -91,7 +91,7 @@
4.8.5
1.0.118
1.2.7
- 2.4.2
+ 2.6.3
false
diff --git a/eng/common/SetupNugetSources.ps1 b/eng/common/SetupNugetSources.ps1
index 6e99723945..6c65e81925 100644
--- a/eng/common/SetupNugetSources.ps1
+++ b/eng/common/SetupNugetSources.ps1
@@ -153,7 +153,7 @@ if ($dotnet31Source -ne $null) {
AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password
}
-$dotnetVersions = @('5','6','7')
+$dotnetVersions = @('5','6','7','8')
foreach ($dotnetVersion in $dotnetVersions) {
$feedPrefix = "dotnet" + $dotnetVersion;
diff --git a/eng/common/SetupNugetSources.sh b/eng/common/SetupNugetSources.sh
index 8af7d899db..d387c7eac9 100755
--- a/eng/common/SetupNugetSources.sh
+++ b/eng/common/SetupNugetSources.sh
@@ -105,7 +105,7 @@ if [ "$?" == "0" ]; then
PackageSources+=('dotnet3.1-internal-transport')
fi
-DotNetVersions=('5' '6' '7')
+DotNetVersions=('5' '6' '7' '8')
for DotNetVersion in ${DotNetVersions[@]} ; do
FeedPrefix="dotnet${DotNetVersion}";
diff --git a/eng/common/build.cmd b/eng/common/build.cmd
new file mode 100644
index 0000000000..99daf368ab
--- /dev/null
+++ b/eng/common/build.cmd
@@ -0,0 +1,3 @@
+@echo off
+powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0build.ps1""" %*"
+exit /b %ErrorLevel%
diff --git a/eng/common/build.ps1 b/eng/common/build.ps1
index 33a6f2d0e2..066044f62f 100644
--- a/eng/common/build.ps1
+++ b/eng/common/build.ps1
@@ -19,6 +19,7 @@ Param(
[switch] $pack,
[switch] $publish,
[switch] $clean,
+ [switch] $verticalBuild,
[switch][Alias('bl')]$binaryLog,
[switch][Alias('nobl')]$excludeCIBinarylog,
[switch] $ci,
@@ -58,6 +59,7 @@ function Print-Usage() {
Write-Host " -sign Sign build outputs"
Write-Host " -publish Publish artifacts (e.g. symbols)"
Write-Host " -clean Clean the solution"
+ Write-Host " -verticalBuild Run in 'vertical build' infra mode."
Write-Host ""
Write-Host "Advanced settings:"
@@ -120,6 +122,7 @@ function Build {
/p:Deploy=$deploy `
/p:Test=$test `
/p:Pack=$pack `
+ /p:ArcadeBuildVertical=$verticalBuild `
/p:IntegrationTest=$integrationTest `
/p:PerformanceTest=$performanceTest `
/p:Sign=$sign `
diff --git a/eng/common/build.sh b/eng/common/build.sh
index 50af40cdd2..5ce01dd161 100755
--- a/eng/common/build.sh
+++ b/eng/common/build.sh
@@ -59,6 +59,7 @@ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
restore=false
build=false
source_build=false
+vertical_build=false
rebuild=false
test=false
integration_test=false
@@ -105,7 +106,7 @@ while [[ $# > 0 ]]; do
-binarylog|-bl)
binary_log=true
;;
- -excludeCIBinarylog|-nobl)
+ -excludecibinarylog|-nobl)
exclude_ci_binary_log=true
;;
-pipelineslog|-pl)
@@ -129,6 +130,12 @@ while [[ $# > 0 ]]; do
restore=true
pack=true
;;
+ -verticalbuild|-vb)
+ build=true
+ vertical_build=true
+ restore=true
+ pack=true
+ ;;
-test|-t)
test=true
;;
@@ -220,6 +227,7 @@ function Build {
/p:Restore=$restore \
/p:Build=$build \
/p:ArcadeBuildFromSource=$source_build \
+ /p:ArcadeBuildVertical=$vertical_build \
/p:Rebuild=$rebuild \
/p:Test=$test \
/p:Pack=$pack \
diff --git a/eng/common/cross/build-rootfs.sh b/eng/common/cross/build-rootfs.sh
index 9caf9b021d..4228f202e5 100755
--- a/eng/common/cross/build-rootfs.sh
+++ b/eng/common/cross/build-rootfs.sh
@@ -487,7 +487,7 @@ if [[ "$__CodeName" == "alpine" ]]; then
-X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \
-U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" \
- search 'llvm*-libs' | sort | tail -1 | sed 's/-[^-]*//2g')"
+ search 'llvm*-libs' | grep -E '^llvm' | sort | tail -1 | sed 's/-[^-]*//2g')"
fi
# install all packages in one go
diff --git a/eng/common/cross/riscv64/tizen/tizen.patch b/eng/common/cross/riscv64/tizen/tizen.patch
new file mode 100644
index 0000000000..eb6d1c0747
--- /dev/null
+++ b/eng/common/cross/riscv64/tizen/tizen.patch
@@ -0,0 +1,9 @@
+diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
+--- a/usr/lib64/libc.so 2016-12-30 23:00:08.284951863 +0900
++++ b/usr/lib64/libc.so 2016-12-30 23:00:32.140951815 +0900
+@@ -2,4 +2,4 @@
+ Use the shared library, but some functions are only in
+ the static library, so try that secondarily. */
+ OUTPUT_FORMAT(elf64-littleriscv)
+-GROUP ( /lib64/libc.so.6 /usr/lib64/libc_nonshared.a AS_NEEDED ( /lib64/ld-linux-riscv64-lp64d.so.1 ) )
++GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-riscv64-lp64d.so.1 ) )
diff --git a/eng/common/cross/tizen-build-rootfs.sh b/eng/common/cross/tizen-build-rootfs.sh
index ac84173d44..ba31c93285 100644
--- a/eng/common/cross/tizen-build-rootfs.sh
+++ b/eng/common/cross/tizen-build-rootfs.sh
@@ -22,6 +22,10 @@ case "$ARCH" in
TIZEN_ARCH="x86_64"
LINK_ARCH="x86"
;;
+ riscv64)
+ TIZEN_ARCH="riscv64"
+ LINK_ARCH="riscv"
+ ;;
*)
echo "Unsupported architecture for tizen: $ARCH"
exit 1
@@ -58,4 +62,21 @@ rm -rf $TIZEN_TMP_DIR
echo ">>Start configuring Tizen rootfs"
ln -sfn asm-${LINK_ARCH} ./usr/include/asm
patch -p1 < $__TIZEN_CROSSDIR/tizen.patch
+if [[ "$TIZEN_ARCH" == "riscv64" ]]; then
+ echo "Fixing broken symlinks in $PWD"
+ rm ./usr/lib64/libresolv.so
+ ln -s ../../lib64/libresolv.so.2 ./usr/lib64/libresolv.so
+ rm ./usr/lib64/libpthread.so
+ ln -s ../../lib64/libpthread.so.0 ./usr/lib64/libpthread.so
+ rm ./usr/lib64/libdl.so
+ ln -s ../../lib64/libdl.so.2 ./usr/lib64/libdl.so
+ rm ./usr/lib64/libutil.so
+ ln -s ../../lib64/libutil.so.1 ./usr/lib64/libutil.so
+ rm ./usr/lib64/libm.so
+ ln -s ../../lib64/libm.so.6 ./usr/lib64/libm.so
+ rm ./usr/lib64/librt.so
+ ln -s ../../lib64/librt.so.1 ./usr/lib64/librt.so
+ rm ./lib/ld-linux-riscv64-lp64d.so.1
+ ln -s ../lib64/ld-linux-riscv64-lp64d.so.1 ./lib/ld-linux-riscv64-lp64d.so.1
+fi
echo "< 0 ]]; do
diff --git a/eng/common/dotnet-install.sh b/eng/common/dotnet-install.sh
index abd045a324..7e69e3a9e2 100755
--- a/eng/common/dotnet-install.sh
+++ b/eng/common/dotnet-install.sh
@@ -54,6 +54,10 @@ cpuname=$(uname -m)
case $cpuname in
arm64|aarch64)
buildarch=arm64
+ if [ "$(getconf LONG_BIT)" -lt 64 ]; then
+ # This is 32-bit OS running on 64-bit CPU (for example Raspberry Pi OS)
+ buildarch=arm
+ fi
;;
loongarch64)
buildarch=loongarch64
diff --git a/eng/common/loc/P22DotNetHtmlLocalization.lss b/eng/common/loc/P22DotNetHtmlLocalization.lss
index 6661fed566..5d892d6193 100644
Binary files a/eng/common/loc/P22DotNetHtmlLocalization.lss and b/eng/common/loc/P22DotNetHtmlLocalization.lss differ
diff --git a/eng/common/native/init-compiler.sh b/eng/common/native/init-compiler.sh
index 517401b688..f5c1ec7eaf 100644
--- a/eng/common/native/init-compiler.sh
+++ b/eng/common/native/init-compiler.sh
@@ -63,7 +63,7 @@ if [ -z "$CLR_CC" ]; then
# Set default versions
if [ -z "$majorVersion" ]; then
# note: gcc (all versions) and clang versions higher than 6 do not have minor version in file name, if it is zero.
- if [ "$compiler" = "clang" ]; then versions="16 15 14 13 12 11 10 9 8 7 6.0 5.0 4.0 3.9 3.8 3.7 3.6 3.5"
+ if [ "$compiler" = "clang" ]; then versions="17 16 15 14 13 12 11 10 9 8 7 6.0 5.0 4.0 3.9 3.8 3.7 3.6 3.5"
elif [ "$compiler" = "gcc" ]; then versions="13 12 11 10 9 8 7 6 5 4.9"; fi
for version in $versions; do
diff --git a/eng/common/native/init-distro-rid.sh b/eng/common/native/init-distro-rid.sh
new file mode 100644
index 0000000000..de1687b2cc
--- /dev/null
+++ b/eng/common/native/init-distro-rid.sh
@@ -0,0 +1,130 @@
+#!/usr/bin/env bash
+
+# getNonPortableDistroRid
+#
+# Input:
+# targetOs: (str)
+# targetArch: (str)
+# rootfsDir: (str)
+#
+# Return:
+# non-portable rid
+getNonPortableDistroRid()
+{
+ local targetOs="$1"
+ local targetArch="$2"
+ local rootfsDir="$3"
+ local nonPortableRid=""
+
+ if [ "$targetOs" = "linux" ]; then
+ if [ -e "${rootfsDir}/etc/os-release" ]; then
+ source "${rootfsDir}/etc/os-release"
+
+ if [[ "${ID}" == "rhel" || "${ID}" == "rocky" || "${ID}" == "alpine" ]]; then
+ # remove the last version digit
+ VERSION_ID="${VERSION_ID%.*}"
+ fi
+
+ if [[ "${VERSION_ID:-}" =~ ^([[:digit:]]|\.)+$ ]]; then
+ nonPortableRid="${ID}.${VERSION_ID}-${targetArch}"
+ else
+ # Rolling release distros either do not set VERSION_ID, set it as blank or
+ # set it to non-version looking string (such as TEMPLATE_VERSION_ID on ArchLinux);
+ # so omit it here to be consistent with everything else.
+ nonPortableRid="${ID}-${targetArch}"
+ fi
+
+ elif [ -e "${rootfsDir}/android_platform" ]; then
+ source "$rootfsDir"/android_platform
+ nonPortableRid="$RID"
+ fi
+ fi
+
+ if [ "$targetOs" = "freebsd" ]; then
+ # $rootfsDir can be empty. freebsd-version is shell script and it should always work.
+ __freebsd_major_version=$($rootfsDir/bin/freebsd-version | { read v; echo "${v%%.*}"; })
+ nonPortableRid="freebsd.$__freebsd_major_version-${targetArch}"
+ elif command -v getprop && getprop ro.product.system.model 2>&1 | grep -qi android; then
+ __android_sdk_version=$(getprop ro.build.version.sdk)
+ nonPortableRid="android.$__android_sdk_version-${targetArch}"
+ elif [ "$targetOs" = "illumos" ]; then
+ __uname_version=$(uname -v)
+ case "$__uname_version" in
+ omnios-*)
+ __omnios_major_version=$(echo "${__uname_version:8:2}")
+ nonPortableRid=omnios."$__omnios_major_version"-"$targetArch"
+ ;;
+ joyent_*)
+ __smartos_major_version=$(echo "${__uname_version:7:4}")
+ nonPortableRid=smartos."$__smartos_major_version"-"$targetArch"
+ ;;
+ illumos_*)
+ nonPortableRid=openindiana-"$targetArch"
+ ;;
+ esac
+ elif [ "$targetOs" = "solaris" ]; then
+ __uname_version=$(uname -v)
+ __solaris_major_version=$(echo "${__uname_version%.*}")
+ nonPortableRid=solaris."$__solaris_major_version"-"$targetArch"
+ elif [ "$targetOs" = "haiku" ]; then
+ __uname_release=$(uname -r)
+ nonPortableRid=haiku.r"$__uname_release"-"$targetArch"
+ fi
+
+ echo "$(echo $nonPortableRid | tr '[:upper:]' '[:lower:]')"
+}
+
+# initDistroRidGlobal
+#
+# Input:
+# os: (str)
+# arch: (str)
+# rootfsDir?: (nullable:string)
+#
+# Return:
+# None
+#
+# Notes:
+#
+# It is important to note that the function does not return anything, but it
+# exports the following variables on success:
+#
+# __DistroRid : Non-portable rid of the target platform.
+# __PortableTargetOS : OS-part of the portable rid that corresponds to the target platform.
+#
+initDistroRidGlobal()
+{
+ local targetOs="$1"
+ local targetArch="$2"
+ local rootfsDir=""
+ if [ "$#" -ge 3 ]; then
+ rootfsDir="$3"
+ fi
+
+ if [ -n "${rootfsDir}" ]; then
+ # We may have a cross build. Check for the existence of the rootfsDir
+ if [ ! -e "${rootfsDir}" ]; then
+ echo "Error rootfsDir has been passed, but the location is not valid."
+ exit 1
+ fi
+ fi
+
+ __DistroRid=$(getNonPortableDistroRid "${targetOs}" "${targetArch}" "${rootfsDir}")
+
+ if [ -z "${__PortableTargetOS:-}" ]; then
+ __PortableTargetOS="$targetOs"
+
+ STRINGS="$(command -v strings || true)"
+ if [ -z "$STRINGS" ]; then
+ STRINGS="$(command -v llvm-strings || true)"
+ fi
+
+ # Check for musl-based distros (e.g Alpine Linux, Void Linux).
+ if "${rootfsDir}/usr/bin/ldd" --version 2>&1 | grep -q musl ||
+ ( [ -n "$STRINGS" ] && "$STRINGS" "${rootfsDir}/usr/bin/ldd" 2>&1 | grep -q musl ); then
+ __PortableTargetOS="linux-musl"
+ fi
+ fi
+
+ export __DistroRid __PortableTargetOS
+}
diff --git a/eng/common/native/init-os-and-arch.sh b/eng/common/native/init-os-and-arch.sh
new file mode 100644
index 0000000000..e693617a6c
--- /dev/null
+++ b/eng/common/native/init-os-and-arch.sh
@@ -0,0 +1,80 @@
+#!/usr/bin/env bash
+
+# Use uname to determine what the OS is.
+OSName=$(uname -s | tr '[:upper:]' '[:lower:]')
+
+if command -v getprop && getprop ro.product.system.model 2>&1 | grep -qi android; then
+ OSName="android"
+fi
+
+case "$OSName" in
+freebsd|linux|netbsd|openbsd|sunos|android|haiku)
+ os="$OSName" ;;
+darwin)
+ os=osx ;;
+*)
+ echo "Unsupported OS $OSName detected!"
+ exit 1 ;;
+esac
+
+# On Solaris, `uname -m` is discouraged, see https://docs.oracle.com/cd/E36784_01/html/E36870/uname-1.html
+# and `uname -p` returns processor type (e.g. i386 on amd64).
+# The appropriate tool to determine CPU is isainfo(1) https://docs.oracle.com/cd/E36784_01/html/E36870/isainfo-1.html.
+if [ "$os" = "sunos" ]; then
+ if uname -o 2>&1 | grep -q illumos; then
+ os="illumos"
+ else
+ os="solaris"
+ fi
+ CPUName=$(isainfo -n)
+else
+ # For the rest of the operating systems, use uname(1) to determine what the CPU is.
+ CPUName=$(uname -m)
+fi
+
+case "$CPUName" in
+ arm64|aarch64)
+ arch=arm64
+ ;;
+
+ loongarch64)
+ arch=loongarch64
+ ;;
+
+ riscv64)
+ arch=riscv64
+ ;;
+
+ amd64|x86_64)
+ arch=x64
+ ;;
+
+ armv7l|armv8l)
+ if (NAME=""; . /etc/os-release; test "$NAME" = "Tizen"); then
+ arch=armel
+ else
+ arch=arm
+ fi
+ ;;
+
+ armv6l)
+ arch=armv6
+ ;;
+
+ i[3-6]86)
+ echo "Unsupported CPU $CPUName detected, build might not succeed!"
+ arch=x86
+ ;;
+
+ s390x)
+ arch=s390x
+ ;;
+
+ ppc64le)
+ arch=ppc64le
+ ;;
+ *)
+ echo "Unknown CPU $CPUName detected!"
+ exit 1
+ ;;
+esac
diff --git a/eng/common/post-build/add-build-to-channel.ps1 b/eng/common/post-build/add-build-to-channel.ps1
index de2d957922..49938f0c89 100644
--- a/eng/common/post-build/add-build-to-channel.ps1
+++ b/eng/common/post-build/add-build-to-channel.ps1
@@ -2,7 +2,7 @@ param(
[Parameter(Mandatory=$true)][int] $BuildId,
[Parameter(Mandatory=$true)][int] $ChannelId,
[Parameter(Mandatory=$true)][string] $MaestroApiAccessToken,
- [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro-prod.westus2.cloudapp.azure.com',
+ [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro.dot.net',
[Parameter(Mandatory=$false)][string] $MaestroApiVersion = '2019-01-16'
)
diff --git a/eng/common/post-build/check-channel-consistency.ps1 b/eng/common/post-build/check-channel-consistency.ps1
index 63f3464c98..1728f035a9 100644
--- a/eng/common/post-build/check-channel-consistency.ps1
+++ b/eng/common/post-build/check-channel-consistency.ps1
@@ -7,7 +7,7 @@ try {
. $PSScriptRoot\post-build-utils.ps1
if ($PromoteToChannels -eq "") {
- Write-PipelineTaskError -Type 'warning' -Message "This build won't publish assets as it's not configured to any Maestro channel. If that wasn't intended use Darc to configure a default channel using add-default-channel for this branch or to promote it to a channel using add-build-to-channel. See https://github.com/dotnet/arcade/blob/master/Documentation/Darc.md#assigning-an-individual-build-to-a-channel for more info."
+ Write-PipelineTaskError -Type 'warning' -Message "This build won't publish assets as it's not configured to any Maestro channel. If that wasn't intended use Darc to configure a default channel using add-default-channel for this branch or to promote it to a channel using add-build-to-channel. See https://github.com/dotnet/arcade/blob/main/Documentation/Darc.md#assigning-an-individual-build-to-a-channel for more info."
ExitWithExitCode 0
}
diff --git a/eng/common/post-build/publish-using-darc.ps1 b/eng/common/post-build/publish-using-darc.ps1
index 8508397d77..1e779fec4d 100644
--- a/eng/common/post-build/publish-using-darc.ps1
+++ b/eng/common/post-build/publish-using-darc.ps1
@@ -3,7 +3,7 @@ param(
[Parameter(Mandatory=$true)][int] $PublishingInfraVersion,
[Parameter(Mandatory=$true)][string] $AzdoToken,
[Parameter(Mandatory=$true)][string] $MaestroToken,
- [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro-prod.westus2.cloudapp.azure.com',
+ [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro.dot.net',
[Parameter(Mandatory=$true)][string] $WaitPublishingFinish,
[Parameter(Mandatory=$false)][string] $ArtifactsPublishingAdditionalParameters,
[Parameter(Mandatory=$false)][string] $SymbolPublishingAdditionalParameters
diff --git a/eng/common/post-build/redact-logs.ps1 b/eng/common/post-build/redact-logs.ps1
new file mode 100644
index 0000000000..82d91f6fd0
--- /dev/null
+++ b/eng/common/post-build/redact-logs.ps1
@@ -0,0 +1,81 @@
+[CmdletBinding(PositionalBinding=$False)]
+param(
+ [Parameter(Mandatory=$true, Position=0)][string] $InputPath,
+ [Parameter(Mandatory=$true)][string] $BinlogToolVersion,
+ [Parameter(Mandatory=$false)][string] $DotnetPath,
+ [Parameter(Mandatory=$false)][string] $PackageFeed = 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/index.json',
+ # File with strings to redact - separated by newlines.
+ # For comments start the line with '# ' - such lines are ignored
+ [Parameter(Mandatory=$false)][string] $TokensFilePath,
+ [Parameter(ValueFromRemainingArguments=$true)][String[]]$TokensToRedact
+)
+
+try {
+ . $PSScriptRoot\post-build-utils.ps1
+
+ $packageName = 'binlogtool'
+
+ $dotnet = $DotnetPath
+
+ if (!$dotnet) {
+ $dotnetRoot = InitializeDotNetCli -install:$true
+ $dotnet = "$dotnetRoot\dotnet.exe"
+ }
+
+ $toolList = & "$dotnet" tool list -g
+
+ if ($toolList -like "*$packageName*") {
+ & "$dotnet" tool uninstall $packageName -g
+ }
+
+ $toolPath = "$PSScriptRoot\..\..\..\.tools"
+ $verbosity = 'minimal'
+
+ New-Item -ItemType Directory -Force -Path $toolPath
+
+ Push-Location -Path $toolPath
+
+ try {
+ Write-Host "Installing Binlog redactor CLI..."
+ Write-Host "'$dotnet' new tool-manifest"
+ & "$dotnet" new tool-manifest
+ Write-Host "'$dotnet' tool install $packageName --local --add-source '$PackageFeed' -v $verbosity --version $BinlogToolVersion"
+ & "$dotnet" tool install $packageName --local --add-source "$PackageFeed" -v $verbosity --version $BinlogToolVersion
+
+ if (Test-Path $TokensFilePath) {
+ Write-Host "Adding additional sensitive data for redaction from file: " $TokensFilePath
+ $TokensToRedact += Get-Content -Path $TokensFilePath | Foreach {$_.Trim()} | Where { $_ -notmatch "^# " }
+ }
+
+ $optionalParams = [System.Collections.ArrayList]::new()
+
+ Foreach ($p in $TokensToRedact)
+ {
+ if($p -match '^\$\(.*\)$')
+ {
+ Write-Host ("Ignoring token {0} as it is probably unexpanded AzDO variable" -f $p)
+ }
+ elseif($p)
+ {
+ $optionalParams.Add("-p:" + $p) | Out-Null
+ }
+ }
+
+ & $dotnet binlogtool redact --input:$InputPath --recurse --in-place `
+ @optionalParams
+
+ if ($LastExitCode -ne 0) {
+ Write-PipelineTelemetryError -Category 'Redactor' -Type 'warning' -Message "Problems using Redactor tool (exit code: $LastExitCode). But ignoring them now."
+ }
+ }
+ finally {
+ Pop-Location
+ }
+
+ Write-Host 'done.'
+}
+catch {
+ Write-Host $_
+ Write-PipelineTelemetryError -Category 'Redactor' -Message "There was an error while trying to redact logs. Error: $_"
+ ExitWithExitCode 1
+}
diff --git a/eng/common/post-build/trigger-subscriptions.ps1 b/eng/common/post-build/trigger-subscriptions.ps1
index 55dea518ac..ac9a95778f 100644
--- a/eng/common/post-build/trigger-subscriptions.ps1
+++ b/eng/common/post-build/trigger-subscriptions.ps1
@@ -2,7 +2,7 @@ param(
[Parameter(Mandatory=$true)][string] $SourceRepo,
[Parameter(Mandatory=$true)][int] $ChannelId,
[Parameter(Mandatory=$true)][string] $MaestroApiAccessToken,
- [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro-prod.westus2.cloudapp.azure.com',
+ [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro.dot.net',
[Parameter(Mandatory=$false)][string] $MaestroApiVersion = '2019-01-16'
)
diff --git a/eng/common/sdk-task.ps1 b/eng/common/sdk-task.ps1
index e10a596879..73828dd30d 100644
--- a/eng/common/sdk-task.ps1
+++ b/eng/common/sdk-task.ps1
@@ -64,7 +64,7 @@ try {
$GlobalJson.tools | Add-Member -Name "vs" -Value (ConvertFrom-Json "{ `"version`": `"16.5`" }") -MemberType NoteProperty
}
if( -not ($GlobalJson.tools.PSObject.Properties.Name -match "xcopy-msbuild" )) {
- $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.4.1" -MemberType NoteProperty
+ $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.8.1-2" -MemberType NoteProperty
}
if ($GlobalJson.tools."xcopy-msbuild".Trim() -ine "none") {
$xcopyMSBuildToolsFolder = InitializeXCopyMSBuild $GlobalJson.tools."xcopy-msbuild" -install $true
diff --git a/eng/common/sdl/configure-sdl-tool.ps1 b/eng/common/sdl/configure-sdl-tool.ps1
index bdbf49e6c7..27f5a4115f 100644
--- a/eng/common/sdl/configure-sdl-tool.ps1
+++ b/eng/common/sdl/configure-sdl-tool.ps1
@@ -17,7 +17,9 @@ Param(
# Optional: Additional params to add to any tool using PoliCheck.
[string[]] $PoliCheckAdditionalRunConfigParams,
# Optional: Additional params to add to any tool using CodeQL/Semmle.
- [string[]] $CodeQLAdditionalRunConfigParams
+ [string[]] $CodeQLAdditionalRunConfigParams,
+ # Optional: Additional params to add to any tool using Binskim.
+ [string[]] $BinskimAdditionalRunConfigParams
)
$ErrorActionPreference = 'Stop'
@@ -69,22 +71,34 @@ try {
$gdnConfigFile = Join-Path $gdnConfigPath "$toolConfigName-configure.gdnconfig"
# For some tools, add default and automatic args.
- if ($tool.Name -eq 'credscan') {
- if ($targetDirectory) {
- $tool.Args += "`"TargetDirectory < $TargetDirectory`""
+ switch -Exact ($tool.Name) {
+ 'credscan' {
+ if ($targetDirectory) {
+ $tool.Args += "`"TargetDirectory < $TargetDirectory`""
+ }
+ $tool.Args += "`"OutputType < pre`""
+ $tool.Args += $CrScanAdditionalRunConfigParams
}
- $tool.Args += "`"OutputType < pre`""
- $tool.Args += $CrScanAdditionalRunConfigParams
- } elseif ($tool.Name -eq 'policheck') {
- if ($targetDirectory) {
- $tool.Args += "`"Target < $TargetDirectory`""
+ 'policheck' {
+ if ($targetDirectory) {
+ $tool.Args += "`"Target < $TargetDirectory`""
+ }
+ $tool.Args += $PoliCheckAdditionalRunConfigParams
}
- $tool.Args += $PoliCheckAdditionalRunConfigParams
- } elseif ($tool.Name -eq 'semmle' -or $tool.Name -eq 'codeql') {
- if ($targetDirectory) {
- $tool.Args += "`"SourceCodeDirectory < $TargetDirectory`""
+ {$_ -in 'semmle', 'codeql'} {
+ if ($targetDirectory) {
+ $tool.Args += "`"SourceCodeDirectory < $TargetDirectory`""
+ }
+ $tool.Args += $CodeQLAdditionalRunConfigParams
+ }
+ 'binskim' {
+ if ($targetDirectory) {
+ # Binskim crashes due to specific PDBs. GitHub issue: https://github.com/microsoft/binskim/issues/924.
+ # We are excluding all `_.pdb` files from the scan.
+ $tool.Args += "`"Target < $TargetDirectory\**;-:file|$TargetDirectory\**\_.pdb`""
+ }
+ $tool.Args += $BinskimAdditionalRunConfigParams
}
- $tool.Args += $CodeQLAdditionalRunConfigParams
}
# Create variable pointing to the args array directly so we can use splat syntax later.
diff --git a/eng/common/sdl/execute-all-sdl-tools.ps1 b/eng/common/sdl/execute-all-sdl-tools.ps1
index 4797e012c7..4715d75e97 100644
--- a/eng/common/sdl/execute-all-sdl-tools.ps1
+++ b/eng/common/sdl/execute-all-sdl-tools.ps1
@@ -35,6 +35,7 @@ Param(
[string[]] $CrScanAdditionalRunConfigParams, # Optional: Additional Params to custom build a CredScan run config in the format @("xyz:abc","sdf:1")
[string[]] $PoliCheckAdditionalRunConfigParams, # Optional: Additional Params to custom build a Policheck run config in the format @("xyz:abc","sdf:1")
[string[]] $CodeQLAdditionalRunConfigParams, # Optional: Additional Params to custom build a Semmle/CodeQL run config in the format @("xyz < abc","sdf < 1")
+ [string[]] $BinskimAdditionalRunConfigParams, # Optional: Additional Params to custom build a Binskim run config in the format @("xyz < abc","sdf < 1")
[bool] $BreakOnFailure=$False # Optional: Fail the build if there were errors during the run
)
@@ -107,7 +108,8 @@ try {
-GuardianLoggerLevel $GuardianLoggerLevel `
-CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams `
-PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams `
- -CodeQLAdditionalRunConfigParams $CodeQLAdditionalRunConfigParams
+ -CodeQLAdditionalRunConfigParams $CodeQLAdditionalRunConfigParams `
+ -BinskimAdditionalRunConfigParams $BinskimAdditionalRunConfigParams
if ($BreakOnFailure) {
Exit-IfNZEC "Sdl"
}
diff --git a/eng/common/sdl/extract-artifact-packages.ps1 b/eng/common/sdl/extract-artifact-packages.ps1
index 7f28d9c59e..f031ed5b25 100644
--- a/eng/common/sdl/extract-artifact-packages.ps1
+++ b/eng/common/sdl/extract-artifact-packages.ps1
@@ -35,31 +35,33 @@ try {
param(
[string] $PackagePath # Full path to a NuGet package
)
-
+
if (!(Test-Path $PackagePath)) {
Write-PipelineTelemetryError -Category 'Build' -Message "Input file does not exist: $PackagePath"
ExitWithExitCode 1
}
-
+
$RelevantExtensions = @('.dll', '.exe', '.pdb')
Write-Host -NoNewLine 'Extracting ' ([System.IO.Path]::GetFileName($PackagePath)) '...'
-
+
$PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
$ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId
-
+
Add-Type -AssemblyName System.IO.Compression.FileSystem
-
+
[System.IO.Directory]::CreateDirectory($ExtractPath);
-
+
try {
$zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath)
$zip.Entries |
Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} |
ForEach-Object {
- $TargetFile = Join-Path -Path $ExtractPath -ChildPath $_.Name
-
- [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile, $true)
+ $TargetPath = Join-Path -Path $ExtractPath -ChildPath (Split-Path -Path $_.FullName)
+ [System.IO.Directory]::CreateDirectory($TargetPath);
+
+ $TargetFile = Join-Path -Path $ExtractPath -ChildPath $_.FullName
+ [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile)
}
}
catch {
diff --git a/eng/common/sdl/trim-assets-version.ps1 b/eng/common/sdl/trim-assets-version.ps1
new file mode 100644
index 0000000000..0daa2a9e94
--- /dev/null
+++ b/eng/common/sdl/trim-assets-version.ps1
@@ -0,0 +1,75 @@
+<#
+.SYNOPSIS
+Install and run the 'Microsoft.DotNet.VersionTools.Cli' tool with the 'trim-artifacts-version' command to trim the version from the NuGet assets file name.
+
+.PARAMETER InputPath
+Full path to directory where artifact packages are stored
+
+.PARAMETER Recursive
+Search for NuGet packages recursively
+
+#>
+
+Param(
+ [string] $InputPath,
+ [bool] $Recursive = $true
+)
+
+$CliToolName = "Microsoft.DotNet.VersionTools.Cli"
+
+function Install-VersionTools-Cli {
+ param(
+ [Parameter(Mandatory=$true)][string]$Version
+ )
+
+ Write-Host "Installing the package '$CliToolName' with a version of '$version' ..."
+ $feed = "https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json"
+
+ $argumentList = @("tool", "install", "--local", "$CliToolName", "--add-source $feed", "--no-cache", "--version $Version", "--create-manifest-if-needed")
+ Start-Process "$dotnet" -Verbose -ArgumentList $argumentList -NoNewWindow -Wait
+}
+
+# -------------------------------------------------------------------
+
+if (!(Test-Path $InputPath)) {
+ Write-Host "Input Path '$InputPath' does not exist"
+ ExitWithExitCode 1
+}
+
+$ErrorActionPreference = 'Stop'
+Set-StrictMode -Version 2.0
+
+$disableConfigureToolsetImport = $true
+$global:LASTEXITCODE = 0
+
+# `tools.ps1` checks $ci to perform some actions. Since the SDL
+# scripts don't necessarily execute in the same agent that run the
+# build.ps1/sh script this variable isn't automatically set.
+$ci = $true
+. $PSScriptRoot\..\tools.ps1
+
+try {
+ $dotnetRoot = InitializeDotNetCli -install:$true
+ $dotnet = "$dotnetRoot\dotnet.exe"
+
+ $toolsetVersion = Read-ArcadeSdkVersion
+ Install-VersionTools-Cli -Version $toolsetVersion
+
+ $cliToolFound = (& "$dotnet" tool list --local | Where-Object {$_.Split(' ')[0] -eq $CliToolName})
+ if ($null -eq $cliToolFound) {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "The '$CliToolName' tool is not installed."
+ ExitWithExitCode 1
+ }
+
+ Exec-BlockVerbosely {
+ & "$dotnet" $CliToolName trim-assets-version `
+ --assets-path $InputPath `
+ --recursive $Recursive
+ Exit-IfNZEC "Sdl"
+ }
+}
+catch {
+ Write-Host $_
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
+ ExitWithExitCode 1
+}
diff --git a/eng/common/templates/job/execute-sdl.yml b/eng/common/templates/job/execute-sdl.yml
index 7aabaa1801..7870f93bc1 100644
--- a/eng/common/templates/job/execute-sdl.yml
+++ b/eng/common/templates/job/execute-sdl.yml
@@ -105,6 +105,11 @@ jobs:
downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
checkDownloadedFiles: true
+ - powershell: eng/common/sdl/trim-assets-version.ps1
+ -InputPath $(Build.ArtifactStagingDirectory)\artifacts
+ displayName: Trim the version from the NuGet packages
+ continueOnError: ${{ parameters.sdlContinueOnError }}
+
- powershell: eng/common/sdl/extract-artifact-packages.ps1
-InputPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts
-ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts
diff --git a/eng/common/templates/job/job.yml b/eng/common/templates/job/job.yml
index 44ad26abf5..01c0dd995e 100644
--- a/eng/common/templates/job/job.yml
+++ b/eng/common/templates/job/job.yml
@@ -75,6 +75,10 @@ jobs:
- ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}:
- name: EnableRichCodeNavigation
value: 'true'
+ # Retry signature validation up to three times, waiting 2 seconds between attempts.
+ # See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures
+ - name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY
+ value: 3,2000
- ${{ each variable in parameters.variables }}:
# handle name-value variable syntax
# example:
@@ -83,7 +87,7 @@ jobs:
- ${{ if ne(variable.name, '') }}:
- name: ${{ variable.name }}
value: ${{ variable.value }}
-
+
# handle variable groups
- ${{ if ne(variable.group, '') }}:
- group: ${{ variable.group }}
@@ -132,7 +136,7 @@ jobs:
condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
- ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}:
- - task: NuGetAuthenticate@0
+ - task: NuGetAuthenticate@1
- ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}:
- task: DownloadPipelineArtifact@2
@@ -150,7 +154,7 @@ jobs:
displayName: RichCodeNav Upload
inputs:
languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }}
- environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'production') }}
+ environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'internal') }}
richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin
uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }}
continueOnError: true
@@ -169,7 +173,7 @@ jobs:
- ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- task: MicroBuildCleanup@1
- displayName: Execute Microbuild cleanup tasks
+ displayName: Execute Microbuild cleanup tasks
condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
continueOnError: ${{ parameters.continueOnError }}
env:
@@ -219,7 +223,7 @@ jobs:
displayName: Publish XUnit Test Results
inputs:
testResultsFormat: 'xUnit'
- testResultsFiles: '*.xml'
+ testResultsFiles: '*.xml'
searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit
mergeTestResults: ${{ parameters.mergeTestResults }}
@@ -230,7 +234,7 @@ jobs:
displayName: Publish TRX Test Results
inputs:
testResultsFormat: 'VSTest'
- testResultsFiles: '*.trx'
+ testResultsFiles: '*.trx'
searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx
mergeTestResults: ${{ parameters.mergeTestResults }}
diff --git a/eng/common/templates/job/publish-build-assets.yml b/eng/common/templates/job/publish-build-assets.yml
index 42017109f3..d9c5b4aaf9 100644
--- a/eng/common/templates/job/publish-build-assets.yml
+++ b/eng/common/templates/job/publish-build-assets.yml
@@ -48,8 +48,8 @@ jobs:
- group: AzureDevOps-Artifact-Feeds-Pats
- name: runCodesignValidationInjection
value: false
- - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
- - template: /eng/common/templates/post-build/common-variables.yml
+ # unconditional - needed for logs publishing (redactor tool version)
+ - template: /eng/common/templates/post-build/common-variables.yml
pool:
# We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
@@ -72,7 +72,7 @@ jobs:
condition: ${{ parameters.condition }}
continueOnError: ${{ parameters.continueOnError }}
- - task: NuGetAuthenticate@0
+ - task: NuGetAuthenticate@1
- task: PowerShell@2
displayName: Publish Build Assets
@@ -81,7 +81,7 @@ jobs:
arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet
/p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests'
/p:BuildAssetRegistryToken=$(MaestroAccessToken)
- /p:MaestroApiEndpoint=https://maestro-prod.westus2.cloudapp.azure.com
+ /p:MaestroApiEndpoint=https://maestro.dot.net
/p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }}
/p:OfficialBuildId=$(Build.BuildNumber)
condition: ${{ parameters.condition }}
diff --git a/eng/common/templates/job/source-index-stage1.yml b/eng/common/templates/job/source-index-stage1.yml
index b98202aa02..7952336626 100644
--- a/eng/common/templates/job/source-index-stage1.yml
+++ b/eng/common/templates/job/source-index-stage1.yml
@@ -1,6 +1,6 @@
parameters:
runAsPublic: false
- sourceIndexPackageVersion: 1.0.1-20230228.2
+ sourceIndexPackageVersion: 1.0.1-20231213.4
sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json
sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci"
preSteps: []
@@ -30,20 +30,20 @@ jobs:
pool:
${{ if eq(variables['System.TeamProject'], 'public') }}:
name: $(DncEngPublicBuildPool)
- demands: ImageOverride -equals windows.vs2019.amd64.open
+ demands: ImageOverride -equals windows.vs2022.amd64.open
${{ if eq(variables['System.TeamProject'], 'internal') }}:
name: $(DncEngInternalBuildPool)
- demands: ImageOverride -equals windows.vs2019.amd64
+ demands: ImageOverride -equals windows.vs2022.amd64
steps:
- ${{ each preStep in parameters.preSteps }}:
- ${{ preStep }}
- task: UseDotNet@2
- displayName: Use .NET Core SDK 6
+ displayName: Use .NET 8 SDK
inputs:
packageType: sdk
- version: 6.0.x
+ version: 8.0.x
installationPath: $(Agent.TempDirectory)/dotnet
workingDirectory: $(Agent.TempDirectory)
diff --git a/eng/common/templates/post-build/common-variables.yml b/eng/common/templates/post-build/common-variables.yml
index c24193acfc..d7bf5c6e35 100644
--- a/eng/common/templates/post-build/common-variables.yml
+++ b/eng/common/templates/post-build/common-variables.yml
@@ -7,7 +7,7 @@ variables:
# Default Maestro++ API Endpoint and API Version
- name: MaestroApiEndPoint
- value: "https://maestro-prod.westus2.cloudapp.azure.com"
+ value: "https://maestro.dot.net"
- name: MaestroApiAccessToken
value: $(MaestroAccessToken)
- name: MaestroApiVersion
@@ -17,6 +17,8 @@ variables:
value: 3.0.0
- name: SymbolToolVersion
value: 1.0.1
+ - name: BinlogToolVersion
+ value: 1.0.9
- name: runCodesignValidationInjection
value: false
diff --git a/eng/common/templates/post-build/post-build.yml b/eng/common/templates/post-build/post-build.yml
index ef720f9d78..bbc010fe73 100644
--- a/eng/common/templates/post-build/post-build.yml
+++ b/eng/common/templates/post-build/post-build.yml
@@ -169,7 +169,7 @@ stages:
# This is necessary whenever we want to publish/restore to an AzDO private feed
# Since sdk-task.ps1 tries to restore packages we need to do this authentication here
# otherwise it'll complain about accessing a private feed.
- - task: NuGetAuthenticate@0
+ - task: NuGetAuthenticate@1
displayName: 'Authenticate to AzDO Feeds'
# Signing validation will optionally work with the buildmanifest file which is downloaded from
@@ -187,6 +187,7 @@ stages:
parameters:
StageLabel: 'Validation'
JobLabel: 'Signing'
+ BinlogToolVersion: $(BinlogToolVersion)
- job:
displayName: SourceLink Validation
@@ -266,7 +267,7 @@ stages:
BARBuildId: ${{ parameters.BARBuildId }}
PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
- - task: NuGetAuthenticate@0
+ - task: NuGetAuthenticate@1
- task: PowerShell@2
displayName: Publish Using Darc
diff --git a/eng/common/templates/steps/execute-sdl.yml b/eng/common/templates/steps/execute-sdl.yml
index 9dd5709f66..07426fde05 100644
--- a/eng/common/templates/steps/execute-sdl.yml
+++ b/eng/common/templates/steps/execute-sdl.yml
@@ -33,7 +33,7 @@ steps:
- ${{ if ne(parameters.overrideParameters, '') }}:
- powershell: ${{ parameters.executeAllSdlToolsScript }} ${{ parameters.overrideParameters }}
- displayName: Execute SDL
+ displayName: Execute SDL (Overridden)
continueOnError: ${{ parameters.sdlContinueOnError }}
condition: ${{ parameters.condition }}
diff --git a/eng/common/templates/steps/publish-logs.yml b/eng/common/templates/steps/publish-logs.yml
index 88f238f36b..835e527512 100644
--- a/eng/common/templates/steps/publish-logs.yml
+++ b/eng/common/templates/steps/publish-logs.yml
@@ -1,6 +1,9 @@
parameters:
StageLabel: ''
JobLabel: ''
+ CustomSensitiveDataList: ''
+ # A default - in case value from eng/common/templates/post-build/common-variables.yml is not passed
+ BinlogToolVersion: '1.0.9'
steps:
- task: Powershell@2
@@ -12,7 +15,30 @@ steps:
Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
continueOnError: true
condition: always()
-
+
+- task: PowerShell@2
+ displayName: Redact Logs
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/redact-logs.ps1
+ # For now this needs to have explicit list of all sensitive data. Taken from eng/publishing/v3/publish.yml
+ # Sensitive data can as well be added to $(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt'
+ # If the file exists - sensitive data for redaction will be sourced from it
+ # (single entry per line, lines starting with '# ' are considered comments and skipped)
+ arguments: -InputPath '$(Build.SourcesDirectory)/PostBuildLogs'
+ -BinlogToolVersion ${{parameters.BinlogToolVersion}}
+ -TokensFilePath '$(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt'
+ '$(publishing-dnceng-devdiv-code-r-build-re)'
+ '$(MaestroAccessToken)'
+ '$(dn-bot-all-orgs-artifact-feeds-rw)'
+ '$(akams-client-id)'
+ '$(akams-client-secret)'
+ '$(microsoft-symbol-server-pat)'
+ '$(symweb-symbol-server-pat)'
+ '$(dn-bot-all-orgs-build-rw-code-rw)'
+ ${{parameters.CustomSensitiveDataList}}
+ continueOnError: true
+ condition: always()
+
- task: PublishBuildArtifacts@1
displayName: Publish Logs
inputs:
diff --git a/eng/common/templates/steps/source-build.yml b/eng/common/templates/steps/source-build.yml
index a97a185a36..1d79797363 100644
--- a/eng/common/templates/steps/source-build.yml
+++ b/eng/common/templates/steps/source-build.yml
@@ -68,6 +68,11 @@ steps:
runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}'
fi
+ baseOsArgs=
+ if [ '${{ parameters.platform.baseOS }}' != '' ]; then
+ baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}'
+ fi
+
publishArgs=
if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then
publishArgs='--publish'
@@ -86,6 +91,7 @@ steps:
$internalRestoreArgs \
$targetRidArgs \
$runtimeOsArgs \
+ $baseOsArgs \
/p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \
/p:ArcadeBuildFromSource=true \
/p:AssetManifestFileName=$assetManifestFileName
@@ -99,7 +105,7 @@ steps:
Contents: |
**/*.log
**/*.binlog
- artifacts/source-build/self/prebuilt-report/**
+ artifacts/sb/prebuilt-report/**
TargetFolder: '$(Build.StagingDirectory)/BuildLogs'
CleanTargetFolder: true
continueOnError: true
@@ -112,3 +118,12 @@ steps:
artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt)
continueOnError: true
condition: succeededOrFailed()
+
+# Manually inject component detection so that we can ignore the source build upstream cache, which contains
+# a nupkg cache of input packages (a local feed).
+# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir'
+# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets
+- task: ComponentGovernanceComponentDetection@0
+ displayName: Component Detection (Exclude upstream cache)
+ inputs:
+ ignoreDirectories: '$(Build.SourcesDirectory)/artifacts/sb/src/artifacts/obj/source-built-upstream-cache'
diff --git a/eng/common/tools.ps1 b/eng/common/tools.ps1
index 38cf94ff88..9048d60324 100644
--- a/eng/common/tools.ps1
+++ b/eng/common/tools.ps1
@@ -158,18 +158,13 @@ function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) {
$env:DOTNET_MULTILEVEL_LOOKUP=0
# Disable first run since we do not need all ASP.NET packages restored.
- $env:DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
+ $env:DOTNET_NOLOGO=1
# Disable telemetry on CI.
if ($ci) {
$env:DOTNET_CLI_TELEMETRY_OPTOUT=1
}
- # Source Build uses DotNetCoreSdkDir variable
- if ($env:DotNetCoreSdkDir -ne $null) {
- $env:DOTNET_INSTALL_DIR = $env:DotNetCoreSdkDir
- }
-
# Find the first path on %PATH% that contains the dotnet.exe
if ($useInstalledDotNetCli -and (-not $globalJsonHasRuntimes) -and ($env:DOTNET_INSTALL_DIR -eq $null)) {
$dotnetExecutable = GetExecutableFileName 'dotnet'
@@ -228,7 +223,7 @@ function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) {
Write-PipelinePrependPath -Path $dotnetRoot
Write-PipelineSetVariable -Name 'DOTNET_MULTILEVEL_LOOKUP' -Value '0'
- Write-PipelineSetVariable -Name 'DOTNET_SKIP_FIRST_TIME_EXPERIENCE' -Value '1'
+ Write-PipelineSetVariable -Name 'DOTNET_NOLOGO' -Value '1'
return $global:_DotNetInstallDir = $dotnetRoot
}
@@ -379,13 +374,13 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements =
}
# Minimum VS version to require.
- $vsMinVersionReqdStr = '16.8'
+ $vsMinVersionReqdStr = '17.7'
$vsMinVersionReqd = [Version]::new($vsMinVersionReqdStr)
# If the version of msbuild is going to be xcopied,
# use this version. Version matches a package here:
- # https://dev.azure.com/dnceng/public/_packaging?_a=package&feed=dotnet-eng&package=RoslynTools.MSBuild&protocolType=NuGet&version=17.4.1&view=overview
- $defaultXCopyMSBuildVersion = '17.4.1'
+ # https://dev.azure.com/dnceng/public/_artifacts/feed/dotnet-eng/NuGet/RoslynTools.MSBuild/versions/17.8.1-2
+ $defaultXCopyMSBuildVersion = '17.8.1-2'
if (!$vsRequirements) {
if (Get-Member -InputObject $GlobalJson.tools -Name 'vs') {
@@ -601,7 +596,7 @@ function InitializeBuildTool() {
ExitWithExitCode 1
}
$dotnetPath = Join-Path $dotnetRoot (GetExecutableFileName 'dotnet')
- $buildTool = @{ Path = $dotnetPath; Command = 'msbuild'; Tool = 'dotnet'; Framework = 'net8.0' }
+ $buildTool = @{ Path = $dotnetPath; Command = 'msbuild'; Tool = 'dotnet'; Framework = 'net9.0' }
} elseif ($msbuildEngine -eq "vs") {
try {
$msbuildPath = InitializeVisualStudioMSBuild -install:$restore
@@ -671,20 +666,30 @@ function InitializeNativeTools() {
}
}
+function Read-ArcadeSdkVersion() {
+ return $GlobalJson.'msbuild-sdks'.'Microsoft.DotNet.Arcade.Sdk'
+}
+
function InitializeToolset() {
- if (Test-Path variable:global:_ToolsetBuildProj) {
- return $global:_ToolsetBuildProj
+ # For Unified Build/Source-build support, check whether the environment variable is
+ # set. If it is, then use this as the toolset build project.
+ if ($env:_InitializeToolset -ne $null) {
+ return $global:_InitializeToolset = $env:_InitializeToolset
+ }
+
+ if (Test-Path variable:global:_InitializeToolset) {
+ return $global:_InitializeToolset
}
$nugetCache = GetNuGetPackageCachePath
- $toolsetVersion = $GlobalJson.'msbuild-sdks'.'Microsoft.DotNet.Arcade.Sdk'
+ $toolsetVersion = Read-ArcadeSdkVersion
$toolsetLocationFile = Join-Path $ToolsetDir "$toolsetVersion.txt"
if (Test-Path $toolsetLocationFile) {
$path = Get-Content $toolsetLocationFile -TotalCount 1
if (Test-Path $path) {
- return $global:_ToolsetBuildProj = $path
+ return $global:_InitializeToolset = $path
}
}
@@ -707,7 +712,7 @@ function InitializeToolset() {
throw "Invalid toolset path: $path"
}
- return $global:_ToolsetBuildProj = $path
+ return $global:_InitializeToolset = $path
}
function ExitWithExitCode([int] $exitCode) {
@@ -759,12 +764,10 @@ function MSBuild() {
# new scripts need to work with old packages, so we need to look for the old names/versions
(Join-Path $basePath (Join-Path $buildTool.Framework 'Microsoft.DotNet.ArcadeLogging.dll')),
(Join-Path $basePath (Join-Path $buildTool.Framework 'Microsoft.DotNet.Arcade.Sdk.dll')),
- (Join-Path $basePath (Join-Path netcoreapp2.1 'Microsoft.DotNet.ArcadeLogging.dll')),
- (Join-Path $basePath (Join-Path netcoreapp2.1 'Microsoft.DotNet.Arcade.Sdk.dll'))
- (Join-Path $basePath (Join-Path netcoreapp3.1 'Microsoft.DotNet.ArcadeLogging.dll')),
- (Join-Path $basePath (Join-Path netcoreapp3.1 'Microsoft.DotNet.Arcade.Sdk.dll'))
(Join-Path $basePath (Join-Path net7.0 'Microsoft.DotNet.ArcadeLogging.dll')),
- (Join-Path $basePath (Join-Path net7.0 'Microsoft.DotNet.Arcade.Sdk.dll'))
+ (Join-Path $basePath (Join-Path net7.0 'Microsoft.DotNet.Arcade.Sdk.dll')),
+ (Join-Path $basePath (Join-Path net8.0 'Microsoft.DotNet.ArcadeLogging.dll')),
+ (Join-Path $basePath (Join-Path net8.0 'Microsoft.DotNet.Arcade.Sdk.dll'))
)
$selectedPath = $null
foreach ($path in $possiblePaths) {
@@ -823,7 +826,8 @@ function MSBuild-Core() {
}
}
- $env:ARCADE_BUILD_TOOL_COMMAND = "$($buildTool.Path) $cmdArgs"
+ # Be sure quote the path in case there are spaces in the dotnet installation location.
+ $env:ARCADE_BUILD_TOOL_COMMAND = "`"$($buildTool.Path)`" $cmdArgs"
$exitCode = Exec-Process $buildTool.Path $cmdArgs
diff --git a/eng/common/tools.sh b/eng/common/tools.sh
index e8d4789433..ece4b73079 100755
--- a/eng/common/tools.sh
+++ b/eng/common/tools.sh
@@ -112,7 +112,7 @@ function InitializeDotNetCli {
export DOTNET_MULTILEVEL_LOOKUP=0
# Disable first run since we want to control all package sources
- export DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
+ export DOTNET_NOLOGO=1
# Disable telemetry on CI
if [[ $ci == true ]]; then
@@ -123,11 +123,6 @@ function InitializeDotNetCli {
# so it doesn't output warnings to the console.
export LTTNG_HOME="$HOME"
- # Source Build uses DotNetCoreSdkDir variable
- if [[ -n "${DotNetCoreSdkDir:-}" ]]; then
- export DOTNET_INSTALL_DIR="$DotNetCoreSdkDir"
- fi
-
# Find the first path on $PATH that contains the dotnet.exe
if [[ "$use_installed_dotnet_cli" == true && $global_json_has_runtimes == false && -z "${DOTNET_INSTALL_DIR:-}" ]]; then
local dotnet_path=`command -v dotnet`
@@ -165,7 +160,7 @@ function InitializeDotNetCli {
Write-PipelinePrependPath -path "$dotnet_root"
Write-PipelineSetVariable -name "DOTNET_MULTILEVEL_LOOKUP" -value "0"
- Write-PipelineSetVariable -name "DOTNET_SKIP_FIRST_TIME_EXPERIENCE" -value "1"
+ Write-PipelineSetVariable -name "DOTNET_NOLOGO" -value "1"
# return value
_InitializeDotNetCli="$dotnet_root"
@@ -310,7 +305,7 @@ function GetDotNetInstallScript {
curl "$install_script_url" -sSL --retry 10 --create-dirs -o "$install_script" || {
if command -v openssl &> /dev/null; then
echo "Curl failed; dumping some information about dotnet.microsoft.com for later investigation"
- echo | openssl s_client -showcerts -servername dotnet.microsoft.com -connect dotnet.microsoft.com:443
+ echo | openssl s_client -showcerts -servername dotnet.microsoft.com -connect dotnet.microsoft.com:443 || true
fi
echo "Will now retry the same URL with verbose logging."
with_retries curl "$install_script_url" -sSL --verbose --retry 10 --create-dirs -o "$install_script" || {
@@ -341,7 +336,12 @@ function InitializeBuildTool {
# return values
_InitializeBuildTool="$_InitializeDotNetCli/dotnet"
_InitializeBuildToolCommand="msbuild"
- _InitializeBuildToolFramework="net8.0"
+ # use override if it exists - commonly set by source-build
+ if [[ "${_OverrideArcadeInitializeBuildToolFramework:-x}" == "x" ]]; then
+ _InitializeBuildToolFramework="net9.0"
+ else
+ _InitializeBuildToolFramework="${_OverrideArcadeInitializeBuildToolFramework}"
+ fi
}
# Set RestoreNoCache as a workaround for https://github.com/NuGet/Home/issues/3116
@@ -453,12 +453,10 @@ function MSBuild {
local possiblePaths=()
possiblePaths+=( "$toolset_dir/$_InitializeBuildToolFramework/Microsoft.DotNet.ArcadeLogging.dll" )
possiblePaths+=( "$toolset_dir/$_InitializeBuildToolFramework/Microsoft.DotNet.Arcade.Sdk.dll" )
- possiblePaths+=( "$toolset_dir/netcoreapp2.1/Microsoft.DotNet.ArcadeLogging.dll" )
- possiblePaths+=( "$toolset_dir/netcoreapp2.1/Microsoft.DotNet.Arcade.Sdk.dll" )
- possiblePaths+=( "$toolset_dir/netcoreapp3.1/Microsoft.DotNet.ArcadeLogging.dll" )
- possiblePaths+=( "$toolset_dir/netcoreapp3.1/Microsoft.DotNet.Arcade.Sdk.dll" )
possiblePaths+=( "$toolset_dir/net7.0/Microsoft.DotNet.ArcadeLogging.dll" )
possiblePaths+=( "$toolset_dir/net7.0/Microsoft.DotNet.Arcade.Sdk.dll" )
+ possiblePaths+=( "$toolset_dir/net8.0/Microsoft.DotNet.ArcadeLogging.dll" )
+ possiblePaths+=( "$toolset_dir/net8.0/Microsoft.DotNet.Arcade.Sdk.dll" )
for path in "${possiblePaths[@]}"; do
if [[ -f $path ]]; then
selectedPath=$path
diff --git a/global.json b/global.json
index 25df1701da..40b88952ee 100644
--- a/global.json
+++ b/global.json
@@ -1,18 +1,20 @@
{
"tools": {
- "dotnet": "8.0.100-preview.3.23178.7",
+ "dotnet": "9.0.100-alpha.1.23615.4",
"runtimes": {
"dotnet/x64": [
- "$(DotNetRuntime60Version)"
+ "$(DotNetRuntime60Version)",
+ "$(DotNetRuntime80Version)"
],
"dotnet/x86": [
- "$(DotNetRuntime60Version)"
+ "$(DotNetRuntime60Version)",
+ "$(DotNetRuntime80Version)"
]
}
},
"msbuild-sdks": {
- "Microsoft.DotNet.Arcade.Sdk": "8.0.0-beta.23265.1",
- "Microsoft.DotNet.Helix.Sdk": "8.0.0-beta.23265.1",
+ "Microsoft.DotNet.Arcade.Sdk": "9.0.0-beta.23628.1",
+ "Microsoft.DotNet.Helix.Sdk": "9.0.0-beta.23628.1",
"Microsoft.Build.Traversal": "3.2.0",
"Microsoft.SourceLink.GitHub": "1.1.0-beta-20206-02",
"Microsoft.SourceLink.Common": "1.1.0-beta-20206-02"
diff --git a/src/Microsoft.ML.Mkl.Redist/CompatibilitySuppressions.xml b/src/Microsoft.ML.Mkl.Redist/CompatibilitySuppressions.xml
deleted file mode 100644
index 90523af626..0000000000
--- a/src/Microsoft.ML.Mkl.Redist/CompatibilitySuppressions.xml
+++ /dev/null
@@ -1,8 +0,0 @@
-
-
-
-
- PKV006
- .NETStandard,Version=v2.0
-
-
\ No newline at end of file
diff --git a/src/Native/Stdafx.h b/src/Native/Stdafx.h
index 4008ebd012..e1b37c7092 100644
--- a/src/Native/Stdafx.h
+++ b/src/Native/Stdafx.h
@@ -7,6 +7,7 @@
#include
#include
#include
+#include
#define UNUSED(x) (void)(x)
#define DEBUG_ONLY(x) (void)(x)
diff --git a/test/Microsoft.Data.Analysis.Tests/ArrowIntegrationTests.cs b/test/Microsoft.Data.Analysis.Tests/ArrowIntegrationTests.cs
index 185ab835bb..8961b724d9 100644
--- a/test/Microsoft.Data.Analysis.Tests/ArrowIntegrationTests.cs
+++ b/test/Microsoft.Data.Analysis.Tests/ArrowIntegrationTests.cs
@@ -128,7 +128,7 @@ RecordBatch CreateRecordBatch(string prependColumnNamesWith = "")
}
[Fact]
- public void TestEmptyDataFrameRecordBatch()
+ public async void TestEmptyDataFrameRecordBatch()
{
PrimitiveDataFrameColumn ageColumn = new PrimitiveDataFrameColumn("Age");
PrimitiveDataFrameColumn lengthColumn = new PrimitiveDataFrameColumn("CharCount");
@@ -142,7 +142,7 @@ public void TestEmptyDataFrameRecordBatch()
foundARecordBatch = true;
MemoryStream stream = new MemoryStream();
ArrowStreamWriter writer = new ArrowStreamWriter(stream, recordBatch.Schema);
- writer.WriteRecordBatchAsync(recordBatch).GetAwaiter().GetResult();
+ await writer.WriteRecordBatchAsync(recordBatch);
stream.Position = 0;
ArrowStreamReader reader = new ArrowStreamReader(stream);
diff --git a/test/Microsoft.Data.Analysis.Tests/DataFrameJoinExtensionsTests.cs b/test/Microsoft.Data.Analysis.Tests/DataFrameJoinExtensionsTests.cs
index 6a20bdee47..ed39dff49f 100644
--- a/test/Microsoft.Data.Analysis.Tests/DataFrameJoinExtensionsTests.cs
+++ b/test/Microsoft.Data.Analysis.Tests/DataFrameJoinExtensionsTests.cs
@@ -24,7 +24,7 @@ public void GetSortedListsIntersection_EmptyCollections_EmptyResult()
// Assert
- Assert.Equal(0, intersection.Count);
+ Assert.Empty(intersection);
}
[Fact]
@@ -47,7 +47,7 @@ public void GetSortedListsIntersection_EmptyCollections_FirstIsNotEmpty_EmptyRes
// Assert
- Assert.Equal(0, intersection.Count);
+ Assert.Empty(intersection);
}
[Fact]
@@ -70,7 +70,7 @@ public void GetSortedListsIntersection_EmptyCollections_SecondIsNotEmpty_EmptyRe
// Assert
- Assert.Equal(0, intersection.Count);
+ Assert.Empty(intersection);
}
[Fact]
@@ -101,7 +101,7 @@ public void GetSortedListsIntersection_SortedCollections_WithoutIntersection_Suc
// Assert
- Assert.Equal(0, intersection.Count);
+ Assert.Empty(intersection);
}
[Fact]
diff --git a/test/Microsoft.ML.AutoML.Tests/ColumnInferenceTests.cs b/test/Microsoft.ML.AutoML.Tests/ColumnInferenceTests.cs
index 558e5d6788..09d0aaffd7 100644
--- a/test/Microsoft.ML.AutoML.Tests/ColumnInferenceTests.cs
+++ b/test/Microsoft.ML.AutoML.Tests/ColumnInferenceTests.cs
@@ -223,8 +223,8 @@ public void InferColumnsFromMultilineInputFile()
// File has 3 columns: "id", "description" and "animal"
Assert.NotNull(result.ColumnInformation.LabelColumnName);
- Assert.Equal(1, result.ColumnInformation.TextColumnNames.Count);
- Assert.Equal(1, result.ColumnInformation.CategoricalColumnNames.Count);
+ Assert.Single(result.ColumnInformation.TextColumnNames);
+ Assert.Single(result.ColumnInformation.CategoricalColumnNames);
Assert.Equal("id", result.ColumnInformation.LabelColumnName);
Assert.Equal("description", result.ColumnInformation.TextColumnNames.First());
diff --git a/test/Microsoft.ML.AutoML.Tests/SuggestedPipelineBuilderTests.cs b/test/Microsoft.ML.AutoML.Tests/SuggestedPipelineBuilderTests.cs
index be8d75ff8a..63cca8f878 100644
--- a/test/Microsoft.ML.AutoML.Tests/SuggestedPipelineBuilderTests.cs
+++ b/test/Microsoft.ML.AutoML.Tests/SuggestedPipelineBuilderTests.cs
@@ -46,7 +46,7 @@ public void TrainerNeedsNormalization()
public void TrainerNotNeedNormalization()
{
var pipeline = BuildSuggestedPipeline(BuildLightGbmTrainer());
- Assert.Equal(0, pipeline.Transforms.Count);
+ Assert.Empty(pipeline.Transforms);
}
private static void TestPipelineBuilderCaching(
diff --git a/test/Microsoft.ML.Core.Tests/UnitTests/ColumnTypes.cs b/test/Microsoft.ML.Core.Tests/UnitTests/ColumnTypes.cs
index e50fa4ddee..8b59faf6bf 100644
--- a/test/Microsoft.ML.Core.Tests/UnitTests/ColumnTypes.cs
+++ b/test/Microsoft.ML.Core.Tests/UnitTests/ColumnTypes.cs
@@ -32,19 +32,19 @@ public void TestEqualAndGetHashCode()
{
var tmp = type;
if (dict.ContainsKey(tmp) && dict[tmp] != tmp.ToString())
- Assert.True(false, dict[tmp] + " and " + tmp.ToString() + " are duplicates.");
+ Assert.Fail(dict[tmp] + " and " + tmp.ToString() + " are duplicates.");
dict[tmp] = tmp.ToString();
for (int size = 0; size < 5; size++)
{
tmp1 = new VectorDataViewType(tmp, size);
if (dict.ContainsKey(tmp1) && dict[tmp1] != tmp1.ToString())
- Assert.True(false, dict[tmp1] + " and " + tmp1.ToString() + " are duplicates.");
+ Assert.Fail(dict[tmp1] + " and " + tmp1.ToString() + " are duplicates.");
dict[tmp1] = tmp1.ToString();
for (int size1 = 0; size1 < 5; size1++)
{
tmp2 = new VectorDataViewType(tmp, size, size1);
if (dict.ContainsKey(tmp2) && dict[tmp2] != tmp2.ToString())
- Assert.True(false, dict[tmp2] + " and " + tmp2.ToString() + " are duplicates.");
+ Assert.Fail(dict[tmp2] + " and " + tmp2.ToString() + " are duplicates.");
dict[tmp2] = tmp2.ToString();
}
}
@@ -59,19 +59,19 @@ public void TestEqualAndGetHashCode()
{
tmp = new KeyDataViewType(rawType, count);
if (dict.ContainsKey(tmp) && dict[tmp] != tmp.ToString())
- Assert.True(false, dict[tmp] + " and " + tmp.ToString() + " are duplicates.");
+ Assert.Fail(dict[tmp] + " and " + tmp.ToString() + " are duplicates.");
dict[tmp] = tmp.ToString();
for (int size = 0; size < 5; size++)
{
tmp1 = new VectorDataViewType(tmp, size);
if (dict.ContainsKey(tmp1) && dict[tmp1] != tmp1.ToString())
- Assert.True(false, dict[tmp1] + " and " + tmp1.ToString() + " are duplicates.");
+ Assert.Fail(dict[tmp1] + " and " + tmp1.ToString() + " are duplicates.");
dict[tmp1] = tmp1.ToString();
for (int size1 = 0; size1 < 5; size1++)
{
tmp2 = new VectorDataViewType(tmp, size, size1);
if (dict.ContainsKey(tmp2) && dict[tmp2] != tmp2.ToString())
- Assert.True(false, dict[tmp2] + " and " + tmp2.ToString() + " are duplicates.");
+ Assert.Fail(dict[tmp2] + " and " + tmp2.ToString() + " are duplicates.");
dict[tmp2] = tmp2.ToString();
}
}
@@ -79,19 +79,19 @@ public void TestEqualAndGetHashCode()
Assert.True(rawType.TryGetDataKind(out var kind));
tmp = new KeyDataViewType(rawType, kind.ToMaxInt());
if (dict.ContainsKey(tmp) && dict[tmp] != tmp.ToString())
- Assert.True(false, dict[tmp] + " and " + tmp.ToString() + " are duplicates.");
+ Assert.Fail(dict[tmp] + " and " + tmp.ToString() + " are duplicates.");
dict[tmp] = tmp.ToString();
for (int size = 0; size < 5; size++)
{
tmp1 = new VectorDataViewType(tmp, size);
if (dict.ContainsKey(tmp1) && dict[tmp1] != tmp1.ToString())
- Assert.True(false, dict[tmp1] + " and " + tmp1.ToString() + " are duplicates.");
+ Assert.Fail(dict[tmp1] + " and " + tmp1.ToString() + " are duplicates.");
dict[tmp1] = tmp1.ToString();
for (int size1 = 0; size1 < 5; size1++)
{
tmp2 = new VectorDataViewType(tmp, size, size1);
if (dict.ContainsKey(tmp2) && dict[tmp2] != tmp2.ToString())
- Assert.True(false, dict[tmp2] + " and " + tmp2.ToString() + " are duplicates.");
+ Assert.Fail(dict[tmp2] + " and " + tmp2.ToString() + " are duplicates.");
dict[tmp2] = tmp2.ToString();
}
}
@@ -104,7 +104,7 @@ public void TestEqualAndGetHashCode()
{
var tmp4 = new ImageDataViewType(height, width);
if (dict.ContainsKey(tmp4))
- Assert.True(false, dict[tmp4] + " and " + tmp4.ToString() + " are duplicates.");
+ Assert.Fail(dict[tmp4] + " and " + tmp4.ToString() + " are duplicates.");
dict[tmp4] = tmp4.ToString();
}
}
diff --git a/test/Microsoft.ML.FSharp.Tests/SmokeTests.fs b/test/Microsoft.ML.FSharp.Tests/SmokeTests.fs
index 7e186b13e0..0f129eacfc 100644
--- a/test/Microsoft.ML.FSharp.Tests/SmokeTests.fs
+++ b/test/Microsoft.ML.FSharp.Tests/SmokeTests.fs
@@ -11,31 +11,31 @@
// environment, for example, see https://github.com/isaacabraham/ml-test-experiment/, but
// here we list them explicitly to avoid the dependency on a package loader,
//
-// You should build Microsoft.ML.FSharp.Tests in Debug mode for framework net461
+// You should build Microsoft.ML.FSharp.Tests in Debug mode for framework net462
// before running this as a script with F# Interactive by editing the project
// file to have:
-// net6.0; net461
+// net6.0; net462
#if INTERACTIVE
#r "netstandard"
-#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/Microsoft.ML.Core.dll"
-#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/Google.Protobuf.dll"
-#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/Newtonsoft.Json.dll"
-#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/System.CodeDom.dll"
-#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/Microsoft.ML.CpuMath.dll"
-#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/Microsoft.ML.Data.dll"
-#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/Microsoft.ML.Transforms.dll"
-#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/Microsoft.ML.ResultProcessor.dll"
-#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/Microsoft.ML.PCA.dll"
-#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/Microsoft.ML.KMeansClustering.dll"
-#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/Microsoft.ML.FastTree.dll"
-#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/Microsoft.ML.Api.dll"
-#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/Microsoft.ML.Sweeper.dll"
-#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/Microsoft.ML.dll"
-#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/Microsoft.ML.StandardTrainers.dll"
-#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/Microsoft.ML.PipelineInference.dll"
-#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/xunit.core.dll"
-#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net461/xunit.assert.dll"
+#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/Microsoft.ML.Core.dll"
+#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/Google.Protobuf.dll"
+#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/Newtonsoft.Json.dll"
+#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/System.CodeDom.dll"
+#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/Microsoft.ML.CpuMath.dll"
+#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/Microsoft.ML.Data.dll"
+#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/Microsoft.ML.Transforms.dll"
+#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/Microsoft.ML.ResultProcessor.dll"
+#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/Microsoft.ML.PCA.dll"
+#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/Microsoft.ML.KMeansClustering.dll"
+#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/Microsoft.ML.FastTree.dll"
+#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/Microsoft.ML.Api.dll"
+#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/Microsoft.ML.Sweeper.dll"
+#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/Microsoft.ML.dll"
+#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/Microsoft.ML.StandardTrainers.dll"
+#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/Microsoft.ML.PipelineInference.dll"
+#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/xunit.core.dll"
+#r @"../../bin/AnyCPU.Debug/Microsoft.ML.FSharp.Tests/net462/xunit.assert.dll"
#r "System"
#r "System.Core"
#r "System.Xml.Linq"
diff --git a/test/Microsoft.ML.IntegrationTests/IntrospectiveTraining.cs b/test/Microsoft.ML.IntegrationTests/IntrospectiveTraining.cs
index e58498b42c..ddbf66ba01 100644
--- a/test/Microsoft.ML.IntegrationTests/IntrospectiveTraining.cs
+++ b/test/Microsoft.ML.IntegrationTests/IntrospectiveTraining.cs
@@ -59,8 +59,8 @@ public void InspectFastForestRegresionTrees()
Assert.Equal(tree.SplitGains.Count, tree.NumberOfNodes);
Assert.Equal(tree.NumericalSplitThresholds.Count, tree.NumberOfNodes);
Assert.All(tree.CategoricalSplitFlags, flag => Assert.False(flag));
- Assert.Equal(0, tree.GetCategoricalSplitFeaturesAt(0).Count);
- Assert.Equal(0, tree.GetCategoricalCategoricalSplitFeatureRangeAt(0).Count);
+ Assert.Empty(tree.GetCategoricalSplitFeaturesAt(0));
+ Assert.Empty(tree.GetCategoricalCategoricalSplitFeatureRangeAt(0));
});
}
@@ -103,8 +103,8 @@ public void InspectFastTreeModelParameters()
Assert.Equal(tree.SplitGains.Count, tree.NumberOfNodes);
Assert.Equal(tree.NumericalSplitThresholds.Count, tree.NumberOfNodes);
Assert.All(tree.CategoricalSplitFlags, flag => Assert.False(flag));
- Assert.Equal(0, tree.GetCategoricalSplitFeaturesAt(0).Count);
- Assert.Equal(0, tree.GetCategoricalCategoricalSplitFeatureRangeAt(0).Count);
+ Assert.Empty(tree.GetCategoricalSplitFeaturesAt(0));
+ Assert.Empty(tree.GetCategoricalCategoricalSplitFeatureRangeAt(0));
});
// Add baselines for the model.
diff --git a/test/Microsoft.ML.PerformanceTests/Harness/Configs.cs b/test/Microsoft.ML.PerformanceTests/Harness/Configs.cs
index c7dd80c90b..8f069980d6 100644
--- a/test/Microsoft.ML.PerformanceTests/Harness/Configs.cs
+++ b/test/Microsoft.ML.PerformanceTests/Harness/Configs.cs
@@ -41,7 +41,7 @@ private IToolchain CreateToolchain()
TimeSpan timeout = TimeSpan.FromMinutes(5);
#if NETFRAMEWORK
- var tfm = "net461";
+ var tfm = "net462";
var csProj = CsProjClassicNetToolchain.From(tfm, timeout: timeout);
#else
var frameworkName = new FrameworkName(AppContext.TargetFrameworkName);
diff --git a/test/Microsoft.ML.Sweeper.Tests/TestSweeper.cs b/test/Microsoft.ML.Sweeper.Tests/TestSweeper.cs
index b9c8894584..b0abbc9086 100644
--- a/test/Microsoft.ML.Sweeper.Tests/TestSweeper.cs
+++ b/test/Microsoft.ML.Sweeper.Tests/TestSweeper.cs
@@ -120,14 +120,14 @@ public void TestRandomSweeper()
}
else
{
- Assert.True(false, "Wrong parameter");
+ Assert.Fail("Wrong parameter");
}
}
}
}
[Fact]
- public void TestSimpleSweeperAsync()
+ public async void TestSimpleSweeperAsync()
{
var random = new Random(42);
var env = new MLContext(42);
@@ -146,10 +146,11 @@ public void TestSimpleSweeperAsync()
for (int i = 0; i < sweeps; i++)
{
var task = sweeper.ProposeAsync();
+ var tResult = await task;
Assert.True(task.IsCompleted);
- paramSets.Add(task.Result.ParameterSet);
- var result = new RunResult(task.Result.ParameterSet, random.NextDouble(), true);
- sweeper.Update(task.Result.Id, result);
+ paramSets.Add(tResult.ParameterSet);
+ var result = new RunResult(tResult.ParameterSet, random.NextDouble(), true);
+ sweeper.Update(tResult.Id, result);
}
Assert.Equal(sweeps, paramSets.Count);
CheckAsyncSweeperResult(paramSets);
@@ -167,8 +168,9 @@ public void TestSimpleSweeperAsync()
for (int i = 0; i < sweeps; i++)
{
var task = gridSweeper.ProposeAsync();
+ var tResult = await task;
Assert.True(task.IsCompleted);
- paramSets.Add(task.Result.ParameterSet);
+ paramSets.Add(tResult.ParameterSet);
}
Assert.Equal(sweeps, paramSets.Count);
CheckAsyncSweeperResult(paramSets);
@@ -326,12 +328,12 @@ public void TestDeterministicSweeperAsyncParallel()
int[] sleeps = new int[sweeps];
for (int i = 0; i < sleeps.Length; i++)
sleeps[i] = random.Next(10, 100);
- var r = Task.Run(() => Parallel.For(0, sweeps, options, (int i) =>
+ var r = Task.Run(() => Parallel.For(0, sweeps, options, async (int i) =>
{
var task = sweeper.ProposeAsync();
- task.Wait();
+ var tResult = await task;
Assert.Equal(TaskStatus.RanToCompletion, task.Status);
- var paramWithId = task.Result;
+ var paramWithId = tResult;
if (paramWithId == null)
return;
Thread.Sleep(sleeps[i]);
@@ -417,7 +419,7 @@ private void CheckAsyncSweeperResult(List paramSets)
}
else
{
- Assert.True(false, "Wrong parameter");
+ Assert.Fail("Wrong parameter");
}
}
}
@@ -464,7 +466,7 @@ public void TestRandomGridSweeper()
}
else
{
- Assert.True(false, "Wrong parameter");
+ Assert.Fail("Wrong parameter");
}
}
Assert.False(gridPoint[i][j]);
@@ -491,7 +493,7 @@ public void TestRandomGridSweeper()
}
else
{
- Assert.True(false, "Wrong parameter");
+ Assert.Fail("Wrong parameter");
}
}
Assert.False(gridPoint[i][j]);
@@ -523,7 +525,7 @@ public void TestRandomGridSweeper()
}
else
{
- Assert.True(false, "Wrong parameter");
+ Assert.Fail("Wrong parameter");
}
}
Assert.False(gridPoint[i][j]);
@@ -577,7 +579,7 @@ public void TestNelderMeadSweeper()
}
else
{
- Assert.True(false, "Wrong parameter");
+ Assert.Fail("Wrong parameter");
}
}
results.Add(new RunResult(parameterSet, random.NextDouble(), true));
@@ -625,7 +627,7 @@ public void TestNelderMeadSweeperWithDefaultFirstBatchSweeper()
}
else
{
- Assert.True(false, "Wrong parameter");
+ Assert.Fail("Wrong parameter");
}
}
results.Add(new RunResult(parameterSet, random.NextDouble(), true));
@@ -676,7 +678,7 @@ public void TestSmacSweeper()
}
else
{
- Assert.True(false, "Wrong parameter");
+ Assert.Fail("Wrong parameter");
}
}
results.Add(new RunResult(parameterSet, random.NextDouble(), true));
diff --git a/test/Microsoft.ML.TestFramework/Attributes/BenchmarkTheoryAttribute.cs b/test/Microsoft.ML.TestFramework/Attributes/BenchmarkTheoryAttribute.cs
index 982ec4e580..28d58bd829 100644
--- a/test/Microsoft.ML.TestFramework/Attributes/BenchmarkTheoryAttribute.cs
+++ b/test/Microsoft.ML.TestFramework/Attributes/BenchmarkTheoryAttribute.cs
@@ -12,7 +12,7 @@ public sealed class BenchmarkTheoryAttribute : EnvironmentSpecificTheoryAttribut
#if DEBUG
private const string SkipMessage = "BenchmarkDotNet does not allow running the benchmarks in Debug, so this test is disabled for DEBUG";
private readonly bool _isEnvironmentSupported = false;
-#elif NET461
+#elif NETFRAMEWORK
private const string SkipMessage = "We are currently not running Benchmarks for FullFramework";
private readonly bool _isEnvironmentSupported = false;
#else
diff --git a/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipe.cs b/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipe.cs
index 50281944d6..ee94efc676 100644
--- a/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipe.cs
+++ b/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipe.cs
@@ -1611,7 +1611,7 @@ public void TestLdaTransformerEmptyDocumentException()
return;
}
- Assert.True(false, "The LDA transform does not throw expected error on empty documents.");
+ Assert.Fail("The LDA transform does not throw expected error on empty documents.");
}
}
}
diff --git a/test/Microsoft.ML.TestFramework/GlobalBase.cs b/test/Microsoft.ML.TestFramework/GlobalBase.cs
index d1404e11ff..016e3dfd9b 100644
--- a/test/Microsoft.ML.TestFramework/GlobalBase.cs
+++ b/test/Microsoft.ML.TestFramework/GlobalBase.cs
@@ -77,7 +77,7 @@ private static void AssertHandler(string msg, IExceptionContext ectx)
else
#endif
{
- Assert.True(false, $"Assert failed: {msg}");
+ Assert.Fail($"Assert failed: {msg}");
}
}
diff --git a/test/Microsoft.ML.TestFramework/RemoteExecutor.cs b/test/Microsoft.ML.TestFramework/RemoteExecutor.cs
index 365ce0d7c4..097de7ad8e 100644
--- a/test/Microsoft.ML.TestFramework/RemoteExecutor.cs
+++ b/test/Microsoft.ML.TestFramework/RemoteExecutor.cs
@@ -155,7 +155,7 @@ private static void CheckProcess(Process process, RemoteInvokeOptions options)
private sealed class RemoteExecutionException : XunitException
{
- internal RemoteExecutionException(string stackTrace) : base("Remote process failed with an unhandled exception.", stackTrace) { }
+ internal RemoteExecutionException(string stackTrace) : base($"Remote process failed with an unhandled exception. {stackTrace}") { }
}
private static MethodInfo GetMethodInfo(Delegate d)
diff --git a/test/Microsoft.ML.TestFramework/TestCommandBase.cs b/test/Microsoft.ML.TestFramework/TestCommandBase.cs
index d990aa673e..f0ba55340d 100644
--- a/test/Microsoft.ML.TestFramework/TestCommandBase.cs
+++ b/test/Microsoft.ML.TestFramework/TestCommandBase.cs
@@ -985,7 +985,7 @@ public void CommandCrossValidationAndSave()
// FastTree internally fails if we try to run it simultaneously and if this happens we wouldn't get model file for training.
[TestCategory(Cat)]
[Fact]
- public void CommandTrainFastTreeInDifferentThreads()
+ public async void CommandTrainFastTreeInDifferentThreads()
{
var dataPath = GetDataPath(TestDatasets.adult.testFilename);
var firstModelOutPath = DeleteOutputPath("TreeTransform-model2.zip");
@@ -1001,10 +1001,11 @@ public void CommandTrainFastTreeInDifferentThreads()
t[1] = new Task(() => MainForTest(secondTrainArgs));
t[0].Start();
t[1].Start();
- Task.WaitAll(t);
+ var t0 = await t[0];
+ var t1 = await t[1];
- Assert.Equal(0, t[0].Result);
- Assert.Equal(0, t[1].Result);
+ Assert.Equal(0, t0);
+ Assert.Equal(0, t1);
}
[TestCategory(Cat), TestCategory("FastTree")]
diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/TestApi.cs b/test/Microsoft.ML.Tests/Scenarios/Api/TestApi.cs
index d701aa9876..1e9410cf16 100644
--- a/test/Microsoft.ML.Tests/Scenarios/Api/TestApi.cs
+++ b/test/Microsoft.ML.Tests/Scenarios/Api/TestApi.cs
@@ -91,7 +91,7 @@ public void CursorChannelExposedInMapTransform()
try
{
filter2.GetRowCursorForAllColumns().MoveNext();
- Assert.True(false, "Throw an error if attribute is applied to a field that is not an IChannel.");
+ Assert.Fail("Throw an error if attribute is applied to a field that is not an IChannel.");
}
catch (InvalidOperationException ex)
{
@@ -114,7 +114,7 @@ public void CursorChannelExposedInMapTransform()
try
{
filter3.GetRowCursorForAllColumns().MoveNext();
- Assert.True(false, "Throw an error if attribute is applied to a field that is not an IChannel.");
+ Assert.Fail("Throw an error if attribute is applied to a field that is not an IChannel.");
}
catch (InvalidOperationException ex)
{
diff --git a/test/Microsoft.ML.Tests/TextLoaderTests.cs b/test/Microsoft.ML.Tests/TextLoaderTests.cs
index 66e50efd9c..7242b69a8b 100644
--- a/test/Microsoft.ML.Tests/TextLoaderTests.cs
+++ b/test/Microsoft.ML.Tests/TextLoaderTests.cs
@@ -109,7 +109,7 @@ public void TestTextLoaderInvalidLongMin()
return;
}
- Assert.True(false, "Test failed.");
+ Assert.Fail("Test failed.");
}
[Fact]
@@ -133,7 +133,7 @@ public void TestTextLoaderInvalidLongMax()
return;
}
- Assert.True(false, "Test failed.");
+ Assert.Fail("Test failed.");
}
}