Skip to content
This repository was archived by the owner on Aug 15, 2025. It is now read-only.

Commit 687b13a

Browse files
committed
Add smoke test for binary package
1 parent 219fab7 commit 687b13a

File tree

1 file changed

+193
-0
lines changed

1 file changed

+193
-0
lines changed

windows/internal/smoke_test.bat

Lines changed: 193 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,193 @@
1+
set SRC_DIR=%~dp0
2+
pushd %SRC_DIR%\..
3+
4+
set "ORIG_PATH=%PATH%"
5+
6+
setlocal EnableDelayedExpansion
7+
set NVIDIA_GPU_EXISTS=0
8+
for /F "delims=" %%i in ('wmic path win32_VideoController get name') do (
9+
set GPUS=%%i
10+
if not "x!GPUS:NVIDIA=!" == "x!GPUS!" (
11+
SET NVIDIA_GPU_EXISTS=1
12+
goto gpu_check_end
13+
)
14+
)
15+
:gpu_check_end
16+
endlocal & set NVIDIA_GPU_EXISTS=%NVIDIA_GPU_EXISTS%
17+
18+
if "%PACKAGE_TYPE%" == "wheel" goto wheel
19+
if "%PACKAGE_TYPE%" == "conda" goto conda
20+
if "%PACKAGE_TYPE%" == "libtorch" goto libtorch
21+
22+
:wheel
23+
echo "install wheel package"
24+
25+
set PYTHON_INSTALLER_URL=
26+
if "%DESIRED_PYTHON%" == "3.8" set "PYTHON_INSTALLER_URL=https://www.python.org/ftp/python/3.8.2/python-3.8.2-amd64.exe"
27+
if "%DESIRED_PYTHON%" == "3.7" set "PYTHON_INSTALLER_URL=https://www.python.org/ftp/python/3.7.7/python-3.7.7-amd64.exe"
28+
if "%DESIRED_PYTHON%" == "3.6" set "PYTHON_INSTALLER_URL=https://www.python.org/ftp/python/3.6.8/python-3.6.8-amd64.exe"
29+
if "%PYTHON_INSTALLER_URL%" == "" (
30+
echo Python %DESIRED_PYTHON% not supported yet
31+
)
32+
33+
del python-amd64.exe
34+
curl --retry 3 -kL "%PYTHON_INSTALLER_URL%" --output python-amd64.exe
35+
if errorlevel 1 exit /b 1
36+
37+
start /wait "" python-amd64.exe /quiet InstallAllUsers=1 PrependPath=1 Include_test=0 TargetDir=%CD%\Python%PYTHON_VERSION%
38+
if errorlevel 1 exit /b 1
39+
40+
set "PATH=%CD%\Python%PYTHON_VERSION%\Scripts;%CD%\Python%PYTHON_VERSION%;%PATH%"
41+
42+
for /F "delims=" %%i in ('where /R "%PYTORCH_FINAL_PACKAGE_DIR:/=\%" *.whl') do pip install "%%i"
43+
pip install -q future numpy protobuf six "mkl>=2019"
44+
goto smoke_test
45+
46+
:conda
47+
echo "install conda package"
48+
49+
:: Install Miniconda3
50+
set "CONDA_HOME=%CD%\conda"
51+
set "tmp_conda=%CONDA_HOME%"
52+
set "miniconda_exe=%CD%\miniconda.exe"
53+
54+
rmdir /s /q conda
55+
del miniconda.exe
56+
curl -k https://repo.anaconda.com/miniconda/Miniconda3-latest-Windows-x86_64.exe -o "%miniconda_exe%"
57+
call ..\conda\install_conda.bat
58+
if ERRORLEVEL 1 exit /b 1
59+
60+
set "PATH=%CONDA_HOME%;%CONDA_HOME%\scripts;%CONDA_HOME%\Library\bin;%PATH%"
61+
62+
conda create -qyn testenv python=%DESIRED_PYTHON%
63+
source activate testenv >/dev/null
64+
for /F "delims=" %%i in ('where /R "%PYTORCH_FINAL_PACKAGE_DIR:/=\%" *.tar.bz2') do conda install -y "%%i" --offline
65+
conda install -yq future numpy protobuf six
66+
67+
set /a CUDA_VER=%CUDA_VERSION%
68+
set CUDA_VER_MAJOR=%CUDA_VERSION:~0,-1%
69+
set CUDA_VER_MINOR=%CUDA_VERSION:~-1,1%
70+
set CUDA_VERSION_STR=%CUDA_VER_MAJOR%.%CUDA_VER_MINOR%
71+
conda install -yq -c pytorch "cudatoolkit=%CUDA_VERSION_STR%"
72+
73+
:smoke_test
74+
python -c "import torch"
75+
if ERRORLEVEL 1 exit /b 1
76+
77+
python -c "from caffe2.python import core"
78+
if ERRORLEVEL 1 exit /b 1
79+
80+
echo Checking that MKL is available
81+
python -c "import torch; exit(0 if torch.backends.mkl.is_available() else 1)"
82+
if ERRORLEVEL 1 exit /b 1
83+
84+
if "%NVIDIA_GPU_EXISTS%" == "0" (
85+
echo "Skip CUDA tests for machines without a Nvidia GPU card"
86+
goto end
87+
)
88+
89+
echo Checking that CUDA archs are setup correctly
90+
python -c "import torch; torch.randn([3,5]).cuda()"
91+
if ERRORLEVEL 1 exit /b 1
92+
93+
echo Checking that magma is available
94+
python -c "import torch; torch.rand(1).cuda(); exit(0 if torch.cuda.has_magma else 1)"
95+
if ERRORLEVEL 1 exit /b 1
96+
97+
echo Checking that CuDNN is available
98+
python -c "import torch; exit(0 if torch.backends.cudnn.is_available() else 1)"
99+
if ERRORLEVEL 1 exit /b 1
100+
101+
goto end
102+
103+
:libtorch
104+
echo "install and test libtorch"
105+
106+
powershell internal/vs_install.ps1
107+
if ERRORLEVEL 1 exit /b 1
108+
109+
for /F "delims=" %%i in ('where /R "%PYTORCH_FINAL_PACKAGE_DIR:/=\%" *-latest.zip') do unzip "%%i" -d tmp
110+
pushd tmp\libtorch
111+
112+
for /f "usebackq tokens=*" %%i in (`"%ProgramFiles(x86)%\Microsoft Visual Studio\Installer\vswhere.exe" -legacy -products * -version [15^,16^) -property installationPath`) do (
113+
if exist "%%i" if exist "%%i\VC\Auxiliary\Build\vcvarsall.bat" (
114+
set "VS15INSTALLDIR=%%i"
115+
set "VS15VCVARSALL=%%i\VC\Auxiliary\Build\vcvarsall.bat"
116+
goto vswhere
117+
)
118+
)
119+
120+
:vswhere
121+
IF "%VS15VCVARSALL%"=="" (
122+
echo Visual Studio 2017 C++ BuildTools is required to compile PyTorch on Windows
123+
exit /b 1
124+
)
125+
call "%VS15VCVARSALL%" x64
126+
127+
set install_root=%CD%
128+
set INCLUDE=%INCLUDE%;%install_root%/include;%install_root%/include/torch/csrc/api/include
129+
set LIB=%LIB%;%install_root%/lib
130+
set PATH=%PATH%;%install_root%/lib
131+
132+
>example-app.cpp (
133+
echo #include ^<torch/torch.h^>
134+
echo int main(int argc, const char* argv[]^) {
135+
echo TORCH_WARN("Simple test passed!"^);
136+
echo return 0;
137+
echo }
138+
)
139+
140+
del example-app.exe
141+
cl example-app.cpp c10.lib torch_cpu.lib /EHsc
142+
.\example-app.exe
143+
if ERRORLEVEL 1 exit /b 1
144+
145+
echo Checking that MKL is available
146+
del example-app.cpp
147+
>example-app.cpp (
148+
echo #include ^<torch/torch.h^>
149+
echo int main(int argc, const char* argv[]^) {
150+
echo TORCH_CHECK(torch::hasMKL(^), "MKL is not available"^);
151+
echo return 0;
152+
echo }
153+
)
154+
155+
del example-app.exe
156+
cl example-app.cpp c10.lib torch_cpu.lib /EHsc
157+
.\example-app.exe
158+
if ERRORLEVEL 1 exit /b 1
159+
160+
if "%NVIDIA_GPU_EXISTS%" == "0" (
161+
echo "Skip CUDA tests for machines without a Nvidia GPU card"
162+
goto end
163+
)
164+
165+
del example-app.cpp
166+
>example-app.cpp (
167+
echo #include ^<torch/torch.h^>
168+
echo int main(int argc, const char* argv[]^) {
169+
echo std::cout ^<^< "Checking that CUDA archs are setup correctly" ^<^< std::endl;
170+
echo TORCH_CHECK(torch::rand({3, 5}, torch::Device(torch::kCUDA^)^).defined(^), "CUDA archs are not setup correctly"^);
171+
echo.
172+
echo // These have to run after CUDA is initialized
173+
echo.
174+
echo std::cout ^<^< "Checking that magma is available" ^<^< std::endl;
175+
echo TORCH_CHECK(torch::hasMAGMA(^), "MAGMA is not available"^);
176+
echo.
177+
echo std::cout ^<^< "Checking that CuDNN is available" ^<^< std::endl;
178+
echo TORCH_CHECK(torch::cuda::cudnn_is_available(^), "CuDNN is not available"^);
179+
echo std::cout.flush(^);
180+
echo return 0;
181+
echo }
182+
)
183+
184+
del example-app.exe
185+
cl example-app.cpp torch_cpu.lib c10.lib torch_cuda.lib /EHsc /link /INCLUDE:?warp_size@cuda@at@@YAHXZ
186+
.\example-app.exe
187+
if ERRORLEVEL 1 exit /b 1
188+
189+
popd
190+
191+
:end
192+
set "PATH=%ORIG_PATH%"
193+
popd

0 commit comments

Comments
 (0)