Skip to content
This repository was archived by the owner on Aug 15, 2025. It is now read-only.

Commit cb42869

Browse files
committed
Add smoke test for binary package
1 parent 219fab7 commit cb42869

File tree

1 file changed

+209
-0
lines changed

1 file changed

+209
-0
lines changed

windows/internal/smoke_test.bat

Lines changed: 209 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,209 @@
1+
set SRC_DIR=%~dp0
2+
3+
pushd %SRC_DIR%\..
4+
5+
set "ORIG_PATH=%PATH%"
6+
7+
setlocal EnableDelayedExpansion
8+
set NVIDIA_GPU_EXISTS=0
9+
for /F "delims=" %%i in ('wmic path win32_VideoController get name') do (
10+
set GPUS=%%i
11+
if not "x!GPUS:NVIDIA=!" == "x!GPUS!" (
12+
SET NVIDIA_GPU_EXISTS=1
13+
goto gpu_check_end
14+
)
15+
)
16+
:gpu_check_end
17+
endlocal & set NVIDIA_GPU_EXISTS=%NVIDIA_GPU_EXISTS%
18+
19+
if "%PACKAGE_TYPE%" == "wheel" goto wheel
20+
if "%PACKAGE_TYPE%" == "conda" goto conda
21+
if "%PACKAGE_TYPE%" == "libtorch" goto libtorch
22+
23+
echo "unknown package type"
24+
exit /b 1
25+
26+
:wheel
27+
echo "install wheel package"
28+
29+
set PYTHON_INSTALLER_URL=
30+
if "%DESIRED_PYTHON%" == "3.8" set "PYTHON_INSTALLER_URL=https://www.python.org/ftp/python/3.8.2/python-3.8.2-amd64.exe"
31+
if "%DESIRED_PYTHON%" == "3.7" set "PYTHON_INSTALLER_URL=https://www.python.org/ftp/python/3.7.7/python-3.7.7-amd64.exe"
32+
if "%DESIRED_PYTHON%" == "3.6" set "PYTHON_INSTALLER_URL=https://www.python.org/ftp/python/3.6.8/python-3.6.8-amd64.exe"
33+
if "%PYTHON_INSTALLER_URL%" == "" (
34+
echo Python %DESIRED_PYTHON% not supported yet
35+
)
36+
37+
del python-amd64.exe
38+
curl --retry 3 -kL "%PYTHON_INSTALLER_URL%" --output python-amd64.exe
39+
if errorlevel 1 exit /b 1
40+
41+
start /wait "" python-amd64.exe /quiet InstallAllUsers=1 PrependPath=1 Include_test=0 TargetDir=%CD%\Python%PYTHON_VERSION%
42+
if errorlevel 1 exit /b 1
43+
44+
set "PATH=%CD%\Python%PYTHON_VERSION%\Scripts;%CD%\Python%PYTHON_VERSION%;%PATH%"
45+
46+
for /F "delims=" %%i in ('where /R "%PYTORCH_FINAL_PACKAGE_DIR:/=\%" *.whl') do pip install "%%i"
47+
if errorlevel 1 exit /b 1
48+
49+
pip install -q future numpy protobuf six "mkl>=2019"
50+
if errorlevel 1 exit /b 1
51+
52+
goto smoke_test
53+
54+
:conda
55+
echo "install conda package"
56+
57+
:: Install Miniconda3
58+
set "CONDA_HOME=%CD%\conda"
59+
set "tmp_conda=%CONDA_HOME%"
60+
set "miniconda_exe=%CD%\miniconda.exe"
61+
62+
rmdir /s /q conda
63+
del miniconda.exe
64+
curl -k https://repo.anaconda.com/miniconda/Miniconda3-latest-Windows-x86_64.exe -o "%miniconda_exe%"
65+
call ..\conda\install_conda.bat
66+
if ERRORLEVEL 1 exit /b 1
67+
68+
set "PATH=%CONDA_HOME%;%CONDA_HOME%\scripts;%CONDA_HOME%\Library\bin;%PATH%"
69+
70+
conda create -qyn testenv python=%DESIRED_PYTHON%
71+
if errorlevel 1 exit /b 1
72+
73+
call %CONDA_HOME%\condabin\activate.bat testenv
74+
if errorlevel 1 exit /b 1
75+
76+
for /F "delims=" %%i in ('where /R "%PYTORCH_FINAL_PACKAGE_DIR:/=\%" *.tar.bz2') do call conda install -y "%%i" --offline
77+
if ERRORLEVEL 1 exit /b 1
78+
79+
call conda install -yq future numpy protobuf six
80+
if ERRORLEVEL 1 exit /b 1
81+
82+
if "%CUDA_VERSION%" == "cpu" (
83+
call conda install -y cpuonly -c pytorch
84+
if ERRORLEVEL 1 exit /b 1
85+
) else (
86+
set /a CUDA_VER=%CUDA_VERSION%
87+
set CUDA_VER_MAJOR=%CUDA_VERSION:~0,-1%
88+
set CUDA_VER_MINOR=%CUDA_VERSION:~-1,1%
89+
set CUDA_VERSION_STR=%CUDA_VER_MAJOR%.%CUDA_VER_MINOR%
90+
call conda install -yq -c pytorch "cudatoolkit=%CUDA_VERSION_STR%"
91+
if ERRORLEVEL 1 exit /b 1
92+
)
93+
94+
:smoke_test
95+
python -c "import torch"
96+
if ERRORLEVEL 1 exit /b 1
97+
98+
python -c "from caffe2.python import core"
99+
if ERRORLEVEL 1 exit /b 1
100+
101+
echo Checking that MKL is available
102+
python -c "import torch; exit(0 if torch.backends.mkl.is_available() else 1)"
103+
if ERRORLEVEL 1 exit /b 1
104+
105+
if "%NVIDIA_GPU_EXISTS%" == "0" (
106+
echo "Skip CUDA tests for machines without a Nvidia GPU card"
107+
goto end
108+
)
109+
110+
echo Checking that CUDA archs are setup correctly
111+
python -c "import torch; torch.randn([3,5]).cuda()"
112+
if ERRORLEVEL 1 exit /b 1
113+
114+
echo Checking that magma is available
115+
python -c "import torch; torch.rand(1).cuda(); exit(0 if torch.cuda.has_magma else 1)"
116+
if ERRORLEVEL 1 exit /b 1
117+
118+
echo Checking that CuDNN is available
119+
python -c "import torch; exit(0 if torch.backends.cudnn.is_available() else 1)"
120+
if ERRORLEVEL 1 exit /b 1
121+
122+
goto end
123+
124+
:libtorch
125+
echo "install and test libtorch"
126+
127+
powershell internal/vs_install.ps1
128+
if ERRORLEVEL 1 exit /b 1
129+
130+
for /F "delims=" %%i in ('where /R "%PYTORCH_FINAL_PACKAGE_DIR:/=\%" *-latest.zip') do 7z x "%%i" -otmp
131+
pushd tmp\libtorch
132+
133+
for /f "usebackq tokens=*" %%i in (`"%ProgramFiles(x86)%\Microsoft Visual Studio\Installer\vswhere.exe" -legacy -products * -version [15^,16^) -property installationPath`) do (
134+
if exist "%%i" if exist "%%i\VC\Auxiliary\Build\vcvarsall.bat" (
135+
set "VS15INSTALLDIR=%%i"
136+
set "VS15VCVARSALL=%%i\VC\Auxiliary\Build\vcvarsall.bat"
137+
goto vswhere
138+
)
139+
)
140+
141+
:vswhere
142+
IF "%VS15VCVARSALL%"=="" (
143+
echo Visual Studio 2017 C++ BuildTools is required to compile PyTorch test on Windows
144+
exit /b 1
145+
)
146+
call "%VS15VCVARSALL%" x64
147+
148+
set install_root=%CD%
149+
set INCLUDE=%INCLUDE%;%install_root%/include;%install_root%/include/torch/csrc/api/include
150+
set LIB=%LIB%;%install_root%/lib
151+
set PATH=%PATH%;%install_root%/lib
152+
153+
>simple-torch-test.cpp (
154+
echo #include ^<torch/torch.h^>
155+
echo int main(int argc, const char* argv[]^) {
156+
echo TORCH_WARN("Simple test passed!"^);
157+
echo return 0;
158+
echo }
159+
)
160+
161+
cl simple-torch-test.cpp c10.lib torch_cpu.lib /EHsc
162+
.\simple-torch-test.exe
163+
if ERRORLEVEL 1 exit /b 1
164+
165+
echo "Checking that MKL is available"
166+
>check-torch-mkl.cpp (
167+
echo #include ^<torch/torch.h^>
168+
echo int main(int argc, const char* argv[]^) {
169+
echo TORCH_CHECK(torch::hasMKL(^), "MKL is not available"^);
170+
echo return 0;
171+
echo }
172+
)
173+
174+
cl check-torch-mkl.cpp c10.lib torch_cpu.lib /EHsc
175+
.\check-torch-mkl.exe
176+
if ERRORLEVEL 1 exit /b 1
177+
178+
if "%NVIDIA_GPU_EXISTS%" == "0" (
179+
echo "Skip CUDA tests for machines without a Nvidia GPU card"
180+
goto end
181+
)
182+
183+
>check-torch-cuda.cpp (
184+
echo #include ^<torch/torch.h^>
185+
echo int main(int argc, const char* argv[]^) {
186+
echo std::cout ^<^< "Checking that CUDA archs are setup correctly" ^<^< std::endl;
187+
echo TORCH_CHECK(torch::rand({3, 5}, torch::Device(torch::kCUDA^)^).defined(^), "CUDA archs are not setup correctly"^);
188+
echo.
189+
echo // These have to run after CUDA is initialized
190+
echo.
191+
echo std::cout ^<^< "Checking that magma is available" ^<^< std::endl;
192+
echo TORCH_CHECK(torch::hasMAGMA(^), "MAGMA is not available"^);
193+
echo.
194+
echo std::cout ^<^< "Checking that CuDNN is available" ^<^< std::endl;
195+
echo TORCH_CHECK(torch::cuda::cudnn_is_available(^), "CuDNN is not available"^);
196+
echo std::cout.flush(^);
197+
echo return 0;
198+
echo }
199+
)
200+
201+
cl check-torch-cuda.cpp torch_cpu.lib c10.lib torch_cuda.lib /EHsc /link /INCLUDE:?warp_size@cuda@at@@YAHXZ
202+
.\check-torch-cuda.exe
203+
if ERRORLEVEL 1 exit /b 1
204+
205+
popd
206+
207+
:end
208+
set "PATH=%ORIG_PATH%"
209+
popd

0 commit comments

Comments
 (0)