diff --git a/.github/workflows/test_docker.yml b/.github/workflows/test_docker.yml index 6470e81fe8a..b66a6437a37 100644 --- a/.github/workflows/test_docker.yml +++ b/.github/workflows/test_docker.yml @@ -33,13 +33,19 @@ env: jobs: docker_tests: - name: "Build and Test On Docker" - runs-on: ${{ matrix.os }} + name: "Build and Test On Docker on ${{ matrix.platform.os }}" + runs-on: ${{ matrix.platform.os }} strategy: fail-fast: false matrix: python-version: ["3.10"] - os: ["ubuntu-latest"] + platform: + - os: "ubuntu-latest" + image-name: ghcr.io/ansys-dpf/dpf-standalone:linux-26.1 + - os: "windows-latest" + image-name: ghcr.io/ansys-dpf/dpf-standalone:windows-26.1 + env: + DPF_DOCKER: ${{ matrix.platform.image-name }} steps: - uses: actions/checkout@v4 @@ -58,7 +64,7 @@ jobs: - name: "Build the wheel" shell: bash run: | - if [ ${{ matrix.os }} == "ubuntu-latest" ]; then + if [ ${{ matrix.platform.os }} == "ubuntu-latest" ]; then export platform="manylinux_2_17" else export platform="win" @@ -79,13 +85,34 @@ jobs: run: | pip install dist/${{ steps.wheel.outputs.wheel_name }}[graphics] - - name: "Install DPF" - id: set-server-path - uses: ansys/pydpf-actions/install-dpf-docker@v2.3 + # - name: "Install DPF" + # id: set-server-path + # uses: ansys/pydpf-actions/install-dpf-docker@v2.3 + # with: + # dpf-standalone-TOKEN: ${{secrets.PYANSYS_CI_BOT_TOKEN}} + # standalone_suffix: ${{ inputs.standalone_suffix }} + # ANSYS_VERSION : ${{ inputs.ANSYS_VERSION || vars.ANSYS_VERSION_DEFAULT }} + + - name: "Login to Github Container Registry" + uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 with: - dpf-standalone-TOKEN: ${{secrets.PYANSYS_CI_BOT_TOKEN}} - standalone_suffix: ${{ inputs.standalone_suffix }} - ANSYS_VERSION : ${{ inputs.ANSYS_VERSION || vars.ANSYS_VERSION_DEFAULT }} + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.PYANSYS_CI_BOT_TOKEN }} + + - name: Download DPF docker container + if: runner.os == 'Linux' + env: + IMAGE_NAME: ${{ env.DPF_DOCKER }} + run: | + docker pull "$IMAGE_NAME" + + - name: Download DPF docker container + if: runner.os == 'Windows' + env: + IMAGE_NAME: ${{ env.DPF_DOCKER }} + run: | + docker pull "$env:IMAGE_NAME" - name: "Check licences of packages" uses: ansys/pydpf-actions/check-licenses@v2.3 @@ -118,7 +145,7 @@ jobs: - name: "Test API" uses: nick-fields/retry@v3 with: - timeout_minutes: 10 + timeout_minutes: 20 max_attempts: 2 shell: bash command: | @@ -127,7 +154,7 @@ jobs: - name: "Test API test_launcher" uses: nick-fields/retry@v3 with: - timeout_minutes: 2 + timeout_minutes: 20 max_attempts: 2 shell: bash command: | @@ -136,7 +163,7 @@ jobs: - name: "Test API test_server" uses: nick-fields/retry@v3 with: - timeout_minutes: 8 + timeout_minutes: 20 max_attempts: 2 shell: bash command: | @@ -145,7 +172,7 @@ jobs: - name: "Test API test_local_server" uses: nick-fields/retry@v3 with: - timeout_minutes: 2 + timeout_minutes: 20 max_attempts: 2 shell: bash command: | @@ -154,7 +181,7 @@ jobs: - name: "Test API test_multi_server" uses: nick-fields/retry@v3 with: - timeout_minutes: 5 + timeout_minutes: 20 max_attempts: 2 shell: bash command: | @@ -163,7 +190,7 @@ jobs: - name: "Test API test_remote_workflow" uses: nick-fields/retry@v3 with: - timeout_minutes: 2 + timeout_minutes: 20 max_attempts: 3 shell: bash command: | @@ -172,7 +199,7 @@ jobs: - name: "Test API test_remote_operator" uses: nick-fields/retry@v3 with: - timeout_minutes: 2 + timeout_minutes: 20 max_attempts: 2 shell: bash command: | @@ -181,7 +208,7 @@ jobs: - name: "Test API test_workflow" uses: nick-fields/retry@v3 with: - timeout_minutes: 3 + timeout_minutes: 20 max_attempts: 3 shell: bash command: | @@ -190,7 +217,7 @@ jobs: - name: "Test API test_service" uses: nick-fields/retry@v3 with: - timeout_minutes: 3 + timeout_minutes: 20 max_attempts: 2 shell: bash command: | @@ -199,7 +226,7 @@ jobs: - name: "Test Operators" uses: nick-fields/retry@v3 with: - timeout_minutes: 3 + timeout_minutes: 20 max_attempts: 2 shell: bash command: | @@ -208,7 +235,7 @@ jobs: - name: "Test Documentation" uses: nick-fields/retry@v3 with: - timeout_minutes: 8 + timeout_minutes: 20 max_attempts: 2 shell: bash command: | @@ -226,7 +253,7 @@ jobs: - name: "Upload Test Results" uses: actions/upload-artifact@v4 with: - name: ${{ env.PACKAGE_NAME }}_${{ matrix.python-version }}_${{ matrix.os }}_pytest_${{ inputs.ANSYS_VERSION || vars.ANSYS_VERSION_DEFAULT }}_docker + name: ${{ env.PACKAGE_NAME }}_${{ matrix.python-version }}_${{ matrix.platform.os }}_pytest_${{ inputs.ANSYS_VERSION || vars.ANSYS_VERSION_DEFAULT }}_docker path: tests/junit/test-results.xml timeout-minutes: 5 @@ -234,13 +261,13 @@ jobs: uses: codecov/codecov-action@v4 with: token: ${{ secrets.CODECOV_TOKEN }} # required - name: ${{ env.PACKAGE_NAME }}_${{ matrix.python-version }}_${{ matrix.os }}_pytest_${{ inputs.ANSYS_VERSION || vars.ANSYS_VERSION_DEFAULT }}_docker.xml - flags: docker,${{ inputs.ANSYS_VERSION || vars.ANSYS_VERSION_DEFAULT }},${{ matrix.os }},${{ matrix.python-version }} + name: ${{ env.PACKAGE_NAME }}_${{ matrix.python-version }}_${{ matrix.platform.os }}_pytest_${{ inputs.ANSYS_VERSION || vars.ANSYS_VERSION_DEFAULT }}_docker.xml + flags: docker,${{ inputs.ANSYS_VERSION || vars.ANSYS_VERSION_DEFAULT }},${{ matrix.platform.os }},${{ matrix.python-version }} - name: "Upload test analytics results to Codecov" if: ${{ !cancelled() }} uses: codecov/test-results-action@v1 with: token: ${{ secrets.CODECOV_TOKEN }} - name: test_results_${{ env.PACKAGE_NAME }}_${{ matrix.python-version }}_${{ matrix.os }}_${{ inputs.ANSYS_VERSION || vars.ANSYS_VERSION_DEFAULT }}${{ inputs.test_any == 'true' && '_any' || '' }} - flags: ${{ inputs.ANSYS_VERSION || vars.ANSYS_VERSION_DEFAULT }},${{ matrix.os }},${{ matrix.python-version }}${{ inputs.test_any == 'true' && ',any' || '' }} + name: test_results_${{ env.PACKAGE_NAME }}_${{ matrix.python-version }}_${{ matrix.platform.os }}_${{ inputs.ANSYS_VERSION || vars.ANSYS_VERSION_DEFAULT }}${{ inputs.test_any == 'true' && '_any' || '' }} + flags: ${{ inputs.ANSYS_VERSION || vars.ANSYS_VERSION_DEFAULT }},${{ matrix.platform.os }},${{ matrix.python-version }}${{ inputs.test_any == 'true' && ',any' || '' }} diff --git a/src/ansys/dpf/core/core.py b/src/ansys/dpf/core/core.py index b479cdb0e3f..317c2c4136d 100644 --- a/src/ansys/dpf/core/core.py +++ b/src/ansys/dpf/core/core.py @@ -345,7 +345,7 @@ class BaseService: -------- Connect to an existing DPF server >>> from ansys.dpf import core as dpf - >>> #server = dpf.connect_to_server(ip='127.0.0.1', port = 50054, as_global=False) + >>> #server = dpf.connect_to_server(ip='127.0.0.1', port = 60054, as_global=False) >>> #base = dpf.BaseService(server=server) """ diff --git a/src/ansys/dpf/core/server.py b/src/ansys/dpf/core/server.py index fc14a0ac595..d4dc04cfa8e 100644 --- a/src/ansys/dpf/core/server.py +++ b/src/ansys/dpf/core/server.py @@ -182,7 +182,7 @@ def start_local_server( default is ``"LOCALHOST"``. port : int, optional Port to connect to the remote instance on. The default is - ``"DPF_DEFAULT_PORT"``, which is 50054. + ``"DPF_DEFAULT_PORT"``, which is 60054. ansys_path : str or os.PathLike, optional Root path for the Ansys installation directory. For example, ``"/ansys_inc/v212/"``. The default is the latest Ansys installation. @@ -326,7 +326,7 @@ def connect_to_server( default is ``"LOCALHOST"``. port : int Port to connect to the remote instance on. The default is - ``"DPF_DEFAULT_PORT"``, which is 50054. + ``"DPF_DEFAULT_PORT"``, which is 60054. as_global : bool, optional Global variable that stores the IP address and port for the DPF module. All DPF objects created in this Python session will diff --git a/src/ansys/dpf/core/server_factory.py b/src/ansys/dpf/core/server_factory.py index 2be2f8a069d..56c1c8d7207 100644 --- a/src/ansys/dpf/core/server_factory.py +++ b/src/ansys/dpf/core/server_factory.py @@ -28,6 +28,7 @@ """ import io +import json import logging import os import subprocess @@ -90,8 +91,23 @@ def __init__( ): from ansys.dpf.core import LOCAL_DOWNLOADED_EXAMPLES_PATH - if mounted_volumes is None: - mounted_volumes = {LOCAL_DOWNLOADED_EXAMPLES_PATH: "/tmp/downloaded_examples"} + if use_docker: + args = ["docker", "inspect", "-f", "json", docker_name] + inspect_docker_image = subprocess.run(args, capture_output=True) + if inspect_docker_image.stderr: + raise Exception( + f"Specified docker image not found. Verify that the image name '{docker_name}' is valid and the image file is available locally." + ) + + output = json.loads(inspect_docker_image.stdout) + image_os = output[0]["Os"] + if mounted_volumes is None: + if image_os == "linux": + mounted_volumes = {LOCAL_DOWNLOADED_EXAMPLES_PATH: "/tmp/downloaded_examples"} + else: # image is windows + mounted_volumes = { + LOCAL_DOWNLOADED_EXAMPLES_PATH: "C:\\Users\\ContainerAdministrator\\AppData\\Local\\Temp\\downloaded_examples" + } self._use_docker = use_docker self._docker_name = docker_name diff --git a/src/ansys/dpf/core/server_types.py b/src/ansys/dpf/core/server_types.py index 05964171502..3526f247769 100644 --- a/src/ansys/dpf/core/server_types.py +++ b/src/ansys/dpf/core/server_types.py @@ -60,7 +60,7 @@ LOG = logging.getLogger(__name__) LOG.setLevel("DEBUG") -DPF_DEFAULT_PORT = int(os.environ.get("DPF_PORT", 50054)) +DPF_DEFAULT_PORT = int(os.environ.get("DPF_PORT", 60054)) LOCALHOST = os.environ.get("DPF_IP", "127.0.0.1") RUNNING_DOCKER = server_factory.create_default_docker_config() @@ -236,7 +236,7 @@ def launch_dpf( default is ``"LOCALHOST"``. port : int Port to connect to the remote instance on. The default is - ``"DPF_DEFAULT_PORT"``, which is 50054. + ``"DPF_DEFAULT_PORT"``, which is 60054. timeout : float, optional Maximum number of seconds for the initialization attempt. The default is ``10``. Once the specified number of seconds @@ -258,7 +258,7 @@ def launch_dpf_on_docker( ansys_path=None, ip=LOCALHOST, port=DPF_DEFAULT_PORT, - timeout=10.0, + timeout=120.0, ): """Launch Ansys DPF. @@ -274,10 +274,10 @@ def launch_dpf_on_docker( default is ``"LOCALHOST"``. port : int Port to connect to the remote instance on. The default is - ``"DPF_DEFAULT_PORT"``, which is 50054. + ``"DPF_DEFAULT_PORT"``, which is 60054. timeout : float, optional Maximum number of seconds for the initialization attempt. - The default is ``10``. Once the specified number of seconds + The default is ``50``. Once the specified number of seconds passes, the connection fails. """ @@ -836,7 +836,6 @@ def __init__( ansys_path=ansys_path, ip=ip, port=port, - timeout=timeout, ) else: launch_dpf(ansys_path, ip, port, timeout=timeout, context=context) @@ -1196,7 +1195,7 @@ class LegacyGrpcServer(BaseServer): default is ``"LOCALHOST"``. port : int Port to connect to the remote instance on. The default is - ``"DPF_DEFAULT_PORT"``, which is 50054. + ``"DPF_DEFAULT_PORT"``, which is 60054. timeout : float, optional Maximum number of seconds for the initialization attempt. The default is ``10``. Once the specified number of seconds @@ -1271,7 +1270,6 @@ def __init__( ansys_path=ansys_path, ip=ip, port=port, - timeout=timeout, ) else: launch_dpf(ansys_path, ip, port, timeout=timeout, context=context) diff --git a/tests/conftest.py b/tests/conftest.py index f241ee23aa4..6e812aa9d9d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -27,8 +27,10 @@ """ import functools +import json import os from pathlib import Path +import subprocess import warnings import psutil @@ -67,9 +69,23 @@ def _get_test_files_directory(): ssl._create_default_https_context = ssl._create_unverified_context if running_docker: - ansys.dpf.core.server_types.RUNNING_DOCKER.mounted_volumes[_get_test_files_directory()] = ( - "/tmp/test_files" - ) + docker_name = ansys.dpf.core.server_types.RUNNING_DOCKER.docker_name + args = ["docker", "inspect", "-f", "json", docker_name] + inspect_docker_image = subprocess.run(args, capture_output=True) + if inspect_docker_image.stderr: + raise Exception( + f"Specified docker image not found. Verify that the image name '{docker_name}' is valid and the image file is available locally." + ) + output = json.loads(inspect_docker_image.stdout) + image_os = output[0]["Os"] + if image_os == "linux": + ansys.dpf.core.server_types.RUNNING_DOCKER.mounted_volumes[_get_test_files_directory()] = ( + "/tmp/test_files" + ) + else: # image is windows + ansys.dpf.core.server_types.RUNNING_DOCKER.mounted_volumes[_get_test_files_directory()] = ( + "C:\\Users\\ContainerAdministrator\\AppData\\Local\\Temp\\test_files" + ) @pytest.hookimpl() diff --git a/tests/entry/conftest.py b/tests/entry/conftest.py index 01e1a0bf70a..12148e284b5 100644 --- a/tests/entry/conftest.py +++ b/tests/entry/conftest.py @@ -29,8 +29,10 @@ """ import functools +import json import os from pathlib import Path +import subprocess import pytest @@ -67,9 +69,23 @@ def _get_test_files_directory(): ssl._create_default_https_context = ssl._create_unverified_context if running_docker: - ansys.dpf.core.server_types.RUNNING_DOCKER.mounted_volumes[_get_test_files_directory()] = ( - "/tmp/test_files" - ) + docker_name = ansys.dpf.core.server_types.RUNNING_DOCKER.docker_name + args = ["docker", "inspect", "-f", "json", docker_name] + inspect_docker_image = subprocess.run(args, capture_output=True) + if inspect_docker_image.stderr: + raise Exception( + f"Specified docker image not found. Verify that the image name '{docker_name}' is valid and the image file is available locally." + ) + output = json.loads(inspect_docker_image.stdout) + image_os = output[0]["Os"] + if image_os == "linux": + ansys.dpf.core.server_types.RUNNING_DOCKER.mounted_volumes[_get_test_files_directory()] = ( + "/tmp/test_files" + ) + else: # image is windows + ansys.dpf.core.server_types.RUNNING_DOCKER.mounted_volumes[_get_test_files_directory()] = ( + "C:\\Users\\ContainerAdministrator\\AppData\\Local\\Temp\\test_files" + ) SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_8_1 = meets_version( get_server_version(core._global_server()), "8.1" diff --git a/tests/test_plotter.py b/tests/test_plotter.py index 743f09755f9..69131e924bd 100644 --- a/tests/test_plotter.py +++ b/tests/test_plotter.py @@ -20,7 +20,9 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +import json from pathlib import Path +import subprocess import pytest @@ -28,6 +30,7 @@ from ansys.dpf import core from ansys.dpf.core import Model, Operator, element_types, errors as dpf_errors, misc from ansys.dpf.core.plotter import plot_chart +import ansys.dpf.core.server_types from conftest import ( SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_5_0, SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_7_0, @@ -43,6 +46,21 @@ else: HAS_PYVISTA = False +running_docker = ansys.dpf.core.server_types.RUNNING_DOCKER.use_docker + +# Determine image os if running docker +image_os = None +if running_docker: + docker_name = ansys.dpf.core.server_types.RUNNING_DOCKER.docker_name + args = ["docker", "inspect", "-f", "json", docker_name] + inspect_docker_image = subprocess.run(args, capture_output=True) + if inspect_docker_image.stderr: + raise Exception( + f"Specified docker image not found. Verify that the image name '{docker_name}' is valid and the image file is available locally." + ) + output = json.loads(inspect_docker_image.stdout) + image_os = output[0]["Os"] + def remove_picture(picture): if Path.cwd().joinpath(picture).exists(): @@ -234,6 +252,10 @@ def test_field_nodal_plot(allkindofcomplexity): @pytest.mark.skipif(not HAS_PYVISTA, reason="Please install pyvista") +# @pytest.mark.skipif( +# running_docker and image_os == "windows", +# reason="Test fails when running DPF server on a windows container", +# ) def test_field_elemental_nodal_plot_simple(simple_bar): model = Model(simple_bar) stress = model.results.element_nodal_forces() @@ -255,6 +277,10 @@ def test_field_elemental_nodal_plot_scoped(simple_bar): @pytest.mark.skipif(not HAS_PYVISTA, reason="Please install pyvista") +# @pytest.mark.skipif( +# running_docker and image_os == "windows", +# reason="Test fails when running DPF server on a windows container", +# ) def test_field_elemental_nodal_plot_multiple_solid_types(): from ansys.dpf.core import examples @@ -266,6 +292,10 @@ def test_field_elemental_nodal_plot_multiple_solid_types(): @pytest.mark.skipif(not HAS_PYVISTA, reason="Please install pyvista") +# @pytest.mark.skipif( +# running_docker and image_os == "windows", +# reason="Test fails when running DPF server on a windows container", +# ) def test_field_elemental_nodal_plot_shells(): from ansys.dpf.core import examples @@ -290,6 +320,10 @@ def test_field_elemental_nodal_plot_multi_shells(multishells): @pytest.mark.skipif(not HAS_PYVISTA, reason="Please install pyvista") @pytest.mark.skipif(not SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_10_0, reason="Old bug before 25R2") +# @pytest.mark.skipif( +# running_docker and image_os == "windows", +# reason="Test fails when running DPF server on a windows container", +# ) def test_dpf_plotter_add_field_elemental_nodal_multi_shells(multishells): fc: core.FieldsContainer = core.operators.result.stress( data_sources=core.DataSources(multishells), @@ -413,6 +447,10 @@ def test_dpf_plotter_add_field_elemental_nodal_plot_simple(simple_bar): @pytest.mark.skipif(not HAS_PYVISTA, reason="Please install pyvista") +# @pytest.mark.skipif( +# running_docker and image_os == "windows", +# reason="Test fails when running DPF server on a windows container", +# ) def test_dpf_plotter_add_field_elemental_nodal_plot_scoped(simple_bar): mesh_scoping = dpf.core.mesh_scoping_factory.elemental_scoping( element_ids=list(range(1501, 3001)) diff --git a/tests/test_python_plugins.py b/tests/test_python_plugins.py index 3abfe1a33c2..ea55c6f7910 100644 --- a/tests/test_python_plugins.py +++ b/tests/test_python_plugins.py @@ -37,6 +37,7 @@ PinSpecification, SpecificationProperties, ) +import ansys.dpf.core.server_types import conftest from conftest import ( SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_4_0, @@ -54,6 +55,10 @@ if platform.system() == "Linux": pytest.skip("Known failures for the Ubuntu-latest GitHub pipelines", allow_module_level=True) +running_docker = ansys.dpf.core.server_types.RUNNING_DOCKER.use_docker +if running_docker: + pytest.skip("Skip python plugins tests in docker", allow_module_level=True) + update_virtual_environment_for_custom_operators(restore_original=True) update_virtual_environment_for_custom_operators()