Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -170,6 +170,9 @@ jobs:
- name: Install pyvistaqt requirements
run: make install-pyvistaqt-requirements

- name: Install post requirements
run: make install-post

- name: Unit Testing
run: make unittest

Expand Down
72 changes: 55 additions & 17 deletions ansys/fluent/core/meta.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,16 @@
from ansys.fluent.core.services.datamodel_tui import PyMenu


class LocalObjectDataExtractor:
def __init__(self, obj):
self.field_info = lambda: obj._get_top_most_parent().session.field_info
self.field_data = lambda: obj._get_top_most_parent().session.field_data
self.surface_api = (
lambda: obj._get_top_most_parent().session.tui.solver.surface
)
self.id = lambda: obj._get_top_most_parent().session.id


class Attribute:
VALID_NAMES = ["range", "allowed_values"]

Expand Down Expand Up @@ -87,7 +97,40 @@ def __new__(cls, name, bases, attrs):
return super(PyMenuMeta, cls).__new__(cls, name, bases, attrs)


class PyLocalPropertyMeta(type):
class PyLocalBaseMeta(type):
@classmethod
def __create_get_parent_by_type(cls):
def wrapper(self, obj_type, obj=None):
obj = self if obj is None else obj
parent = None
if getattr(obj, "_parent", None):
if isinstance(obj._parent, obj_type):
return obj._parent
parent = self._get_parent_by_type(obj_type, obj._parent)
return parent

return wrapper

@classmethod
def __create_get_top_most_parent(cls):
def wrapper(self, obj=None):
obj = self if obj is None else obj
parent = obj
if getattr(obj, "_parent", None):
parent = self._get_top_most_parent(obj._parent)
return parent

return wrapper

def __new__(cls, name, bases, attrs):
attrs[
"_get_parent_by_type"
] = cls.__create_get_parent_by_type()
attrs["_get_top_most_parent"] = cls.__create_get_top_most_parent()
return super(PyLocalBaseMeta, cls).__new__(cls, name, bases, attrs)


class PyLocalPropertyMeta(PyLocalBaseMeta):
"""Metaclass for local property classes."""

@classmethod
Expand Down Expand Up @@ -126,14 +169,8 @@ def wrapper(self, value):
@classmethod
def __create_init(cls):
def wrapper(self, parent):
def get_top_most_parent(obj):
parent = obj
if getattr(obj, "parent", None):
parent = get_top_most_parent(obj.parent)
return parent

self.get_session = lambda: get_top_most_parent(self).session
self.parent = parent
self._data_extractor = LocalObjectDataExtractor(self)
self._parent = parent
self._on_change_cbs = []
annotations = self.__class__.__dict__.get("__annotations__")
if isinstance(getattr(self.__class__, "value", None), property):
Expand Down Expand Up @@ -194,17 +231,17 @@ def __new__(cls, name, bases, attrs):
attrs["_validate"] = cls.__create_validate()
attrs["_register_on_change_cb"] = cls.__create_register_on_change()
attrs["set_state"] = cls.__create_set_state()
attrs["parent"] = None
return super(PyLocalPropertyMeta, cls).__new__(cls, name, bases, attrs)


class PyLocalObjectMeta(type):
class PyLocalObjectMeta(PyLocalBaseMeta):
"""Metaclass for local object classes."""

@classmethod
def __create_init(cls):
def wrapper(self, parent):
self.parent = parent
self._parent = parent
self._data_extractor = LocalObjectDataExtractor(self)

def update(clss):
for name, cls in clss.__dict__.items():
Expand Down Expand Up @@ -260,7 +297,8 @@ def wrapper(self, value):
obj.set_state(val)
else:
obj.update(val)
wrapper.__doc__ = "Update method."

wrapper.__doc__ = "Update object."
return wrapper

# graphics = ansys.fluent.postprocessing.pyvista.Graphics(session1)
Expand Down Expand Up @@ -330,7 +368,6 @@ def __new__(cls, name, bases, attrs):
attrs["__setattr__"] = cls.__create_setattr()
attrs["__repr__"] = cls.__create_repr()
attrs["update"] = cls.__create_updateitem()
attrs["parent"] = None
return super(PyLocalObjectMeta, cls).__new__(cls, name, bases, attrs)


Expand All @@ -340,8 +377,9 @@ class PyLocalNamedObjectMeta(PyLocalObjectMeta):
@classmethod
def __create_init(cls):
def wrapper(self, name, parent):
self.__name = name
self.parent = parent
self._name = name
self._data_extractor = LocalObjectDataExtractor(self)
self._parent = parent

def update(clss):
for name, cls in clss.__dict__.items():
Expand Down Expand Up @@ -382,7 +420,7 @@ class PyLocalContainer(MutableMapping):
"""Local container for named objects."""

def __init__(self, parent, object_class):
self.parent = parent
self._parent = parent
self.__object_class = object_class
self.__collection: dict = {}

Expand Down
81 changes: 47 additions & 34 deletions ansys/fluent/core/services/field_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,9 +43,9 @@ def get_fields(self, request):
return self.__stub.GetFields(request, metadata=self.__metadata)


class FieldData:
class FieldInfo:
"""
Provide the field data.
Provides access to Fluent field info.

Methods
-------
Expand All @@ -62,39 +62,8 @@ class FieldData:
get_surfaces_info(self) -> dict
Get surfaces information i.e. surface name, id and type.

get_surfaces(surface_ids: List[int], overset_mesh: bool) -> Dict[int, Dict]
Get surfaces data i.e. coordinates and connectivity.

get_scalar_field(
surface_ids: List[int],
scalar_field: str,
node_value: Optional[bool] = True,
boundary_value: Optional[bool] = False,
) -> Dict[int, Dict]:
Get scalar field data i.e. surface data and associated
scalar field values.

get_vector_field(
surface_ids: List[int],
vector_field: Optional[str] = "velocity",
scalar_field: Optional[str] = "",
node_value: Optional[bool] = False,
) -> Dict[int, Dict]:
Get vector field data i.e. surface data and associated
scalar and vector field values.

"""

# data mapping
_proto_field_type_to_np_data_type = {
FieldDataProtoModule.FieldType.INT_ARRAY: np.int32,
FieldDataProtoModule.FieldType.LONG_ARRAY: np.int64,
FieldDataProtoModule.FieldType.FLOAT_ARRAY: np.float32,
FieldDataProtoModule.FieldType.DOUBLE_ARRAY: np.float64,
}
_chunk_size = 256 * 1024
_bytes_stream = True

def __init__(self, service: FieldDataService):
self.__service = service

Expand Down Expand Up @@ -137,7 +106,7 @@ def get_vector_fields_info(self) -> dict:
def get_surfaces_info(self) -> dict:
request = FieldDataProtoModule.GetSurfacesInfoResponse()
response = self.__service.get_surfaces_info(request)
return {
info = {
surface_info.surfaceName: {
"surface_id": [surf.id for surf in surface_info.surfaceId],
"zone_id": surface_info.zoneId.id,
Expand All @@ -146,6 +115,50 @@ def get_surfaces_info(self) -> dict:
}
for surface_info in response.surfaceInfo
}
return info


class FieldData:
"""
Provides access to Fluent field data on surfaces.

Methods
-------
get_surfaces(surface_ids: List[int], overset_mesh: bool) -> Dict[int, Dict]
Get surfaces data i.e. coordinates and connectivity.

get_scalar_field(
surface_ids: List[int],
scalar_field: str,
node_value: Optional[bool] = True,
boundary_value: Optional[bool] = False,
) -> Dict[int, Dict]:
Get scalar field data i.e. surface data and associated
scalar field values.

get_vector_field(
surface_ids: List[int],
vector_field: Optional[str] = "velocity",
scalar_field: Optional[str] = "",
node_value: Optional[bool] = False,
) -> Dict[int, Dict]:
Get vector field data i.e. surface data and associated
scalar and vector field values.

"""

# data mapping
_proto_field_type_to_np_data_type = {
FieldDataProtoModule.FieldType.INT_ARRAY: np.int32,
FieldDataProtoModule.FieldType.LONG_ARRAY: np.int64,
FieldDataProtoModule.FieldType.FLOAT_ARRAY: np.float32,
FieldDataProtoModule.FieldType.DOUBLE_ARRAY: np.float64,
}
_chunk_size = 256 * 1024
_bytes_stream = True

def __init__(self, service: FieldDataService):
self.__service = service

def _extract_fields(self, chunk_iterator):
def _extract_field(field_datatype, field_size, chunk_iterator):
Expand Down
16 changes: 12 additions & 4 deletions ansys/fluent/core/session.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,17 +17,23 @@
DatamodelService as DatamodelService_TUI,
)
from ansys.fluent.core.services.datamodel_tui import PyMenu as PyMenu_TUI
from ansys.fluent.core.services.field_data import FieldData, FieldDataService
from ansys.fluent.core.services.field_data import (
FieldInfo,
FieldData,
FieldDataService,
)
from ansys.fluent.core.services.health_check import HealthCheckService
from ansys.fluent.core.services.scheme_eval import (
SchemeEval, SchemeEvalService
SchemeEval,
SchemeEvalService,
)
from ansys.fluent.core.services.settings import SettingsService
from ansys.fluent.core.services.transcript import TranscriptService
from ansys.fluent.core.solver.flobject import get_root as settings_get_root
from ansys.fluent.core.services.events import EventsService
from ansys.fluent.core.solver.events_manager import EventsManager


def _parse_server_info_file(filename: str):
with open(filename, encoding="utf-8") as f:
lines = f.readlines()
Expand Down Expand Up @@ -144,8 +150,9 @@ def __init__(
if not port:
port = os.getenv("PYFLUENT_FLUENT_PORT")
if not port:
raise RuntimeError("The port to connect to Fluent "
"session is not provided.")
raise ValueError(
"The port to connect to Fluent session is not provided."
)
self._channel = grpc.insecure_channel(f"{ip}:{port}")
self._metadata: List[Tuple[str, str]] = []
self._id = f"session-{next(Session._id_iter)}"
Expand All @@ -170,6 +177,7 @@ def __init__(
self._field_data_service = FieldDataService(
self._channel, self._metadata
)
self.field_info = FieldInfo(self._field_data_service)
self.field_data = FieldData(self._field_data_service)
self.tui = Session.Tui(self._datamodel_service_tui)

Expand Down
86 changes: 86 additions & 0 deletions ansys/fluent/core/utils/dump_session_data.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
"""Module providing dump session data functionality."""
import pickle


def dump_session_data(
session, file_path: str, fields: list = [], surfaces: list = []
):
"""
Dump session data.

Parameters
----------
session :
Session object.
file_path: str
Session dump file path.
fields: list, optional
List of fields to write. If empty then all fields will be written.
surfaces: list, optional
List of surfaces to write. If empty then all surfaces will be written.
"""
session_data = {}
session_data["scalar_fields_info"] = {
k: v
for k, v in session.field_info.get_fields_info().items()
if (not fields or v["solver_name"] in fields)
}
session_data["surfaces_info"] = {
k: v
for k, v in session.field_info.get_surfaces_info().items()
if (not surfaces or k in surfaces)
}
session_data[
"vector_fields_info"
] = session.field_info.get_vector_fields_info()
if not fields:
fields = [
v["solver_name"]
for k, v in session_data["scalar_fields_info"].items()
]
surfaces_id = [
v["surface_id"][0] for k, v in session_data["surfaces_info"].items()
]
session_data["range"] = {}
for field in fields:
session_data["range"][field] = {}
for surface in surfaces_id:
session_data["range"][field][surface] = {}
session_data["range"][field][surface][
"node_value"
] = session.field_info.get_range(field, True, [surface])
session_data["range"][field][surface][
"cell_value"
] = session.field_info.get_range(field, False, [surface])

session_data["scalar-field"] = {}
for field in fields:
session_data["scalar-field"][field] = {}
for surface in surfaces_id:
session_data["scalar-field"][field][surface] = {}
session_data["scalar-field"][field][surface][
"node_value"
] = session.field_data.get_scalar_field([surface], field, True)[
surface
]
session_data["scalar-field"][field][surface][
"cell_value"
] = session.field_data.get_scalar_field([surface], field, False)[
surface
]

session_data["surfaces"] = {}
for surface in surfaces_id:
session_data["surfaces"][surface] = session.field_data.get_surfaces(
[surface]
)[surface]

session_data["vector-field"] = {}
for surface in surfaces_id:
session_data["vector-field"][
surface
] = session.field_data.get_vector_field([surface])[surface]

pickle_obj = open(file_path, "wb")
pickle.dump(session_data, pickle_obj)
pickle_obj.close()
1 change: 0 additions & 1 deletion ansys/fluent/core/utils/generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,6 @@ def cb(*args, **kwargs):

def in_notebook():
"""Function to check if application is running in notebook."""

try:
from IPython import get_ipython

Expand Down
Loading