Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 15 additions & 0 deletions pyoptsparse/pyOpt_optimization.py
Original file line number Diff line number Diff line change
Expand Up @@ -756,10 +756,25 @@ def finalize(self):
This is a helper function which will only finalize the optProb if it's not already finalized.
"""
if not self.finalized:
self._finalizeObjectives()
self._finalizeDesignVariables()
self._finalizeConstraints()
self.finalized = True

def _finalizeObjectives(self):
"""
Communicate objectives potentially from different
processors.

Warnings
--------
This should not be called directly. Instead, call self.finalize()
to ensure that both design variables and constraints are properly finalized.
"""

# Determine the consistent set of objectives from all processors.
self.objectives = self._reduceDict(self.objectives)

def _finalizeDesignVariables(self):
"""
Communicate design variables potentially from different
Expand Down
54 changes: 46 additions & 8 deletions tests/test_optProb.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,26 @@
import numpy as np
from numpy.testing import assert_allclose

try:
# External modules
import mpi4py # noqa:F401

comm = mpi4py.MPI.COMM_WORLD
except ImportError:
comm = None

# First party modules
from pyoptsparse import OPT, Optimization
from pyoptsparse.testing.pyOpt_testing import assert_optProb_size


class TestOptProb(unittest.TestCase):
N_PROCS = 2
tol = 1e-12

def setUp(self):
self.rng = np.random.default_rng(12345)

def objfunc(self, xdict):
"""
This is a simple quadratic test function with linear constraints.
Expand Down Expand Up @@ -58,9 +70,9 @@ def setup_optProb(self, nObj=1, nDV=[4], nCon=[2], xScale=[1.0], objScale=[1.0],
# Design Variables
for iDV in range(len(nDV)):
n = nDV[iDV]
lower = np.random.uniform(-5, 2, n)
upper = np.random.uniform(5, 20, n)
x0 = np.random.uniform(lower, upper)
lower = self.rng.uniform(-5, 2, n)
upper = self.rng.uniform(5, 20, n)
x0 = self.rng.uniform(lower, upper)
dvName = f"x{iDV}"
self.x0[dvName] = x0
self.optProb.addVarGroup(
Expand All @@ -76,8 +88,8 @@ def setup_optProb(self, nObj=1, nDV=[4], nCon=[2], xScale=[1.0], objScale=[1.0],
# Constraints
for iCon in range(len(nCon)):
nc = nCon[iCon]
lower = np.random.uniform(-5, 2, nc)
upper = np.random.uniform(5, 6, nc)
lower = self.rng.uniform(-5, 2, nc)
upper = self.rng.uniform(5, 6, nc)
self.optProb.addConGroup(
f"con_{iCon}",
nc,
Expand Down Expand Up @@ -155,10 +167,10 @@ def test_mappings(self):
nObj=1,
nDV=nDV,
nCon=nCon,
xScale=[np.random.rand(i) for i in nDV],
xScale=[self.rng.random(i) for i in nDV],
objScale=[0.3],
conScale=[np.random.rand(i) for i in nCon],
offset=[np.random.rand(i) * np.arange(i) for i in nDV],
conScale=[self.rng.random(i) for i in nCon],
offset=[self.rng.random(i) * np.arange(i) for i in nDV],
)

# first test X
Expand Down Expand Up @@ -256,6 +268,32 @@ def test_finalize(self):
self.optProb.addCon("CON2")
assert_optProb_size(self.optProb, 2, 13, 6)

def test_parallel_add(self):
"""Check that when different procs add different variables/constraints/objectives, they are all collected
properly when finalized.
"""
if comm is None:
raise unittest.SkipTest("mpi4py not available, skipping test.")
if comm.size != self.N_PROCS:
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

When does this condition happen?

Copy link
Member Author

@A-CGray A-CGray Oct 19, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If you have mpi4py installed but you run the test through something other than testflo (e.g unittest or pytest)

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

IMO this sort of thing do not need to be handled, we explicitly say in the docs to run tests with testflo so I don't think we need to support running raw unittest.

Copy link
Member Author

@A-CGray A-CGray Oct 20, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

True, but whenever I want to interactively debug a failing test I have to run them with unittest and I'd rather not have to comment this test out every time I want to do that. I guess I could also try changing how the test works so it won't fail if run in serial, but it seemed easier to just do this.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'll approve, but I don't think we should do things like this in general, because we have tons of MPI-based tests (here or in other repos) and we should not have to add these checks for each one. Maybe in the longer term we should move away from testflo-based testing to something more standard like pytest, it may not be sufficient for packages like ADflow but the parallel portions of pyOptSparse are minimal and easy to test.

raise unittest.SkipTest("Not running with %d MPI procs, skipping test." % self.N_PROCS)
objNames = [f"parallel-obj{i}" for i in range(comm.size)]
dvNames = [f"parallel-x{i}" for i in range(comm.size)]
conNames = [f"parallel-con{i}" for i in range(comm.size)]

self.setup_optProb(nObj=1, nDV=[4, 8], nCon=[2, 3], xScale=[1.0, 1.0], conScale=[1.0, 1.0], offset=[0, 0])
self.optProb.addObj(objNames[comm.rank])
self.optProb.addVar(dvNames[comm.rank])
self.optProb.addCon(conNames[comm.rank])
self.optProb.finalize()

# Get variables/constraints/objectives from each proc and check they are all the same
allObjNames = comm.allgather(list(self.optProb.objectives.keys()))
self.assertEqual(allObjNames[0], allObjNames[1])
allDVNames = comm.allgather(list(self.optProb.variables.keys()))
self.assertEqual(allDVNames[0], allDVNames[1])
allConNames = comm.allgather(list(self.optProb.constraints.keys()))
self.assertEqual(allConNames[0], allConNames[1])


if __name__ == "__main__":
unittest.main()
Loading