Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pyoptsparse/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__version__ = "2.14.4"
__version__ = "2.14.5"

from .pyOpt_history import History
from .pyOpt_variable import Variable
Expand Down
10 changes: 8 additions & 2 deletions pyoptsparse/pyIPOPT/pyIPOPT.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
import numpy as np

# Local modules
from ..pyOpt_error import pyOptSparseWarning
from ..pyOpt_optimizer import Optimizer
from ..pyOpt_solution import SolutionInform
from ..pyOpt_utils import ICOL, INFINITY, IROW, convertToCOO, extractRows, import_module, scaleRows
Expand Down Expand Up @@ -281,12 +282,17 @@ def intermediate(self_cyipopt, alg_mod, iter_count, obj_value, inf_pr, inf_du, m

# Find pyoptsparse call counters for objective and constraints calls at current x.
# IPOPT calls objective and constraints separately, so we find two call counters and append iter_dict to both counters.
call_counter_1 = self.hist._searchCallCounter(self.cache["x"])
call_counter_2 = self.hist._searchCallCounter(self.cache["x"], last=call_counter_1 - 1)
call_counter_1 = self.hist._searchCallCounter(self.optProb._mapXtoUser(self.cache["x"]))
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Actually, I wonder if we should have some checks in case even with scaling we fail to find the call counter. In such cases we should be able to continue gracefully without raising errors

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I am not sure if from a user perspective it would be better to continue with a warning or exit, but I agree we should probably add a check (try/except?) to make the error more understandable?

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't think we should exit, I think the correct behaviour is to potentially warn, and then continue without saving these additional variables. I don't think an issue in saving some extra parameters during the optimization should kill the optimization,. it is very important that we continue the optimization loop wherever possible.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Modified here to keep going if the matching call counter cannot be found and print a warning.

if call_counter_1 is None:
call_counter_2 = None
else:
call_counter_2 = self.hist._searchCallCounter(self.optProb._mapXtoUser(self.cache["x"]), last=call_counter_1 - 1)

for call_counter in [call_counter_2, call_counter_1]:
if call_counter is not None:
self.hist.write(call_counter, iterDict)
else:
pyOptSparseWarning("Failed to find a corresponding call counter at current x. Skipping writing to history file.")

if self.userRequestedTermination is True:
return False
Expand Down
18 changes: 11 additions & 7 deletions tests/test_hs071.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,24 +117,28 @@ def test_snopt_setDVFromHist(self):
# since we restarted from the optimum
self.assertEqual(second["xvars"].shape, (1, 4))

def test_slsqp_scaling_offset_optProb(self):
@parameterized.expand(["SLSQP", "IPOPT"])
def test_scaling_offset_optProb(self, optName):
"""
Test that scaling and offset works as expected
Also test optProb stored in the history file is correct
"""
self.optName = "SLSQP"
histFileName = "hs071_SLSQP_scaling_offset.hst"
self.optName = optName
histFileName = f"hs071_{optName}_scaling_offset.hst"
objScale = 4.2
xScale = [2, 3, 4, 5]
conScale = [0.6, 1.7]
offset = [1, -2, 40, 2.5]
self.setup_optProb(objScale=objScale, xScale=xScale, conScale=conScale, offset=offset)
sol = self.optimize(storeHistory=histFileName)
self.assert_solution_allclose(sol, self.tol["SLSQP"])
lambda_sign = -1.0 if optName == "IPOPT" else 1.0
self.assert_solution_allclose(sol, self.tol[optName], lambda_sign=lambda_sign)
# now we retrieve the history file, and check the scale=True option is indeed
# scaling things correctly
# IPOPT calls gradient and jacobian at first, so set callCounter = 2 for the first non-derivative call
callCounter = "2" if optName == "IPOPT" else "0"
hist = History(histFileName, flag="r")
orig_values = hist.getValues(callCounters="0", scale=False)
orig_values = hist.getValues(callCounters=callCounter, scale=False)
optProb = hist.getOptProb()

# check that the scales are stored properly
Expand All @@ -147,13 +151,13 @@ def test_slsqp_scaling_offset_optProb(self):
assert_allclose(objScale, optProb.objectives[obj].scale, atol=1e-12, rtol=1e-12)

# verify the scale option in getValues
scaled_values = hist.getValues(callCounters="0", scale=True, stack=False)
scaled_values = hist.getValues(callCounters=callCounter, scale=True, stack=False)
x = orig_values["xvars"][0]
x_scaled = scaled_values["xvars"][0]
assert_allclose(x_scaled, (x - offset) * xScale, atol=1e-12, rtol=1e-12)

# now do the same but with stack=True
stacked_values = hist.getValues(callCounters="0", scale=True, stack=True)
stacked_values = hist.getValues(callCounters=callCounter, scale=True, stack=True)
x_scaled = stacked_values["xuser"][0]
assert_allclose(x_scaled, (x - offset) * xScale, atol=1e-12, rtol=1e-12)

Expand Down
Loading