I'm trying to pass options to a NLP I'm solving in cyipopt.
These options would for affect the objective in the same way in each iteration. For example, the tutorial problem is to minimize
x_1 * x_4 * (x_1 + x_2 + _3) + x_3
subject to some constraints (see https://pythonhosted.org/ipopt/tutorial.html).
I'd like to solve the related problem
scale * x_1 * x_4 * (x_1 + x_2 + _3) + x_3
where scale is a parameter that is set before optimization. The below code shows how to set up the problem in pyipopt, but the scale is hardcoded as 2. How can I set it as an option so that it can be changed flexibly?
import ipopt
import numpy as np
class hs071(object):
def __init__(self):
pass
def objective(self, x, scale):
# The callback for calculating the objective
scale = 2
return scale * x[0] * x[3] * np.sum(x[0:3]) + x[2]
def gradient(self, x, scale):
# The callback for calculating the gradient
scale = 2
return np.array([
scale * x[0] * x[3] + scale * x[3] * np.sum(x[0:3]),
scale * x[0] * x[3],
scale * x[0] * x[3] + 1.0,
scale * x[0] * np.sum(x[0:3])
])
def constraints(self, x):
# The callback for calculating the constraints
return np.array((np.prod(x), np.dot(x, x)))
def jacobian(self, x):
# The callback for calculating the Jacobian
return np.concatenate((np.prod(x) / x, 2*x))
x0 = [1.0, 5.0, 5.0, 1.0]
lb = [1.0, 1.0, 1.0, 1.0]
ub = [5.0, 5.0, 5.0, 5.0]
cl = [25.0, 40.0]
cu = [2.0e19, 40.0]
nlp = ipopt.problem(
n=len(x0),
m=len(cl),
problem_obj=hs071(),
lb=lb,
ub=ub,
cl=cl,
cu=cu
)
x, info = nlp.solve(x0)
NB: defining globals works but is sloppy. There must be a cleaner way to do this, since this is the way that you would add data to an optimization problem.
Add them to the class itself:
import ipopt
import numpy as np
class hs071(object):
def __init__(self):
pass
def objective(self, x):
# The callback for calculating the objective
scale = self.scale
return scale * x[0] * x[3] * np.sum(x[0:3]) + x[2]
def gradient(self, x):
# The callback for calculating the gradient
scale = self.scale
return np.array([
scale * x[0] * x[3] + scale * x[3] * np.sum(x[0:3]),
scale * x[0] * x[3],
scale * x[0] * x[3] + 1.0,
scale * x[0] * np.sum(x[0:3])
])
def constraints(self, x):
# The callback for calculating the constraints
return np.array((np.prod(x), np.dot(x, x)))
def jacobian(self, x):
# The callback for calculating the Jacobian
return np.concatenate((np.prod(x) / x, 2*x))
x0 = [1.0, 5.0, 5.0, 1.0]
lb = [1.0, 1.0, 1.0, 1.0]
ub = [5.0, 5.0, 5.0, 5.0]
cl = [25.0, 40.0]
cu = [2.0e19, 40.0]
model = hs071()
model.scale = 2
nlp = ipopt.problem(
n=len(x0),
m=len(cl),
problem_obj=model,
lb=lb,
ub=ub,
cl=cl,
cu=cu
)
x, info = nlp.solve(x0)