I am trying to do a simple minimisation as below using SciPy optimise, but the expected results are NOT matching the optimiser output:
JavaScript
x
24
24
1
x0 = [0.2, 0.2, 0.2, 0.4]
2
x_expected = np.array([0., 0., 0., 1])
3
bounds = ((0, 1), (0, 1), (0, 1), (0, 1))
4
df = pd.DataFrame(
5
{'t1': [1, 2, 3, 2, 1], 't2': [3, 4, 5, 6, 2], 't3': [2, 3, 2, 3, 5], 'c': [1, 1.1, 1.2, 1.3, 1.4]}).to_numpy()
6
7
factors = pd.DataFrame(
8
{"tk": ["t1", "t2", "t3", "c"], "class": ["x", "y", "x", "z"]})
9
min_max_class = pd.DataFrame(
10
{"class": ["x", "y", "z"], "min_allocation": [0., 0., 0.], "max_allocation": [0.0001, 0.5, 1.0000]})
11
12
allocation = factors.join(min_max_class.set_index('class'), on='class')
13
min = allocation['min_allocation'].to_numpy()
14
max = allocation['max_allocation'].to_numpy()
15
ineq1 = {'type': 'ineq',
16
"fun": lambda x: np.greater_equal(x, min).sum() + np.less_equal(x, max).sum() - 2 * df.shape[1]}
17
eq1 = {'type': 'eq', "fun": lambda x: 1 - np.sum(x)}
18
19
result = sp.minimize(fun=lambda x: np.std(np.dot(df, x.T), ddof=1), x0=x0, bounds=bounds, method='SLSQP',
20
options={'disp': True}, constraints=[eq1, ineq1])
21
22
print(f"final result : {np.round(result.x,2)}, objective value: {result.fun}")
23
print(f" Manual : {x_expected}, objective value: {np.std(np.dot(df, x_expected.T))}")
24
I would expect the final results to be close to “x_expected”… but that is not the case.. any ideas ?
Advertisement
Answer
SLSQP solver failed to find the optimal value for your problem.
JavaScript
1
14
14
1
def obj(x):
2
return np.std(np.dot(df, x.T), ddof=1)
3
4
def ineq(x):
5
# Non-negative
6
return np.greater_equal(x, min).sum() + np.less_equal(x, max).sum() - 2 * df.shape[1]
7
8
def eq(x):
9
# Zero
10
return 1 - np.sum(x)
11
12
result = scipy.optimize.minimize(fun=obj, x0=x0, bounds=bounds, method='SLSQP', options={'disp': True}, constraints=[eq1, ineq1])
13
> "Positive directional derivative for linesearch (Exit mode 8)"
14
Therefore, inequality constraint does not hold.
JavaScript
1
5
1
x_opt = np.round(result.x,2)
2
3
print(f"opt. result : {x_opt}, objective: {obj(x_opt)}, ineq: {ineq(x_opt)}, eq: {eq(x_opt)}")
4
print(f"manual result : {x_expected}, objective: {obj(x_expected)}, ineq: {ineq(x_expected)}, eq: {eq(x_expected)}")
5
JavaScript
1
3
1
opt. result : [0.03 0. 0. 0.97], objective: 0.1554107460891942, ineq: -1, eq: 0.0
2
manual result : [0. 0. 0. 1.], objective: 0.15811388300841892, ineq: 0, eq: 0.0
3
Initial guess [0, 0, 0, 0]
can help find the optimal value for your problem.
JavaScript
1
11
11
1
x0 = [0.0, 0.0, 0.0, 0.0]
2
bounds = ((0, 1), (0, 1), (0, 1), (0, 1))
3
ineq1 = {'type': 'ineq', "fun": ineq}
4
eq1 = {'type': 'eq', "fun": eq}
5
6
result = scipy.optimize.minimize(fun=obj, x0=x0, bounds=bounds, method='SLSQP', options={'disp': True}, constraints=[eq1, ineq1])
7
> Optimization terminated successfully (Exit mode 0)
8
9
print(f"opt. result : {x_opt}, objective: {obj(x_opt)}, ineq: {ineq(x_opt)}, eq: {eq(x_opt)}")
10
> opt. result : [0. 0. 0. 1.], objective: 0.15811388300841892, ineq: 0, eq: 0.0
11