Ограничение двойного неравенства в Gekko

У меня есть проблема оптимизации, в которой некоторые ограничения неравенства могут быть равны 0 или больше определенного значения. Например, в приведенном ниже коде qtde и c1 - это списки, а pp - массив 2d numpy.

import numpy as np
from gekko import GEKKO

qtde = [7, 2, 2, 12, 2, 7, 1.5, 8, 4, 16, 2, 1, 3, 0.2, 3, 1, 1, 10, 8, 5, 3, 2.5, 5, 2.5, 10, 3, 1, 6, 12, 2, 6, 1, 4, 1, 2, 10, 1, 1, 1, 1]
c1 = [26.0, 150.0, 300.0, 110.0, 400.0, 500.0, 200.0, 200.0, 27.0, 150.0, 50.0, 200.0, 75.0, 0.0, 250.0, 22.8, 300.0, 22.8, 22.8, 150.0, 300.0, 150.0, 100.0, 100.0, 1000.0, 150.0, 150.0, 200.0, 31.2, 100.0, 100.0, 50.0, 23.0, 300.0, 200.0, 300.0, 0.0, 300.0, 30.0, 26.0, 300.0, 300.0, 250.0, 100.0, 100.0, 200.0, 400.0, 21.2, 200.0, 500.0, 0.0]

mm = [[4,0,0,0,0,0,0,0,0,0,9,0,0,0,0,0,5,0,2,0,0,0,7,0,0,0,6,0,0,0,8,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,13,0,0,0,0,0,0,0,0,0,0,0,12,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,11,0,10,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,19,0,0,0,0,0,0,17,15,0,0,16,0,0,18,0,0,0,0,0,0,0,0,0,0],
[26,0,0,0,0,0,0,0,0,0,27,0,0,0,0,0,0,0,21,0,0,0,25,0,0,0,23,0,0,0,22,0,0,0,0,0,0,0,0,0,24,0,20,0,0,0,0,0,0,0,0],
[29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,34,0,0,0,0,0,0,0,30,0,0,31,0,0,0,0,0,0,0,32,0,0,33,0,28,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,40,0,0,0,0,0,0,0,37,0,0,0,36,0,0,0,38,0,0,0,39,0,0,0,0,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0,0],
[42,0,0,0,0,0,0,0,0,0,48,0,0,0,0,0,44,0,43,0,0,0,49,0,0,0,46,0,0,0,47,0,0,0,0,0,0,0,0,0,45,0,41,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,53,0,0,0,52,0,0,0,0,0,0,0,0,0,51,0,50,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,60,0,0,0,0,0,0,0,56,0,0,0,59,0,0,0,57,0,0,0,58,0,0,0,0,0,0,0,0,0,0,0,55,0,0,0,0,0,0,0,0],
[69,0,0,0,0,0,0,0,0,0,68,0,0,0,0,0,61,0,0,0,0,0,64,0,0,0,63,0,0,0,65,0,0,0,0,0,0,67,0,0,62,0,66,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,71,0,70,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,78,0,0,0,0,0,77,0,0,0,0,0,73,0,0,0,76,0,0,0,75,0,0,0,0,0,0,0,0,0,74,0,72,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,80,0,0,0,79,0,0,0,82,0,0,0,0,0,0,0,0,0,83,0,81,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,86,0,0,0,84,0,0,0,0,0,0,0,0,0,85,0,87,0,0,0,0,0,0,0,0],
[93,0,0,0,0,0,0,0,0,0,95,0,0,0,0,0,94,0,92,0,0,0,90,0,0,0,91,0,0,0,96,0,0,0,0,0,0,0,0,0,89,0,88,0,0,0,0,0,0,0,0],
[104,0,0,0,0,0,0,0,0,0,100,0,0,0,0,0,99,0,98,0,0,0,103,0,0,0,101,0,0,0,102,0,0,0,0,0,0,0,0,0,0,0,97,0,0,0,0,0,0,0,0],
[112,0,0,0,0,0,0,0,0,0,108,0,0,0,0,0,110,0,107,0,0,0,111,0,0,0,109,0,0,0,113,0,0,0,0,0,0,0,0,0,106,0,105,0,0,0,0,0,0,0,0],
[114,0,0,0,0,0,0,0,0,0,116,0,0,0,0,0,117,0,119,0,0,0,115,0,0,0,118,0,0,0,120,0,0,0,0,0,0,0,0,0,121,0,122,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,123,0,0,0,0,0,0,0,0],
[0,129,0,0,0,0,126,0,0,0,0,0,0,128,0,0,0,0,0,0,0,0,0,0,0,0,0,127,125,0,0,0,0,0,0,0,0,0,0,130,0,0,0,0,0,124,0,131,0,0,0],
[0,133,0,0,0,0,136,0,0,0,0,0,0,135,0,0,0,0,0,0,0,0,0,0,0,0,0,132,0,0,0,0,0,0,0,0,0,0,134,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,138,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,137,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,139,0,0,0,0,0,0,0,0,0,0,0,0,140,0,0,0,0,0,0,0,0,0,0,0,0,0,141],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,142,0,143,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,144,0,0,0,150,0,146,0,149,0,0,0,0,0,0,152,0,0,0,145,0,0,0,0,147,0,0,151,0,0,0,0,0,148],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,154,0,0,0,0,0,153,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,155,0,0,0,157,0,0,156,0,0,0,158,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,160,0,0,0,0,0,0,0,0,0,0,0,0,0,159,0],
[0,0,0,0,0,0,0,0,0,0,0,161,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,164,0,0,163,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,162,0],
[0,0,165,0,0,0,0,0,0,166,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,167,169,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,170,0,0,0,0,0,0,0,0,0,0,168,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,173,0,0,0,0,0,0,175,177,0,0,171,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,176,0,0,0,0,0,0,0,0,0,0,0,0,174,172,0],
[0,0,0,0,0,0,0,0,0,0,0,0,180,0,0,178,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,179,0],
[0,0,0,0,182,184,0,186,0,0,0,183,185,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,181,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,190,191,0,0,187,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,189,0,0,0,0,0,0,0,0,0,0,0,0,0,188,0],
[0,0,193,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,192,0,0,0,0],
[0,0,197,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,196,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,195,0,0,194,0,0,0,0],
[0,0,0,0,0,0,0,0,0,199,0,0,0,0,201,0,0,0,0,0,0,0,200,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,198,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,203,0,0,0,0,204,0,0,0,0,0,0,0,0,0,0,0,0,0,0,202,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,205,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]

mm = np.array(mm)
#
pp = [[5.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,7.90,0.00,0.00,0.00,0.00,0.00,5.49,0.00,2.89,0.00,0.00,0.00,5.98,0.00,0.00,0.00,5.94,0.00,0.00,0.00,6.21,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,3.55,0.00,2.89,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00],
[0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,5.70,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,3.61,0.00,0.00,0.00,5.80,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,3.15,0.00,3.15,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00],
[0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,15.95,0.00,0.00,0.00,0.00,0.00,0.00,14.00,11.95,0.00,0.00,12.36,0.00,0.00,14.18,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00],
[3.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,3.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,2.20,0.00,0.00,0.00,2.80,0.00,0.00,0.00,2.29,0.00,0.00,0.00,2.27,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,2.61,0.00,2.20,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00],
[3.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,9.76,0.00,0.00,0.00,0.00,0.00,0.00,0.00,5.70,0.00,0.00,6.47,0.00,0.00,0.00,0.00,0.00,0.00,0.00,7.47,0.00,0.00,8.51,0.00,3.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00],
[0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,10.50,0.00,0.00,0.00,0.00,0.00,0.00,0.00,9.52,0.00,0.00,0.00,9.10,0.00,0.00,0.00,9.57,0.00,0.00,0.00,9.62,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,9.10,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00],
[6.75,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,9.50,0.00,0.00,0.00,0.00,0.00,7.98,0.00,6.99,0.00,0.00,0.00,11.05,0.00,0.00,0.00,8.55,0.00,0.00,0.00,8.88,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,8.27,0.00,6.75,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00],
[0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,11.20,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,10.95,0.00,0.00,0.00,9.75,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,9.63,0.00,9.16,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00],
[0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,3.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,1.69,0.00,0.00,0.00,1.98,0.00,0.00,0.00,1.77,0.00,0.00,0.00,1.96,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,1.69,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00],
[10.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,7.10,0.00,0.00,0.00,0.00,0.00,1.59,0.00,0.00,0.00,0.00,0.00,1.95,0.00,0.00,0.00,1.74,0.00,0.00,0.00,2.09,0.00,0.00,0.00,0.00,0.00,0.00,6.43,0.00,0.00,1.70,0.00,2.83,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00],
[0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,9.93,0.00,9.93,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00],
[0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,18.40,0.00,0.00,0.00,0.00,0.00,14.49,0.00,0.00,0.00,0.00,0.00,12.89,0.00,0.00,0.00,14.36,0.00,0.00,0.00,13.76,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,13.48,0.00,11.91,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00],
[0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,9.39,0.00,0.00,0.00,7.97,0.00,0.00,0.00,9.57,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,10.24,0.00,9.49,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00],
[0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,33.35,0.00,0.00,0.00,14.80,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,18.00,0.00,72.90,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00],
[5.70,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,6.00,0.00,0.00,0.00,0.00,0.00,5.78,0.00,4.50,0.00,0.00,0.00,3.90,0.00,0.00,0.00,4.06,0.00,0.00,0.00,6.46,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,3.55,0.00,3.55,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00],
[4.50,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,3.60,0.00,0.00,0.00,0.00,0.00,3.19,0.00,2.69,0.00,0.00,0.00,4.12,0.00,0.00,0.00,3.75,0.00,0.00,0.00,4.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,2.69,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00],
[5.70,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,3.80,0.00,0.00,0.00,0.00,0.00,4.65,0.00,3.69,0.00,0.00,0.00,5.42,0.00,0.00,0.00,4.50,0.00,0.00,0.00,6.40,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,3.55,0.00,3.55,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00],
[4.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,5.40,0.00,0.00,0.00,0.00,0.00,5.49,0.00,6.60,0.00,0.00,0.00,4.33,0.00,0.00,0.00,6.38,0.00,0.00,0.00,6.92,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,7.09,0.00,8.68,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00],
[0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,8.68,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00],
[0.00,18.99,0.00,0.00,0.00,0.00,16.98,0.00,0.00,0.00,0.00,0.00,0.00,17.80,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,17.20,16.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,28.58,0.00,0.00,0.00,0.00,0.00,13.99,0.00,30.45,0.00,0.00,0.00],
[0.00,9.49,0.00,0.00,0.00,0.00,34.98,0.00,0.00,0.00,0.00,0.00,0.00,18.90,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,8.77,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,15.90,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00],
[0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,47.90,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,38.39,0.00,0.00,0.00,0.00,0.00],
[0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,89.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,91.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,92.00],
[0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,66.89,0.00,79.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00],
[0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,27.30,0.00,0.00,0.00,36.90,0.00,29.50,0.00,36.00,0.00,0.00,0.00,0.00,0.00,0.00,49.90,0.00,0.00,0.00,28.90,0.00,0.00,0.00,0.00,31.99,0.00,0.00,42.00,0.00,0.00,0.00,0.00,0.00,33.50],
[0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,65.00,0.00,0.00,0.00,0.00,0.00,23.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00],
[0.00,0.00,0.00,0.00,12.89,0.00,0.00,0.00,13.99,0.00,0.00,13.90,0.00,0.00,0.00,14.32,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,16.50,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,15.57,0.00],
[0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,36.75,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00],
[0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,58.73,0.00,0.00,53.43,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,51.85,0.00],
[0.00,0.00,5.39,0.00,0.00,0.00,0.00,0.00,0.00,6.90,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00],
[0.00,0.00,12.36,14.63,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,18.76,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,12.90,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00],
[0.00,0.00,0.00,0.00,86.00,0.00,0.00,0.00,0.00,0.00,0.00,89.90,97.30,0.00,0.00,81.60,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,96.70,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,89.00,83.77,0.00],
[0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,64.28,0.00,0.00,49.46,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,52.34,0.00],
[0.00,0.00,0.00,0.00,79.90,89.00,0.00,124.00,0.00,0.00,0.00,85.00,104.47,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,67.20,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00],
[0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,91.00,91.11,0.00,0.00,73.61,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,81.50,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,80.60,0.00],
[0.00,0.00,2.47,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,2.44,0.00,0.00,0.00,0.00],
[0.00,0.00,28.44,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,15.90,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,15.10,0.00,0.00,13.00,0.00,0.00,0.00,0.00],
[0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,22.00,0.00,0.00,0.00,0.00,31.92,0.00,0.00,0.00,0.00,0.00,0.00,0.00,28.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,22.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00],
[0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,8.55,0.00,0.00,0.00,0.00,62.70,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,8.30,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00],
[0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,62.70,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00,0.00]]
pp = np.array(pp)
#c1 = [26.0, 150.0, 300.0, 110.0, 400.0, 500.0, 200.0, 200.0, 27.0, 150.0, 50.0, 200.0, 75.0, 0.0, 250.0, 22.8, 300.0, 22.8, 22.8, 150.0, 300.0, 150.0, 100.0, 100.0, 1000.0, 150.0, 150.0, 200.0, 31.2, 100.0, 100.0, 50.0, 23.0, 300.0, 200.0, 300.0, 0.0, 300.0, 30.0, 26.0, 300.0, 300.0, 250.0, 100.0, 100.0, 200.0, 400.0, 21.2, 200.0, 500.0, 0.0]

m = GEKKO()
ni = 40
nj = 51

x = [[m.Var(lb=0,integer=True) for j in range(nj)] for i in range(ni)]

s = 0
expr = []
for i in range(ni):
    for j in range(nj):
        s += x[i][j]

for i in range(ni):
    expr.append(sum(x[i]))


for i in range(ni):
    for j in range(nj):
        if mm[i][j] == 0:
            m.Equation(x[i][j] == 0)



for i in range(ni):
    m.Equation(sum([x[i][j] for j in range(nj)]) >= qtde[i])



b = m.Array(m.Var,nj,integer=True,lb=0,ub=1)
iv = [None]*nj
for j in range(nj):
    iv[j] = m.sum([pp[i][j]*x[i][j] for i in range(ni)])
    m.Equation(iv[j] >= b[j]*c1[j])
    m.Equation((1 - b[j])*iv[j] == 0)

m.Obj(m.sum(expr))


m.options.SOLVER=1  # switch to APOPT
m.solver_options = ['minlp_gap_tol 1.0e-1',\
                    'minlp_maximum_iterations 10000',\
                    'minlp_max_iter_with_int_sol 1000',\
                    'minlp_branch_method 1',\
                    'minlp_integer_leaves 2']

m.solve()    

Изменить: я изменил написание последнего ограничения, как это было предложено Джоном Хеденгреном (ниже). Однако при вставке двоичной переменной код теперь возвращает ошибку перед началом любых итераций. Как этого избежать?


person donut    schedule 20.04.2020    source источник


Ответы (1)


Вы можете использовать двоичную переменную (0 = оборудование выключено, 1 = оборудование включено и выше порогового значения) и уравнение как:

b = m.Array(m.Var,nj,integer=True,lb=0,ub=1)
iv = [None]*nj

for j in range(nj):
    iv[j] = m.sum([pp[i][j]*x[i][j] for i in range(ni)])
    m.Equation(iv[j] >= b[j]*c1[j])
    m.Equation((1-b[j])*iv[j] <= 0)

m.options.SOLVER = 1  # Change to MINLP solver

Вы можете разделить суммирование на промежуточную переменную iv, потому что она используется в двух уравнениях. Другая рекомендация - использовать m.sum() вместо sum. Суммирование Гекко обычно выполняется быстрее. Есть и другие способы поставить проблему, но это может быть наиболее надежным. Я не могу проверить это решение, потому что в вашем скрипте отсутствуют некоторые входные данные. В будущих публикациях это поможет свести проблему к минимальному и воспроизводимому примеру, чтобы можно было проверить решения. Существует дополнительная информация о логических условиях в задачах оптимизации.

Ответ на изменение

MINLP не сходится быстро, потому что существует nj x ni = 2040 двоичных переменных. Это 2^2040 потенциальных решений. Вы можете настроить параметры решателя, чтобы помочь ему найти хотя бы одно возможное решение.

m.options.SOLVER=3
m.solve()  # sometimes it helps to solve with IPOPT first

m.options.SOLVER=1  # switch to APOPT
m.solver_options = ['minlp_gap_tol 1.0e-2',\
                    'minlp_maximum_iterations 10000',\
                    'minlp_max_iter_with_int_sol 500',\
                    'minlp_branch_method 1',\
                    'minlp_integer_leaves 2']
m.solve()

Дополнительное описание параметров решателя можно найти на веб-сайте APOPT.

Ответ на изменение

Ошибка на первой итерации MINLP связана с невозможностью решения проблемы. Если вы переключитесь на опцию решателя minlp_as_nlp 1, вы увидите, что первая проблема НЛП не сходится. Вы также можете увидеть это с помощью решателя IPOPT, если переключитесь на m.options.SOLVER=3.

EXIT: Converged to a point of local infeasibility.
Problem may be infeasible.

Если вы решите локально с помощью m=GEKKO(remote=False) и откроете папку выполнения перед командой решения с помощью m.open_folder(), вы увидите файл infeasibilities.txt, который поможет вам определить недопустимое уравнение. Я подозреваю, что это неосуществимость из-за уравнений m.Equation(m.sum([x[i][j] for j in range(nj)]) >= qtde[i]) и m.Equation(x[i][j] == 0). Вы также можете попытаться определить недопустимую проблему с помощью m.options.COLDSTART=2. Дополнительную помощь по устранению неполадок приложений можно найти в упражнении 18 руководств Gekko.

person John Hedengren    schedule 20.04.2020
comment
Спасибо за ваш ответ. Я пробовал в коде, но решения не нашел. Я отредактировал свой вопрос, поэтому он включает все переменные. Не могли бы вы проверить, не ошибся ли я, или проблема действительно не имеет решения? - person donut; 20.04.2020
comment
О, мне так жаль. Я отредактировал свой вопрос, включив в него все переменные, и был бы очень благодарен, если бы вы все еще могли мне помочь. - person donut; 21.04.2020
comment
У вас есть 2 ^ 2040 различных возможных решений. Задачи MINLP могут занимать гораздо больше времени, чем NLP, но есть варианты, которые вы можете изучить с помощью решателя MINLP. Вот несколько вариантов: apopt.com/download.php Я добавил несколько дополнительных комментариев к своему ответу. APOPT нашел целочисленное решение после 832 итераций НЛП. - person John Hedengren; 22.04.2020
comment
Большое спасибо за вашу помощь и время. У меня еще один вопрос. Даже с изменениями в параметрах решателя случай, в котором я добавил двоичную переменную, не нашел никакого решения. Есть ли что-нибудь еще, что я могу сделать, чтобы найти решение? Или мне следует принять тот факт, что нет решения с имеющимися у меня ограничениями? - person donut; 22.04.2020
comment
Решение может быть, но решающей программе может потребоваться слишком много времени, чтобы оценить все варианты. С вариантами, которые я показал, и вашими данными, решение было найдено за 832 итераций. Если вы изменили проблему, то для поиска решения может потребоваться больше итераций. Вы не можете сказать, что решения не существует, пока оно не оценит все варианты, но это может занять слишком много времени. Существует множество методов аппроксимации решения MINLP без использования целочисленных переменных. Вот один пример: mdpi.com/2227-9717/7/12/929 - person John Hedengren; 22.04.2020
comment
Извините, что снова не торопитесь; Я изменил написание последнего ограничения, как было предложено (теперь вопрос о новом коде). Однако при вставке двоичной переменной код теперь возвращает ошибку перед началом любых итераций (до добавления двоичной переменной код выполнялся каждую итерацию, а затем, после достижения максимума, он возвращал ошибку, после нескольких минут обработки; теперь он работает около минуты, не показывает никаких итераций, а затем возвращает ошибку). Как этого избежать? - person donut; 24.04.2020