9个参数的非线性优化问题,调用Ipopt没问题,调用NLopt不报错但只返回初始值。由于相同操作要重复几万次,但Ipopt不是thread-safe,所以最终还是想用NLopt实现。请教下哪里出错了?
NLopt版本:
using JuMP
using NLopt
using LinearAlgebra
ask()
function ask()
lb = [-1, -1, -1, -1, -1, -1, -1, -1, -1]
ub = [ 1, 1, 1, 1, 1, 1, 1, 1, 1]
x0 = [0.7, 0.7, 0.07, -0.4, 0.5, -0.7, -0.5, 0.5, 0.7]
model = Model(NLopt.Optimizer)
set_optimizer_attribute(model, "algorithm", :LD_SLSQP)
@variable(model, lb[1] ≤ x1 ≤ ub[1], start = x0[1])
@variable(model, lb[2] ≤ x2 ≤ ub[2], start = x0[2])
@variable(model, lb[3] ≤ x3 ≤ ub[3], start = x0[3])
@variable(model, lb[4] ≤ x4 ≤ ub[4], start = x0[4])
@variable(model, lb[5] ≤ x5 ≤ ub[5], start = x0[5])
@variable(model, lb[6] ≤ x6 ≤ ub[6], start = x0[6])
@variable(model, lb[7] ≤ x7 ≤ ub[7], start = x0[7])
@variable(model, lb[8] ≤ x8 ≤ ub[8], start = x0[8])
@variable(model, lb[9] ≤ x9 ≤ ub[9], start = x0[9])
@NLobjective(model, Min, x1^2+x2+x3^2+x4+x5^2+x6+x7^2+x8+x9^2)
@constraint(model, [x1 x2 x3; x4 x5 x6; x7 x8 x9]*[x1 x2 x3; x4 x5 x6; x7 x8 x9]' .== Matrix{Float64}(I, 3, 3))
JuMP.optimize!(model)
println("got ", objective_value(model), " at ")
println(" x1: ", value(x1))
println(" x2: ", value(x2))
println(" x3: ", value(x3))
println(" x4: ", value(x4))
println(" x5: ", value(x5))
println(" x6: ", value(x6))
println(" x7: ", value(x7))
println(" x8: ", value(x8))
println(" x9: ", value(x9))
end
got 1.5848999999999998 at
x1: 0.7
x2: 0.7
x3: 0.07
x4: -0.4
x5: 0.5
x6: -0.7
x7: -0.5
x8: 0.5
x9: 0.7
Ipopt版本:
using JuMP
using Ipopt
using LinearAlgebra
ask()
function ask()
lb = [-1, -1, -1, -1, -1, -1, -1, -1, -1]
ub = [ 1, 1, 1, 1, 1, 1, 1, 1, 1]
x0 = [0.7, 0.7, 0.07, -0.4, 0.5, -0.7, -0.5, 0.5, 0.7]
model = Model(Ipopt.Optimizer)
set_optimizer_attribute(model, "max_cpu_time", 60.0)
set_optimizer_attribute(model, "print_level", 0)
set_silent(model)
@variable(model, lb[1] ≤ x1 ≤ ub[1], start = x0[1])
@variable(model, lb[2] ≤ x2 ≤ ub[2], start = x0[2])
@variable(model, lb[3] ≤ x3 ≤ ub[3], start = x0[3])
@variable(model, lb[4] ≤ x4 ≤ ub[4], start = x0[4])
@variable(model, lb[5] ≤ x5 ≤ ub[5], start = x0[5])
@variable(model, lb[6] ≤ x6 ≤ ub[6], start = x0[6])
@variable(model, lb[7] ≤ x7 ≤ ub[7], start = x0[7])
@variable(model, lb[8] ≤ x8 ≤ ub[8], start = x0[8])
@variable(model, lb[9] ≤ x9 ≤ ub[9], start = x0[9])
@NLobjective(model, Min, x1^2+x2+x3^2+x4+x5^2+x6+x7^2+x8+x9^2)
@constraint(model, [x1 x2 x3; x4 x5 x6; x7 x8 x9]*[x1 x2 x3; x4 x5 x6; x7 x8 x9]' .== Matrix{Float64}(I, 3, 3))
JuMP.optimize!(model)
println("got ", objective_value(model), " at ")
println(" x1: ", value(x1))
println(" x2: ", value(x2))
println(" x3: ", value(x3))
println(" x4: ", value(x4))
println(" x5: ", value(x5))
println(" x6: ", value(x6))
println(" x7: ", value(x7))
println(" x8: ", value(x8))
println(" x9: ", value(x9))
end
got -1.8284271244830348 at
x1: -0.500000014712519
x2: -0.7071067831367339
x3: 0.49999998301859216
x4: -0.7071067591675974
x5: 5.788661909852154e-13
x6: -0.707106803904326
x7: 0.5000000173810248
x8: -0.7071067792414213
x9: -0.49999998585490696