Skip to content

Commit

Permalink
Use objgrad on tron
Browse files Browse the repository at this point in the history
The keyword option `use_only_objgrad` is also included models without individual `obj` functions.
  • Loading branch information
abelsiqueira committed Aug 19, 2020
1 parent c09754a commit 75f36c7
Show file tree
Hide file tree
Showing 3 changed files with 53 additions and 6 deletions.
20 changes: 14 additions & 6 deletions src/tron.jl
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ function tron(::Val{:Newton},
max_eval :: Int=-1,
max_time :: Real=30.0,
max_cgiter :: Int=nlp.meta.nvar,
use_only_objgrad :: Bool=false,
cgtol :: Real=eltype(x)(0.1),
atol :: Real=eps(eltype(x)),
rtol :: Real=eps(eltype(x)),
Expand All @@ -42,8 +43,6 @@ function tron(::Val{:Newton},
T = eltype(x)
= T.(nlp.meta.lvar)
u = T.(nlp.meta.uvar)
f(x) = obj(nlp, x)
g(x) = grad(nlp, x)
n = nlp.meta.nvar

iter = 0
Expand All @@ -57,8 +56,9 @@ function tron(::Val{:Newton},
Hs = zeros(T, n)

x .= max.(ℓ, min.(x, u))
fx = f(x)
gx = g(x)
gx = zeros(T, n)
fx, _ = objgrad!(nlp, x, gx)
gt = use_only_objgrad ? zeros(T, n) : T[]
num_success_iters = 0

# Optimality measure
Expand Down Expand Up @@ -101,7 +101,11 @@ function tron(::Val{:Newton},
end
slope = dot(n, gx, s)
qs = dot(n, s, Hs) / 2 + slope
fx = f(x)
fx = if use_only_objgrad
objgrad!(nlp, x, gt)[1]
else
obj(nlp, x)
end

ared, pred, quad_min = aredpred(tr, nlp, fc, fx, qs, x, s, slope)
if pred 0
Expand All @@ -123,7 +127,11 @@ function tron(::Val{:Newton},

if acceptable(tr)
num_success_iters += 1
gx = g(x)
if use_only_objgrad
gx .= gt
else
grad!(nlp, x, gx)
end
project_step!(gpx, x, gx, ℓ, u, -one(T))
πx = nrm2(n, gpx)

Expand Down
37 changes: 37 additions & 0 deletions test/objgrad-on-tron.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
@testset "objgrad on tron" begin
struct MyProblem <: AbstractNLPModel
meta :: NLPModelMeta
counters :: Counters
end

function MyProblem()
meta = NLPModelMeta(
2, # nvar
x0 = [0.1; 0.1],
lvar=zeros(2),
uvar=ones(2)
)
MyProblem(meta, Counters())
end

function NLPModels.objgrad!(:: MyProblem, x :: AbstractVector, g:: AbstractVector)
f = (x[1] - 1)^2 + 100 * (x[2] - x[1]^2)^2
g[1] = 2 * (x[1] - 1) - 400 * x[1] * (x[2] - x[1]^2)
g[2] = 200 * (x[2] - x[1]^2)
f, g
end

function NLPModels.hprod!(:: MyProblem, x :: AbstractVector, v :: AbstractVector, Hv :: AbstractVector; obj_weight=1.0)
Hv[1] = obj_weight * (2 - 400 * (x[2] - x[1]^2) + 800 * x[1]^2) * v[1] - 400obj_weight * x[1] * v[2]
Hv[2] = 200obj_weight * v[2] - 400obj_weight * x[1] * v[1]
Hv
end

nlp = MyProblem()
output = tron(nlp, use_only_objgrad=true)
@test isapprox(output.solution, ones(2), rtol=1e-4)
@test output.dual_feas < 1e-4
@test output.objective< 1e-4

@test_throws MethodError tron(nlp, use_only_objgrad=false)
end
2 changes: 2 additions & 0 deletions test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -23,3 +23,5 @@ for solver in [trunk]
solver(nlp, max_eval=20)
reset!(nlp)
end

include("objgrad-on-tron.jl")

0 comments on commit 75f36c7

Please sign in to comment.