Skip to content

Commit

Permalink
Merge pull request #732 from ParasPuneetSingh/master
Browse files Browse the repository at this point in the history
Added struct and constructors, updated OptimizationProblem for MOO
  • Loading branch information
Vaibhavdixit02 authored Aug 4, 2024
2 parents c7df225 + 739fe11 commit 56ba881
Show file tree
Hide file tree
Showing 4 changed files with 89 additions and 7 deletions.
2 changes: 1 addition & 1 deletion src/SciMLBase.jl
Original file line number Diff line number Diff line change
Expand Up @@ -811,7 +811,7 @@ export ODEFunction, DiscreteFunction, ImplicitDiscreteFunction, SplitFunction, D
IncrementingODEFunction, NonlinearFunction, IntervalNonlinearFunction, BVPFunction,
DynamicalBVPFunction, IntegralFunction, BatchIntegralFunction

export OptimizationFunction
export OptimizationFunction, MultiObjectiveOptimizationFunction

export EnsembleThreads, EnsembleDistributed, EnsembleSplitThreads, EnsembleSerial

Expand Down
4 changes: 2 additions & 2 deletions src/problems/optimization_problems.jl
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ struct OptimizationProblem{iip, F, uType, P, LB, UB, I, LC, UC, S, K} <:
ucons::UC
sense::S
kwargs::K
@add_kwonly function OptimizationProblem{iip}(f::OptimizationFunction{iip}, u0,
@add_kwonly function OptimizationProblem{iip}(f::Union{OptimizationFunction{iip}, MultiObjectiveOptimizationFunction{iip}}, u0,
p = NullParameters();
lb = nothing, ub = nothing, int = nothing,
lcons = nothing, ucons = nothing,
Expand All @@ -119,7 +119,7 @@ struct OptimizationProblem{iip, F, uType, P, LB, UB, I, LC, UC, S, K} <:
end
end

function OptimizationProblem(f::OptimizationFunction, args...; kwargs...)
function OptimizationProblem(f::Union{OptimizationFunction, MultiObjectiveOptimizationFunction}, args...; kwargs...)
OptimizationProblem{isinplace(f)}(f, args...; kwargs...)
end
function OptimizationProblem(f, args...; kwargs...)
Expand Down
82 changes: 82 additions & 0 deletions src/scimlfunctions.jl
Original file line number Diff line number Diff line change
Expand Up @@ -1925,6 +1925,38 @@ struct OptimizationFunction{iip, AD, F, G, H, HV, C, CJ, CJV, CVJ, CH, HP, CJP,
lag_hess_colorvec::LHCV
end

"""
$(TYPEDEF)
"""

struct MultiObjectiveOptimizationFunction{iip, AD, F, J, H, HV, C, CJ, CJV, CVJ, CH, HP, CJP, CHP, O,
EX, CEX, SYS, LH, LHP, HCV, CJCV, CHCV, LHCV} <:
AbstractOptimizationFunction{iip}
f::F
adtype::AD
jac::J # Replacing grad with jac for the Jacobian
hess::Vector{H} # Hess will be a vector of type H
hv::HV
cons::C
cons_j::CJ
cons_jvp::CJV
cons_vjp::CVJ
cons_h::CH
hess_prototype::HP
cons_jac_prototype::CJP
cons_hess_prototype::CHP
observed::O
expr::EX
cons_expr::CEX
sys::SYS
lag_h::LH
lag_hess_prototype::LHP
hess_colorvec::HCV
cons_jac_colorvec::CJCV
cons_hess_colorvec::CHCV
lag_hess_colorvec::LHCV
end

"""
$(TYPEDEF)
"""
Expand Down Expand Up @@ -3819,6 +3851,56 @@ function OptimizationFunction{iip}(f, adtype::AbstractADType = NoAD();
cons_hess_colorvec, lag_hess_colorvec)
end

# Function call operator for MultiObjectiveOptimizationFunction
(f::MultiObjectiveOptimizationFunction)(args...) = f.f(args...)

# Convenience constructor
MultiObjectiveOptimizationFunction(args...; kwargs...) = MultiObjectiveOptimizationFunction{true}(args...; kwargs...)

# Constructor with keyword arguments
function MultiObjectiveOptimizationFunction{iip}(f, adtype::AbstractADType = NoAD();
jac = nothing, hess = Vector{Nothing}(undef, 0), hv = nothing,
cons = nothing, cons_j = nothing, cons_jvp = nothing,
cons_vjp = nothing, cons_h = nothing,
hess_prototype = nothing,
cons_jac_prototype = __has_jac_prototype(f) ?
f.jac_prototype : nothing,
cons_hess_prototype = nothing,
syms = nothing,
paramsyms = nothing,
observed = __has_observed(f) ? f.observed :
DEFAULT_OBSERVED_NO_TIME,
expr = nothing, cons_expr = nothing,
sys = __has_sys(f) ? f.sys : nothing,
lag_h = nothing, lag_hess_prototype = nothing,
hess_colorvec = __has_colorvec(f) ? f.colorvec : nothing,
cons_jac_colorvec = __has_colorvec(f) ? f.colorvec :
nothing,
cons_hess_colorvec = __has_colorvec(f) ? f.colorvec :
nothing,
lag_hess_colorvec = nothing) where {iip}
isinplace(f, 2; has_two_dispatches = false, isoptimization = true)
sys = sys_or_symbolcache(sys, syms, paramsyms)
MultiObjectiveOptimizationFunction{iip, typeof(adtype), typeof(f), typeof(jac), typeof(hess),
typeof(hv),
typeof(cons), typeof(cons_j), typeof(cons_jvp),
typeof(cons_vjp), typeof(cons_h),
typeof(hess_prototype),
typeof(cons_jac_prototype), typeof(cons_hess_prototype),
typeof(observed),
typeof(expr), typeof(cons_expr), typeof(sys), typeof(lag_h),
typeof(lag_hess_prototype), typeof(hess_colorvec),
typeof(cons_jac_colorvec), typeof(cons_hess_colorvec),
typeof(lag_hess_colorvec)
}(f, adtype, jac, hess,
hv, cons, cons_j, cons_jvp,
cons_vjp, cons_h,
hess_prototype, cons_jac_prototype,
cons_hess_prototype, observed, expr, cons_expr, sys,
lag_h, lag_hess_prototype, hess_colorvec, cons_jac_colorvec,
cons_hess_colorvec, lag_hess_colorvec)
end

function BVPFunction{iip, specialize, twopoint}(f, bc;
mass_matrix = __has_mass_matrix(f) ? f.mass_matrix : I,
analytic = __has_analytic(f) ? f.analytic : nothing,
Expand Down
8 changes: 4 additions & 4 deletions src/solve.jl
Original file line number Diff line number Diff line change
Expand Up @@ -116,13 +116,13 @@ function _check_opt_alg(prob::OptimizationProblem, alg; kwargs...)
throw(IncompatibleOptimizerError("The algorithm $(typeof(alg)) requires constraints, pass them with the `cons` kwarg in `OptimizationFunction`."))
!allowscallback(alg) && haskey(kwargs, :callback) &&
throw(IncompatibleOptimizerError("The algorithm $(typeof(alg)) does not support callbacks, remove the `callback` keyword argument from the `solve` call."))
!requiresgradient(alg) && !(prob.f isa OptimizationFunction) &&
requiresgradient(alg) && !(prob.f isa AbstractOptimizationFunction) &&
throw(IncompatibleOptimizerError("The algorithm $(typeof(alg)) requires gradients, hence use `OptimizationFunction` to generate them with an automatic differentiation backend e.g. `OptimizationFunction(f, AutoForwardDiff())` or pass it in with `grad` kwarg."))
!requireshessian(alg) && !(prob.f isa OptimizationFunction) &&
requireshessian(alg) && !(prob.f isa AbstractOptimizationFunction) &&
throw(IncompatibleOptimizerError("The algorithm $(typeof(alg)) requires hessians, hence use `OptimizationFunction` to generate them with an automatic differentiation backend e.g. `OptimizationFunction(f, AutoFiniteDiff(); kwargs...)` or pass them in with `hess` kwarg."))
!requiresconsjac(alg) && !(prob.f isa OptimizationFunction) &&
requiresconsjac(alg) && !(prob.f isa AbstractOptimizationFunction) &&
throw(IncompatibleOptimizerError("The algorithm $(typeof(alg)) requires constraint jacobians, hence use `OptimizationFunction` to generate them with an automatic differentiation backend e.g. `OptimizationFunction(f, AutoFiniteDiff(); kwargs...)` or pass them in with `cons` kwarg."))
!requiresconshess(alg) && !(prob.f isa OptimizationFunction) &&
requiresconshess(alg) && !(prob.f isa AbstractOptimizationFunction) &&
throw(IncompatibleOptimizerError("The algorithm $(typeof(alg)) requires constraint hessians, hence use `OptimizationFunction` to generate them with an automatic differentiation backend e.g. `OptimizationFunction(f, AutoFiniteDiff(), AutoFiniteDiff(hess=true); kwargs...)` or pass them in with `cons` kwarg."))
return
end
Expand Down

0 comments on commit 56ba881

Please sign in to comment.