Skip to content

Commit 181cd63

Browse files
blegatkellertuer
andauthored
Add support for Riemanian objective through JuMP (#448)
* Add support for Riemanian objective through JuMP * Fix no objective case * Add entry to Changelog * Use zero_vector * Fix Changelog * Stop using scaled objective * Fix doc --------- Co-authored-by: Ronny Bergmann <git@ronnybergmann.net>
1 parent 992ccae commit 181cd63

3 files changed

Lines changed: 52 additions & 38 deletions

File tree

Changelog.md

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,12 @@ The file was started with Version `0.4`.
66
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
77
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
88

9+
## [0.5.13] unreleased
10+
11+
### Added
12+
13+
* Allow setting `AbstractManifoldObjective` through JuMP
14+
915
## [0.5.12] April 13, 2025
1016

1117
### Added

docs/src/extensions.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -109,7 +109,7 @@ MOI.get(model::Manopt.JuMP_Optimizer, ::MOI.NumberOfVariables)
109109
MOI.supports(::Manopt.JuMP_Optimizer, ::MOI.VariablePrimalStart, ::Type{MOI.VariableIndex})
110110
MOI.set(::Manopt.JuMP_Optimizer, ::MOI.VariablePrimalStart, ::MOI.VariableIndex, ::Union{Real,Nothing})
111111
MOI.set(::Manopt.JuMP_Optimizer, ::MOI.ObjectiveSense, ::MOI.OptimizationSense)
112-
MOI.set(::Manopt.JuMP_Optimizer, ::MOI.ObjectiveFunction{F}, ::F) where {F}
112+
MOI.set(::Manopt.JuMP_Optimizer, ::MOI.ObjectiveFunction, func::MOI.AbstractScalarFunction)
113113
MOI.supports(::Manopt.JuMP_Optimizer, ::Union{MOI.ObjectiveSense,MOI.ObjectiveFunction})
114114
JuMP.build_variable(::Function, ::Any, ::Manopt.AbstractManifold)
115115
MOI.get(::Manopt.JuMP_Optimizer, ::MOI.ResultCount)

ext/ManoptJuMPExt.jl

Lines changed: 45 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,10 @@ function MOI.dimension(set::VectorizedManifold)
2929
return prod(ManifoldsBase.representation_size(set.manifold))
3030
end
3131

32+
struct RiemannianFunction{MO<:Manopt.AbstractManifoldObjective} <: MOI.AbstractScalarFunction
33+
func::MO
34+
end
35+
3236
mutable struct Optimizer <: MOI.AbstractOptimizer
3337
# Manifold in which all the decision variables leave
3438
manifold::Union{Nothing,ManifoldsBase.AbstractManifold}
@@ -40,8 +44,8 @@ mutable struct Optimizer <: MOI.AbstractOptimizer
4044
variable_primal_start::Vector{Union{Nothing,Float64}}
4145
# Sense of the optimization, that is whether it is for example min, max or no objective
4246
sense::MOI.OptimizationSense
43-
# Model used to compute gradient of the objective function with AD
44-
nlp_model::MOI.Nonlinear.Model
47+
# Objective function of the optimization
48+
objective::Union{Nothing,Manopt.AbstractManifoldObjective}
4549
# Solver parameters set with `MOI.RawOptimizerAttribute`
4650
options::Dict{String,Any}
4751
function Optimizer()
@@ -51,7 +55,7 @@ mutable struct Optimizer <: MOI.AbstractOptimizer
5155
nothing,
5256
Union{Nothing,Float64}[],
5357
MOI.FEASIBILITY_SENSE,
54-
MOI.Nonlinear.Model(),
58+
nothing,
5559
Dict{String,Any}(DESCENT_STATE_TYPE => Manopt.GradientDescentState),
5660
)
5761
end
@@ -68,7 +72,7 @@ MOI.get(::Optimizer, ::MOI.SolverVersion) = "0.4.37"
6872
function MOI.is_empty(model::Optimizer)
6973
return isnothing(model.manifold) &&
7074
isempty(model.variable_primal_start) &&
71-
MOI.is_empty(model.nlp_model) &&
75+
isnothing(model.objective) &&
7276
model.sense == MOI.FEASIBILITY_SENSE
7377
end
7478

@@ -83,7 +87,7 @@ function MOI.empty!(model::Optimizer)
8387
model.state = nothing
8488
empty!(model.variable_primal_start)
8589
model.sense = MOI.FEASIBILITY_SENSE
86-
MOI.empty!(model.nlp_model)
90+
model.objective = nothing
8791
return nothing
8892
end
8993

@@ -270,7 +274,7 @@ function MOI.set(model::Optimizer, ::MOI.ObjectiveSense, sense::MOI.Optimization
270274
end
271275

272276
"""
273-
MOI.set(model::Optimizer, ::MOI.ObjectiveSense, sense::MOI.OptimizationSense)
277+
MOI.get(model::Optimizer, ::MOI.ObjectiveSense)
274278
275279
Return the objective sense, defaults to `MOI.FEASIBILITY_SENSE` if no sense has
276280
already been set.
@@ -282,9 +286,38 @@ MOI.get(model::Optimizer, ::MOI.ObjectiveSense) = model.sense
282286
283287
Set the objective function as `func` for `model`.
284288
"""
285-
function MOI.set(model::Optimizer, ::MOI.ObjectiveFunction{F}, func::F) where {F}
289+
function MOI.set(model::Optimizer, attr::MOI.ObjectiveFunction, func::MOI.AbstractScalarFunction)
290+
backend = MOI.Nonlinear.SparseReverseMode()
291+
vars = [MOI.VariableIndex(i) for i in eachindex(model.variable_primal_start)]
292+
nlp_model = MOI.Nonlinear.Model()
286293
nl = convert(MOI.ScalarNonlinearFunction, func)
287-
MOI.Nonlinear.set_objective(model.nlp_model, nl)
294+
MOI.Nonlinear.set_objective(nlp_model, nl)
295+
evaluator = MOI.Nonlinear.Evaluator(nlp_model, backend, vars)
296+
MOI.initialize(evaluator, [:Grad])
297+
function eval_f_cb(M, x)
298+
val = MOI.eval_objective(evaluator, JuMP.vectorize(x, _shape(model.manifold)))
299+
if model.sense == MOI.MAX_SENSE
300+
val = -val
301+
end
302+
return val
303+
end
304+
function eval_grad_f_cb(M, X)
305+
x = JuMP.vectorize(X, _shape(model.manifold))
306+
grad_f = zeros(length(x))
307+
MOI.eval_objective_gradient(evaluator, grad_f, x)
308+
if model.sense == MOI.MAX_SENSE
309+
LinearAlgebra.rmul!(grad_f, -1)
310+
end
311+
reshaped_grad_f = JuMP.reshape_vector(grad_f, _shape(model.manifold))
312+
return ManifoldDiff.riemannian_gradient(model.manifold, X, reshaped_grad_f)
313+
end
314+
objective = RiemannianFunction(Manopt.ManifoldGradientObjective(eval_f_cb, eval_grad_f_cb))
315+
MOI.set(model, MOI.ObjectiveFunction{typeof(objective)}(), objective)
316+
return nothing
317+
end
318+
319+
function MOI.set(model::Optimizer, ::MOI.ObjectiveFunction, func::RiemannianFunction)
320+
model.objective = func.func
288321
model.problem = nothing
289322
model.state = nothing
290323
return nothing
@@ -304,36 +337,11 @@ function MOI.optimize!(model::Optimizer)
304337
model.variable_primal_start[i]
305338
end for i in eachindex(model.variable_primal_start)
306339
]
307-
backend = MOI.Nonlinear.SparseReverseMode()
308-
vars = [MOI.VariableIndex(i) for i in eachindex(model.variable_primal_start)]
309-
evaluator = MOI.Nonlinear.Evaluator(model.nlp_model, backend, vars)
310-
MOI.initialize(evaluator, [:Grad])
311-
function eval_f_cb(M, x)
312-
if model.sense == MOI.FEASIBILITY_SENSE
313-
return 0.0
314-
end
315-
obj = MOI.eval_objective(evaluator, JuMP.vectorize(x, _shape(model.manifold)))
316-
if model.sense == MOI.MAX_SENSE
317-
obj = -obj
318-
end
319-
return obj
320-
end
321-
function eval_grad_f_cb(M, X)
322-
x = JuMP.vectorize(X, _shape(model.manifold))
323-
grad_f = zeros(length(x))
324-
if model.sense == MOI.FEASIBILITY_SENSE
325-
grad_f .= zero(eltype(grad_f))
326-
else
327-
MOI.eval_objective_gradient(evaluator, grad_f, x)
328-
end
329-
if model.sense == MOI.MAX_SENSE
330-
LinearAlgebra.rmul!(grad_f, -1)
331-
end
332-
reshaped_grad_f = JuMP.reshape_vector(grad_f, _shape(model.manifold))
333-
return ManifoldDiff.riemannian_gradient(model.manifold, X, reshaped_grad_f)
340+
objective = model.objective
341+
if model.sense == MOI.FEASIBILITY_SENSE
342+
objective = Manopt.ManifoldGradientObjective((_, _) -> 0.0, ManifoldsBase.zero_vector)
334343
end
335-
mgo = Manopt.ManifoldGradientObjective(eval_f_cb, eval_grad_f_cb)
336-
dmgo = decorate_objective!(model.manifold, mgo)
344+
dmgo = decorate_objective!(model.manifold, objective)
337345
model.problem = DefaultManoptProblem(model.manifold, dmgo)
338346
reshaped_start = JuMP.reshape_vector(start, _shape(model.manifold))
339347
descent_state_type = model.options[DESCENT_STATE_TYPE]

0 commit comments

Comments
 (0)