@@ -29,6 +29,10 @@ function MOI.dimension(set::VectorizedManifold)
2929 return prod (ManifoldsBase. representation_size (set. manifold))
3030end
3131
32+ struct RiemannianFunction{MO<: Manopt.AbstractManifoldObjective } <: MOI.AbstractScalarFunction
33+ func:: MO
34+ end
35+
3236mutable struct Optimizer <: MOI.AbstractOptimizer
3337 # Manifold in which all the decision variables leave
3438 manifold:: Union{Nothing,ManifoldsBase.AbstractManifold}
@@ -40,8 +44,8 @@ mutable struct Optimizer <: MOI.AbstractOptimizer
4044 variable_primal_start:: Vector{Union{Nothing,Float64}}
4145 # Sense of the optimization, that is whether it is for example min, max or no objective
4246 sense:: MOI.OptimizationSense
43- # Model used to compute gradient of the objective function with AD
44- nlp_model :: MOI.Nonlinear.Model
47+ # Objective function of the optimization
48+ objective :: Union{Nothing,Manopt.AbstractManifoldObjective}
4549 # Solver parameters set with `MOI.RawOptimizerAttribute`
4650 options:: Dict{String,Any}
4751 function Optimizer ()
@@ -51,7 +55,7 @@ mutable struct Optimizer <: MOI.AbstractOptimizer
5155 nothing ,
5256 Union{Nothing,Float64}[],
5357 MOI. FEASIBILITY_SENSE,
54- MOI . Nonlinear . Model () ,
58+ nothing ,
5559 Dict {String,Any} (DESCENT_STATE_TYPE => Manopt. GradientDescentState),
5660 )
5761 end
@@ -68,7 +72,7 @@ MOI.get(::Optimizer, ::MOI.SolverVersion) = "0.4.37"
6872function MOI. is_empty (model:: Optimizer )
6973 return isnothing (model. manifold) &&
7074 isempty (model. variable_primal_start) &&
71- MOI . is_empty (model. nlp_model ) &&
75+ isnothing (model. objective ) &&
7276 model. sense == MOI. FEASIBILITY_SENSE
7377end
7478
@@ -83,7 +87,7 @@ function MOI.empty!(model::Optimizer)
8387 model. state = nothing
8488 empty! (model. variable_primal_start)
8589 model. sense = MOI. FEASIBILITY_SENSE
86- MOI . empty! ( model. nlp_model)
90+ model. objective = nothing
8791 return nothing
8892end
8993
@@ -270,7 +274,7 @@ function MOI.set(model::Optimizer, ::MOI.ObjectiveSense, sense::MOI.Optimization
270274end
271275
272276"""
273- MOI.set (model::Optimizer, ::MOI.ObjectiveSense, sense::MOI.OptimizationSense )
277+ MOI.get (model::Optimizer, ::MOI.ObjectiveSense)
274278
275279Return the objective sense, defaults to `MOI.FEASIBILITY_SENSE` if no sense has
276280already been set.
@@ -282,9 +286,38 @@ MOI.get(model::Optimizer, ::MOI.ObjectiveSense) = model.sense
282286
283287Set the objective function as `func` for `model`.
284288"""
285- function MOI. set (model:: Optimizer , :: MOI.ObjectiveFunction{F} , func:: F ) where {F}
289+ function MOI. set (model:: Optimizer , attr:: MOI.ObjectiveFunction , func:: MOI.AbstractScalarFunction )
290+ backend = MOI. Nonlinear. SparseReverseMode ()
291+ vars = [MOI. VariableIndex (i) for i in eachindex (model. variable_primal_start)]
292+ nlp_model = MOI. Nonlinear. Model ()
286293 nl = convert (MOI. ScalarNonlinearFunction, func)
287- MOI. Nonlinear. set_objective (model. nlp_model, nl)
294+ MOI. Nonlinear. set_objective (nlp_model, nl)
295+ evaluator = MOI. Nonlinear. Evaluator (nlp_model, backend, vars)
296+ MOI. initialize (evaluator, [:Grad ])
297+ function eval_f_cb (M, x)
298+ val = MOI. eval_objective (evaluator, JuMP. vectorize (x, _shape (model. manifold)))
299+ if model. sense == MOI. MAX_SENSE
300+ val = - val
301+ end
302+ return val
303+ end
304+ function eval_grad_f_cb (M, X)
305+ x = JuMP. vectorize (X, _shape (model. manifold))
306+ grad_f = zeros (length (x))
307+ MOI. eval_objective_gradient (evaluator, grad_f, x)
308+ if model. sense == MOI. MAX_SENSE
309+ LinearAlgebra. rmul! (grad_f, - 1 )
310+ end
311+ reshaped_grad_f = JuMP. reshape_vector (grad_f, _shape (model. manifold))
312+ return ManifoldDiff. riemannian_gradient (model. manifold, X, reshaped_grad_f)
313+ end
314+ objective = RiemannianFunction (Manopt. ManifoldGradientObjective (eval_f_cb, eval_grad_f_cb))
315+ MOI. set (model, MOI. ObjectiveFunction {typeof(objective)} (), objective)
316+ return nothing
317+ end
318+
319+ function MOI. set (model:: Optimizer , :: MOI.ObjectiveFunction , func:: RiemannianFunction )
320+ model. objective = func. func
288321 model. problem = nothing
289322 model. state = nothing
290323 return nothing
@@ -304,36 +337,11 @@ function MOI.optimize!(model::Optimizer)
304337 model. variable_primal_start[i]
305338 end for i in eachindex (model. variable_primal_start)
306339 ]
307- backend = MOI. Nonlinear. SparseReverseMode ()
308- vars = [MOI. VariableIndex (i) for i in eachindex (model. variable_primal_start)]
309- evaluator = MOI. Nonlinear. Evaluator (model. nlp_model, backend, vars)
310- MOI. initialize (evaluator, [:Grad ])
311- function eval_f_cb (M, x)
312- if model. sense == MOI. FEASIBILITY_SENSE
313- return 0.0
314- end
315- obj = MOI. eval_objective (evaluator, JuMP. vectorize (x, _shape (model. manifold)))
316- if model. sense == MOI. MAX_SENSE
317- obj = - obj
318- end
319- return obj
320- end
321- function eval_grad_f_cb (M, X)
322- x = JuMP. vectorize (X, _shape (model. manifold))
323- grad_f = zeros (length (x))
324- if model. sense == MOI. FEASIBILITY_SENSE
325- grad_f .= zero (eltype (grad_f))
326- else
327- MOI. eval_objective_gradient (evaluator, grad_f, x)
328- end
329- if model. sense == MOI. MAX_SENSE
330- LinearAlgebra. rmul! (grad_f, - 1 )
331- end
332- reshaped_grad_f = JuMP. reshape_vector (grad_f, _shape (model. manifold))
333- return ManifoldDiff. riemannian_gradient (model. manifold, X, reshaped_grad_f)
340+ objective = model. objective
341+ if model. sense == MOI. FEASIBILITY_SENSE
342+ objective = Manopt. ManifoldGradientObjective ((_, _) -> 0.0 , ManifoldsBase. zero_vector)
334343 end
335- mgo = Manopt. ManifoldGradientObjective (eval_f_cb, eval_grad_f_cb)
336- dmgo = decorate_objective! (model. manifold, mgo)
344+ dmgo = decorate_objective! (model. manifold, objective)
337345 model. problem = DefaultManoptProblem (model. manifold, dmgo)
338346 reshaped_start = JuMP. reshape_vector (start, _shape (model. manifold))
339347 descent_state_type = model. options[DESCENT_STATE_TYPE]
0 commit comments