diff --git a/src/MOI_wrapper.jl b/src/MOI_wrapper.jl index 726d658..2a112af 100644 --- a/src/MOI_wrapper.jl +++ b/src/MOI_wrapper.jl @@ -624,6 +624,10 @@ MOI.get(model::Optimizer, ::MOI.NLPBlockDualStart) = model.nlp_dual_start MOI.supports(::Optimizer, ::MOI.NLPBlock) = true +# This may also be set by `optimize!` and contain the block created from +# ScalarNonlinearFunction +MOI.get(model::Optimizer, ::MOI.NLPBlock) = model.nlp_data + function MOI.set(model::Optimizer, ::MOI.NLPBlock, nlp_data::MOI.NLPBlockData) model.nlp_data = nlp_data model.inner = nothing diff --git a/test/MOI_wrapper.jl b/test/MOI_wrapper.jl index aff2178..fbbb9a5 100644 --- a/test/MOI_wrapper.jl +++ b/test/MOI_wrapper.jl @@ -168,6 +168,7 @@ function test_check_derivatives_for_naninf() # MOI.set(model, MOI.RawOptimizerAttribute("check_derivatives_for_naninf"), "no") MOI.optimize!(model) @test MOI.get(model, MOI.TerminationStatus()) == MOI.INVALID_MODEL + @test MOI.get(model, MOI.NLPBlock()) isa MOI.NLPBlockData return end @@ -359,6 +360,18 @@ function test_scalar_nonlinear_function_is_valid() return end +function test_scalar_nonlinear_function_nlp_block() + model = Ipopt.Optimizer() + x = MOI.add_variable(model) + f = MOI.ScalarNonlinearFunction(:^, Any[x, 4]) + MOI.add_constraint(model, f, MOI.LessThan(1.0)) + MOI.optimize!(model) + block = MOI.get(model, MOI.NLPBlock()) + @test !block.has_objective + @test block.evaluator isa MOI.Nonlinear.Evaluator + return +end + function test_parameter() model = Ipopt.Optimizer() MOI.set(model, MOI.Silent(), true)