From 2f7fcc79aeb8462afc0cbedcd6b9440389e689f0 Mon Sep 17 00:00:00 2001 From: joaquimg Date: Wed, 12 Nov 2025 08:01:56 -0300 Subject: [PATCH 1/3] WIP obj sensib fallbacks for conic and quad --- src/ConicProgram/ConicProgram.jl | 8 +- src/QuadraticProgram/QuadraticProgram.jl | 8 +- src/jump_wrapper.jl | 22 +++++ src/moi_wrapper.jl | 105 ++++++++++++++++++++++- test/jump_wrapper.jl | 97 +++++++++++++++++++++ 5 files changed, 230 insertions(+), 10 deletions(-) diff --git a/src/ConicProgram/ConicProgram.jl b/src/ConicProgram/ConicProgram.jl index abf21231a..8c317600d 100644 --- a/src/ConicProgram/ConicProgram.jl +++ b/src/ConicProgram/ConicProgram.jl @@ -451,14 +451,14 @@ function MOI.get( end function MOI.get(::Model, ::DiffOpt.ForwardObjectiveSensitivity) - return error( - "ForwardObjectiveSensitivity is not implemented for the Conic Optimization backend", + return throw( + MOI.UnsupportedAttribute(DiffOpt.ForwardObjectiveSensitivity()), ) end function MOI.set(::Model, ::DiffOpt.ReverseObjectiveSensitivity, val) - return error( - "ReverseObjectiveSensitivity is not implemented for the Conic Optimization backend", + return throw( + MOI.UnsupportedAttribute(DiffOpt.ReverseObjectiveSensitivity()), ) end diff --git a/src/QuadraticProgram/QuadraticProgram.jl b/src/QuadraticProgram/QuadraticProgram.jl index 1ed588878..cf2da7886 100644 --- a/src/QuadraticProgram/QuadraticProgram.jl +++ b/src/QuadraticProgram/QuadraticProgram.jl @@ -502,14 +502,14 @@ function MOI.set(model::Model, ::LinearAlgebraSolver, linear_solver) end function MOI.get(::Model, ::DiffOpt.ForwardObjectiveSensitivity) - return error( - "ForwardObjectiveSensitivity is not implemented for the Quadratic Optimization backend", + return throw( + MOI.UnsupportedAttribute(DiffOpt.ForwardObjectiveSensitivity()), ) end function MOI.set(::Model, ::DiffOpt.ReverseObjectiveSensitivity, val) - return error( - "ReverseObjectiveSensitivity is not implemented for the Quadratic Optimization backend", + return throw( + MOI.UnsupportedAttribute(DiffOpt.ReverseObjectiveSensitivity()), ) end diff --git a/src/jump_wrapper.jl b/src/jump_wrapper.jl index ce26c4a5c..b88f6fd04 100644 --- a/src/jump_wrapper.jl +++ b/src/jump_wrapper.jl @@ -143,3 +143,25 @@ Get the value of a variable output sensitivity for forward mode. function get_forward_variable(model::JuMP.Model, variable::JuMP.VariableRef) return MOI.get(model, ForwardVariablePrimal(), variable) end + + +""" + set_reverse_objective(model::JuMP.Model, value::Number) + +Set the value of the objective input sensitivity for reverse mode. +""" +function set_reverse_objective( + model::JuMP.Model, + value::Number, +) + return MOI.set(model, ReverseObjectiveSensitivity(), value) +end + +""" + get_forward_objective(model::JuMP.Model) + +Get the value of the objective output sensitivity for forward mode. +""" +function get_forward_objective(model::JuMP.Model) + return MOI.get(model, ForwardObjectiveSensitivity()) +end diff --git a/src/moi_wrapper.jl b/src/moi_wrapper.jl index 56cd2096c..d876388a5 100644 --- a/src/moi_wrapper.jl +++ b/src/moi_wrapper.jl @@ -574,11 +574,61 @@ function reverse_differentiate!(model::Optimizer) MOI.set(diff, ReverseConstraintDual(), model.index_map[vi], value) end if !iszero(model.input_cache.dobj) - MOI.set(diff, ReverseObjectiveSensitivity(), model.input_cache.dobj) + if !isempty(model.input_cache.dx) + error( + "Cannot compute the reverse differentiation with both solution sensitivities and objective sensitivities.", + ) + end + try + MOI.set(diff, ReverseObjectiveSensitivity(), model.input_cache.dobj) + catch e + if e isa MOI.UnsupportedAttribute + _fallback_set_reverse_objective_sensitivity(model, model.input_cache.dobj) + else + rethrow(e) + end + end end return reverse_differentiate!(diff) end +function _fallback_set_reverse_objective_sensitivity(model::Optimizer, val) + diff = _diff(model) + obj_type = MOI.get( + model, + MOI.ObjectiveFunctionType(), + ) + obj_func = MOI.get( + model, + MOI.ObjectiveFunction{obj_type}(), + ) + for xi in MOI.Nonlinear.SymbolicAD.variables(obj_func) + df_dx = MOI.Nonlinear.SymbolicAD.simplify!( + MOI.Nonlinear.SymbolicAD.derivative(obj_func, xi), + ) + if iszero(df_dx) + continue + end + dd = 0.0 + if df_dx isa Number + dd = df_dx * val + elseif df_dx isa MOI.ScalarAffineFunction{Float64} + for term in df_dx.terms + xj_val = MOI.get(model, MOI.VariablePrimal(), term.variable) + dd += term.coefficient * xj_val * val + end + dd += df_dx * val + else + error( + "Cannot compute forward objective sensitivity fallback: " * + "unsupported derivative found.", + ) + end + MOI.set(diff, ReverseVariablePrimal(), model.index_map[xi], dd) + end + return +end + function _copy_forward_in_constraint(diff, index_map, con_map, constraints) for (index, value) in constraints MOI.set( @@ -830,7 +880,58 @@ function MOI.get( end function MOI.get(model::Optimizer, attr::ForwardObjectiveSensitivity) - return MOI.get(_checked_diff(model, attr, :forward_differentiate!), attr) + diff_model = _checked_diff(model, attr, :forward_differentiate!) + val = 0.0 + try + val = MOI.get(diff_model, attr) + catch e + if e isa MOI.UnsupportedAttribute + val = _fallback_get_forward_objective_sensitivity(model) + else + rethrow(e) + end + end + return val +end + +function _fallback_get_forward_objective_sensitivity(model::Optimizer) + ret = 0.0 + obj_type = MOI.get( + model, + MOI.ObjectiveFunctionType(), + ) + obj_func = MOI.get( + model, + MOI.ObjectiveFunction{obj_type}(), + ) + for xi in MOI.Nonlinear.SymbolicAD.variables(obj_func) + df_dx = MOI.Nonlinear.SymbolicAD.simplify!( + MOI.Nonlinear.SymbolicAD.derivative(obj_func, xi), + ) + if iszero(df_dx) + continue + end + dx_dp = MOI.get( + model, + ForwardVariablePrimal(), + xi, + ) + if df_dx isa Number + ret += df_dx * dx_dp + elseif df_dx isa MOI.ScalarAffineFunction{Float64} + for term in df_dx.terms + xj_val = MOI.get(model, MOI.VariablePrimal(), term.variable) + ret += term.coefficient * xj_val * dx_dp + end + ret += df_dx.constant * dx_dp + else + error( + "Cannot compute forward objective sensitivity fallback: " * + "unsupported derivative found.", + ) + end + end + return ret end function MOI.supports( diff --git a/test/jump_wrapper.jl b/test/jump_wrapper.jl index 6588ab0c9..489843f25 100644 --- a/test/jump_wrapper.jl +++ b/test/jump_wrapper.jl @@ -29,6 +29,103 @@ function runtests() return end +function test_obj() + + for (MODEL, SOLVER) in [ + (DiffOpt.diff_model, HiGHS.Optimizer), + # (DiffOpt.diff_model, SCS.Optimizer), + # (DiffOpt.diff_model, Ipopt.Optimizer), + # (DiffOpt.quadratic_diff_model, HiGHS.Optimizer), + # (DiffOpt.quadratic_diff_model, SCS.Optimizer), + # (DiffOpt.quadratic_diff_model, Ipopt.Optimizer), + # (DiffOpt.conic_diff_model, HiGHS.Optimizer), + # (DiffOpt.conic_diff_model, SCS.Optimizer), + # (DiffOpt.conic_diff_model, Ipopt.Optimizer), + # (DiffOpt.nonlinear_diff_model, HiGHS.Optimizer), + # (DiffOpt.nonlinear_diff_model, SCS.Optimizer), + # (DiffOpt.nonlinear_diff_model, Ipopt.Optimizer), + ], + # ineq in [true, false], + # _min in [true, false], + # flip in [true, false], + with_bridge_type in [Float64, nothing] + + if isnothing(with_bridge_type) && SOLVER === SCS.Optimizer + continue + end + + + MODEL = DiffOpt.diff_model + SOLVER = HiGHS.Optimizer + with_bridge_type = Float64 + ineq = false + _min = true + flip = false + + @testset "$(MODEL) with: $(SOLVER), $(ineq ? "ineqs" : "eqs"), $(_min ? "Min" : "Max"), $(flip ? "geq" : "leq") bridge:$with_bridge_type" begin + model = MODEL(SOLVER; with_bridge_type) + set_silent(model) + + p_val = 4.0 + pc_val = 2.0 + @variable(model, x) + @variable(model, p in Parameter(p_val)) + @variable(model, pc in Parameter(pc_val)) + # if ineq + # if !flip + # cons = @constraint(model, con, pc * x >= 3 * p) + # else + # cons = @constraint(model, con, pc * x <= 3 * p) + # end + # else + cons = @constraint(model, con, pc * x == 3 * p) + # end + # sign = flip ? -1 : 1 + # if _min + # @objective(model, Min, 2x * sign) + # else + # @objective(model, Max, -2x * sign) + # end + + for obj_coef in [-3, 2, 5] + @objective(model, Min, obj_coef * x) + + optimize!(model) + @test value(x) ≈ 3 * p_val / pc_val atol = ATOL rtol = RTOL + + DiffOpt.empty_input_sensitivities!(model) + direction_obj = 2.0 + DiffOpt.set_reverse_objective(model, direction_obj) + DiffOpt.reverse_differentiate!(model) + @test DiffOpt.get_reverse_parameter(model, p) ≈ obj_coef * direction_obj * 3 / pc_val atol = ATOL rtol = RTOL + @test DiffOpt.get_reverse_parameter(model, pc) ≈ -obj_coef * direction_obj * 3 * p_val / (pc_val^2) atol = ATOL rtol = RTOL + + DiffOpt.empty_input_sensitivities!(model) + direction_p = 3.0 + DiffOpt.set_forward_parameter(model, p, direction_p) + DiffOpt.forward_differentiate!(model) + @test DiffOpt.get_forward_objective(model) ≈ obj_coef * direction_p * 3 / pc_val atol = ATOL rtol = RTOL + + # stop differentiating with respect to p + DiffOpt.empty_input_sensitivities!(model) + # differentiate w.r.t. pc + direction_pc = 10.0 + DiffOpt.set_forward_parameter(model, pc, direction_pc) + DiffOpt.forward_differentiate!(model) + @test DiffOpt.get_forward_objective(model) ≈ + - obj_coef * direction_pc * 3 * p_val / pc_val^2 atol = ATOL rtol = RTOL + + end + + + end + end + + return +end + +# TODO test quadratic obj + function test_jump_api() for (MODEL, SOLVER) in [ (DiffOpt.diff_model, HiGHS.Optimizer), From 351fdd4a55c499b8e9d22c77032ee17099995f27 Mon Sep 17 00:00:00 2001 From: joaquimg Date: Mon, 1 Dec 2025 02:31:34 -0300 Subject: [PATCH 2/3] adjust tests --- test/jump_wrapper.jl | 133 ++++++++++++++++++++++++++++--------------- 1 file changed, 86 insertions(+), 47 deletions(-) diff --git a/test/jump_wrapper.jl b/test/jump_wrapper.jl index 489843f25..adbb8f1fd 100644 --- a/test/jump_wrapper.jl +++ b/test/jump_wrapper.jl @@ -29,38 +29,79 @@ function runtests() return end -function test_obj() - - for (MODEL, SOLVER) in [ +function test_obj_simple() + + for (MODEL, SOLVER) in [ (DiffOpt.diff_model, HiGHS.Optimizer), - # (DiffOpt.diff_model, SCS.Optimizer), - # (DiffOpt.diff_model, Ipopt.Optimizer), - # (DiffOpt.quadratic_diff_model, HiGHS.Optimizer), - # (DiffOpt.quadratic_diff_model, SCS.Optimizer), - # (DiffOpt.quadratic_diff_model, Ipopt.Optimizer), - # (DiffOpt.conic_diff_model, HiGHS.Optimizer), - # (DiffOpt.conic_diff_model, SCS.Optimizer), - # (DiffOpt.conic_diff_model, Ipopt.Optimizer), - # (DiffOpt.nonlinear_diff_model, HiGHS.Optimizer), - # (DiffOpt.nonlinear_diff_model, SCS.Optimizer), - # (DiffOpt.nonlinear_diff_model, Ipopt.Optimizer), + (DiffOpt.diff_model, SCS.Optimizer), + (DiffOpt.diff_model, Ipopt.Optimizer), ], - # ineq in [true, false], - # _min in [true, false], - # flip in [true, false], + sign in [+1, -1], + sign_p in [-1, +1], + sense in [:Min, :Max], with_bridge_type in [Float64, nothing] if isnothing(with_bridge_type) && SOLVER === SCS.Optimizer continue end + @testset "$(MODEL) with: $(SOLVER), bridge:$with_bridge_type, sign:$sign, sense: $sense, sign_p: $sign_p" begin + model = MODEL(SOLVER; with_bridge_type) + set_silent(model) - MODEL = DiffOpt.diff_model - SOLVER = HiGHS.Optimizer - with_bridge_type = Float64 - ineq = false - _min = true - flip = false + p_val = 4.0 + @variable(model, x) + @variable(model, p in Parameter(p_val)) + @constraint(model, con, x == 3 * sign_p * p) + @objective(model, Min, 2 * sign * x) + if sense == :Max + @objective(model, Max, 2 * sign * x) + end + optimize!(model) + @test value(x) ≈ sign_p * 3 * p_val atol = ATOL rtol = RTOL + + DiffOpt.empty_input_sensitivities!(model) + direction_obj = 2.0 + DiffOpt.set_reverse_objective(model, direction_obj) + DiffOpt.reverse_differentiate!(model) + @test DiffOpt.get_reverse_parameter(model, p) ≈ sign_p * sign * 6 * direction_obj atol = ATOL rtol = RTOL + + DiffOpt.empty_input_sensitivities!(model) + direction_p = 3.0 + DiffOpt.set_forward_parameter(model, p, direction_p) + DiffOpt.forward_differentiate!(model) + @test DiffOpt.get_forward_objective(model) ≈ sign_p * sign * 6 * direction_p atol = ATOL rtol = RTOL + + end + end + + return +end + +function test_obj() + + for (MODEL, SOLVER) in [ + (DiffOpt.diff_model, HiGHS.Optimizer), + (DiffOpt.diff_model, SCS.Optimizer), + (DiffOpt.diff_model, Ipopt.Optimizer), + (DiffOpt.quadratic_diff_model, HiGHS.Optimizer), + (DiffOpt.quadratic_diff_model, SCS.Optimizer), + (DiffOpt.quadratic_diff_model, Ipopt.Optimizer), + (DiffOpt.conic_diff_model, HiGHS.Optimizer), + (DiffOpt.conic_diff_model, SCS.Optimizer), + (DiffOpt.conic_diff_model, Ipopt.Optimizer), + (DiffOpt.nonlinear_diff_model, HiGHS.Optimizer), + (DiffOpt.nonlinear_diff_model, SCS.Optimizer), + (DiffOpt.nonlinear_diff_model, Ipopt.Optimizer), + ], + ineq in [true, false], + _min in [true, false], + flip in [true, false], + with_bridge_type in [Float64, nothing] + + if isnothing(with_bridge_type) && SOLVER === SCS.Optimizer + continue + end @testset "$(MODEL) with: $(SOLVER), $(ineq ? "ineqs" : "eqs"), $(_min ? "Min" : "Max"), $(flip ? "geq" : "leq") bridge:$with_bridge_type" begin model = MODEL(SOLVER; with_bridge_type) @@ -71,24 +112,25 @@ function test_obj() @variable(model, x) @variable(model, p in Parameter(p_val)) @variable(model, pc in Parameter(pc_val)) - # if ineq - # if !flip - # cons = @constraint(model, con, pc * x >= 3 * p) - # else - # cons = @constraint(model, con, pc * x <= 3 * p) - # end - # else + if ineq + if !flip + cons = @constraint(model, con, pc * x >= 3 * p) + else + cons = @constraint(model, con, pc * x <= 3 * p) + end + else cons = @constraint(model, con, pc * x == 3 * p) - # end - # sign = flip ? -1 : 1 - # if _min - # @objective(model, Min, 2x * sign) - # else - # @objective(model, Max, -2x * sign) - # end + end - for obj_coef in [-3, 2, 5] - @objective(model, Min, obj_coef * x) + for obj_coef in [2, 5] + + sign = flip ? -1 : 1 + dir = _min ? 1 : -1 + if _min + @objective(model, Min, dir * obj_coef * x * sign) + else + @objective(model, Max, dir * obj_coef * x * sign) + end optimize!(model) @test value(x) ≈ 3 * p_val / pc_val atol = ATOL rtol = RTOL @@ -97,27 +139,24 @@ function test_obj() direction_obj = 2.0 DiffOpt.set_reverse_objective(model, direction_obj) DiffOpt.reverse_differentiate!(model) - @test DiffOpt.get_reverse_parameter(model, p) ≈ obj_coef * direction_obj * 3 / pc_val atol = ATOL rtol = RTOL - @test DiffOpt.get_reverse_parameter(model, pc) ≈ -obj_coef * direction_obj * 3 * p_val / (pc_val^2) atol = ATOL rtol = RTOL + @test DiffOpt.get_reverse_parameter(model, p) ≈ dir * sign * obj_coef * direction_obj * 3 / pc_val atol = ATOL rtol = RTOL + @test DiffOpt.get_reverse_parameter(model, pc) ≈ - dir * sign * obj_coef * direction_obj * 3 * p_val / (pc_val^2) atol = ATOL rtol = RTOL DiffOpt.empty_input_sensitivities!(model) direction_p = 3.0 DiffOpt.set_forward_parameter(model, p, direction_p) DiffOpt.forward_differentiate!(model) - @test DiffOpt.get_forward_objective(model) ≈ obj_coef * direction_p * 3 / pc_val atol = ATOL rtol = RTOL + @test DiffOpt.get_forward_objective(model) ≈ dir * sign * obj_coef * direction_p * 3 / pc_val atol = ATOL rtol = RTOL - # stop differentiating with respect to p + # stop differentiating with respect to p DiffOpt.empty_input_sensitivities!(model) # differentiate w.r.t. pc direction_pc = 10.0 DiffOpt.set_forward_parameter(model, pc, direction_pc) DiffOpt.forward_differentiate!(model) @test DiffOpt.get_forward_objective(model) ≈ - - obj_coef * direction_pc * 3 * p_val / pc_val^2 atol = ATOL rtol = RTOL - + - dir * sign * obj_coef * direction_pc * 3 * p_val / pc_val^2 atol = ATOL rtol = RTOL end - - end end From b9d2e2bb5865e23084895001c80764b78ff2aef6 Mon Sep 17 00:00:00 2001 From: joaquimg Date: Mon, 1 Dec 2025 02:36:20 -0300 Subject: [PATCH 3/3] format --- src/jump_wrapper.jl | 6 +----- src/moi_wrapper.jl | 31 +++++++++---------------------- test/jump_wrapper.jl | 25 +++++++++++++++---------- 3 files changed, 25 insertions(+), 37 deletions(-) diff --git a/src/jump_wrapper.jl b/src/jump_wrapper.jl index b88f6fd04..f78efd748 100644 --- a/src/jump_wrapper.jl +++ b/src/jump_wrapper.jl @@ -144,16 +144,12 @@ function get_forward_variable(model::JuMP.Model, variable::JuMP.VariableRef) return MOI.get(model, ForwardVariablePrimal(), variable) end - """ set_reverse_objective(model::JuMP.Model, value::Number) Set the value of the objective input sensitivity for reverse mode. """ -function set_reverse_objective( - model::JuMP.Model, - value::Number, -) +function set_reverse_objective(model::JuMP.Model, value::Number) return MOI.set(model, ReverseObjectiveSensitivity(), value) end diff --git a/src/moi_wrapper.jl b/src/moi_wrapper.jl index d876388a5..59a7f4db0 100644 --- a/src/moi_wrapper.jl +++ b/src/moi_wrapper.jl @@ -583,7 +583,10 @@ function reverse_differentiate!(model::Optimizer) MOI.set(diff, ReverseObjectiveSensitivity(), model.input_cache.dobj) catch e if e isa MOI.UnsupportedAttribute - _fallback_set_reverse_objective_sensitivity(model, model.input_cache.dobj) + _fallback_set_reverse_objective_sensitivity( + model, + model.input_cache.dobj, + ) else rethrow(e) end @@ -594,14 +597,8 @@ end function _fallback_set_reverse_objective_sensitivity(model::Optimizer, val) diff = _diff(model) - obj_type = MOI.get( - model, - MOI.ObjectiveFunctionType(), - ) - obj_func = MOI.get( - model, - MOI.ObjectiveFunction{obj_type}(), - ) + obj_type = MOI.get(model, MOI.ObjectiveFunctionType()) + obj_func = MOI.get(model, MOI.ObjectiveFunction{obj_type}()) for xi in MOI.Nonlinear.SymbolicAD.variables(obj_func) df_dx = MOI.Nonlinear.SymbolicAD.simplify!( MOI.Nonlinear.SymbolicAD.derivative(obj_func, xi), @@ -896,14 +893,8 @@ end function _fallback_get_forward_objective_sensitivity(model::Optimizer) ret = 0.0 - obj_type = MOI.get( - model, - MOI.ObjectiveFunctionType(), - ) - obj_func = MOI.get( - model, - MOI.ObjectiveFunction{obj_type}(), - ) + obj_type = MOI.get(model, MOI.ObjectiveFunctionType()) + obj_func = MOI.get(model, MOI.ObjectiveFunction{obj_type}()) for xi in MOI.Nonlinear.SymbolicAD.variables(obj_func) df_dx = MOI.Nonlinear.SymbolicAD.simplify!( MOI.Nonlinear.SymbolicAD.derivative(obj_func, xi), @@ -911,11 +902,7 @@ function _fallback_get_forward_objective_sensitivity(model::Optimizer) if iszero(df_dx) continue end - dx_dp = MOI.get( - model, - ForwardVariablePrimal(), - xi, - ) + dx_dp = MOI.get(model, ForwardVariablePrimal(), xi) if df_dx isa Number ret += df_dx * dx_dp elseif df_dx isa MOI.ScalarAffineFunction{Float64} diff --git a/test/jump_wrapper.jl b/test/jump_wrapper.jl index adbb8f1fd..8e48e06dd 100644 --- a/test/jump_wrapper.jl +++ b/test/jump_wrapper.jl @@ -30,7 +30,6 @@ function runtests() end function test_obj_simple() - for (MODEL, SOLVER) in [ (DiffOpt.diff_model, HiGHS.Optimizer), (DiffOpt.diff_model, SCS.Optimizer), @@ -64,14 +63,15 @@ function test_obj_simple() direction_obj = 2.0 DiffOpt.set_reverse_objective(model, direction_obj) DiffOpt.reverse_differentiate!(model) - @test DiffOpt.get_reverse_parameter(model, p) ≈ sign_p * sign * 6 * direction_obj atol = ATOL rtol = RTOL + @test DiffOpt.get_reverse_parameter(model, p) ≈ + sign_p * sign * 6 * direction_obj atol = ATOL rtol = RTOL DiffOpt.empty_input_sensitivities!(model) direction_p = 3.0 DiffOpt.set_forward_parameter(model, p, direction_p) DiffOpt.forward_differentiate!(model) - @test DiffOpt.get_forward_objective(model) ≈ sign_p * sign * 6 * direction_p atol = ATOL rtol = RTOL - + @test DiffOpt.get_forward_objective(model) ≈ + sign_p * sign * 6 * direction_p atol = ATOL rtol = RTOL end end @@ -79,7 +79,6 @@ function test_obj_simple() end function test_obj() - for (MODEL, SOLVER) in [ (DiffOpt.diff_model, HiGHS.Optimizer), (DiffOpt.diff_model, SCS.Optimizer), @@ -123,7 +122,6 @@ function test_obj() end for obj_coef in [2, 5] - sign = flip ? -1 : 1 dir = _min ? 1 : -1 if _min @@ -139,14 +137,20 @@ function test_obj() direction_obj = 2.0 DiffOpt.set_reverse_objective(model, direction_obj) DiffOpt.reverse_differentiate!(model) - @test DiffOpt.get_reverse_parameter(model, p) ≈ dir * sign * obj_coef * direction_obj * 3 / pc_val atol = ATOL rtol = RTOL - @test DiffOpt.get_reverse_parameter(model, pc) ≈ - dir * sign * obj_coef * direction_obj * 3 * p_val / (pc_val^2) atol = ATOL rtol = RTOL + @test DiffOpt.get_reverse_parameter(model, p) ≈ + dir * sign * obj_coef * direction_obj * 3 / pc_val atol = + ATOL rtol = RTOL + @test DiffOpt.get_reverse_parameter(model, pc) ≈ + - dir * sign * obj_coef * direction_obj * 3 * p_val / + (pc_val^2) atol = ATOL rtol = RTOL DiffOpt.empty_input_sensitivities!(model) direction_p = 3.0 DiffOpt.set_forward_parameter(model, p, direction_p) DiffOpt.forward_differentiate!(model) - @test DiffOpt.get_forward_objective(model) ≈ dir * sign * obj_coef * direction_p * 3 / pc_val atol = ATOL rtol = RTOL + @test DiffOpt.get_forward_objective(model) ≈ + dir * sign * obj_coef * direction_p * 3 / pc_val atol = + ATOL rtol = RTOL # stop differentiating with respect to p DiffOpt.empty_input_sensitivities!(model) @@ -155,7 +159,8 @@ function test_obj() DiffOpt.set_forward_parameter(model, pc, direction_pc) DiffOpt.forward_differentiate!(model) @test DiffOpt.get_forward_objective(model) ≈ - - dir * sign * obj_coef * direction_pc * 3 * p_val / pc_val^2 atol = ATOL rtol = RTOL + - dir * sign * obj_coef * direction_pc * 3 * p_val / + pc_val^2 atol = ATOL rtol = RTOL end end end