diff --git a/ext/OptimizationEnzymeExt.jl b/ext/OptimizationEnzymeExt.jl index 6b49863..7adae59 100644 --- a/ext/OptimizationEnzymeExt.jl +++ b/ext/OptimizationEnzymeExt.jl @@ -58,7 +58,8 @@ function inner_cons(x, fcons::Function, p::Union{SciMLBase.NullParameters, Nothi end function cons_f2(x, dx, fcons, p, num_cons, i) - Enzyme.autodiff_deferred(Enzyme.Reverse, Const(inner_cons), Active, Enzyme.Duplicated(x, dx), + Enzyme.autodiff_deferred( + Enzyme.Reverse, Const(inner_cons), Active, Enzyme.Duplicated(x, dx), Const(fcons), Const(p), Const(num_cons), Const(i)) return nothing end @@ -83,7 +84,8 @@ function lagrangian(x, _f::Function, cons::Function, p, λ, σ = one(eltype(x))) end function lag_grad(x, dx, lagrangian::Function, _f::Function, cons::Function, p, σ, λ) - Enzyme.autodiff_deferred(Enzyme.Reverse, Const(lagrangian), Active, Enzyme.Duplicated(x, dx), + Enzyme.autodiff_deferred( + Enzyme.Reverse, Const(lagrangian), Active, Enzyme.Duplicated(x, dx), Const(_f), Const(cons), Const(p), Const(λ), Const(σ)) return nothing end diff --git a/ext/OptimizationZygoteExt.jl b/ext/OptimizationZygoteExt.jl index 09185f9..f31be4b 100644 --- a/ext/OptimizationZygoteExt.jl +++ b/ext/OptimizationZygoteExt.jl @@ -24,7 +24,6 @@ function OptimizationBase.instantiate_function( g = false, h = false, hv = false, fg = false, fgh = false, cons_j = false, cons_vjp = false, cons_jvp = false, cons_h = false, lag_h = false) - adtype, soadtype = OptimizationBase.generate_adtype(adtype) if g == true && f.grad === nothing @@ -83,12 +82,14 @@ function OptimizationBase.instantiate_function( if fgh == true && f.fgh === nothing function fgh!(G, H, θ) - (y, _, _) = value_derivative_and_second_derivative!(f.f, G, H, prep_hess, soadtype, θ, Constant(p)) + (y, _, _) = value_derivative_and_second_derivative!( + f.f, G, H, prep_hess, soadtype, θ, Constant(p)) return y end if p !== SciMLBase.NullParameters() && p !== nothing function fgh!(G, H, θ, p) - (y, _, _) = value_derivative_and_second_derivative!(f.f, G, H, prep_hess, soadtype, θ, Constant(p)) + (y, _, _) = value_derivative_and_second_derivative!( + f.f, G, H, prep_hess, soadtype, θ, Constant(p)) return y end end @@ -180,7 +181,8 @@ function OptimizationBase.instantiate_function( conshess_sparsity = f.cons_hess_prototype conshess_colors = f.cons_hess_colorvec if cons !== nothing && cons_h == true && f.cons_h === nothing - prep_cons_hess = [prepare_hessian(cons_oop, soadtype, x, Constant(i)) for i in 1:num_cons] + prep_cons_hess = [prepare_hessian(cons_oop, soadtype, x, Constant(i)) + for i in 1:num_cons] function cons_h!(H, θ) for i in 1:num_cons @@ -197,7 +199,8 @@ function OptimizationBase.instantiate_function( if f.lag_h === nothing && cons !== nothing && lag_h == true lag_extras = prepare_hessian( - lagrangian, soadtype, x, Constant(one(eltype(x))), Constant(ones(eltype(x), num_cons)), Constant(p)) + lagrangian, soadtype, x, Constant(one(eltype(x))), + Constant(ones(eltype(x), num_cons)), Constant(p)) lag_hess_prototype = zeros(Bool, num_cons, length(x)) function lag_h!(H::AbstractMatrix, θ, σ, λ) @@ -205,12 +208,14 @@ function OptimizationBase.instantiate_function( cons_h!(H, θ) H *= λ else - hessian!(lagrangian, H, lag_extras, soadtype, θ, Constant(σ), Constant(λ), Constant(p)) + hessian!(lagrangian, H, lag_extras, soadtype, θ, + Constant(σ), Constant(λ), Constant(p)) end end function lag_h!(h::AbstractVector, θ, σ, λ) - H = hessian(lagrangian, lag_extras, soadtype, θ, Constant(σ), Constant(λ), Constant(p)) + H = hessian( + lagrangian, lag_extras, soadtype, θ, Constant(σ), Constant(λ), Constant(p)) k = 0 for i in 1:length(θ) for j in 1:i @@ -226,12 +231,14 @@ function OptimizationBase.instantiate_function( cons_h(H, θ) H *= λ else - hessian!(lagrangian, H, lag_extras, soadtype, θ, Constant(σ), Constant(λ), Constant(p)) + hessian!(lagrangian, H, lag_extras, soadtype, θ, + Constant(σ), Constant(λ), Constant(p)) end end function lag_h!(h::AbstractVector, θ, σ, λ, p) - H = hessian(lagrangian, lag_extras, soadtype, θ, Constant(σ), Constant(λ), Constant(p)) + H = hessian(lagrangian, lag_extras, soadtype, θ, + Constant(σ), Constant(λ), Constant(p)) k = 0 for i in 1:length(θ) for j in 1:i @@ -303,12 +310,14 @@ function OptimizationBase.instantiate_function( extras_grad = prepare_gradient(_f, adtype.dense_ad, x, Constant(p)) end function fg!(res, θ) - (y, _) = value_and_gradient!(_f, res, extras_grad, adtype.dense_ad, θ, Constant(p)) + (y, _) = value_and_gradient!( + _f, res, extras_grad, adtype.dense_ad, θ, Constant(p)) return y end if p !== SciMLBase.NullParameters() && p !== nothing function fg!(res, θ, p) - (y, _) = value_and_gradient!(_f, res, extras_grad, adtype.dense_ad, θ, Constant(p)) + (y, _) = value_and_gradient!( + _f, res, extras_grad, adtype.dense_ad, θ, Constant(p)) return y end end @@ -341,13 +350,15 @@ function OptimizationBase.instantiate_function( if fgh == true && f.fgh === nothing function fgh!(G, H, θ) - (y, _, _) = value_derivative_and_second_derivative!(_f, G, H, θ, prep_hess, soadtype, Constant(p)) + (y, _, _) = value_derivative_and_second_derivative!( + _f, G, H, θ, prep_hess, soadtype, Constant(p)) return y end if p !== SciMLBase.NullParameters() && p !== nothing function fgh!(G, H, θ, p) - (y, _, _) = value_derivative_and_second_derivative!(_f, G, H, θ, prep_hess, soadtype, Constant(p)) + (y, _, _) = value_derivative_and_second_derivative!( + _f, G, H, θ, prep_hess, soadtype, Constant(p)) return y end end @@ -419,7 +430,8 @@ function OptimizationBase.instantiate_function( extras_pullback = prepare_pullback( cons_oop, adtype.dense_ad, x, (ones(eltype(x), num_cons),), Constant(p)) function cons_vjp!(J, θ, v) - pullback!(cons_oop, (J,), extras_pullback, adtype.dense_ad, θ, (v,), Constant(p)) + pullback!( + cons_oop, (J,), extras_pullback, adtype.dense_ad, θ, (v,), Constant(p)) end elseif cons_vjp == true cons_vjp! = (J, θ, v) -> f.cons_vjp(J, θ, v, p) @@ -431,7 +443,8 @@ function OptimizationBase.instantiate_function( extras_pushforward = prepare_pushforward( cons_oop, adtype.dense_ad, x, (ones(eltype(x), length(x)),), Constant(p)) function cons_jvp!(J, θ, v) - pushforward!(cons_oop, (J,), extras_pushforward, adtype.dense_ad, θ, (v,), Constant(p)) + pushforward!( + cons_oop, (J,), extras_pushforward, adtype.dense_ad, θ, (v,), Constant(p)) end elseif cons_jvp == true cons_jvp! = (J, θ, v) -> f.cons_jvp(J, θ, v, p) @@ -442,7 +455,8 @@ function OptimizationBase.instantiate_function( conshess_sparsity = f.cons_hess_prototype conshess_colors = f.cons_hess_colorvec if cons !== nothing && f.cons_h === nothing && cons_h == true - prep_cons_hess = [prepare_hessian(cons_oop, soadtype, x, Constant(i)) for i in 1:num_cons] + prep_cons_hess = [prepare_hessian(cons_oop, soadtype, x, Constant(i)) + for i in 1:num_cons] colores = getfield.(prep_cons_hess, :coloring_result) conshess_sparsity = getfield.(colores, :S) conshess_colors = getfield.(colores, :color) @@ -461,7 +475,8 @@ function OptimizationBase.instantiate_function( lag_hess_colors = f.lag_hess_colorvec if cons !== nothing && f.lag_h === nothing && lag_h == true lag_extras = prepare_hessian( - lagrangian, soadtype, x, Constant(one(eltype(x))), Constant(ones(eltype(x), num_cons)), Constant(p)) + lagrangian, soadtype, x, Constant(one(eltype(x))), + Constant(ones(eltype(x), num_cons)), Constant(p)) lag_hess_prototype = lag_extras.coloring_result.S lag_hess_colors = lag_extras.coloring_result.color @@ -470,12 +485,14 @@ function OptimizationBase.instantiate_function( cons_h(H, θ) H *= λ else - hessian!(lagrangian, H, lag_extras, soadtype, θ, Constant(σ), Constant(λ), Constant(p)) + hessian!(lagrangian, H, lag_extras, soadtype, θ, + Constant(σ), Constant(λ), Constant(p)) end end function lag_h!(h, θ, σ, λ) - H = hessian(lagrangian, lag_extras, soadtype, θ, Constant(σ), Constant(λ), Constant(p)) + H = hessian( + lagrangian, lag_extras, soadtype, θ, Constant(σ), Constant(λ), Constant(p)) k = 0 rows, cols, _ = findnz(H) for (i, j) in zip(rows, cols) @@ -492,12 +509,14 @@ function OptimizationBase.instantiate_function( cons_h!(H, θ) H *= λ else - hessian!(lagrangian, H, lag_extras, soadtype, θ, Constant(σ), Constant(λ), Constant(p)) + hessian!(lagrangian, H, lag_extras, soadtype, θ, + Constant(σ), Constant(λ), Constant(p)) end end function lag_h!(h::AbstractVector, θ, σ, λ, p) - H = hessian(lagrangian, lag_extras, soadtype, θ, Constant(σ), Constant(λ), Constant(p)) + H = hessian(lagrangian, lag_extras, soadtype, θ, + Constant(σ), Constant(λ), Constant(p)) k = 0 for i in 1:length(θ) for j in 1:i diff --git a/src/OptimizationDIExt.jl b/src/OptimizationDIExt.jl index 3cdbba9..1a19570 100644 --- a/src/OptimizationDIExt.jl +++ b/src/OptimizationDIExt.jl @@ -31,11 +31,10 @@ function instantiate_function( g = false, h = false, hv = false, fg = false, fgh = false, cons_j = false, cons_vjp = false, cons_jvp = false, cons_h = false, lag_h = false) - adtype, soadtype = generate_adtype(adtype) if g == true && f.grad === nothing - prep_grad = prepare_gradient(f.f, adtype, x, Constant(p)) + prep_grad = prepare_gradient(f.f, adtype, x, Constant(p)) function grad(res, θ) gradient!(f.f, res, prep_grad, adtype, θ, Constant(p)) end @@ -183,7 +182,8 @@ function instantiate_function( conshess_sparsity = f.cons_hess_prototype conshess_colors = f.cons_hess_colorvec if f.cons !== nothing && f.cons_h === nothing && cons_h == true - prep_cons_hess = [prepare_hessian(cons_oop, soadtype, x, Constant(i)) for i in 1:num_cons] + prep_cons_hess = [prepare_hessian(cons_oop, soadtype, x, Constant(i)) + for i in 1:num_cons] function cons_h!(H, θ) for i in 1:num_cons @@ -200,7 +200,8 @@ function instantiate_function( if f.cons !== nothing && lag_h == true && f.lag_h === nothing lag_prep = prepare_hessian( - lagrangian, soadtype, x, Constant(one(eltype(x))), Constant(ones(eltype(x), num_cons)), Constant(p)) + lagrangian, soadtype, x, Constant(one(eltype(x))), + Constant(ones(eltype(x), num_cons)), Constant(p)) lag_hess_prototype = zeros(Bool, num_cons, length(x)) function lag_h!(H::AbstractMatrix, θ, σ, λ) @@ -208,12 +209,14 @@ function instantiate_function( cons_h!(H, θ) H *= λ else - hessian!(lagrangian, H, lag_prep, soadtype, θ, Constant(σ), Constant(λ), Constant(p)) + hessian!(lagrangian, H, lag_prep, soadtype, θ, + Constant(σ), Constant(λ), Constant(p)) end end function lag_h!(h::AbstractVector, θ, σ, λ) - H = hessian(lagrangian, lag_prep, soadtype, θ, Constant(σ), Constant(λ), Constant(p)) + H = hessian( + lagrangian, lag_prep, soadtype, θ, Constant(σ), Constant(λ), Constant(p)) k = 0 for i in 1:length(θ) for j in 1:i @@ -229,12 +232,14 @@ function instantiate_function( cons_h!(H, θ) H *= λ else - hessian!(lagrangian, H, lag_prep, soadtype, θ, Constant(σ), Constant(λ), Constant(p)) + hessian!(lagrangian, H, lag_prep, soadtype, θ, + Constant(σ), Constant(λ), Constant(p)) end end function lag_h!(h::AbstractVector, θ, σ, λ, p) - H = hessian(lagrangian, lag_prep, soadtype, θ, Constant(σ), Constant(λ), Constant(p)) + H = hessian(lagrangian, lag_prep, soadtype, θ, + Constant(σ), Constant(λ), Constant(p)) k = 0 for i in 1:length(θ) for j in 1:i @@ -341,12 +346,14 @@ function instantiate_function( if fgh == true && f.fgh === nothing function fgh!(θ) - (y, G, H) = value_derivative_and_second_derivative(f.f, prep_hess, adtype, θ, Constant(p)) + (y, G, H) = value_derivative_and_second_derivative( + f.f, prep_hess, adtype, θ, Constant(p)) return y, G, H end if p !== SciMLBase.NullParameters() && p !== nothing function fgh!(θ, p) - (y, G, H) = value_derivative_and_second_derivative(f.f, prep_hess, adtype, θ, Constant(p)) + (y, G, H) = value_derivative_and_second_derivative( + f.f, prep_hess, adtype, θ, Constant(p)) return y, G, H end end @@ -396,7 +403,8 @@ function instantiate_function( end if f.cons_vjp === nothing && cons_vjp == true && f.cons !== nothing - prep_pullback = prepare_pullback(f.cons, adtype, x, (ones(eltype(x), num_cons),), Constant(p)) + prep_pullback = prepare_pullback( + f.cons, adtype, x, (ones(eltype(x), num_cons),), Constant(p)) function cons_vjp!(θ, v) return only(pullback(f.cons, prep_pullback, adtype, θ, (v,), Constant(p))) end @@ -424,7 +432,8 @@ function instantiate_function( function cons_i(x, i) return f.cons(x, p)[i] end - prep_cons_hess = [prepare_hessian(cons_i, soadtype, x, Constant(i)) for i in 1:num_cons] + prep_cons_hess = [prepare_hessian(cons_i, soadtype, x, Constant(i)) + for i in 1:num_cons] function cons_h!(θ) H = map(1:num_cons) do i @@ -442,14 +451,16 @@ function instantiate_function( if f.cons !== nothing && lag_h == true && f.lag_h === nothing lag_prep = prepare_hessian( - lagrangian, soadtype, x, Constant(one(eltype(x))), Constant(ones(eltype(x), num_cons)), Constant(p)) + lagrangian, soadtype, x, Constant(one(eltype(x))), + Constant(ones(eltype(x), num_cons)), Constant(p)) lag_hess_prototype = zeros(Bool, num_cons, length(x)) function lag_h!(θ, σ, λ) if σ == zero(eltype(θ)) return λ .* cons_h(θ) else - return hessian(lagrangian, lag_prep, soadtype, θ, Constant(σ), Constant(λ), Constant(p)) + return hessian(lagrangian, lag_prep, soadtype, θ, + Constant(σ), Constant(λ), Constant(p)) end end @@ -458,7 +469,8 @@ function instantiate_function( if σ == zero(eltype(θ)) return λ .* cons_h(θ) else - return hessian(lagrangian, lag_prep, soadtype, θ, Constant(σ), Constant(λ), Constant(p)) + return hessian(lagrangian, lag_prep, soadtype, θ, + Constant(σ), Constant(λ), Constant(p)) end end end diff --git a/src/OptimizationDISparseExt.jl b/src/OptimizationDISparseExt.jl index 59e89c1..5736012 100644 --- a/src/OptimizationDISparseExt.jl +++ b/src/OptimizationDISparseExt.jl @@ -131,12 +131,14 @@ function instantiate_function( prep_grad = prepare_gradient(f.f, adtype.dense_ad, x, Constant(p)) end function fg!(res, θ) - (y, _) = value_and_gradient!(f.f, res, prep_grad, adtype.dense_ad, θ, Constant(p)) + (y, _) = value_and_gradient!( + f.f, res, prep_grad, adtype.dense_ad, θ, Constant(p)) return y end if p !== SciMLBase.NullParameters() function fg!(res, θ, p) - (y, _) = value_and_gradient!(f.f, res, prep_grad, adtype.dense_ad, θ, Constant(p)) + (y, _) = value_and_gradient!( + f.f, res, prep_grad, adtype.dense_ad, θ, Constant(p)) return y end end @@ -187,7 +189,8 @@ function instantiate_function( end if hv == true && f.hv === nothing - prep_hvp = prepare_hvp(f.f, soadtype.dense_ad, x, (zeros(eltype(x), size(x)),), Constant(p)) + prep_hvp = prepare_hvp( + f.f, soadtype.dense_ad, x, (zeros(eltype(x), size(x)),), Constant(p)) function hv!(H, θ, v) only(hvp!(f.f, (H,), prep_hvp, soadtype.dense_ad, θ, (v,), Constant(p))) end @@ -265,7 +268,8 @@ function instantiate_function( conshess_sparsity = f.cons_hess_prototype conshess_colors = f.cons_hess_colorvec if f.cons !== nothing && f.cons_h === nothing && cons_h == true - prep_cons_hess = [prepare_hessian(cons_oop, soadtype, x, Constant(i)) for i in 1:num_cons] + prep_cons_hess = [prepare_hessian(cons_oop, soadtype, x, Constant(i)) + for i in 1:num_cons] colores = getfield.(prep_cons_hess, :coloring_result) conshess_sparsity = getfield.(colores, :S) conshess_colors = getfield.(colores, :color) @@ -284,7 +288,8 @@ function instantiate_function( lag_hess_colors = f.lag_hess_colorvec if f.cons !== nothing && lag_h == true && f.lag_h === nothing lag_prep = prepare_hessian( - lagrangian, soadtype, x, Constant(one(eltype(x))), Constant(ones(eltype(x), num_cons)), Constant(p)) + lagrangian, soadtype, x, Constant(one(eltype(x))), + Constant(ones(eltype(x), num_cons)), Constant(p)) lag_hess_prototype = lag_prep.coloring_result.S lag_hess_colors = lag_prep.coloring_result.color @@ -293,12 +298,14 @@ function instantiate_function( cons_h!(H, θ) H *= λ else - hessian!(lagrangian, H, lag_prep, soadtype, θ, Constant(σ), Constant(λ), Constant(p)) + hessian!(lagrangian, H, lag_prep, soadtype, θ, + Constant(σ), Constant(λ), Constant(p)) end end function lag_h!(h, θ, σ, λ) - H = hessian(lagrangian, lag_prep, soadtype, θ, Constant(σ), Constant(λ), Constant(p)) + H = hessian( + lagrangian, lag_prep, soadtype, θ, Constant(σ), Constant(λ), Constant(p)) k = 0 rows, cols, _ = findnz(H) for (i, j) in zip(rows, cols) @@ -315,12 +322,14 @@ function instantiate_function( cons_h(H, θ) H *= λ else - hessian!(lagrangian, H, lag_prep, soadtype, θ, Constant(σ), Constant(λ), Constant(p)) + hessian!(lagrangian, H, lag_prep, soadtype, θ, + Constant(σ), Constant(λ), Constant(p)) end end function lag_h!(h, θ, σ, λ, p) - H = hessian(lagrangian, lag_prep, soadtype, θ, Constant(σ), Constant(λ), Constant(p)) + H = hessian(lagrangian, lag_prep, soadtype, θ, + Constant(σ), Constant(λ), Constant(p)) k = 0 rows, cols, _ = findnz(H) for (i, j) in zip(rows, cols) @@ -369,7 +378,6 @@ function instantiate_function( g = false, h = false, hv = false, fg = false, fgh = false, cons_j = false, cons_vjp = false, cons_jvp = false, cons_h = false, lag_h = false) - adtype, soadtype = generate_sparse_adtype(adtype) if g == true && f.grad === nothing @@ -410,7 +418,8 @@ function instantiate_function( if fgh == true && f.fgh === nothing function fgh!(θ) - (y, G, H) = value_derivative_and_second_derivative(f.f, prep_hess, soadtype, θ, Constant(p)) + (y, G, H) = value_derivative_and_second_derivative( + f.f, prep_hess, soadtype, θ, Constant(p)) return y, G, H end @@ -449,7 +458,8 @@ function instantiate_function( end if hv == true && f.hv === nothing - prep_hvp = prepare_hvp(f.f, soadtype.dense_ad, x, (zeros(eltype(x), size(x)),), Constant(p)) + prep_hvp = prepare_hvp( + f.f, soadtype.dense_ad, x, (zeros(eltype(x), size(x)),), Constant(p)) function hv!(θ, v) only(hvp(f.f, prep_hvp, soadtype.dense_ad, θ, (v,), Constant(p))) end @@ -506,7 +516,8 @@ function instantiate_function( prep_pushforward = prepare_pushforward( f.cons, adtype.dense_ad, x, (ones(eltype(x), length(x)),), Constant(p)) function cons_jvp!(θ, v) - only(pushforward(f.cons, prep_pushforward, adtype.dense_ad, θ, (v,), Constant(p))) + only(pushforward( + f.cons, prep_pushforward, adtype.dense_ad, θ, (v,), Constant(p))) end elseif cons_jvp === true && f.cons !== nothing cons_jvp! = (θ, v) -> f.cons_jvp(θ, v, p) @@ -520,7 +531,8 @@ function instantiate_function( function cons_i(x, i) f.cons(x, p)[i] end - prep_cons_hess = [prepare_hessian(cons_i, soadtype, x, Constant(i)) for i in 1:num_cons] + prep_cons_hess = [prepare_hessian(cons_i, soadtype, x, Constant(i)) + for i in 1:num_cons] function cons_h!(θ) H = map(1:num_cons) do i @@ -541,12 +553,14 @@ function instantiate_function( lag_hess_colors = f.lag_hess_colorvec if f.cons !== nothing && lag_h == true && f.lag_h === nothing lag_prep = prepare_hessian( - lagrangian, soadtype, x, Constant(one(eltype(x))), Constant(ones(eltype(x), num_cons)), Constant(p)) + lagrangian, soadtype, x, Constant(one(eltype(x))), + Constant(ones(eltype(x), num_cons)), Constant(p)) function lag_h!(θ, σ, λ) if σ == zero(eltype(θ)) return λ .* cons_h!(θ) else - hess = hessian(lagrangian, lag_prep, soadtype, θ, Constant(σ), Constant(λ), Constant(p)) + hess = hessian(lagrangian, lag_prep, soadtype, θ, + Constant(σ), Constant(λ), Constant(p)) return hess end end @@ -558,7 +572,8 @@ function instantiate_function( if σ == zero(eltype(θ)) return λ .* cons_h!(θ) else - hess = hessian(lagrangian, lag_prep, θ, Constant(σ), Constant(λ), Constant(p)) + hess = hessian( + lagrangian, lag_prep, θ, Constant(σ), Constant(λ), Constant(p)) return hess end end @@ -570,7 +585,7 @@ function instantiate_function( end return OptimizationFunction{false}(f.f, adtype; grad = grad, fg = fg!, hess = hess, hv = hv!, fgh = fgh!, - cons = Base.Fix2(f.cons, p), cons_j = cons_j!, cons_h = cons_h!, + cons = Base.Fix2(f.cons, p), cons_j = cons_j!, cons_h = cons_h!, cons_vjp = cons_vjp!, cons_jvp = cons_jvp!, hess_prototype = hess_sparsity, hess_colorvec = hess_colors, diff --git a/test/adtests.jl b/test/adtests.jl index 56aa9fa..2872cf1 100644 --- a/test/adtests.jl +++ b/test/adtests.jl @@ -1110,7 +1110,7 @@ using MLUtils optf = OptimizationBase.instantiate_function( optf, rand(3), AutoForwardDiff(), iterate(data)[1], g = true, fg = true) G0 = zeros(3) - optf.grad(G0, ones(3), (x0,y0)) + optf.grad(G0, ones(3), (x0, y0)) stochgrads = [] i = 0 for (x, y) in data @@ -1159,7 +1159,7 @@ using MLUtils optf = OptimizationBase.instantiate_function( optf, rand(3), AutoEnzyme(), iterate(data)[1], g = true, fg = true) G0 = zeros(3) - @test_broken optf.grad(G0, ones(3), (x,y)) + @test_broken optf.grad(G0, ones(3), (x, y)) stochgrads = [] # for (x,y) in data # G = zeros(3)