Skip to content

Commit

Permalink
🤖 Format .jl files (#250)
Browse files Browse the repository at this point in the history
Co-authored-by: amontoison <[email protected]>
  • Loading branch information
github-actions[bot] and amontoison authored Jun 18, 2024
1 parent c1b0919 commit aed73fc
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 13 deletions.
9 changes: 4 additions & 5 deletions benchmark/gradient/benchmarks_gradient.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ We test here the function `grad!` for ADNLPModels with different backends:
- DNLPModels.EnzymeADGradient (use Enzyme.jl);
- ADNLPModels.ZygoteADGradient (use Zygote.jl).
=#
using ReverseDiff, Zygote, ForwardDiff, Enzyme
using ReverseDiff, Zygote, ForwardDiff, Enzyme

include("additional_backends.jl")

Expand All @@ -29,9 +29,7 @@ benchmarked_generic_gradient_backend = Dict(
get_backend_list(::Val{:generic}) = keys(benchmarked_generic_gradient_backend)
get_backend(::Val{:generic}, b::String) = benchmarked_generic_gradient_backend[b]

problem_sets = Dict(
"scalable" => scalable_problems,
)
problem_sets = Dict("scalable" => scalable_problems)
nscal = 1000

@info "Initialize grad! benchmark"
Expand All @@ -51,7 +49,8 @@ for f in [:optimized, :generic]
m = eval(Meta.parse("OptimizationProblems.get_" * pb * "_ncon(n = $(nscal))"))
@info " $(pb): $T with $n vars and $m cons"
g = zeros(T, n)
SUITE["grad!"][f][T][s][b][pb] = @benchmarkable grad!(nlp, get_x0(nlp), $g) setup=(nlp = set_adnlp($pb, "gradient_backend", $(backend), $nscal, $T))
SUITE["grad!"][f][T][s][b][pb] = @benchmarkable grad!(nlp, get_x0(nlp), $g) setup =
(nlp = set_adnlp($pb, "gradient_backend", $(backend), $nscal, $T))
end
end
end
Expand Down
16 changes: 8 additions & 8 deletions benchmark/problems_sets.jl
Original file line number Diff line number Diff line change
Expand Up @@ -51,8 +51,8 @@ function set_adnlp(
push!(backend_structure, k => all_backend_structure[k])
end
end
return OptimizationProblems.ADNLPProblems.eval(pbs)(
;type = Val(T),
return OptimizationProblems.ADNLPProblems.eval(pbs)(;
type = Val(T),
n = n,
gradient_backend = backend_structure["gradient_backend"],
hprod_backend = backend_structure["hprod_backend"],
Expand All @@ -65,12 +65,12 @@ function set_adnlp(
end

function set_problem(
pb::String,
test_back::String,
backend::String,
s::String,
n::Integer = nn,
T::DataType = Float64,
pb::String,
test_back::String,
backend::String,
s::String,
n::Integer = nn,
T::DataType = Float64,
)
nlp = if backend == "jump"
model = OptimizationProblems.PureJuMP.eval(Meta.parse(pb))(n = n)
Expand Down

0 comments on commit aed73fc

Please sign in to comment.