Skip to content

Commit

Permalink
Merge pull request #277 from avik-pal/ap/approx_sparsity
Browse files Browse the repository at this point in the history
Path to use FiniteDiff for ApproximateSparsityDetection
  • Loading branch information
ChrisRackauckas authored Dec 16, 2023
2 parents 84de29e + b6553b8 commit 001a75f
Show file tree
Hide file tree
Showing 6 changed files with 82 additions and 16 deletions.
2 changes: 1 addition & 1 deletion Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "SparseDiffTools"
uuid = "47a9eef4-7e08-11e9-0b38-333d64bd3804"
authors = ["Pankaj Mishra <[email protected]>", "Chris Rackauckas <[email protected]>"]
version = "2.14.0"
version = "2.15.0"

[deps]
ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b"
Expand Down
6 changes: 4 additions & 2 deletions ext/SparseDiffToolsEnzymeExt.jl
Original file line number Diff line number Diff line change
@@ -1,14 +1,16 @@
module SparseDiffToolsEnzymeExt

import ArrayInterface: fast_scalar_indexing
import SparseDiffTools: __f̂,
__maybe_copy_x, __jacobian!, __gradient, __gradient!, AutoSparseEnzyme
import SparseDiffTools: __f̂, __maybe_copy_x, __jacobian!, __gradient, __gradient!,
AutoSparseEnzyme, __test_backend_loaded
# FIXME: For Enzyme we currently assume reverse mode
import ADTypes: AutoEnzyme
using Enzyme

using ForwardDiff

@inline __test_backend_loaded(::Union{AutoSparseEnzyme, AutoEnzyme}) = nothing

## Satisfying High-Level Interface for Sparse Jacobians
function __gradient(::Union{AutoSparseEnzyme, AutoEnzyme}, f, x, cols)
dx = zero(x)
Expand Down
4 changes: 3 additions & 1 deletion ext/SparseDiffToolsZygoteExt.jl
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
module SparseDiffToolsZygoteExt

using ADTypes, LinearAlgebra, Zygote
import SparseDiffTools: SparseDiffTools, DeivVecTag, AutoDiffVJP
import SparseDiffTools: SparseDiffTools, DeivVecTag, AutoDiffVJP, __test_backend_loaded
import ForwardDiff: ForwardDiff, Dual, partials
import SciMLOperators: update_coefficients, update_coefficients!
import Setfield: @set!
Expand All @@ -12,6 +12,8 @@ import SparseDiffTools: numback_hesvec!,
import SparseDiffTools: __f̂, __jacobian!, __gradient, __gradient!
import ADTypes: AutoZygote, AutoSparseZygote

@inline __test_backend_loaded(::Union{AutoSparseZygote, AutoZygote}) = nothing

## Satisfying High-Level Interface for Sparse Jacobians
function __gradient(::Union{AutoSparseZygote, AutoZygote}, f::F, x, cols) where {F}
_, ∂x, _ = Zygote.gradient(__f̂, f, x, cols)
Expand Down
58 changes: 51 additions & 7 deletions src/highlevel/coloring.jl
Original file line number Diff line number Diff line change
Expand Up @@ -34,28 +34,72 @@ end
## Right now we hardcode it to use `ForwardDiff`
function (alg::ApproximateJacobianSparsity)(ad::AbstractSparseADType, f::F, x; fx = nothing,
kwargs...) where {F}
if !(ad isa AutoSparseForwardDiff)
@warn "$(ad) support for approximate jacobian not implemented. Using ForwardDiff instead." maxlog=1
end
@unpack ntrials, rng = alg
fx = fx === nothing ? f(x) : fx
J = fill!(similar(fx, length(fx), length(x)), 0)
cfg = ForwardDiff.JacobianConfig(f, x)
J = fill!(similar(fx, length(fx), length(x)), 0)
J_cache = similar(J)
x_ = similar(x)
for _ in 1:ntrials
randn!(rng, x_)
ForwardDiff.jacobian!(J_cache, f, x_, cfg)
@. J += abs(J_cache)
end
return (JacPrototypeSparsityDetection(; jac_prototype = sparse(J), alg.alg))(ad, f, x;
fx, kwargs...)
end

function (alg::ApproximateJacobianSparsity)(ad::AbstractSparseADType, f::F, fx, x;
kwargs...) where {F}
if !(ad isa AutoSparseForwardDiff)
@warn "$(ad) support for approximate jacobian not implemented. Using ForwardDiff instead." maxlog=1
end
@unpack ntrials, rng = alg
cfg = ForwardDiff.JacobianConfig(f, fx, x)
J = fill!(similar(fx, length(fx), length(x)), 0)
J_cache = similar(J)
x_ = similar(x)
for _ in 1:ntrials
randn!(rng, x_)
ForwardDiff.jacobian!(J_cache, f, fx, x_, cfg)
@. J += abs(J_cache)
end
return (JacPrototypeSparsityDetection(; jac_prototype = sparse(J), alg.alg))(ad, f, x;
fx, kwargs...)
end

function (alg::ApproximateJacobianSparsity)(ad::AutoSparseFiniteDiff, f::F, x; fx = nothing,
kwargs...) where {F}
@unpack ntrials, rng = alg
fx = fx === nothing ? f(x) : fx
cache = FiniteDiff.JacobianCache(x, fx)
J = fill!(similar(fx, length(fx), length(x)), 0)
x_ = similar(x)
ε = ifelse(alg.epsilon === nothing, eps(eltype(x)) * 100, alg.epsilon)
for _ in 1:ntrials
x_ = similar(x)
randn!(rng, x_)
J .+= abs.(ForwardDiff.jacobian(f, x_, cfg))
J_cache = FiniteDiff.finite_difference_jacobian(f, x, cache)
@. J += (abs(J_cache) .≥ ε) # hedge against numerical issues
end
return (JacPrototypeSparsityDetection(; jac_prototype = sparse(J), alg.alg))(ad, f, x;
fx, kwargs...)
end

function (alg::ApproximateJacobianSparsity)(ad::AbstractSparseADType, f!::F, fx, x;
function (alg::ApproximateJacobianSparsity)(ad::AutoSparseFiniteDiff, f!::F, fx, x;
kwargs...) where {F}
@unpack ntrials, rng = alg
cfg = ForwardDiff.JacobianConfig(f!, fx, x)
cache = FiniteDiff.JacobianCache(x, fx)
J = fill!(similar(fx, length(fx), length(x)), 0)
J_cache = similar(J)
x_ = similar(x)
ε = ifelse(alg.epsilon === nothing, eps(eltype(x)) * 100, alg.epsilon)
for _ in 1:ntrials
x_ = similar(x)
randn!(rng, x_)
J .+= abs.(ForwardDiff.jacobian(f!, fx, x_, cfg))
FiniteDiff.finite_difference_jacobian!(J_cache, f!, x_, cache)
@. J += (abs(J_cache) .≥ ε) # hedge against numerical issues
end
return (JacPrototypeSparsityDetection(; jac_prototype = sparse(J), alg.alg))(ad, f!, fx,
x; kwargs...)
Expand Down
26 changes: 21 additions & 5 deletions src/highlevel/common.jl
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ end

"""
ApproximateJacobianSparsity(; ntrials = 5, rng = Random.default_rng(),
alg = GreedyD1Color())
epsilon = nothing, alg = GreedyD1Color())
Use `ntrials` random vectors to compute the sparsity pattern of the Jacobian. This is an
approximate method and the sparsity pattern may not be exact.
Expand All @@ -124,17 +124,21 @@ approximate method and the sparsity pattern may not be exact.
- `ntrials`: The number of random vectors to use for computing the sparsity pattern
- `rng`: The random number generator used for generating the random vectors
- `alg`: The algorithm used for computing the matrix colors
- `epsilon`: For Finite Differencing based Jacobian Approximations, any number smaller
than `epsilon` is considered to be zero. If `nothing` is specified, then this value
is calculated as `100 * eps(eltype(x))`
"""
struct ApproximateJacobianSparsity{R <: AbstractRNG,
A <: ArrayInterface.ColoringAlgorithm} <: AbstractSparsityDetection
struct ApproximateJacobianSparsity{R <: AbstractRNG, A <: ArrayInterface.ColoringAlgorithm,
E} <: AbstractSparsityDetection
ntrials::Int
rng::R
alg::A
epsilon::E
end

function ApproximateJacobianSparsity(; ntrials::Int = 3,
function ApproximateJacobianSparsity(; ntrials::Int = 3, epsilon = nothing,
rng::AbstractRNG = Random.default_rng(), alg = GreedyD1Color())
return ApproximateJacobianSparsity(ntrials, rng, alg)
return ApproximateJacobianSparsity(ntrials, rng, alg, epsilon)
end

# No one should be using this currently
Expand Down Expand Up @@ -332,3 +336,15 @@ init_jacobian(J::SparseMatrixCSC, ::Type{T}, fx, x; kwargs...) where {T} = T.(J)

__maybe_copy_x(_, x) = x
__maybe_copy_x(_, ::Nothing) = nothing

# Check Backend has been loaded
## We pay a small compile time cost for this, but it avoids cryptic error messages
@inline function __test_backend_loaded(ad::AbstractADType)
error("$(ad) requires $(__backend(ad)).jl to be loaded. Please load it.")
end

@inline __backend(ad) = nothing
@inline __backend(::Union{AutoEnzyme, AutoSparseEnzyme}) = :Enzyme
@inline __backend(::Union{AutoZygote, AutoSparseZygote}) = :Zygote
@inline __backend(::Union{AutoForwardDiff, AutoSparseForwardDiff}) = :ForwardDiff
@inline __backend(::Union{AutoFiniteDiff, AutoSparseFiniteDiff}) = :FiniteDiff
2 changes: 2 additions & 0 deletions src/highlevel/reverse_mode.jl
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ end

function sparse_jacobian!(J::AbstractMatrix, ad, cache::ReverseModeJacobianCache, args...)
if cache.coloring isa NoMatrixColoring
__test_backend_loaded(ad)
return __jacobian!(J, ad, args...)
else
return __sparse_jacobian_reverse_impl!(J, ad, cache.idx_vec, cache.coloring,
Expand All @@ -43,6 +44,7 @@ end
function __sparse_jacobian_reverse_impl!(J::AbstractMatrix, ad, idx_vec,
cache::MatrixColoringResult, f::F, fx, x) where {F}
# If `fx` is `nothing` then assume `f` is not in-place
__test_backend_loaded(ad)
x_ = __maybe_copy_x(ad, x)
fx_ = __maybe_copy_x(ad, fx)
@unpack colorvec, nz_rows, nz_cols = cache
Expand Down

0 comments on commit 001a75f

Please sign in to comment.