Skip to content

Commit dbd8a73

Browse files
committed
Upgrade ADTypes to v1
1 parent f537806 commit dbd8a73

18 files changed

+81
-88
lines changed

.github/workflows/CI.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ jobs:
1717
- InterfaceI
1818
version:
1919
- '1' # Latest Release
20-
- '1.6' # Current LTS
20+
- '1.10' # future LTS
2121
steps:
2222
- uses: actions/checkout@v4
2323
- uses: julia-actions/setup-julia@v2

.github/workflows/Downstream.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ jobs:
1414
strategy:
1515
fail-fast: false
1616
matrix:
17-
julia-version: [1,1.6]
17+
julia-version: [1,1.10]
1818
os: [ubuntu-latest]
1919
package:
2020
- {user: SciML, repo: OrdinaryDiffEq.jl, group: InterfaceII}

Project.toml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
name = "SparseDiffTools"
22
uuid = "47a9eef4-7e08-11e9-0b38-333d64bd3804"
33
authors = ["Pankaj Mishra <pankajmishra1511@gmail.com>", "Chris Rackauckas <contact@chrisrackauckas.com>"]
4-
version = "2.18.0"
4+
version = "2.19.0"
55

66
[deps]
77
ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b"
@@ -40,7 +40,7 @@ SparseDiffToolsSymbolicsExt = "Symbolics"
4040
SparseDiffToolsZygoteExt = "Zygote"
4141

4242
[compat]
43-
ADTypes = "0.2.6"
43+
ADTypes = "1.0.0"
4444
Adapt = "3, 4"
4545
ArrayInterface = "7.4.2"
4646
Compat = "4"
@@ -65,7 +65,7 @@ Tricks = "0.1.6"
6565
UnPack = "1"
6666
VertexSafeGraphs = "0.2"
6767
Zygote = "0.6"
68-
julia = "1.6"
68+
julia = "1.10"
6969

7070
[extras]
7171
BandedMatrices = "aae01518-5342-5314-be14-df237901396f"

README.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -59,8 +59,8 @@ We need to perform the following steps to utilize SparseDiffTools:
5959
the sparsity pattern. (Note that `Symbolics.jl` must be explicitly loaded before
6060
using this functionality.)
6161
2. Now choose an AD backend from `ADTypes.jl`:
62-
1. If using a Non `*Sparse*` type, then we will not use sparsity detection.
63-
2. All other sparse AD types will internally compute the proper sparsity pattern, and
62+
1. If using a standard type like `AutoForwardDiff()`, then we will not use sparsity detection.
63+
2. If you wrap it inside `AutoSparse(AutoForwardDiff())`, then we will internally compute the proper sparsity pattern, and
6464
try to exploit that.
6565
3. Now there are 2 options:
6666
1. Precompute the cache using `sparse_jacobian_cache` and use the `sparse_jacobian` or
@@ -73,7 +73,7 @@ We need to perform the following steps to utilize SparseDiffTools:
7373
using Symbolics
7474

7575
sd = SymbolicsSparsityDetection()
76-
adtype = AutoSparseFiniteDiff()
76+
adtype = AutoSparse(AutoFiniteDiff())
7777
x = rand(30)
7878
y = similar(x)
7979

docs/src/index.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ We need to perform the following steps to utilize SparseDiffTools:
6363
using Symbolics
6464

6565
sd = SymbolicsSparsityDetection()
66-
adtype = AutoSparseFiniteDiff()
66+
adtype = AutoSparse(AutoFiniteDiff())
6767
x = rand(30)
6868
y = similar(x)
6969

ext/SparseDiffToolsEnzymeExt.jl

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -2,36 +2,36 @@ module SparseDiffToolsEnzymeExt
22

33
import ArrayInterface: fast_scalar_indexing
44
import SparseDiffTools: __f̂, __maybe_copy_x, __jacobian!, __gradient, __gradient!,
5-
AutoSparseEnzyme, __test_backend_loaded
5+
AutoSparse{<:AutoEnzyme}, __test_backend_loaded
66
# FIXME: For Enzyme we currently assume reverse mode
7-
import ADTypes: AutoEnzyme
7+
import ADTypes: AutoSparse, AutoEnzyme
88
using Enzyme
99

1010
using ForwardDiff
1111

12-
@inline __test_backend_loaded(::Union{AutoSparseEnzyme, AutoEnzyme}) = nothing
12+
@inline __test_backend_loaded(::Union{AutoSparse{<:AutoEnzyme}, AutoEnzyme}) = nothing
1313

1414
## Satisfying High-Level Interface for Sparse Jacobians
15-
function __gradient(::Union{AutoSparseEnzyme, AutoEnzyme}, f, x, cols)
15+
function __gradient(::Union{AutoSparse{<:AutoEnzyme}, AutoEnzyme}, f, x, cols)
1616
dx = zero(x)
1717
autodiff(Reverse, __f̂, Const(f), Duplicated(x, dx), Const(cols))
1818
return vec(dx)
1919
end
2020

21-
function __gradient!(::Union{AutoSparseEnzyme, AutoEnzyme}, f!, fx, x, cols)
21+
function __gradient!(::Union{AutoSparse{<:AutoEnzyme}, AutoEnzyme}, f!, fx, x, cols)
2222
dx = zero(x)
2323
dfx = zero(fx)
2424
autodiff(Reverse, __f̂, Active, Const(f!), Duplicated(fx, dfx), Duplicated(x, dx),
2525
Const(cols))
2626
return dx
2727
end
2828

29-
function __jacobian!(J::AbstractMatrix, ::Union{AutoSparseEnzyme, AutoEnzyme}, f, x)
29+
function __jacobian!(J::AbstractMatrix, ::Union{AutoSparse{<:AutoEnzyme}, AutoEnzyme}, f, x)
3030
J .= jacobian(Reverse, f, x, Val(size(J, 1)))
3131
return J
3232
end
3333

34-
@views function __jacobian!(J, ad::Union{AutoSparseEnzyme, AutoEnzyme}, f!, fx, x)
34+
@views function __jacobian!(J, ad::Union{AutoSparse{<:AutoEnzyme}, AutoEnzyme}, f!, fx, x)
3535
# This version is slowish not sure how to do jacobians for inplace functions
3636
@warn "Current code for computing jacobian for inplace functions in Enzyme is slow." maxlog=1
3737
dfx = zero(fx)
@@ -58,6 +58,6 @@ end
5858
return J
5959
end
6060

61-
__maybe_copy_x(::Union{AutoSparseEnzyme, AutoEnzyme}, x::SubArray) = copy(x)
61+
__maybe_copy_x(::Union{AutoSparse{<:AutoEnzyme}, AutoEnzyme}, x::SubArray) = copy(x)
6262

6363
end

ext/SparseDiffToolsPolyesterForwardDiffExt.jl

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ struct PolyesterForwardDiffJacobianCache{CO, CA, J, FX, X} <:
1818
end
1919

2020
function sparse_jacobian_cache(
21-
ad::Union{AutoSparsePolyesterForwardDiff, AutoPolyesterForwardDiff},
21+
ad::Union{AutoSparse{<:AutoPolyesterForwardDiff}, AutoPolyesterForwardDiff},
2222
sd::AbstractMaybeSparsityDetection, f::F, x; fx = nothing) where {F}
2323
coloring_result = sd(ad, f, x)
2424
fx = fx === nothing ? similar(f(x)) : fx
@@ -36,7 +36,7 @@ function sparse_jacobian_cache(
3636
end
3737

3838
function sparse_jacobian_cache(
39-
ad::Union{AutoSparsePolyesterForwardDiff, AutoPolyesterForwardDiff},
39+
ad::Union{AutoSparse{<:AutoPolyesterForwardDiff}, AutoPolyesterForwardDiff},
4040
sd::AbstractMaybeSparsityDetection, f!::F, fx, x) where {F}
4141
coloring_result = sd(ad, f!, fx, x)
4242
if coloring_result isa NoMatrixColoring
@@ -77,7 +77,7 @@ end
7777

7878
## Approximate Sparsity Detection
7979
function (alg::ApproximateJacobianSparsity)(
80-
ad::AutoSparsePolyesterForwardDiff, f::F, x; fx = nothing, kwargs...) where {F}
80+
ad::AutoSparse{<:AutoPolyesterForwardDiff}, f::F, x; fx = nothing, kwargs...) where {F}
8181
@unpack ntrials, rng = alg
8282
fx = fx === nothing ? f(x) : fx
8383
ck = __chunksize(ad, x)
@@ -94,7 +94,7 @@ function (alg::ApproximateJacobianSparsity)(
9494
end
9595

9696
function (alg::ApproximateJacobianSparsity)(
97-
ad::AutoSparsePolyesterForwardDiff, f::F, fx, x;
97+
ad::AutoSparse{<:AutoPolyesterForwardDiff}, f::F, fx, x;
9898
kwargs...) where {F}
9999
@unpack ntrials, rng = alg
100100
ck = __chunksize(ad, x)

ext/SparseDiffToolsSymbolicsExt.jl

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
module SparseDiffToolsSymbolicsExt
22

33
using SparseDiffTools, Symbolics
4-
import SparseDiffTools: AbstractSparseADType
4+
import SparseDiffTools: AutoSparse
55

6-
function (alg::SymbolicsSparsityDetection)(ad::AbstractSparseADType, f, x; fx = nothing,
6+
function (alg::SymbolicsSparsityDetection)(ad::AutoSparse, f, x; fx = nothing,
77
kwargs...)
88
fx = fx === nothing ? similar(f(x)) : dx
99
f!(y, x) = (y .= f(x))
@@ -12,7 +12,7 @@ function (alg::SymbolicsSparsityDetection)(ad::AbstractSparseADType, f, x; fx =
1212
return _alg(ad, f, x; fx, kwargs...)
1313
end
1414

15-
function (alg::SymbolicsSparsityDetection)(ad::AbstractSparseADType, f!, fx, x; kwargs...)
15+
function (alg::SymbolicsSparsityDetection)(ad::AutoSparse, f!, fx, x; kwargs...)
1616
J = Symbolics.jacobian_sparsity(f!, fx, x)
1717
_alg = JacPrototypeSparsityDetection(J, alg.alg)
1818
return _alg(ad, f!, fx, x; kwargs...)

ext/SparseDiffToolsZygoteExt.jl

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -11,25 +11,25 @@ import SparseDiffTools: numback_hesvec!,
1111
numback_hesvec, autoback_hesvec!, autoback_hesvec, auto_vecjac!,
1212
auto_vecjac
1313
import SparseDiffTools: __f̂, __jacobian!, __gradient, __gradient!
14-
import ADTypes: AutoZygote, AutoSparseZygote
14+
import ADTypes: AutoZygote, AutoSparse{<:AutoZygote}
1515

16-
@inline __test_backend_loaded(::Union{AutoSparseZygote, AutoZygote}) = nothing
16+
@inline __test_backend_loaded(::Union{AutoSparse{<:AutoZygote}, AutoZygote}) = nothing
1717

1818
## Satisfying High-Level Interface for Sparse Jacobians
19-
function __gradient(::Union{AutoSparseZygote, AutoZygote}, f::F, x, cols) where {F}
19+
function __gradient(::Union{AutoSparse{<:AutoZygote}, AutoZygote}, f::F, x, cols) where {F}
2020
_, ∂x, _ = Zygote.gradient(__f̂, f, x, cols)
2121
return vec(∂x)
2222
end
2323

24-
function __gradient!(::Union{AutoSparseZygote, AutoZygote}, f!::F, fx, x, cols) where {F}
24+
function __gradient!(::Union{AutoSparse{<:AutoZygote}, AutoZygote}, f!::F, fx, x, cols) where {F}
2525
return error("Zygote.jl cannot differentiate in-place (mutating) functions.")
2626
end
2727

2828
# Zygote doesn't provide a way to accumulate directly into `J`. So we modify the code from
2929
# https://github.com/FluxML/Zygote.jl/blob/82c7a000bae7fb0999275e62cc53ddb61aed94c7/src/lib/grad.jl#L140-L157C4
3030
import Zygote: _jvec, _eyelike, _gradcopy!
3131

32-
@views function __jacobian!(J::AbstractMatrix, ::Union{AutoSparseZygote, AutoZygote}, f::F,
32+
@views function __jacobian!(J::AbstractMatrix, ::Union{AutoSparse{<:AutoZygote}, AutoZygote}, f::F,
3333
x) where {F}
3434
y, back = Zygote.pullback(_jvec f, x)
3535
δ = _eyelike(y)
@@ -40,7 +40,7 @@ import Zygote: _jvec, _eyelike, _gradcopy!
4040
return J
4141
end
4242

43-
function __jacobian!(_, ::Union{AutoSparseZygote, AutoZygote}, f!::F, fx, x) where {F}
43+
function __jacobian!(_, ::Union{AutoSparse{<:AutoZygote}, AutoZygote}, f!::F, fx, x) where {F}
4444
return error("Zygote.jl cannot differentiate in-place (mutating) functions.")
4545
end
4646

src/SparseDiffTools.jl

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -9,9 +9,8 @@ import Graphs: SimpleGraph
99
# Differentiation
1010
using FiniteDiff, ForwardDiff
1111
@reexport using ADTypes
12-
import ADTypes: AbstractADType, AutoSparseZygote, AbstractSparseForwardMode,
13-
AbstractSparseReverseMode, AbstractSparseFiniteDifferences,
14-
AbstractReverseMode
12+
import ADTypes: AbstractADType, AutoSparse, ForwardMode, ForwardOrReverseMode, ReverseMode,
13+
SymbolicMode, mode
1514
import ForwardDiff: Dual, jacobian, partials, DEFAULT_CHUNK_THRESHOLD
1615
# Array Packages
1716
using ArrayInterface, SparseArrays
@@ -90,7 +89,6 @@ export JacVec, HesVec, HesVecGrad, VecJac
9089
export update_coefficients, update_coefficients!, value!
9190

9291
# High Level Interface: sparse_jacobian
93-
export AutoSparseEnzyme
9492

9593
export NoSparsityDetection, SymbolicsSparsityDetection, JacPrototypeSparsityDetection,
9694
PrecomputedJacobianColorvec, ApproximateJacobianSparsity, AutoSparsityDetection

0 commit comments

Comments
 (0)