Skip to content

Commit 4662c4c

Browse files
authored
safer gradients, less piracy (#2098)
1 parent b2b0748 commit 4662c4c

File tree

3 files changed

+3
-6
lines changed

3 files changed

+3
-6
lines changed

Project.toml

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,9 @@
11
name = "Flux"
22
uuid = "587475ba-b771-5e3f-ad9e-33799f191a9c"
3-
version = "0.13.7"
3+
version = "0.13.8"
44

55
[deps]
66
Adapt = "79e6a3ab-5dfb-504d-930d-738a2a938a0e"
7-
ArrayInterface = "4fba245c-0d91-5ea0-9b3e-6abc04ee57a9"
87
CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba"
98
ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4"
109
Functors = "d9f16b24-f501-4c13-a1f2-28368ffc5196"
@@ -27,7 +26,6 @@ Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"
2726

2827
[compat]
2928
Adapt = "3.0"
30-
ArrayInterface = "3.1, 4, 5, 6"
3129
CUDA = "3"
3230
ChainRulesCore = "1.12"
3331
Functors = "0.3"

src/optimise/Optimise.jl

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
module Optimise
22

33
using LinearAlgebra
4-
import ArrayInterface
54

65
export train!, update!,
76
Descent, Adam, Momentum, Nesterov, RMSProp,

src/optimise/train.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,8 +13,8 @@ As a result, the parameters are mutated and the optimizer's internal state may c
1313
The gradient could be mutated as well.
1414
"""
1515
function update!(opt::AbstractOptimiser, x, x̄)
16-
x̄r = ArrayInterface.restructure(x, x̄) # address some cases where Zygote's
17-
# output are not mutable, see #1510
16+
x̄r = copyto!(similar(x̄), x̄) # Flux.Optimise assumes it can mutate the gradient. This is not
17+
# safe due to aliasing, nor guaranteed to be possible, e.g. Fill.
1818
x .-= apply!(opt, x, x̄r)
1919
end
2020

0 commit comments

Comments
 (0)