Skip to content

Commit bf9dada

Browse files
committed
fix tests
1 parent 5eb28c1 commit bf9dada

File tree

2 files changed

+2
-11
lines changed

2 files changed

+2
-11
lines changed

test/layers/recurrent.jl

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -90,15 +90,6 @@ end
9090
end
9191
end
9292

93-
@testset "RNN-input-state-eltypes" begin
94-
@testset for R in [RNN, GRU, LSTM, GRUv3]
95-
m = R(3 => 5)
96-
x = rand(Float64, 3, 1)
97-
Flux.reset!(m)
98-
@test_throws MethodError m(x)
99-
end
100-
end
101-
10293
@testset "multigate" begin
10394
x = rand(6, 5)
10495
res, (dx,) = Flux.withgradient(x) do x

test/utils.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -290,8 +290,8 @@ end
290290
x32 = rand(Float32, 10)
291291
@test eltype(m[1].weight) == Float32
292292
@test eltype(m(x32)) == Float32
293-
@test eltype(m(x64)) == Float64
294-
@test eltype(f64(m)(x32)) == Float64
293+
@test eltype(m(x64)) == Float32 # fixed by _match_eltype
294+
@test eltype(f64(m)(x32)) == Float64 # _match_eltype promotes, Julia would too
295295
@test eltype(f64(m)(x64)) == Float64
296296
@test eltype(f64(m)[1].weight) == Float64
297297
@test eltype(f32(f64(m))[1].weight) == Float32

0 commit comments

Comments
 (0)