@@ -19,7 +19,7 @@ julia> Flux.mae(y_model, 1:3)
19
19
```
20
20
"""
21
21
function mae (ŷ, y; agg = mean)
22
- match_sizes (ŷ, y)
22
+ _check_sizes (ŷ, y)
23
23
agg (abs .(ŷ .- y))
24
24
end
25
25
@@ -43,7 +43,7 @@ julia> Flux.mse(y_model, y_true)
43
43
```
44
44
"""
45
45
function mse (ŷ, y; agg = mean)
46
- match_sizes (ŷ, y)
46
+ _check_sizes (ŷ, y)
47
47
agg ((ŷ .- y) .^ 2 )
48
48
end
49
49
@@ -67,7 +67,7 @@ julia> Flux.msle(Float32[0.9, 1.8, 2.7], 1:3)
67
67
```
68
68
"""
69
69
function msle (ŷ, y; agg = mean, ϵ = epseltype (ŷ))
70
- match_sizes (ŷ, y)
70
+ _check_sizes (ŷ, y)
71
71
agg ((log .((ŷ .+ ϵ) ./ (y .+ ϵ))) .^ 2 )
72
72
end
73
73
@@ -82,7 +82,7 @@ given the prediction `ŷ` and true values `y`.
82
82
| δ * (|ŷ - y| - 0.5 * δ), otherwise
83
83
"""
84
84
function huber_loss (ŷ, y; agg = mean, δ = ofeltype (ŷ, 1 ))
85
- match_sizes (ŷ, y)
85
+ _check_sizes (ŷ, y)
86
86
abs_error = abs .(ŷ .- y)
87
87
# TODO : remove dropgrad when Zygote can handle this function with CuArrays
88
88
temp = Zygote. dropgrad (abs_error .< δ)
@@ -212,7 +212,7 @@ julia> Flux.crossentropy(y_model, y_smooth)
212
212
```
213
213
"""
214
214
function crossentropy (ŷ, y; dims = 1 , agg = mean, ϵ = epseltype (ŷ))
215
- match_sizes (ŷ, y)
215
+ _check_sizes (ŷ, y)
216
216
agg (.- sum (xlogy .(y, ŷ .+ ϵ); dims = dims))
217
217
end
218
218
@@ -251,7 +251,7 @@ julia> Flux.crossentropy(softmax(y_model), y_label)
251
251
```
252
252
"""
253
253
function logitcrossentropy (ŷ, y; dims = 1 , agg = mean)
254
- match_sizes (ŷ, y)
254
+ _check_sizes (ŷ, y)
255
255
agg (.- sum (y .* logsoftmax (ŷ; dims = dims); dims = dims))
256
256
end
257
257
@@ -300,7 +300,7 @@ julia> Flux.crossentropy(y_prob, y_hot)
300
300
```
301
301
"""
302
302
function binarycrossentropy (ŷ, y; agg = mean, ϵ = epseltype (ŷ))
303
- match_sizes (ŷ, y)
303
+ _check_sizes (ŷ, y)
304
304
agg (@. (- xlogy (y, ŷ + ϵ) - xlogy (1 - y, 1 - ŷ + ϵ)))
305
305
end
306
306
@@ -330,7 +330,7 @@ julia> Flux.binarycrossentropy(sigmoid.(y_model), y_bin)
330
330
```
331
331
"""
332
332
function logitbinarycrossentropy (ŷ, y; agg = mean)
333
- match_sizes (ŷ, y)
333
+ _check_sizes (ŷ, y)
334
334
agg (@. ((1 - y) * ŷ - logσ (ŷ)))
335
335
end
336
336
370
370
```
371
371
"""
372
372
function kldivergence (ŷ, y; dims = 1 , agg = mean, ϵ = epseltype (ŷ))
373
- match_sizes (ŷ, y)
373
+ _check_sizes (ŷ, y)
374
374
entropy = agg (sum (xlogx .(y), dims = dims))
375
375
cross_entropy = crossentropy (ŷ, y; dims = dims, agg = agg, ϵ = ϵ)
376
376
return entropy + cross_entropy
385
385
[More information.](https://peltarion.com/knowledge-center/documentation/modeling-view/build-an-ai-model/loss-functions/poisson).
386
386
"""
387
387
function poisson_loss (ŷ, y; agg = mean)
388
- match_sizes (ŷ, y)
388
+ _check_sizes (ŷ, y)
389
389
agg (ŷ .- xlogy .(y, ŷ))
390
390
end
391
391
@@ -399,7 +399,7 @@ prediction `ŷ` and true labels `y` (containing 1 or -1); calculated as
399
399
See also: [`squared_hinge_loss`](@ref)
400
400
"""
401
401
function hinge_loss (ŷ, y; agg = mean)
402
- match_sizes (ŷ, y)
402
+ _check_sizes (ŷ, y)
403
403
agg (max .(0 , 1 .- ŷ .* y))
404
404
end
405
405
@@ -412,7 +412,7 @@ Return the squared hinge_loss loss given the prediction `ŷ` and true labels `y
412
412
See also: [`hinge_loss`](@ref)
413
413
"""
414
414
function squared_hinge_loss (ŷ, y; agg = mean)
415
- match_sizes (ŷ, y)
415
+ _check_sizes (ŷ, y)
416
416
agg ((max .(0 , 1 .- ŷ .* y)) .^ 2 )
417
417
end
418
418
@@ -427,7 +427,7 @@ Similar to the F1_score. Calculated as:
427
427
1 - 2*sum(|ŷ .* y| + smooth) / (sum(ŷ.^2) + sum(y.^2) + smooth)
428
428
"""
429
429
function dice_coeff_loss (ŷ, y; smooth = ofeltype (ŷ, 1.0 ))
430
- match_sizes (ŷ, y)
430
+ _check_sizes (ŷ, y)
431
431
1 - (2 * sum (y .* ŷ) + smooth) / (sum (y .^ 2 ) + sum (ŷ .^ 2 ) + smooth) # TODO agg
432
432
end
433
433
@@ -441,7 +441,7 @@ Calculated as:
441
441
1 - sum(|y .* ŷ| + 1) / (sum(y .* ŷ + β*(1 .- y) .* ŷ + (1 - β)*y .* (1 .- ŷ)) + 1)
442
442
"""
443
443
function tversky_loss (ŷ, y; β = ofeltype (ŷ, 0.7 ))
444
- match_sizes (ŷ, y)
444
+ _check_sizes (ŷ, y)
445
445
# TODO add agg
446
446
num = sum (y .* ŷ) + 1
447
447
den = sum (y .* ŷ + β * (1 .- y) .* ŷ + (1 - β) * y .* (1 .- ŷ)) + 1
@@ -478,7 +478,7 @@ See also: [`Losses.focal_loss`](@ref) for multi-class setting
478
478
479
479
"""
480
480
function binary_focal_loss (ŷ, y; agg= mean, γ= 2 , ϵ= epseltype (ŷ))
481
- match_sizes (ŷ, y)
481
+ _check_sizes (ŷ, y)
482
482
ŷ = ŷ .+ ϵ
483
483
p_t = y .* ŷ + (1 .- y) .* (1 .- ŷ)
484
484
ce = - log .(p_t)
@@ -522,7 +522,7 @@ See also: [`Losses.binary_focal_loss`](@ref) for binary (not one-hot) labels
522
522
523
523
"""
524
524
function focal_loss (ŷ, y; dims= 1 , agg= mean, γ= 2 , ϵ= epseltype (ŷ))
525
- match_sizes (ŷ, y)
525
+ _check_sizes (ŷ, y)
526
526
ŷ = ŷ .+ ϵ
527
527
agg (sum (@. - y * (1 - ŷ)^ γ * log (ŷ); dims= dims))
528
528
end
0 commit comments