Skip to content
This repository was archived by the owner on Mar 12, 2021. It is now read-only.

Avoid exponential of positive numbers in softplus implementation #518

Merged
merged 3 commits into from
Jan 17, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/nnlib.jl
Original file line number Diff line number Diff line change
Expand Up @@ -28,4 +28,4 @@ end
λ * ifelse(x > 0, x/1, α * (exp(x) - 1))
end

@cufunc softplus(x) = log1p(exp(x))
@cufunc softplus(x) = ifelse(x > 0, x + log1p(exp(-x)), log1p(exp(x)))
10 changes: 10 additions & 0 deletions test/dnn.jl
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,16 @@ end
@test testf(CuArrays.CUDNN.cudnnAddTensor, cu(rand(Float64, 10, 10, 3, 1)), cu(rand(Float64, 10, 10, 3, 1)))
@test testf(CuArrays.CUDNN.cudnnActivationForward, cu(rand(Float64, 10, 10, 3, 1)), cu(rand(Float64, 10, 10, 3, 1)))
@test testf(CuArrays.CUDNN.cudnnActivationBackward, cu(rand(Float64, 10, 10, 3, 1)), cu(rand(Float64, 10, 10, 3, 1)), cu(rand(Float64, 10, 10, 3, 1)), cu(rand(Float64, 10, 10, 3, 1)))

# activations defined in src/nnlib.jl
for dims in ((5,5), (5,))
for f in (σ, logσ, elu, swish, gelu, selu, softplus)
@test testf(x -> f.(x), rand(Float64, dims))
end
end
# softplus does not give `Inf` for large arguments
x = cu([1000.])
@test all(softplus.(x) .== x)
end

@testset "Batchnorm" begin
Expand Down