From 0636977738e30486cf1c4b0a3b5452f8e65f1d0e Mon Sep 17 00:00:00 2001 From: schillic Date: Thu, 11 Apr 2024 23:21:33 +0200 Subject: [PATCH 1/2] better printing --- src/Architecture/ActivationFunction.jl | 10 ++++++++++ src/Architecture/DenseLayerOp.jl | 2 +- src/Architecture/FeedforwardNetwork.jl | 2 +- 3 files changed, 12 insertions(+), 2 deletions(-) diff --git a/src/Architecture/ActivationFunction.jl b/src/Architecture/ActivationFunction.jl index dce62fd..f16f097 100644 --- a/src/Architecture/ActivationFunction.jl +++ b/src/Architecture/ActivationFunction.jl @@ -18,6 +18,8 @@ struct Id <: ActivationFunction end (::Id)(x) = x +Base.show(io::IO, ::Id) = print(io, Id) + """ ReLU @@ -31,6 +33,8 @@ struct ReLU <: ActivationFunction end (::ReLU)(x) = max.(x, zero(eltype(x))) +Base.show(io::IO, ::ReLU) = print(io, ReLU) + """ Sigmoid @@ -44,6 +48,8 @@ struct Sigmoid <: ActivationFunction end (::Sigmoid)(x) = @. 1 / (1 + exp(-x)) +Base.show(io::IO, ::Sigmoid) = print(io, Sigmoid) + """ Tanh @@ -57,6 +63,8 @@ struct Tanh <: ActivationFunction end (::Tanh)(x) = tanh.(x) +Base.show(io::IO, ::Tanh) = print(io, Tanh) + """ LeakyReLU{N<:Number} @@ -78,6 +86,8 @@ end (lr::LeakyReLU)(x::Number) = x >= zero(x) ? x : lr.slope * x (lr::LeakyReLU)(x::AbstractVector) = lr.(x) +Base.show(io::IO, lr::LeakyReLU) = print(io, "$LeakyReLU($(lr.slope))") + # constant instances of each activation function const _id = Id() const _relu = ReLU() diff --git a/src/Architecture/DenseLayerOp.jl b/src/Architecture/DenseLayerOp.jl index 40dd1fe..c32609b 100644 --- a/src/Architecture/DenseLayerOp.jl +++ b/src/Architecture/DenseLayerOp.jl @@ -64,7 +64,7 @@ function Base.:isapprox(L1::DenseLayerOp, L2::DenseLayerOp; atol::Real=0, end function Base.show(io::IO, L::DenseLayerOp) - str = "$(string(DenseLayerOp)) with $(dim_in(L)) inputs, $(dim_out(L)) " * + str = "$DenseLayerOp with $(dim_in(L)) inputs, $(dim_out(L)) " * "outputs, and $(L.activation) activation" return print(io, str) end diff --git a/src/Architecture/FeedforwardNetwork.jl b/src/Architecture/FeedforwardNetwork.jl index 71096c6..a9f95ba 100644 --- a/src/Architecture/FeedforwardNetwork.jl +++ b/src/Architecture/FeedforwardNetwork.jl @@ -57,7 +57,7 @@ function load_Flux_convert_network() end function Base.show(io::IO, N::FeedforwardNetwork) - str = "$(string(FeedforwardNetwork)) with $(dim_in(N)) inputs, " * + str = "$FeedforwardNetwork with $(dim_in(N)) inputs, " * "$(dim_out(N)) outputs, and $(length(N)) layers:" for l in layers(N) str *= "\n- $l" From b34e7f4ce9c551eaadd9173797ab3b67e2d65044 Mon Sep 17 00:00:00 2001 From: schillic Date: Sat, 25 May 2024 12:50:40 +0200 Subject: [PATCH 2/2] add tests --- test/Architecture/ActivationFunction.jl | 5 +++++ test/runtests.jl | 3 +++ 2 files changed, 8 insertions(+) create mode 100644 test/Architecture/ActivationFunction.jl diff --git a/test/Architecture/ActivationFunction.jl b/test/Architecture/ActivationFunction.jl new file mode 100644 index 0000000..aecb7de --- /dev/null +++ b/test/Architecture/ActivationFunction.jl @@ -0,0 +1,5 @@ +# printing +io = IOBuffer() +for act in (Id(), ReLU(), Sigmoid(), Tanh(), LeakyReLU(0.1)) + println(io, act) +end diff --git a/test/runtests.jl b/test/runtests.jl index 5a06c1d..06fefff 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -7,6 +7,9 @@ import Flux, MAT, ONNX, YAML struct TestActivation <: ActivationFunction end @testset "Architecture" begin + @testset "ActivationFunction" begin + include("Architecture/ActivationFunction.jl") + end @testset "AbstractLayerOp" begin include("Architecture/AbstractLayerOp.jl") end