From 91ab6ec5f58a71fb6ed6e7421636411964f9b174 Mon Sep 17 00:00:00 2001 From: schillic Date: Sun, 7 Apr 2024 16:28:41 +0200 Subject: [PATCH 1/7] reorder Architecture module into folders --- src/Architecture/Architecture.jl | 12 ++++++------ src/Architecture/{ => LayerOps}/AbstractLayerOp.jl | 0 src/Architecture/{ => LayerOps}/DenseLayerOp.jl | 0 .../{ => NeuralNetworks}/AbstractNeuralNetwork.jl | 0 .../{ => NeuralNetworks}/FeedforwardNetwork.jl | 0 5 files changed, 6 insertions(+), 6 deletions(-) rename src/Architecture/{ => LayerOps}/AbstractLayerOp.jl (100%) rename src/Architecture/{ => LayerOps}/DenseLayerOp.jl (100%) rename src/Architecture/{ => NeuralNetworks}/AbstractNeuralNetwork.jl (100%) rename src/Architecture/{ => NeuralNetworks}/FeedforwardNetwork.jl (100%) diff --git a/src/Architecture/Architecture.jl b/src/Architecture/Architecture.jl index 21f225f..7027862 100644 --- a/src/Architecture/Architecture.jl +++ b/src/Architecture/Architecture.jl @@ -7,16 +7,16 @@ module Architecture using Requires -export AbstractNeuralNetwork, AbstractLayerOp, - FeedforwardNetwork, DenseLayerOp, +export AbstractNeuralNetwork, FeedforwardNetwork, + AbstractLayerOp, DenseLayerOp, layers, dim_in, dim_out, ActivationFunction, Id, ReLU, Sigmoid, Tanh, LeakyReLU -include("AbstractNeuralNetwork.jl") -include("AbstractLayerOp.jl") include("ActivationFunction.jl") -include("DenseLayerOp.jl") -include("FeedforwardNetwork.jl") +include("LayerOps/AbstractLayerOp.jl") +include("LayerOps/DenseLayerOp.jl") +include("NeuralNetworks/AbstractNeuralNetwork.jl") +include("NeuralNetworks/FeedforwardNetwork.jl") include("init.jl") diff --git a/src/Architecture/AbstractLayerOp.jl b/src/Architecture/LayerOps/AbstractLayerOp.jl similarity index 100% rename from src/Architecture/AbstractLayerOp.jl rename to src/Architecture/LayerOps/AbstractLayerOp.jl diff --git a/src/Architecture/DenseLayerOp.jl b/src/Architecture/LayerOps/DenseLayerOp.jl similarity index 100% rename from src/Architecture/DenseLayerOp.jl rename to src/Architecture/LayerOps/DenseLayerOp.jl diff --git a/src/Architecture/AbstractNeuralNetwork.jl b/src/Architecture/NeuralNetworks/AbstractNeuralNetwork.jl similarity index 100% rename from src/Architecture/AbstractNeuralNetwork.jl rename to src/Architecture/NeuralNetworks/AbstractNeuralNetwork.jl diff --git a/src/Architecture/FeedforwardNetwork.jl b/src/Architecture/NeuralNetworks/FeedforwardNetwork.jl similarity index 100% rename from src/Architecture/FeedforwardNetwork.jl rename to src/Architecture/NeuralNetworks/FeedforwardNetwork.jl From 48c7dad0df8d57973f042bff39c7395108c7f2a4 Mon Sep 17 00:00:00 2001 From: schillic Date: Sun, 7 Apr 2024 16:52:06 +0200 Subject: [PATCH 2/7] remove type restriction of DenseLayerOp --- src/Architecture/LayerOps/DenseLayerOp.jl | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/Architecture/LayerOps/DenseLayerOp.jl b/src/Architecture/LayerOps/DenseLayerOp.jl index c32609b..9ddcfc1 100644 --- a/src/Architecture/LayerOps/DenseLayerOp.jl +++ b/src/Architecture/LayerOps/DenseLayerOp.jl @@ -1,5 +1,5 @@ """ - DenseLayerOp{F, M<:AbstractMatrix, B} <: AbstractLayerOp + DenseLayerOp{F, M, B} <: AbstractLayerOp A dense layer operation is an affine map followed by an activation function. @@ -13,19 +13,19 @@ A dense layer operation is an affine map followed by an activation function. Conversion from a `Flux.Dense` is supported. """ -struct DenseLayerOp{F,M<:AbstractMatrix,B} <: AbstractLayerOp - weights::M +struct DenseLayerOp{F,W,B} <: AbstractLayerOp + weights::W bias::B activation::F - function DenseLayerOp(weights::M, bias::B, activation::F; - validate=Val(true)) where {F,M<:AbstractMatrix,B} + function DenseLayerOp(weights::W, bias::B, activation::F; + validate=Val(true)) where {F,W,B} if validate isa Val{true} && !_isconsistent(weights, bias) throw(ArgumentError("inconsistent dimensions of weights " * "($(size(weights, 1))) and bias ($(length(bias)))")) end - return new{F,M,B}(weights, bias, activation) + return new{F,W,B}(weights, bias, activation) end end From 431134618638adb82be4408175beb671b217767b Mon Sep 17 00:00:00 2001 From: schillic Date: Sun, 7 Apr 2024 17:46:22 +0200 Subject: [PATCH 3/7] rename function --- src/Architecture/LayerOps/DenseLayerOp.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/Architecture/LayerOps/DenseLayerOp.jl b/src/Architecture/LayerOps/DenseLayerOp.jl index 9ddcfc1..a35f597 100644 --- a/src/Architecture/LayerOps/DenseLayerOp.jl +++ b/src/Architecture/LayerOps/DenseLayerOp.jl @@ -20,7 +20,7 @@ struct DenseLayerOp{F,W,B} <: AbstractLayerOp function DenseLayerOp(weights::W, bias::B, activation::F; validate=Val(true)) where {F,W,B} - if validate isa Val{true} && !_isconsistent(weights, bias) + if validate isa Val{true} && !_isconsistent_DenseLayerOp(weights, bias) throw(ArgumentError("inconsistent dimensions of weights " * "($(size(weights, 1))) and bias ($(length(bias)))")) end @@ -29,7 +29,7 @@ struct DenseLayerOp{F,W,B} <: AbstractLayerOp end end -function _isconsistent(weights, bias) +function _isconsistent_DenseLayerOp(weights, bias) return size(weights, 1) == length(bias) end From 12c70f56a339a6ad8e4e5ce10e12f79a01b1267e Mon Sep 17 00:00:00 2001 From: schillic Date: Sun, 7 Apr 2024 21:48:37 +0200 Subject: [PATCH 4/7] outsource test --- test/Architecture/ActivationFunction.jl | 4 ++++ test/Architecture/DenseLayerOp.jl | 4 ---- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/test/Architecture/ActivationFunction.jl b/test/Architecture/ActivationFunction.jl index aecb7de..bb753cf 100644 --- a/test/Architecture/ActivationFunction.jl +++ b/test/Architecture/ActivationFunction.jl @@ -3,3 +3,7 @@ io = IOBuffer() for act in (Id(), ReLU(), Sigmoid(), Tanh(), LeakyReLU(0.1)) println(io, act) end + +# leaky ReLU on a vector +act = LeakyReLU(0.01) +@test act([-1.0, 0, 1, -100]) == [-0.01, 0, 1, -1] diff --git a/test/Architecture/DenseLayerOp.jl b/test/Architecture/DenseLayerOp.jl index 46989d9..92e851f 100644 --- a/test/Architecture/DenseLayerOp.jl +++ b/test/Architecture/DenseLayerOp.jl @@ -79,7 +79,3 @@ for act in subtypes(ActivationFunction) end test_layer(DenseLayerOp(W, b, act_inst)) end - -# leaky ReLU on a vector -act = LeakyReLU(0.01) -@test act([-1.0, 0, 1, -100]) == [-0.01, 0, 1, -1] From fbba3c6b5817f0b4b373aa121bc98e20f561ace1 Mon Sep 17 00:00:00 2001 From: schillic Date: Sun, 7 Apr 2024 22:35:35 +0200 Subject: [PATCH 5/7] rename variables --- test/Architecture/Flux.jl | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/test/Architecture/Flux.jl b/test/Architecture/Flux.jl index c3dd777..f4c2318 100644 --- a/test/Architecture/Flux.jl +++ b/test/Architecture/Flux.jl @@ -1,37 +1,37 @@ import Flux -l1 = Flux.Dense(1, 2, Flux.relu) -l1.weight .= 1, 2 -l1.bias .= 3, 4 +L1 = Flux.Dense(1, 2, Flux.relu) +L1.weight .= 1, 2 +L1.bias .= 3, 4 -l2 = Flux.Dense(2, 3, Flux.sigmoid) -l2.weight .= [1 2; 3 4; 5 6] +L2 = Flux.Dense(2, 3, Flux.sigmoid) +L2.weight .= [1 2; 3 4; 5 6] -l3 = Flux.Dense(3, 1) -l3.weight .= [1 2 3;] +L3 = Flux.Dense(3, 1) +L3.weight .= [1 2 3;] -l_unsupported = Flux.Dense(1 => 1, Flux.trelu) +L_unsupported = Flux.Dense(1 => 1, Flux.trelu) -c = Flux.Chain(l1, l2, l3) +c = Flux.Chain(L1, L2, L3) activations = [ReLU(), Sigmoid(), Id()] # `==` is not defined for Flux types -function compare_Flux_layer(l1, l2) - return l1.weight == l2.weight && l1.bias == l2.bias && l1.σ == l2.σ +function compare_Flux_layer(L1, L2) + return L1.weight == L2.weight && L1.bias == L2.bias && L1.σ == L2.σ end # layer conversion -for (i, l) in enumerate(c.layers) - op = convert(DenseLayerOp, l) - @test op.weights == l.weight - @test op.bias == l.bias +for (i, L) in enumerate(c.layers) + op = convert(DenseLayerOp, L) + @test op.weights == L.weight + @test op.bias == L.bias @test op.activation == activations[i] - l_back = convert(Flux.Dense, op) - @test compare_Flux_layer(l, l_back) + L_back = convert(Flux.Dense, op) + @test compare_Flux_layer(L, L_back) end -@test_throws ArgumentError convert(DenseLayerOp, l_unsupported) +@test_throws ArgumentError convert(DenseLayerOp, L_unsupported) # network conversion net = convert(FeedforwardNetwork, c) From d31df1b0ebfbb609cdd9d62aafa6c779b5d473ec Mon Sep 17 00:00:00 2001 From: schillic Date: Sun, 7 Apr 2024 22:36:01 +0200 Subject: [PATCH 6/7] rename function --- src/Architecture/LayerOps/DenseLayerOp.jl | 2 +- src/Architecture/init.jl | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/Architecture/LayerOps/DenseLayerOp.jl b/src/Architecture/LayerOps/DenseLayerOp.jl index a35f597..cd2a535 100644 --- a/src/Architecture/LayerOps/DenseLayerOp.jl +++ b/src/Architecture/LayerOps/DenseLayerOp.jl @@ -73,7 +73,7 @@ dim_in(L::DenseLayerOp) = size(L.weights, 2) dim_out(L::DenseLayerOp) = length(L.bias) -function load_Flux_convert_layer() +function load_Flux_convert_Dense_layer() return quote function Base.convert(::Type{DenseLayerOp}, layer::Flux.Dense) act = get(activations_Flux, layer.σ, nothing) diff --git a/src/Architecture/init.jl b/src/Architecture/init.jl index e2fecde..a62c2b7 100644 --- a/src/Architecture/init.jl +++ b/src/Architecture/init.jl @@ -2,7 +2,7 @@ function __init__() @require Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c" begin eval(load_Flux_activations()) - eval(load_Flux_convert_layer()) + eval(load_Flux_convert_Dense_layer()) eval(load_Flux_convert_network()) end end From 1f2c97cc8315f289f2872ba3a1115ffa6d45b5b8 Mon Sep 17 00:00:00 2001 From: schillic Date: Sat, 25 May 2024 13:18:23 +0200 Subject: [PATCH 7/7] remove error line from coverage --- src/FileFormats/ONNX.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/FileFormats/ONNX.jl b/src/FileFormats/ONNX.jl index 2705858..3682e37 100644 --- a/src/FileFormats/ONNX.jl +++ b/src/FileFormats/ONNX.jl @@ -109,7 +109,7 @@ function read_ONNX(filename::String; input_dimension=nothing) @assert args[2]._op.id == idx - 1 act = args[1] else - @assert false "cannot parse activation $op" + throw(ArgumentError("cannot parse activation $op")) # COV_EXCL_LINE end a = available_activations[string(act)] idx += 1