Skip to content

Commit

Permalink
Merge pull request #35 from JuliaReach/schillic/refactor
Browse files Browse the repository at this point in the history
Refactor test to separate file
  • Loading branch information
schillic authored Apr 5, 2024
2 parents 456d5f8 + b79d4ed commit 1854042
Show file tree
Hide file tree
Showing 4 changed files with 10 additions and 7 deletions.
3 changes: 2 additions & 1 deletion src/Architecture/DenseLayerOp.jl
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,8 @@ function _isconsistent(weights, bias)
return size(weights, 1) == length(bias)
end

(l::DenseLayerOp)(x) = l.activation.(l.weights * x .+ l.bias)
# application to a vector
(L::DenseLayerOp)(x) = L.activation.(L.weights * x .+ L.bias)

Base.length(L::DenseLayerOp) = length(L.bias)

Expand Down
5 changes: 5 additions & 0 deletions test/Architecture/AbstractLayerOp.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
# AbstractLayerOp implementation
struct TestLayerOp <: AbstractLayerOp end
L = TestLayerOp()
dim_in(L)
dim_out(L)
6 changes: 0 additions & 6 deletions test/Architecture/DenseLayerOp.jl
Original file line number Diff line number Diff line change
@@ -1,11 +1,5 @@
using ReachabilityBase.Subtypes: subtypes

# AbstractLayerOp implementation
struct TestLayerOp <: AbstractLayerOp end
L = TestLayerOp()
dim_in(L)
dim_out(L)

# 2D input vector and 2x3 layer
x = [1.0, 1]
W = hcat([1 0.5; -0.5 0.5; -1 -0.5])
Expand Down
3 changes: 3 additions & 0 deletions test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@ import Flux, MAT, ONNX, YAML
struct TestActivation <: ActivationFunction end

@testset "Architecture" begin
@testset "AbstractLayerOp" begin
include("Architecture/AbstractLayerOp.jl")
end
@testset "DenseLayerOp" begin
include("Architecture/DenseLayerOp.jl")
end
Expand Down

0 comments on commit 1854042

Please sign in to comment.