Skip to content

Commit

Permalink
remove unused variables
Browse files Browse the repository at this point in the history
  • Loading branch information
schillic committed Mar 1, 2024
1 parent 2c74297 commit b28310a
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 8 deletions.
4 changes: 2 additions & 2 deletions src/FileFormats/NNet.jl
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ function read_NNet(filename::String)
layer_sizes = parse.(Int, split(readline(io), ",")[1:(n_layer_ops + 1)])

# five lines of irrelevant information
for i in 1:5
for _ in 1:5
line = readline(io)
end

Expand Down Expand Up @@ -120,7 +120,7 @@ function write_NNet(N::FeedforwardNetwork, filename::String)
println(io)

# five lines of irrelevant information
for i in 1:5
for _ in 1:5
println(io, "0")
end

Expand Down
4 changes: 2 additions & 2 deletions src/FileFormats/ONNX.jl
Original file line number Diff line number Diff line change
Expand Up @@ -35,14 +35,14 @@ function read_ONNX(filename::String; input_dimension=nothing)

# parse input dimension if not provided
if isnothing(input_dimension)
open(filename) do io
input_dimension = open(filename) do io
onnx_raw_model = ONNX.decode(ONNX.ProtoDecoder(io), ONNX.ModelProto)
input = onnx_raw_model.graph.input
@assert input isa Vector{ONNX.ValueInfoProto} && length(input) == 1
dimensions = input[1].var"#type".value.value.shape.dim
@assert dimensions isa Vector{ONNX.var"TensorShapeProto.Dimension"} &&
length(dimensions) == 2 && dimensions[1].value.value == 1
return input_dimension = dimensions[2].value.value
return dimensions[2].value.value
end
end

Expand Down
8 changes: 4 additions & 4 deletions src/FileFormats/Sherlock.jl
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,11 @@ All layers including the output layer implicitly use a ReLU activation function.
"""
function read_Sherlock(filename::String)
# activation functions are not read from file because they are always ReLU
read_activations(io, n_layer_ops) = i -> Architecture._relu
read_activations(::IO, ::Int) = _ -> Architecture._relu

layer_type = DenseLayerOp{ReLU,Matrix{Float32},Vector{Float32}}

read_end(io) = nothing
read_end(::IO) = nothing

return _read_Sherlock_POLAR(filename, read_activations, layer_type, read_end)
end
Expand Down Expand Up @@ -107,7 +107,7 @@ format.
The Sherlock format requires that all activation functions are ReLU.
"""
function write_Sherlock(N::FeedforwardNetwork, filename::String)
write_end(io) = nothing
write_end(::IO) = nothing
return _write_Sherlock_POLAR(N, filename, _write_activation_Sherlock, write_end)
end

Expand Down Expand Up @@ -142,7 +142,7 @@ function _write_Sherlock_POLAR(N::FeedforwardNetwork, filename::String,
return nothing
end

function _write_activation_Sherlock(io, layer)
function _write_activation_Sherlock(::IO, layer)
@assert layer.activation isa ReLU "the Sherlock format requires ReLU " *
"activations everywhere, but the network contains a " *
"`$(typeof(layer.activation))` activation"
Expand Down

0 comments on commit b28310a

Please sign in to comment.