From b28310a717979c436da463c9a705da941ff0fc34 Mon Sep 17 00:00:00 2001 From: schillic Date: Fri, 1 Mar 2024 23:13:43 +0100 Subject: [PATCH] remove unused variables --- src/FileFormats/NNet.jl | 4 ++-- src/FileFormats/ONNX.jl | 4 ++-- src/FileFormats/Sherlock.jl | 8 ++++---- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/src/FileFormats/NNet.jl b/src/FileFormats/NNet.jl index fcbfbc3..231cfaf 100644 --- a/src/FileFormats/NNet.jl +++ b/src/FileFormats/NNet.jl @@ -51,7 +51,7 @@ function read_NNet(filename::String) layer_sizes = parse.(Int, split(readline(io), ",")[1:(n_layer_ops + 1)]) # five lines of irrelevant information - for i in 1:5 + for _ in 1:5 line = readline(io) end @@ -120,7 +120,7 @@ function write_NNet(N::FeedforwardNetwork, filename::String) println(io) # five lines of irrelevant information - for i in 1:5 + for _ in 1:5 println(io, "0") end diff --git a/src/FileFormats/ONNX.jl b/src/FileFormats/ONNX.jl index 4951ec6..bdcb47f 100644 --- a/src/FileFormats/ONNX.jl +++ b/src/FileFormats/ONNX.jl @@ -35,14 +35,14 @@ function read_ONNX(filename::String; input_dimension=nothing) # parse input dimension if not provided if isnothing(input_dimension) - open(filename) do io + input_dimension = open(filename) do io onnx_raw_model = ONNX.decode(ONNX.ProtoDecoder(io), ONNX.ModelProto) input = onnx_raw_model.graph.input @assert input isa Vector{ONNX.ValueInfoProto} && length(input) == 1 dimensions = input[1].var"#type".value.value.shape.dim @assert dimensions isa Vector{ONNX.var"TensorShapeProto.Dimension"} && length(dimensions) == 2 && dimensions[1].value.value == 1 - return input_dimension = dimensions[2].value.value + return dimensions[2].value.value end end diff --git a/src/FileFormats/Sherlock.jl b/src/FileFormats/Sherlock.jl index 1f03bec..526da38 100644 --- a/src/FileFormats/Sherlock.jl +++ b/src/FileFormats/Sherlock.jl @@ -19,11 +19,11 @@ All layers including the output layer implicitly use a ReLU activation function. """ function read_Sherlock(filename::String) # activation functions are not read from file because they are always ReLU - read_activations(io, n_layer_ops) = i -> Architecture._relu + read_activations(::IO, ::Int) = _ -> Architecture._relu layer_type = DenseLayerOp{ReLU,Matrix{Float32},Vector{Float32}} - read_end(io) = nothing + read_end(::IO) = nothing return _read_Sherlock_POLAR(filename, read_activations, layer_type, read_end) end @@ -107,7 +107,7 @@ format. The Sherlock format requires that all activation functions are ReLU. """ function write_Sherlock(N::FeedforwardNetwork, filename::String) - write_end(io) = nothing + write_end(::IO) = nothing return _write_Sherlock_POLAR(N, filename, _write_activation_Sherlock, write_end) end @@ -142,7 +142,7 @@ function _write_Sherlock_POLAR(N::FeedforwardNetwork, filename::String, return nothing end -function _write_activation_Sherlock(io, layer) +function _write_activation_Sherlock(::IO, layer) @assert layer.activation isa ReLU "the Sherlock format requires ReLU " * "activations everywhere, but the network contains a " * "`$(typeof(layer.activation))` activation"