Skip to content

Commit c2f6420

Browse files
authored
Merge pull request #44 from JuliaReach/schillic/refactor
Refactoring and remove type restriction of `DenseLayerOp`
2 parents 1e98daa + 1f2c97c commit c2f6420

File tree

10 files changed

+39
-39
lines changed

10 files changed

+39
-39
lines changed

src/Architecture/Architecture.jl

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -7,16 +7,16 @@ module Architecture
77

88
using Requires
99

10-
export AbstractNeuralNetwork, AbstractLayerOp,
11-
FeedforwardNetwork, DenseLayerOp,
10+
export AbstractNeuralNetwork, FeedforwardNetwork,
11+
AbstractLayerOp, DenseLayerOp,
1212
layers, dim_in, dim_out,
1313
ActivationFunction, Id, ReLU, Sigmoid, Tanh, LeakyReLU
1414

15-
include("AbstractNeuralNetwork.jl")
16-
include("AbstractLayerOp.jl")
1715
include("ActivationFunction.jl")
18-
include("DenseLayerOp.jl")
19-
include("FeedforwardNetwork.jl")
16+
include("LayerOps/AbstractLayerOp.jl")
17+
include("LayerOps/DenseLayerOp.jl")
18+
include("NeuralNetworks/AbstractNeuralNetwork.jl")
19+
include("NeuralNetworks/FeedforwardNetwork.jl")
2020

2121
include("init.jl")
2222

src/Architecture/DenseLayerOp.jl renamed to src/Architecture/LayerOps/DenseLayerOp.jl

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
"""
2-
DenseLayerOp{F, M<:AbstractMatrix, B} <: AbstractLayerOp
2+
DenseLayerOp{F, M, B} <: AbstractLayerOp
33
44
A dense layer operation is an affine map followed by an activation function.
55
@@ -13,23 +13,23 @@ A dense layer operation is an affine map followed by an activation function.
1313
1414
Conversion from a `Flux.Dense` is supported.
1515
"""
16-
struct DenseLayerOp{F,M<:AbstractMatrix,B} <: AbstractLayerOp
17-
weights::M
16+
struct DenseLayerOp{F,W,B} <: AbstractLayerOp
17+
weights::W
1818
bias::B
1919
activation::F
2020

21-
function DenseLayerOp(weights::M, bias::B, activation::F;
22-
validate=Val(true)) where {F,M<:AbstractMatrix,B}
23-
if validate isa Val{true} && !_isconsistent(weights, bias)
21+
function DenseLayerOp(weights::W, bias::B, activation::F;
22+
validate=Val(true)) where {F,W,B}
23+
if validate isa Val{true} && !_isconsistent_DenseLayerOp(weights, bias)
2424
throw(ArgumentError("inconsistent dimensions of weights " *
2525
"($(size(weights, 1))) and bias ($(length(bias)))"))
2626
end
2727

28-
return new{F,M,B}(weights, bias, activation)
28+
return new{F,W,B}(weights, bias, activation)
2929
end
3030
end
3131

32-
function _isconsistent(weights, bias)
32+
function _isconsistent_DenseLayerOp(weights, bias)
3333
return size(weights, 1) == length(bias)
3434
end
3535

@@ -73,7 +73,7 @@ dim_in(L::DenseLayerOp) = size(L.weights, 2)
7373

7474
dim_out(L::DenseLayerOp) = length(L.bias)
7575

76-
function load_Flux_convert_layer()
76+
function load_Flux_convert_Dense_layer()
7777
return quote
7878
function Base.convert(::Type{DenseLayerOp}, layer::Flux.Dense)
7979
act = get(activations_Flux, layer.σ, nothing)

src/Architecture/init.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
function __init__()
33
@require Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c" begin
44
eval(load_Flux_activations())
5-
eval(load_Flux_convert_layer())
5+
eval(load_Flux_convert_Dense_layer())
66
eval(load_Flux_convert_network())
77
end
88
end

src/FileFormats/ONNX.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -109,7 +109,7 @@ function read_ONNX(filename::String; input_dimension=nothing)
109109
@assert args[2]._op.id == idx - 1
110110
act = args[1]
111111
else
112-
@assert false "cannot parse activation $op"
112+
throw(ArgumentError("cannot parse activation $op")) # COV_EXCL_LINE
113113
end
114114
a = available_activations[string(act)]
115115
idx += 1

test/Architecture/ActivationFunction.jl

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,3 +3,7 @@ io = IOBuffer()
33
for act in (Id(), ReLU(), Sigmoid(), Tanh(), LeakyReLU(0.1))
44
println(io, act)
55
end
6+
7+
# leaky ReLU on a vector
8+
act = LeakyReLU(0.01)
9+
@test act([-1.0, 0, 1, -100]) == [-0.01, 0, 1, -1]

test/Architecture/DenseLayerOp.jl

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,3 @@ for act in subtypes(ActivationFunction)
7979
end
8080
test_layer(DenseLayerOp(W, b, act_inst))
8181
end
82-
83-
# leaky ReLU on a vector
84-
act = LeakyReLU(0.01)
85-
@test act([-1.0, 0, 1, -100]) == [-0.01, 0, 1, -1]

test/Architecture/Flux.jl

Lines changed: 18 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,37 +1,37 @@
11
import Flux
22

3-
l1 = Flux.Dense(1, 2, Flux.relu)
4-
l1.weight .= 1, 2
5-
l1.bias .= 3, 4
3+
L1 = Flux.Dense(1, 2, Flux.relu)
4+
L1.weight .= 1, 2
5+
L1.bias .= 3, 4
66

7-
l2 = Flux.Dense(2, 3, Flux.sigmoid)
8-
l2.weight .= [1 2; 3 4; 5 6]
7+
L2 = Flux.Dense(2, 3, Flux.sigmoid)
8+
L2.weight .= [1 2; 3 4; 5 6]
99

10-
l3 = Flux.Dense(3, 1)
11-
l3.weight .= [1 2 3;]
10+
L3 = Flux.Dense(3, 1)
11+
L3.weight .= [1 2 3;]
1212

13-
l_unsupported = Flux.Dense(1 => 1, Flux.trelu)
13+
L_unsupported = Flux.Dense(1 => 1, Flux.trelu)
1414

15-
c = Flux.Chain(l1, l2, l3)
15+
c = Flux.Chain(L1, L2, L3)
1616

1717
activations = [ReLU(), Sigmoid(), Id()]
1818

1919
# `==` is not defined for Flux types
20-
function compare_Flux_layer(l1, l2)
21-
return l1.weight == l2.weight && l1.bias == l2.bias && l1.σ == l2.σ
20+
function compare_Flux_layer(L1, L2)
21+
return L1.weight == L2.weight && L1.bias == L2.bias && L1.σ == L2.σ
2222
end
2323

2424
# layer conversion
25-
for (i, l) in enumerate(c.layers)
26-
op = convert(DenseLayerOp, l)
27-
@test op.weights == l.weight
28-
@test op.bias == l.bias
25+
for (i, L) in enumerate(c.layers)
26+
op = convert(DenseLayerOp, L)
27+
@test op.weights == L.weight
28+
@test op.bias == L.bias
2929
@test op.activation == activations[i]
3030

31-
l_back = convert(Flux.Dense, op)
32-
@test compare_Flux_layer(l, l_back)
31+
L_back = convert(Flux.Dense, op)
32+
@test compare_Flux_layer(L, L_back)
3333
end
34-
@test_throws ArgumentError convert(DenseLayerOp, l_unsupported)
34+
@test_throws ArgumentError convert(DenseLayerOp, L_unsupported)
3535

3636
# network conversion
3737
net = convert(FeedforwardNetwork, c)

0 commit comments

Comments
 (0)