Skip to content

Commit

Permalink
added tests; to note that they don't work
Browse files Browse the repository at this point in the history
  • Loading branch information
ricor07 committed Feb 16, 2025
1 parent b5e7f74 commit cf2caf6
Show file tree
Hide file tree
Showing 5 changed files with 169 additions and 6 deletions.
4 changes: 4 additions & 0 deletions src/nf/nf_layer_submodule.f90
Original file line number Diff line number Diff line change
Expand Up @@ -340,6 +340,10 @@ impure elemental module subroutine init(self, input)
self % layer_shape = shape(this_layer % output)
type is(maxpool2d_layer)
self % layer_shape = shape(this_layer % output)
type is(locally_connected_1d_layer)
self % layer_shape = shape(this_layer % output)
type is(maxpool1d_layer)
self % layer_shape = shape(this_layer % output)
type is(flatten_layer)
self % layer_shape = shape(this_layer % output)
end select
Expand Down
15 changes: 9 additions & 6 deletions src/nf/nf_network_submodule.f90
Original file line number Diff line number Diff line change
Expand Up @@ -73,18 +73,21 @@ module function network_from_layers(layers) result(res)
type is(conv2d_layer)
res % layers = [res % layers(:n-1), flatten(), res % layers(n:)]
n = n + 1
!type is(locally_connected_1d_layer)
!res % layers = [res % layers(:n-1), flatten(), res % layers(n:)]
!n = n + 1
type is(maxpool2d_layer)
res % layers = [res % layers(:n-1), flatten(), res % layers(n:)]
n = n + 1
type is(reshape3d_layer)
res % layers = [res % layers(:n-1), flatten(), res % layers(n:)]
n = n + 1
type is(maxpool1d_layer)
res % layers = [res % layers(:n-1), flatten(), res % layers(n:)]
n = n + 1
type is(reshape2d_layer)
res % layers = [res % layers(:n-1), flatten(), res % layers(n:)]
n = n + 1
!type is(maxpool1d_layer)
! res % layers = [res % layers(:n-1), flatten(), res % layers(n:)]
! n = n + 1
!type is(reshape2d_layer)
! res % layers = [res % layers(:n-1), flatten(), res % layers(n:)]
! n = n + 1
class default
n = n + 1
end select
Expand Down
2 changes: 2 additions & 0 deletions test/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,12 @@ foreach(execid
parametric_activation
dense_layer
conv2d_layer
maxpool1d_layer
maxpool2d_layer
flatten_layer
insert_flatten
reshape_layer
reshape2d_layer
dense_network
get_set_network_params
conv2d_network
Expand Down
100 changes: 100 additions & 0 deletions test/test_maxpool1d_layer.f90
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
program test_maxpool1d_layer

use iso_fortran_env, only: stderr => error_unit
use nf, only: maxpool1d, input, layer
use nf_input2d_layer, only: input2d_layer
use nf_maxpool1d_layer, only: maxpool1d_layer

implicit none

type(layer) :: maxpool_layer, input_layer
integer, parameter :: pool_size = 2, stride = 2
integer, parameter :: channels = 3, length = 32
integer, parameter :: input_shape(2) = [channels, length]
integer, parameter :: output_shape(2) = [channels, length / 2]
real, allocatable :: sample_input(:,:), output(:,:), gradient(:,:)
integer :: i
logical :: ok = .true., gradient_ok = .true.

maxpool_layer = maxpool1d(pool_size)

if (.not. maxpool_layer % name == 'maxpool1d') then
ok = .false.
write(stderr, '(a)') 'maxpool1d layer has its name set correctly.. failed'
end if

if (maxpool_layer % initialized) then
ok = .false.
write(stderr, '(a)') 'maxpool1d layer should not be marked as initialized yet.. failed'
end if

input_layer = input(channels, length)
call maxpool_layer % init(input_layer)

if (.not. maxpool_layer % initialized) then
ok = .false.
write(stderr, '(a)') 'maxpool1d layer should now be marked as initialized.. failed'
end if

if (.not. all(maxpool_layer % input_layer_shape == input_shape)) then
ok = .false.
write(stderr, '(a)') 'maxpool1d layer input layer shape should be correct.. failed'
end if

if (.not. all(maxpool_layer % layer_shape == output_shape)) then
ok = .false.
write(stderr, '(a)') 'maxpool1d layer output layer shape should be correct.. failed'
end if

! Allocate and initialize sample input data
allocate(sample_input(channels, length))
do concurrent(i = 1:length)
sample_input(:,i) = i
end do

select type(this_layer => input_layer % p); type is(input2d_layer)
call this_layer % set(sample_input)
end select

call maxpool_layer % forward(input_layer)
call maxpool_layer % get_output(output)

do i = 1, length / 2
! Since input is i, maxpool1d output must be stride*i
if (.not. all(output(:,i) == stride * i)) then
ok = .false.
write(stderr, '(a)') 'maxpool1d layer forward pass correctly propagates the max value.. failed'
end if
end do

! Test the backward pass
! Allocate and initialize the downstream gradient field
allocate(gradient, source=output)

! Make a backward pass
call maxpool_layer % backward(input_layer, gradient)

select type(this_layer => maxpool_layer % p); type is(maxpool1d_layer)
do i = 1, length
if (mod(i,2) == 0) then
if (.not. all(sample_input(:,i) == this_layer % gradient(:,i))) gradient_ok = .false.
else
if (.not. all(this_layer % gradient(:,i) == 0)) gradient_ok = .false.
end if
end do
end select

if (.not. gradient_ok) then
ok = .false.
write(stderr, '(a)') 'maxpool1d layer backward pass produces the correct dL/dx.. failed'
end if

if (ok) then
print '(a)', 'test_maxpool1d_layer: All tests passed.'
else
write(stderr, '(a)') 'test_maxpool1d_layer: One or more tests failed.'
stop 1
end if

end program test_maxpool1d_layer

54 changes: 54 additions & 0 deletions test/test_reshape2d_layer.f90
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
program test_reshape2d_layer

use iso_fortran_env, only: stderr => error_unit
use nf, only: input, network, reshape2d_layer => reshape2d
use nf_datasets, only: download_and_unpack, keras_reshape_url

implicit none

type(network) :: net
real, allocatable :: sample_input(:), output(:,:)
integer, parameter :: output_shape(2) = [32, 32]
integer, parameter :: input_size = product(output_shape)
character(*), parameter :: keras_reshape_path = 'keras_reshape.h5'
logical :: file_exists
logical :: ok = .true.

! Create the network
net = network([ &
input(input_size), &
reshape2d_layer(output_shape) &
])

if (.not. size(net % layers) == 2) then
write(stderr, '(a)') 'the network should have 2 layers.. failed'
ok = .false.
end if

! Initialize test data
allocate(sample_input(input_size))
call random_number(sample_input)

! Propagate forward and get the output
call net % forward(sample_input)
call net % layers(2) % get_output(output)

if (.not. all(shape(output) == output_shape)) then
write(stderr, '(a)') 'the reshape layer produces expected output shape.. failed'
ok = .false.
end if

if (.not. all(reshape(sample_input, output_shape) == output)) then
write(stderr, '(a)') 'the reshape layer produces expected output values.. failed'
ok = .false.
end if

if (ok) then
print '(a)', 'test_reshape2d_layer: All tests passed.'
else
write(stderr, '(a)') 'test_reshape2d_layer: One or more tests failed.'
stop 1
end if

end program test_reshape2d_layer

0 comments on commit cf2caf6

Please sign in to comment.