-
Notifications
You must be signed in to change notification settings - Fork 87
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
added tests; to note that they don't work
- Loading branch information
Showing
5 changed files
with
169 additions
and
6 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,100 @@ | ||
program test_maxpool1d_layer | ||
|
||
use iso_fortran_env, only: stderr => error_unit | ||
use nf, only: maxpool1d, input, layer | ||
use nf_input2d_layer, only: input2d_layer | ||
use nf_maxpool1d_layer, only: maxpool1d_layer | ||
|
||
implicit none | ||
|
||
type(layer) :: maxpool_layer, input_layer | ||
integer, parameter :: pool_size = 2, stride = 2 | ||
integer, parameter :: channels = 3, length = 32 | ||
integer, parameter :: input_shape(2) = [channels, length] | ||
integer, parameter :: output_shape(2) = [channels, length / 2] | ||
real, allocatable :: sample_input(:,:), output(:,:), gradient(:,:) | ||
integer :: i | ||
logical :: ok = .true., gradient_ok = .true. | ||
|
||
maxpool_layer = maxpool1d(pool_size) | ||
|
||
if (.not. maxpool_layer % name == 'maxpool1d') then | ||
ok = .false. | ||
write(stderr, '(a)') 'maxpool1d layer has its name set correctly.. failed' | ||
end if | ||
|
||
if (maxpool_layer % initialized) then | ||
ok = .false. | ||
write(stderr, '(a)') 'maxpool1d layer should not be marked as initialized yet.. failed' | ||
end if | ||
|
||
input_layer = input(channels, length) | ||
call maxpool_layer % init(input_layer) | ||
|
||
if (.not. maxpool_layer % initialized) then | ||
ok = .false. | ||
write(stderr, '(a)') 'maxpool1d layer should now be marked as initialized.. failed' | ||
end if | ||
|
||
if (.not. all(maxpool_layer % input_layer_shape == input_shape)) then | ||
ok = .false. | ||
write(stderr, '(a)') 'maxpool1d layer input layer shape should be correct.. failed' | ||
end if | ||
|
||
if (.not. all(maxpool_layer % layer_shape == output_shape)) then | ||
ok = .false. | ||
write(stderr, '(a)') 'maxpool1d layer output layer shape should be correct.. failed' | ||
end if | ||
|
||
! Allocate and initialize sample input data | ||
allocate(sample_input(channels, length)) | ||
do concurrent(i = 1:length) | ||
sample_input(:,i) = i | ||
end do | ||
|
||
select type(this_layer => input_layer % p); type is(input2d_layer) | ||
call this_layer % set(sample_input) | ||
end select | ||
|
||
call maxpool_layer % forward(input_layer) | ||
call maxpool_layer % get_output(output) | ||
|
||
do i = 1, length / 2 | ||
! Since input is i, maxpool1d output must be stride*i | ||
if (.not. all(output(:,i) == stride * i)) then | ||
ok = .false. | ||
write(stderr, '(a)') 'maxpool1d layer forward pass correctly propagates the max value.. failed' | ||
end if | ||
end do | ||
|
||
! Test the backward pass | ||
! Allocate and initialize the downstream gradient field | ||
allocate(gradient, source=output) | ||
|
||
! Make a backward pass | ||
call maxpool_layer % backward(input_layer, gradient) | ||
|
||
select type(this_layer => maxpool_layer % p); type is(maxpool1d_layer) | ||
do i = 1, length | ||
if (mod(i,2) == 0) then | ||
if (.not. all(sample_input(:,i) == this_layer % gradient(:,i))) gradient_ok = .false. | ||
else | ||
if (.not. all(this_layer % gradient(:,i) == 0)) gradient_ok = .false. | ||
end if | ||
end do | ||
end select | ||
|
||
if (.not. gradient_ok) then | ||
ok = .false. | ||
write(stderr, '(a)') 'maxpool1d layer backward pass produces the correct dL/dx.. failed' | ||
end if | ||
|
||
if (ok) then | ||
print '(a)', 'test_maxpool1d_layer: All tests passed.' | ||
else | ||
write(stderr, '(a)') 'test_maxpool1d_layer: One or more tests failed.' | ||
stop 1 | ||
end if | ||
|
||
end program test_maxpool1d_layer | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,54 @@ | ||
program test_reshape2d_layer | ||
|
||
use iso_fortran_env, only: stderr => error_unit | ||
use nf, only: input, network, reshape2d_layer => reshape2d | ||
use nf_datasets, only: download_and_unpack, keras_reshape_url | ||
|
||
implicit none | ||
|
||
type(network) :: net | ||
real, allocatable :: sample_input(:), output(:,:) | ||
integer, parameter :: output_shape(2) = [32, 32] | ||
integer, parameter :: input_size = product(output_shape) | ||
character(*), parameter :: keras_reshape_path = 'keras_reshape.h5' | ||
logical :: file_exists | ||
logical :: ok = .true. | ||
|
||
! Create the network | ||
net = network([ & | ||
input(input_size), & | ||
reshape2d_layer(output_shape) & | ||
]) | ||
|
||
if (.not. size(net % layers) == 2) then | ||
write(stderr, '(a)') 'the network should have 2 layers.. failed' | ||
ok = .false. | ||
end if | ||
|
||
! Initialize test data | ||
allocate(sample_input(input_size)) | ||
call random_number(sample_input) | ||
|
||
! Propagate forward and get the output | ||
call net % forward(sample_input) | ||
call net % layers(2) % get_output(output) | ||
|
||
if (.not. all(shape(output) == output_shape)) then | ||
write(stderr, '(a)') 'the reshape layer produces expected output shape.. failed' | ||
ok = .false. | ||
end if | ||
|
||
if (.not. all(reshape(sample_input, output_shape) == output)) then | ||
write(stderr, '(a)') 'the reshape layer produces expected output values.. failed' | ||
ok = .false. | ||
end if | ||
|
||
if (ok) then | ||
print '(a)', 'test_reshape2d_layer: All tests passed.' | ||
else | ||
write(stderr, '(a)') 'test_reshape2d_layer: One or more tests failed.' | ||
stop 1 | ||
end if | ||
|
||
end program test_reshape2d_layer | ||
|