forked from modern-fortran/neural-fortran
-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathtest_maxpool2d_layer.f90
103 lines (83 loc) · 3.26 KB
/
test_maxpool2d_layer.f90
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
program test_maxpool2d_layer
use iso_fortran_env, only: stderr => error_unit
use nf, only: maxpool2d, input, layer
use nf_input3d_layer, only: input3d_layer
use nf_maxpool2d_layer, only: maxpool2d_layer
implicit none
type(layer) :: maxpool_layer, input_layer
integer, parameter :: pool_size = 2, stride = 2
integer, parameter :: channels = 3, width = 32
integer, parameter :: input_shape(3) = [channels, width, width]
integer, parameter :: output_shape(3) = [channels, width / 2, width / 2]
real, allocatable :: sample_input(:,:,:), output(:,:,:), gradient(:,:,:)
integer :: i, j
logical :: ok = .true., gradient_ok = .true.
maxpool_layer = maxpool2d(pool_size)
if (.not. maxpool_layer % name == 'maxpool2d') then
ok = .false.
write(stderr, '(a)') 'maxpool2d layer has its name set correctly.. failed'
end if
if (maxpool_layer % initialized) then
ok = .false.
write(stderr, '(a)') 'maxpool2d layer should not be marked as initialized yet.. failed'
end if
input_layer = input(channels, width, width)
call maxpool_layer % init(input_layer)
if (.not. maxpool_layer % initialized) then
ok = .false.
write(stderr, '(a)') 'maxpool2d layer should now be marked as initialized.. failed'
end if
if (.not. all(maxpool_layer % input_layer_shape == input_shape)) then
ok = .false.
write(stderr, '(a)') 'maxpool2d layer input layer shape should be correct.. failed'
end if
if (.not. all(maxpool_layer % layer_shape == output_shape)) then
ok = .false.
write(stderr, '(a)') 'maxpool2d layer input layer shape should be correct.. failed'
end if
! Allocate and initialize sample input data
allocate(sample_input(channels, width, width))
do concurrent(i = 1:width, j = 1:width)
sample_input(:,i,j) = i * j
end do
select type(this_layer => input_layer % p); type is(input3d_layer)
call this_layer % set(sample_input)
end select
call maxpool_layer % forward(input_layer)
call maxpool_layer % get_output(output)
do j = 1, width / 2
do i = 1, width / 2
! Since input is i*j, maxpool2d output must be stride*i * stride*j
if (.not. all(output(:,i,j) == stride**2 * i * j)) then
ok = .false.
write(stderr, '(a)') 'maxpool2d layer forward pass correctly propagates the max value.. failed'
end if
end do
end do
! Test the backward pass
! Allocate and initialize the downstream gradient field
allocate(gradient, source=output)
! Make a backward pass
call maxpool_layer % backward(input_layer, gradient)
select type(this_layer => maxpool_layer % p); type is(maxpool2d_layer)
do j = 1, width
do i = 1, width
if (mod(i,2) == 0 .and. mod(j,2) == 0) then
if (.not. all(sample_input(:,i,j) == this_layer % gradient(:,i,j))) gradient_ok = .false.
else
if (.not. all(this_layer % gradient(:,i,j) == 0)) gradient_ok = .false.
end if
end do
end do
end select
if (.not. gradient_ok) then
ok = .false.
write(stderr, '(a)') 'maxpool2d layer backward pass produces the correct dL/dx.. failed'
end if
if (ok) then
print '(a)', 'test_maxpool2d_layer: All tests passed.'
else
write(stderr, '(a)') 'test_maxpool2d_layer: One or more tests failed.'
stop 1
end if
end program test_maxpool2d_layer