-
Notifications
You must be signed in to change notification settings - Fork 86
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #75 from milancurcic/flatten-layer
Implement a flatten layer
- Loading branch information
Showing
13 changed files
with
439 additions
and
80 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,32 @@ | ||
program cnn | ||
|
||
use nf, only: conv2d, dense, flatten, input, maxpool2d, network | ||
|
||
implicit none | ||
type(network) :: net | ||
real, allocatable :: x(:,:,:) | ||
integer :: n | ||
|
||
print '("Creating a CNN and doing a forward pass")' | ||
print '("(backward pass not implemented yet)")' | ||
print '(60("="))' | ||
|
||
net = network([ & | ||
input([3, 32, 32]), & | ||
conv2d(filters=16, kernel_size=3, activation='relu'), & ! (16, 30, 30) | ||
maxpool2d(pool_size=2), & ! (16, 15, 15) | ||
conv2d(filters=32, kernel_size=3, activation='relu'), & ! (32, 13, 13) | ||
maxpool2d(pool_size=2), & ! (32, 6, 6) | ||
flatten(), & | ||
dense(10) & | ||
]) | ||
|
||
! Print a network summary to the screen | ||
call net % print_info() | ||
|
||
allocate(x(3,32,32)) | ||
call random_number(x) | ||
|
||
print *, 'Output:', net % output(x) | ||
|
||
end program cnn |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,75 @@ | ||
module nf_flatten_layer | ||
|
||
!! This module provides the concrete flatten layer type. | ||
!! It is used internally by the layer type. | ||
!! It is not intended to be used directly by the user. | ||
|
||
use nf_base_layer, only: base_layer | ||
|
||
implicit none | ||
|
||
private | ||
public :: flatten_layer | ||
|
||
type, extends(base_layer) :: flatten_layer | ||
|
||
!! Concrete implementation of a flatten (3-d to 1-d) layer. | ||
|
||
integer, allocatable :: input_shape(:) | ||
integer :: output_size | ||
|
||
real, allocatable :: gradient(:,:,:) | ||
real, allocatable :: output(:) | ||
|
||
contains | ||
|
||
procedure :: backward | ||
procedure :: forward | ||
procedure :: init | ||
|
||
end type flatten_layer | ||
|
||
interface flatten_layer | ||
elemental module function flatten_layer_cons() result(res) | ||
!! This function returns the `flatten_layer` instance. | ||
type(flatten_layer) :: res | ||
!! `flatten_layer` instance | ||
end function flatten_layer_cons | ||
end interface flatten_layer | ||
|
||
interface | ||
|
||
pure module subroutine backward(self, input, gradient) | ||
!! Apply the backward pass to the flatten layer. | ||
!! This is a reshape operation from 1-d gradient to 3-d input. | ||
class(flatten_layer), intent(in out) :: self | ||
!! Flatten layer instance | ||
real, intent(in) :: input(:,:,:) | ||
!! Input from the previous layer | ||
real, intent(in) :: gradient(:) | ||
!! Gradient from the next layer | ||
end subroutine backward | ||
|
||
pure module subroutine forward(self, input) | ||
!! Propagate forward the layer. | ||
!! Calling this subroutine updates the values of a few data components | ||
!! of `flatten_layer` that are needed for the backward pass. | ||
class(flatten_layer), intent(in out) :: self | ||
!! Dense layer instance | ||
real, intent(in) :: input(:,:,:) | ||
!! Input from the previous layer | ||
end subroutine forward | ||
|
||
module subroutine init(self, input_shape) | ||
!! Initialize the layer data structures. | ||
!! | ||
!! This is a deferred procedure from the `base_layer` abstract type. | ||
class(flatten_layer), intent(in out) :: self | ||
!! Dense layer instance | ||
integer, intent(in) :: input_shape(:) | ||
!! Shape of the input layer | ||
end subroutine init | ||
|
||
end interface | ||
|
||
end module nf_flatten_layer |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,48 @@ | ||
submodule(nf_flatten_layer) nf_flatten_layer_submodule | ||
|
||
!! This module provides the concrete flatten layer type. | ||
!! It is used internally by the layer type. | ||
!! It is not intended to be used directly by the user. | ||
|
||
use nf_base_layer, only: base_layer | ||
|
||
implicit none | ||
|
||
contains | ||
|
||
elemental module function flatten_layer_cons() result(res) | ||
type(flatten_layer) :: res | ||
end function flatten_layer_cons | ||
|
||
|
||
pure module subroutine backward(self, input, gradient) | ||
class(flatten_layer), intent(in out) :: self | ||
real, intent(in) :: input(:,:,:) | ||
real, intent(in) :: gradient(:) | ||
self % gradient = reshape(gradient, shape(input)) | ||
end subroutine backward | ||
|
||
|
||
pure module subroutine forward(self, input) | ||
class(flatten_layer), intent(in out) :: self | ||
real, intent(in) :: input(:,:,:) | ||
self % output = pack(input, .true.) | ||
end subroutine forward | ||
|
||
|
||
module subroutine init(self, input_shape) | ||
class(flatten_layer), intent(in out) :: self | ||
integer, intent(in) :: input_shape(:) | ||
|
||
self % input_shape = input_shape | ||
self % output_size = product(input_shape) | ||
|
||
allocate(self % gradient(input_shape(1), input_shape(2), input_shape(3))) | ||
self % gradient = 0 | ||
|
||
allocate(self % output(self % output_size)) | ||
self % output = 0 | ||
|
||
end subroutine init | ||
|
||
end submodule nf_flatten_layer_submodule |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.