Skip to content

Commit

Permalink
Implement the reshape layer (#97)
Browse files Browse the repository at this point in the history
* Interface and constructor for the reshape3d layer

* Use reshape for the constructor function and reshape3d for the internal layer implementation

* Add the submodule for the concrete reshape3d_layer

* Forward and backward passes for the

* Add type guards for reshape layer to forward and backward subroutines

* Test that the resulting shape and values of a reshape layer are correct

* Bump version to 0.8.0 (unreleased)

* Add reshape layer to list of features

* Update CMake build for the reshape layer

* Ignore submodule files

* Enable reading reshape layers from Keras h5
  • Loading branch information
milancurcic authored Sep 28, 2022
1 parent 956c28a commit e9af5b4
Show file tree
Hide file tree
Showing 17 changed files with 350 additions and 25 deletions.
5 changes: 3 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
*.gz
*.o
*.mod
*.smod
*.dat
*.h5
build
doc
/build
/doc
2 changes: 2 additions & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,8 @@ add_library(neural
src/nf/nf_parallel_submodule.f90
src/nf/nf_random.f90
src/nf/nf_random_submodule.f90
src/nf/nf_reshape_layer.f90
src/nf/nf_reshape_layer_submodule.f90
src/nf/io/nf_io_binary.f90
src/nf/io/nf_io_binary_submodule.f90
src/nf/io/nf_io_hdf5.f90
Expand Down
11 changes: 6 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ Read the paper [here](https://arxiv.org/abs/1902.06714).

* Dense, fully connected neural layers
* Convolutional and max-pooling layers (experimental, forward propagation only)
* Flatten layers (forward and backward pass)
* Flatten and reshape layers (forward and backward passes)
* Loading dense and convolutional models from Keras h5 files
* Stochastic and mini-batch gradient descent for back-propagation
* Data-based parallelism
Expand All @@ -29,10 +29,11 @@ Read the paper [here](https://arxiv.org/abs/1902.06714).
| Layer type | Constructor name | Supported input layers | Rank of output array | Forward pass | Backward pass |
|------------|------------------|------------------------|----------------------|--------------|---------------|
| Input (1-d and 3-d) | `input` | n/a | 1, 3 | n/a | n/a |
| Dense (fully-connected) | `dense` | `input` (1-d) | 1 |||
| Convolutional (2-d) | `conv2d` | `input` (3-d), `conv2d`, `maxpool2d` | 3 |||
| Max-pooling (2-d) | `maxpool2d` | `input` (3-d), `conv2d`, `maxpool2d` | 3 |||
| Flatten | `flatten` | `input` (3-d), `conv2d`, `maxpool2d` | 1 |||
| Dense (fully-connected) | `dense` | `input1d` | 1 |||
| Convolutional (2-d) | `conv2d` | `input3d`, `conv2d`, `maxpool2d` | 3 |||
| Max-pooling (2-d) | `maxpool2d` | `input3d`, `conv2d`, `maxpool2d` | 3 |||
| Flatten | `flatten` | `input3d`, `conv2d`, `maxpool2d` | 1 |||
| Reshape (1-d to 3-d) | `reshape` | `input1d`, `dense`, `flatten` | 3 |||

## Getting started

Expand Down
2 changes: 1 addition & 1 deletion fpm.toml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
name = "neural-fortran"
version = "0.7.0"
version = "0.8.0"
license = "MIT"
author = "Milan Curcic"
maintainer = "[email protected]"
Expand Down
3 changes: 2 additions & 1 deletion src/nf.f90
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@ module nf
!! User API: everything an application needs to reference directly
use nf_datasets_mnist, only: label_digits, load_mnist
use nf_layer, only: layer
use nf_layer_constructors, only: conv2d, dense, flatten, input, maxpool2d
use nf_layer_constructors, only: &
conv2d, dense, flatten, input, maxpool2d, reshape
use nf_network, only: network
use nf_optimizers, only: sgd
end module nf
3 changes: 3 additions & 0 deletions src/nf/nf_datasets.f90
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ module nf_datasets
download_and_unpack, &
keras_cnn_mnist_url, &
keras_dense_mnist_url, &
keras_reshape_url, &
mnist_url

character(*), parameter :: keras_snippets_baseurl = &
Expand All @@ -22,6 +23,8 @@ module nf_datasets
keras_snippets_baseurl // '/8892585/keras_cnn_mnist.tar.gz'
character(*), parameter :: keras_dense_mnist_url = &
keras_snippets_baseurl // '/8788739/keras_dense_mnist.tar.gz'
character(*), parameter :: keras_reshape_url = &
keras_snippets_baseurl // '/9667603/keras_reshape.tar.gz'
character(*), parameter :: mnist_url = &
neural_fortran_baseurl // '/8498876/mnist.tar.gz'

Expand Down
3 changes: 3 additions & 0 deletions src/nf/nf_keras.f90
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,9 @@ module nf_keras
integer, allocatable :: pool_size(:)
integer, allocatable :: strides(:)

! Reshape
integer, allocatable :: target_shape(:)

end type keras_layer

interface
Expand Down
7 changes: 7 additions & 0 deletions src/nf/nf_keras_submodule.f90
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,13 @@ module function get_keras_h5_layers(filename) result(res)
res(n) % pool_size = reverse(res(n) % pool_size)
res(n) % strides = reverse(res(n) % strides)

case('Reshape')
! Only read target shape
call json % get(layer_config_json, &
'target_shape', res(n) % target_shape, found)
! Reverse to account for C -> Fortran order
res(n) % target_shape = reverse(res(n) % target_shape)

case default
error stop 'This Keras layer is not supported'

Expand Down
30 changes: 24 additions & 6 deletions src/nf/nf_layer.f90
Original file line number Diff line number Diff line change
Expand Up @@ -24,24 +24,25 @@ module nf_layer

contains

procedure :: backward
procedure :: forward
procedure :: init
procedure :: print_info
procedure :: update

! Specific output subroutines for different array ranks,
! available via generic `get_output`.
! Specific subroutines for different array ranks
procedure, private :: backward_1d
procedure, private :: backward_3d
procedure, private :: get_output_1d
procedure, private :: get_output_3d

generic :: backward => backward_1d, backward_3d
generic :: get_output => get_output_1d, get_output_3d

end type layer

interface
interface backward

pure module subroutine backward(self, previous, gradient)
pure module subroutine backward_1d(self, previous, gradient)
!! Apply a backward pass on the layer.
!! This changes the internal state of the layer.
!! This is normally called internally by the `network % backward`
Expand All @@ -52,7 +53,24 @@ pure module subroutine backward(self, previous, gradient)
!! Previous layer instance
real, intent(in) :: gradient(:)
!! Array of gradient values from the next layer
end subroutine backward
end subroutine backward_1d

pure module subroutine backward_3d(self, previous, gradient)
!! Apply a backward pass on the layer.
!! This changes the internal state of the layer.
!! This is normally called internally by the `network % backward`
!! method.
class(layer), intent(in out) :: self
!! Layer instance
class(layer), intent(in) :: previous
!! Previous layer instance
real, intent(in) :: gradient(:,:,:)
!! Array of gradient values from the next layer
end subroutine backward_3d

end interface backward

interface

pure module subroutine forward(self, input)
!! Apply a forward pass on the layer.
Expand Down
13 changes: 12 additions & 1 deletion src/nf/nf_layer_constructors.f90
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ module nf_layer_constructors
implicit none

private
public :: conv2d, dense, flatten, input, maxpool2d
public :: conv2d, dense, flatten, input, maxpool2d, reshape

interface input

Expand Down Expand Up @@ -154,6 +154,17 @@ pure module function maxpool2d(pool_size, stride) result(res)
!! Resulting layer instance
end function maxpool2d

pure module function reshape(output_shape) result(res)
!! Rank-1 to rank-any reshape layer constructor.
!! Currently implemented is only rank-3 for the output of the reshape.
!!
!! This layer is for connecting 1-d inputs to conv2d or similar layers.
integer, intent(in) :: output_shape(:)
!! Shape of the output
type(layer) :: res
!! Resulting layer instance
end function reshape

end interface

end module nf_layer_constructors
15 changes: 15 additions & 0 deletions src/nf/nf_layer_constructors_submodule.f90
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
use nf_input1d_layer, only: input1d_layer
use nf_input3d_layer, only: input3d_layer
use nf_maxpool2d_layer, only: maxpool2d_layer
use nf_reshape_layer, only: reshape3d_layer

implicit none

Expand Down Expand Up @@ -109,4 +110,18 @@ pure module function maxpool2d(pool_size, stride) result(res)

end function maxpool2d

pure module function reshape(output_shape) result(res)
integer, intent(in) :: output_shape(:)
type(layer) :: res

res % name = 'reshape'

if (size(output_shape) == 3) then
allocate(res % p, source=reshape3d_layer(output_shape))
else
error stop 'size(output_shape) of the reshape layer must == 3'
end if

end function reshape

end submodule nf_layer_constructors_submodule
65 changes: 57 additions & 8 deletions src/nf/nf_layer_submodule.f90
Original file line number Diff line number Diff line change
Expand Up @@ -6,16 +6,18 @@
use nf_input1d_layer, only: input1d_layer
use nf_input3d_layer, only: input3d_layer
use nf_maxpool2d_layer, only: maxpool2d_layer
use nf_reshape_layer, only: reshape3d_layer

contains

pure module subroutine backward(self, previous, gradient)
pure module subroutine backward_1d(self, previous, gradient)
implicit none
class(layer), intent(in out) :: self
class(layer), intent(in) :: previous
real, intent(in) :: gradient(:)

! Backward pass currently implemented only for dense and flatten layers
! Backward pass from a 1-d layer downstream currently implemented
! only for dense and flatten layers
select type(this_layer => self % p)

type is(dense_layer)
Expand All @@ -32,7 +34,7 @@ pure module subroutine backward(self, previous, gradient)

type is(flatten_layer)

! Downstream layers permitted: input3d, conv2d, maxpool2d
! Upstream layers permitted: input3d, conv2d, maxpool2d
select type(prev_layer => previous % p)
type is(input3d_layer)
call this_layer % backward(prev_layer % output, gradient)
Expand All @@ -44,7 +46,34 @@ pure module subroutine backward(self, previous, gradient)

end select

end subroutine backward
end subroutine backward_1d


pure module subroutine backward_3d(self, previous, gradient)
implicit none
class(layer), intent(in out) :: self
class(layer), intent(in) :: previous
real, intent(in) :: gradient(:,:,:)

! Backward pass from a 3-d layer downstream currently implemented
! only for reshape3d layer
select type(this_layer => self % p)

type is(reshape3d_layer)

! Upstream layers permitted: input1d, dense, flatten
select type(prev_layer => previous % p)
type is(input1d_layer)
call this_layer % backward(prev_layer % output, gradient)
type is(dense_layer)
call this_layer % backward(prev_layer % output, gradient)
type is(flatten_layer)
call this_layer % backward(prev_layer % output, gradient)
end select

end select

end subroutine backward_3d


pure module subroutine forward(self, input)
Expand All @@ -68,38 +97,56 @@ pure module subroutine forward(self, input)

type is(conv2d_layer)

! Upstream layers permitted: input3d, conv2d, maxpool2d
! Upstream layers permitted: input3d, conv2d, maxpool2d, reshape3d
select type(prev_layer => input % p)
type is(input3d_layer)
call this_layer % forward(prev_layer % output)
type is(conv2d_layer)
call this_layer % forward(prev_layer % output)
type is(maxpool2d_layer)
call this_layer % forward(prev_layer % output)
type is(reshape3d_layer)
call this_layer % forward(prev_layer % output)
end select

type is(maxpool2d_layer)

! Upstream layers permitted: input3d, conv2d, maxpool2d
! Upstream layers permitted: input3d, conv2d, maxpool2d, reshape3d
select type(prev_layer => input % p)
type is(input3d_layer)
call this_layer % forward(prev_layer % output)
type is(conv2d_layer)
call this_layer % forward(prev_layer % output)
type is(maxpool2d_layer)
call this_layer % forward(prev_layer % output)
type is(reshape3d_layer)
call this_layer % forward(prev_layer % output)
end select

type is(flatten_layer)

! Upstream layers permitted: input3d, conv2d, maxpool2d
! Upstream layers permitted: input3d, conv2d, maxpool2d, reshape3d
select type(prev_layer => input % p)
type is(input3d_layer)
call this_layer % forward(prev_layer % output)
type is(conv2d_layer)
call this_layer % forward(prev_layer % output)
type is(maxpool2d_layer)
call this_layer % forward(prev_layer % output)
type is(reshape3d_layer)
call this_layer % forward(prev_layer % output)
end select

type is(reshape3d_layer)

! Upstream layers permitted: input1d, dense, flatten
select type(prev_layer => input % p)
type is(input1d_layer)
call this_layer % forward(prev_layer % output)
type is(dense_layer)
call this_layer % forward(prev_layer % output)
type is(flatten_layer)
call this_layer % forward(prev_layer % output)
end select

end select
Expand Down Expand Up @@ -141,8 +188,10 @@ pure module subroutine get_output_3d(self, output)
allocate(output, source=this_layer % output)
type is(maxpool2d_layer)
allocate(output, source=this_layer % output)
type is(reshape3d_layer)
allocate(output, source=this_layer % output)
class default
error stop '3-d output can only be read from an input3d, conv2d, or maxpool2d layer.'
error stop '3-d output can only be read from a conv2d, input3d, maxpool2d, or reshape3d layer.'

end select

Expand Down
10 changes: 9 additions & 1 deletion src/nf/nf_network_submodule.f90
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,11 @@
use nf_input1d_layer, only: input1d_layer
use nf_input3d_layer, only: input3d_layer
use nf_maxpool2d_layer, only: maxpool2d_layer
use nf_reshape_layer, only: reshape3d_layer
use nf_io_hdf5, only: get_hdf5_dataset
use nf_keras, only: get_keras_h5_layers, keras_layer
use nf_layer, only: layer
use nf_layer_constructors, only: conv2d, dense, flatten, input, maxpool2d
use nf_layer_constructors, only: conv2d, dense, flatten, input, maxpool2d, reshape
use nf_loss, only: quadratic_derivative
use nf_optimizers, only: sgd
use nf_parallel, only: tile_indices
Expand Down Expand Up @@ -117,6 +118,9 @@ module function network_from_keras(filename) result(res)
keras_layers(n) % strides(1) &
)

case('Reshape')
layers(n) = reshape(keras_layers(n) % target_shape)

case default
error stop 'This Keras layer is not supported'

Expand Down Expand Up @@ -165,6 +169,10 @@ module function network_from_keras(filename) result(res)
! Nothing to do
continue

type is(reshape3d_layer)
! Nothing to do
continue

class default
error stop 'Internal error in network_from_keras(); ' &
// 'mismatch in layer types between the Keras and ' &
Expand Down
Loading

0 comments on commit e9af5b4

Please sign in to comment.