Skip to content

Commit

Permalink
Merge pull request #330 from leondavi/flatten_layer
Browse files Browse the repository at this point in the history
[CNN] Flatten Layer is added to Nerlplanner
  • Loading branch information
leondavi authored May 17, 2024
2 parents eaa87f8 + 86885a8 commit a7797f1
Show file tree
Hide file tree
Showing 16 changed files with 56 additions and 21 deletions.
11 changes: 11 additions & 0 deletions src_cpp/common/nerlLayer.h
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,17 @@ class NerlLayer
int _layer_functionality;
};

class NerlLayerFlatten : public NerlLayer
{
//TODO Ori continue implementation of Flatten layer
public:

NerlLayerFlatten(int layer_type, std::vector<int> &layers_dims, int layer_functionality);
~NerlLayerFlatten();

private:
};


class NerlLayerPooling : public NerlLayer
{
Expand Down
5 changes: 3 additions & 2 deletions src_cpp/common/worker_definitions_ag.h
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
#pragma once

// This file was auto generated
// Generated by Nerlplanner version: 1.0.1
// Generated by Nerlplanner version: 1.0.2

namespace nerlnet {

enum LayerTypeEnum{LAYER_TYPE_DEFAULT=0,LAYER_TYPE_SCALING=1,LAYER_TYPE_CONV=2,LAYER_TYPE_PERCEPTRON=3,LAYER_TYPE_POOLING=4,LAYER_TYPE_PROBABILISTIC=5,LAYER_TYPE_LSTM=6,LAYER_TYPE_RECCURRENT=7,LAYER_TYPE_UNSCALING=8,LAYER_TYPE_BOUNDING=9};
enum LayerTypeEnum{LAYER_TYPE_DEFAULT=0,LAYER_TYPE_SCALING=1,LAYER_TYPE_CONV=2,LAYER_TYPE_PERCEPTRON=3,LAYER_TYPE_POOLING=4,LAYER_TYPE_PROBABILISTIC=5,LAYER_TYPE_LSTM=6,LAYER_TYPE_RECCURRENT=7,LAYER_TYPE_UNSCALING=8,LAYER_TYPE_FLATTEN=9,LAYER_TYPE_BOUNDING=10};
enum ProbabilisticActivationEnum{PROBABILISTIC_ACTIVATION_BINARY=1,PROBABILISTIC_ACTIVATION_LOGISTIC=2,PROBABILISTIC_ACTIVATION_COMPETITIVE=3,PROBABILISTIC_ACTIVATION_SOFTMAX=4};
enum ScalingEnum{SCALING_NONE=1,SCALING_MINMAX=2,SCALING_MEANSTD=3,SCALING_STD=4,SCALING_LOG=5};
enum BoundingEnum{BOUNDING_NONE=1,BOUNDING_BOUNDING=2};
enum FlattenEnum{FLATTEN_FLATTEN=0};
enum UnscalingEnum{UNSCALING_NONE=1,UNSCALING_MINMAX=2,UNSCALING_MEANSTD=3,UNSCALING_STD=4,UNSCALING_LOG=5};
enum PoolingEnum{POOLING_NONE=1,POOLING_MAX=2,POOLING_AVG=3};
enum ActivationEnum{ACTIVATION_THRESHOLD=1,ACTIVATION_SIGN=2,ACTIVATION_LOGISTIC=3,ACTIVATION_TANH=4,ACTIVATION_LINEAR=5,ACTIVATION_RELU=6,ACTIVATION_ELU=7,ACTIVATION_SELU=8,ACTIVATION_SOFT_PLUS=9,ACTIVATION_SOFT_SIGN=10,ACTIVATION_HARD_SIGMOID=11};
Expand Down
4 changes: 3 additions & 1 deletion src_cpp/opennnBridge/nerlWorkerOpenNN.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -511,7 +511,7 @@ namespace nerlnet
case LAYER_TYPE_LSTM: { res = translate_activation_function_int(layer_functionality); break;}
case LAYER_TYPE_RECCURRENT: { res = translate_activation_function_int(layer_functionality); break;}
case LAYER_TYPE_UNSCALING: { res = translate_unscaling_method_int(layer_functionality); break;}
case LAYER_TYPE_BOUNDING: { res = translate_activation_function_int(layer_functionality); break;}
case LAYER_TYPE_BOUNDING: { res = translate_activation_function_int(layer_functionality); break;} // TODO Ori this is an error - bounding error should not have activation function
}
return res;
}
Expand All @@ -529,6 +529,8 @@ namespace nerlnet
case LAYER_TYPE_PROBABILISTIC:{ res = (int)opennn::Layer::Type::Probabilistic; break;}
case LAYER_TYPE_LSTM: { res = (int)opennn::Layer::Type::LongShortTermMemory; break;}
case LAYER_TYPE_RECCURRENT: { res = (int)opennn::Layer::Type::Recurrent; break;}
case LAYER_TYPE_UNSCALING: { res = (int)opennn::Layer::Type::Unscaling; break;}
case LAYER_TYPE_FLATTEN: { res = (int)opennn::Layer::Type::Flatten; break;}
case LAYER_TYPE_BOUNDING: { res = (int)opennn::Layer::Type::Bounding; break;}
}
return res;
Expand Down
2 changes: 1 addition & 1 deletion src_erl/NerlnetApp/rebar.config
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
{apps, [nerlnetApp]}
]}.
{base_dir, "/usr/local/lib/nerlnet-lib/NErlNet/build/rebar"}.
{relx, [{release, {nerlnetApp, "1.2.2"}, [nerlnetApp,cowboy,jsx,kernel,stdlib,inets]},
{relx, [{release, {nerlnetApp, "1.4.3"}, [nerlnetApp,cowboy,jsx,kernel,stdlib,inets]},
{dev_mode, true},
{include_erts, true},
%{include_src, true},
Expand Down
5 changes: 3 additions & 2 deletions src_erl/NerlnetApp/src/Bridge/layers_types_ag.hrl
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
% This is an auto generated .hrl file
% DC Fields Generated by Nerlplanner version: 1.0.1
% DC Fields Generated by Nerlplanner version: 1.0.2

-define(LAYERS_TYPE_DEFAULT_IDX,"0").
-define(LAYERS_TYPE_SCALING_IDX,"1").
Expand All @@ -10,4 +10,5 @@
-define(LAYERS_TYPE_LSTM_IDX,"6").
-define(LAYERS_TYPE_RECCURRENT_IDX,"7").
-define(LAYERS_TYPE_UNSCALING_IDX,"8").
-define(LAYERS_TYPE_BOUNDING_IDX,"9").
-define(LAYERS_TYPE_FLATTEN_IDX,"9").
-define(LAYERS_TYPE_BOUNDING_IDX,"10").
2 changes: 1 addition & 1 deletion src_erl/NerlnetApp/src/Bridge/models_types_ag.hrl
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
% This is an auto generated .hrl file
% DC Fields Generated by Nerlplanner version: 1.0.1
% DC Fields Generated by Nerlplanner version: 1.0.2

-define(MODEL_TYPE_NN_IDX,"0").
-define(MODEL_TYPE_APPROXIMATION_IDX,"1").
Expand Down
2 changes: 1 addition & 1 deletion src_erl/NerlnetApp/src/dc_definitions_ag.hrl
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
% This is an auto generated .hrl file
% DC Fields Generated by Nerlplanner version: 1.0.1
% DC Fields Generated by Nerlplanner version: 1.0.2

-define(DC_KEY_NERLNET_SETTINGS_ATOM,nerlnetSettings).
-define(DC_KEY_FREQUENCY_ATOM,frequency).
Expand Down
4 changes: 2 additions & 2 deletions src_erl/NerlnetApp/src/nerlnetApp_app.erl
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@
-behaviour(application).
-include("nerl_tools.hrl").

-define(NERLNET_APP_VERSION, "1.4.2").
-define(NERLPLANNER_TESTED_VERSION,"1.0.1").
-define(NERLNET_APP_VERSION, "1.4.3").
-define(NERLPLANNER_TESTED_VERSION,"1.0.2").

-export([start/2, stop/1]).

Expand Down
2 changes: 1 addition & 1 deletion src_erl/NerlnetApp/src/router_definitions_ag.hrl
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
% This is an auto generated .hrl file
% Source Fields Generated by Nerlplanner version: 1.0.1
% Source Fields Generated by Nerlplanner version: 1.0.2

-define(ROUTER_POLICY_ROUTINGTABLE_IDX,"0").

Expand Down
2 changes: 1 addition & 1 deletion src_erl/NerlnetApp/src/source_definitions_ag.hrl
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
% This is an auto generated .hrl file
% Source Fields Generated by Nerlplanner version: 1.0.1
% Source Fields Generated by Nerlplanner version: 1.0.2

-define(SOURCE_POLICY_CASTING_IDX,"0").
-define(SOURCE_POLICY_ROUNDROBIN_IDX,"1").
Expand Down
2 changes: 1 addition & 1 deletion src_erl/NerlnetApp/src/worker_definitions_ag.hrl
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
% This is an auto generated .hrl file
% Worker Fields Generated by Nerlplanner version: 1.0.1
% Worker Fields Generated by Nerlplanner version: 1.0.2

-define(WORKER_FIELD_KEY_MODEL_TYPE,modelType).
-define(WORKER_FIELD_KEY_MODEL_ARGS,modelArgs).
Expand Down
5 changes: 4 additions & 1 deletion src_py/nerlPlanner/CppHeadersExporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,9 @@ def gen_header_worker_parameters_definitions(header_path : str, debug : bool = F
scaling_enums = EnumType('ScalingEnum', ScalingMethodMap, True, 'SCALING')
gen_header_exporter_logger(scaling_enums.generate_code())
bounding_enums = EnumType('BoundingEnum', BoundingMethodMap, True, 'BOUNDING')
gen_header_exporter_logger(bounding_enums.generate_code())
gen_header_exporter_logger(bounding_enums.generate_code()),
flatten_enums = EnumType('FlattenEnum', FlattenMethodMap, True, 'FLATTEN')
gen_header_exporter_logger(flatten_enums.generate_code())
unscaling_enums = EnumType('UnscalingEnum', UnScalingMethodMap, True, 'UNSCALING')
gen_header_exporter_logger(unscaling_enums.generate_code())
pooling_enums = EnumType('PoolingEnum', PoolingMethodMap, True, 'POOLING')
Expand Down Expand Up @@ -65,6 +67,7 @@ def gen_header_worker_parameters_definitions(header_path : str, debug : bool = F
f.write(probabilistic_activation_enums.generate_code())
f.write(scaling_enums.generate_code())
f.write(bounding_enums.generate_code())
f.write(flatten_enums.generate_code())
f.write(unscaling_enums.generate_code())
f.write(pooling_enums.generate_code())
f.write(activation_enums.generate_code())
Expand Down
2 changes: 1 addition & 1 deletion src_py/nerlPlanner/Definitions.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import subprocess
from logger import *

VERSION = "1.0.1"
VERSION = "1.0.2"
NERLNET_VERSION_TESTED_WITH = "1.4.2"
NERLNET_TMP_PATH = "/tmp/nerlnet"
NERLNET_GRAPHVIZ_OUTPUT_DIR = f"{NERLNET_TMP_PATH}/nerlplanner"
Expand Down
8 changes: 7 additions & 1 deletion src_py/nerlPlanner/JsonElementWorkerDefinitions.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,9 @@
("LSTM" , "6"),
("Reccurrent" , "7"),
("Unscaling" , "8"),
("Bounding" , "9")]
("Flatten" , "9"),
("Bounding" , "10"),
]
)

ProbabilisticActivationFunctionMap = OrderedDict(
Expand All @@ -30,6 +32,10 @@
("bounding" , "2")]
)

FlattenMethodMap = OrderedDict(
[("flatten" , "0")]
)

ScalingMethodMap = OrderedDict(
[("none" , "1"),
("MinMax" , "2"),
Expand Down
20 changes: 15 additions & 5 deletions src_py/nerlPlanner/WinWorkerDialog.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,9 +163,10 @@ def ui_update_all_values(WorkerWindow):
ActivationDictStr = f'Activation:\n{pretty_print_dict(ActivationFunctionsMap)}'
PoolingDictStr = f'Pooling:\n{pretty_print_dict(PoolingMethodMap)}'
ScalerDictStr = f'Scaler:\n{pretty_print_dict(ScalingMethodMap)}'
FlattenDictStr = f'Flatten:\n{pretty_print_dict(FlattenMethodMap)}'
BoundingDictStr = f'Bounding:\n{pretty_print_dict(BoundingMethodMap)}'
ProbabilisticDictStr = f'Probabilistic:\n{pretty_print_dict(ProbabilisticActivationFunctionMap)}'
sg.popup_ok(f"Layer Functions Codes:\n{ActivationDictStr}\n{PoolingDictStr}\n{ScalerDictStr}\n{BoundingDictStr}\n{ProbabilisticDictStr}", keep_on_top=True, title="Layer Type Codes")
sg.popup_ok(f"Layer Functions Codes:\n{ActivationDictStr}\n{PoolingDictStr}\n{ScalerDictStr}\n{FlattenDictStr}\n{BoundingDictStr}\n{ProbabilisticDictStr}", keep_on_top=True, title="Layer Type Codes")

if event == KEY_LEARNING_RATE_INPUT:
LearningRate = values[event]
Expand Down Expand Up @@ -259,15 +260,19 @@ def ui_update_all_values(WorkerWindow):
def LayerMethodSelection():
global global_layer_method_selection_code

layout = [[sg.Text("Activation",expand_x=True), sg.Text('Pooling', expand_x=True), sg.Text('Scaler', expand_x=True),sg.Text('Bounding' , expand_x=True), sg.Text('Probabilistic', expand_x=True)],
layout = [[sg.Text("Activation",expand_x=True), sg.Text('Pooling', expand_x=True), sg.Text('Scaler', expand_x=True),],
[sg.Listbox(list(ActivationFunctionsMap.keys()), size=(20,15), enable_events=True, key=KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_ACTIVATION),
sg.Listbox(list(PoolingMethodMap.keys()),size=(20,15), enable_events=True, key=KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_POOLING),
sg.Listbox(list(ScalingMethodMap.keys()),size=(20,15), enable_events=True, key=KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_SCALER),
sg.Listbox(list(ScalingMethodMap.keys()),size=(20,15), enable_events=True, key=KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_SCALER)],
[ sg.Text("Bounding",expand_x=True), sg.Text('Flatten', expand_x=True), sg.Text('Probabilistic', expand_x=True)],
[
sg.Listbox(list(BoundingMethodMap.keys()),size=(20,15), enable_events=True, key=KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_BOUNDING),
sg.Listbox(list(ProbabilisticActivationFunctionMap.keys()),size=(20,15), enable_events=True, key=KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_PROBABILISTIC)],
sg.Listbox(list(FlattenMethodMap.keys()),size=(20,15), enable_events=True, key=KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_FLATTEN),
sg.Listbox(list(ProbabilisticActivationFunctionMap.keys()),size=(20,15), enable_events=True, key=KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_PROBABILISTIC)
],
[sg.Text('Selection', expand_x=True, enable_events=True, key=KEY_LAYER_METHOD_SELECTION_TEXT),sg.Button('Select', expand_x=True, key=KEY_LAYER_METHOD_SELECTION_BUTTON)]]

layer_selection_win = sg.Window(title="Layer Method Selection", layout=layout, modal=True)
layer_selection_win = sg.Window(title="Layer Method Selection", layout=layout, modal=True, keep_on_top=True)


while True:
Expand Down Expand Up @@ -298,6 +303,11 @@ def LayerMethodSelection():
global_layer_method_selection_code = BoundingMethodMap[layer_method_selection]
layer_selection_win[KEY_LAYER_METHOD_SELECTION_TEXT].update(f'Selected {layer_method_selection} code: {global_layer_method_selection_code}')

if event == KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_FLATTEN:
layer_method_selection = values[KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_FLATTEN][0]
global_layer_method_selection_code = FlattenMethodMap[layer_method_selection]
layer_selection_win[KEY_LAYER_METHOD_SELECTION_TEXT].update(f'Selected {layer_method_selection} code: {global_layer_method_selection_code}')

if event == KEY_LAYER_METHOD_SELECTION_BUTTON:
break

Expand Down
1 change: 1 addition & 0 deletions src_py/nerlPlanner/WinWorkerDialogDefnitions.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@
KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_ACTIVATION = '-LAYER-METHOD-SELECTION-DIALOG-LISTBOX-ACTIVATION-'
KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_POOLING = '-LAYER-METHOD-SELECTION-DIALOG-LISTBOX-POOLING-'
KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_SCALER = '-LAYER-METHOD-SELECTION-DIALOG-LISTBOX-SCALER-'
KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_FLATTEN = '-LAYER-METHOD-SELECTION-DIALOG-LISTBOX-FLATTEN-'
KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_BOUNDING = '-LAYER-METHOD-SELECTION-DIALOG-LISTBOX-BOUNDING-'
KEY_LAYER_METHOD_SELECTION_DIALOG_LISTBOX_PROBABILISTIC = '-LAYER-METHOD-SELECTION-DIALOG-LISTBOX-PROBABILISTIC-'
KEY_LAYER_METHOD_SELECTION_TEXT = '-LAYER-METHOD-SELECTION-TEXT-'
Expand Down

0 comments on commit a7797f1

Please sign in to comment.