Skip to content

Commit

Permalink
Merge pull request #265 from leondavi/nerlplanner_cpp_headers
Browse files Browse the repository at this point in the history
[NERLPLANNER] Autogenerated cpp headers for worker definitions
  • Loading branch information
leondavi authored Nov 10, 2023
2 parents fded6a3 + 1ab6a91 commit d34b828
Show file tree
Hide file tree
Showing 6 changed files with 189 additions and 37 deletions.
13 changes: 12 additions & 1 deletion NerlnetBuild.sh
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,6 @@ parse_commandline "$@"

NERLNET_BUILD_PREFIX="[Nerlnet Build] "


OPTION="add_compile_definitions(EIGEN_MAX_ALIGN_BYTES=8)"
is_rasp="$(grep -c raspbian /etc/os-release)"
if [ $is_rasp -gt "0" ]; then
Expand All @@ -150,6 +149,18 @@ else
sed -i "s/^.*\(${OPTION}.*$\)/#\1/" CMakeLists.txt
fi

if command -v python3 >/dev/null 2>&1; then
echo "$NERLNET_BUILD_PREFIX Python 3 is installed"
# Generate auto-generated files
AUTOGENERATED_WORKER_DEFINITIONS_PATH="`pwd`/src_cpp/opennnBridge/worker_definitions_ag.h"
echo "$NERLNET_BUILD_PREFIX Generate auto-generated files"
python3 src_py/nerlPlanner/CppHeadersExporter.py --output $AUTOGENERATED_WORKER_DEFINITIONS_PATH #--debug
else
echo "$NERLNET_BUILD_PREFIX Python 3 is not installed"
echo "Autogenerated files will not be generated"
echo "These files are based on last generated files brought from the repository"
fi

echo "$NERLNET_BUILD_PREFIX Building Nerlnet Library"
echo "$NERLNET_BUILD_PREFIX Cmake command of Nerlnet NIFPP"
set -e
Expand Down
15 changes: 15 additions & 0 deletions src_cpp/opennnBridge/worker_definitions_ag.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
#pragma once

// This file was auto generated
#define NERLPLANNER_VERSION "1.0.0"

enum LayerTypeEnum{LAYER_TYPE_DEFAULT=0,LAYER_TYPE_SCALING=1,LAYER_TYPE_CNN=2,LAYER_TYPE_PERCEPTRON=3,LAYER_TYPE_POOLING=4,LAYER_TYPE_PROBABILISTIC=5,LAYER_TYPE_LSTM=6,LAYER_TYPE_RECCURRENT=7,LAYER_TYPE_UNSCALING=8,LAYER_TYPE_BOUNDING=9};
enum ProbabilisticActivationEnum{PROBABILISTIC_ACTIVATION_BINARY=1,PROBABILISTIC_ACTIVATION_LOGISTIC=2,PROBABILISTIC_ACTIVATION_COMPETITIVE=3,PROBABILISTIC_ACTIVATION_SOFTMAX=4};
enum ScalingEnum{SCALING_NONE=1,SCALING_MINMAX=2,SCALING_MEANSTD=3,SCALING_STD=4,SCALING_LOG=5};
enum UnscalingEnum{UNSCALING_NONE=1,UNSCALING_MINMAX=2,UNSCALING_MEANSTD=3,UNSCALING_STD=4,UNSCALING_LOG=5};
enum PoolingEnum{POOLING_NONE=1,POOLING_MAX=2,POOLING_AVG=3};
enum ActivationEnum{ACTIVATION_THRESHOLD=1,ACTIVATION_SIGN=2,ACTIVATION_LOGISTIC=3,ACTIVATION_TANH=4,ACTIVATION_LINEAR=5,ACTIVATION_RELU=6,ACTIVATION_ELU=7,ACTIVATION_SELU=8,ACTIVATION_SOFT_PLUS=9,ACTIVATION_SOFT_SIGN=10,ACTIVATION_HARD_SIGMOID=11};
enum LayerTypeEnum{LAYER_TYPE_DEFAULT=0,LAYER_TYPE_SCALING=1,LAYER_TYPE_CNN=2,LAYER_TYPE_PERCEPTRON=3,LAYER_TYPE_POOLING=4,LAYER_TYPE_PROBABILISTIC=5,LAYER_TYPE_LSTM=6,LAYER_TYPE_RECCURRENT=7,LAYER_TYPE_UNSCALING=8,LAYER_TYPE_BOUNDING=9};
enum ModelTypeEnum{MODEL_TYPE_APPROXIMATION=1,MODEL_TYPE_CLASSIFICATION=2,MODEL_TYPE_FORECASTING=3,MODEL_TYPE_ENCODER_DECODER=4,MODEL_TYPE_NN=5,MODEL_TYPE_AUTOENCODER=6,MODEL_TYPE_AE_CLASSIFIER=7,MODEL_TYPE_FED_CLIENT=8,MODEL_TYPE_FED_SERVER=9};
enum OptimizerEnum{OPTIMIZER_NONE=0,OPTIMIZER_SGD=1,OPTIMIZER_MINI_BATCH=2,OPTIMIZER_MOMENTUM=3,OPTIMIZER_NAG=4,OPTIMIZER_ADAGRAD=5,OPTIMIZER_ADAM=6};
enum LossMethodEnum{LOSS_METHOD_SSE=1,LOSS_METHOD_MSE=2,LOSS_METHOD_NSE=3,LOSS_METHOD_MINKOWSKIE=4,LOSS_METHOD_WSE=5,LOSS_METHOD_CEE=6};
8 changes: 6 additions & 2 deletions src_py/apiServer/experiment_flow_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
from logger import *
from stats import Stats

TEST_ACCEPTABLE_MARGIN_OF_ERROR = 0.02

def print_test(in_str : str):
PREFIX = "[NERLNET-TEST] "
LOG_INFO(f"{PREFIX} {in_str}")
Expand Down Expand Up @@ -62,7 +64,7 @@ def print_test(in_str : str):

exp_stats = Stats(experiment_inst)
data = exp_stats.get_loss_min()
print("min loss of each worker")
print_test("min loss of each worker")
print(data)

conf = exp_stats.get_confusion_matrices()
Expand All @@ -73,7 +75,9 @@ def print_test(in_str : str):
for j in acc_stats[worker].keys():
diff = abs(acc_stats[worker][j]["F1"] - baseline_acc_stats[worker][str(j)]["F1"])
diff_from_baseline.append(diff/baseline_acc_stats[worker][str(j)]["F1"])
anomaly_detected = not all([x < 0.01 for x in diff_from_baseline])
anomaly_detected = not all([x < TEST_ACCEPTABLE_MARGIN_OF_ERROR for x in diff_from_baseline])
if anomaly_detected:
print_test("Anomaly failure detected")
print_test(f"diff_from_baseline: {diff_from_baseline}")
exit(1)

77 changes: 77 additions & 0 deletions src_py/nerlPlanner/CppHeadersExporter.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
import argparse
import os
from CppHeadersExporterDefs import *
from JsonElementWorkerDefinitions import *
from Definitions import VERSION as NERLPLANNER_VERSION

DEBUG = False

def gen_header_exporter_logger(message : str):
if DEBUG:
print(f'[NERLPLANNER][AUTO_HEADER_GENERATOR][DEBUG] {message}')

def gen_header_worker_parameters_definitions(header_path : str, debug : bool = False):
global DEBUG
DEBUG = debug

empty_line = '\n'
pragma_once = PragmaOnce()
gen_header_exporter_logger(pragma_once.generate_code())
auto_generated_header = AutoGeneratedHeader()
gen_header_exporter_logger(auto_generated_header.generate_code())
nerlplanner_version = Definition('NERLPLANNER_VERSION', f'"{NERLPLANNER_VERSION}"')

layer_type_enums = EnumType('LayerTypeEnum', LayerTypeMap, True, 'LAYER_TYPE')
gen_header_exporter_logger(layer_type_enums.generate_code())
probabilistic_activation_enums = EnumType('ProbabilisticActivationEnum', ProbabilisticActivationFunctionMap, True, 'PROBABILISTIC_ACTIVATION')
gen_header_exporter_logger(probabilistic_activation_enums.generate_code())
scaling_enums = EnumType('ScalingEnum', ScalingMethodMap, True, 'SCALING')
gen_header_exporter_logger(scaling_enums.generate_code())
unscaling_enums = EnumType('UnscalingEnum', UnScalingMethodMap, True, 'UNSCALING')
gen_header_exporter_logger(unscaling_enums.generate_code())
pooling_enums = EnumType('PoolingEnum', PoolingMethodMap, True, 'POOLING')
gen_header_exporter_logger(pooling_enums.generate_code())
activation_enums = EnumType('ActivationEnum', ActivationFunctionsMap, True, 'ACTIVATION')
gen_header_exporter_logger(activation_enums.generate_code())
layer_type_enums = EnumType('LayerTypeEnum', LayerTypeMap, True, 'LAYER_TYPE')
gen_header_exporter_logger(layer_type_enums.generate_code())
model_type_enums = EnumType('ModelTypeEnum', ModelTypeMapping, True, 'MODEL_TYPE')
gen_header_exporter_logger(model_type_enums.generate_code())
optimizer_enums = EnumType('OptimizerEnum', OptimizerTypeMapping, True, 'OPTIMIZER')
gen_header_exporter_logger(optimizer_enums.generate_code())
loss_method_enums = EnumType('LossMethodEnum', LossMethodMapping, True, 'LOSS_METHOD')
gen_header_exporter_logger(loss_method_enums.generate_code())

if os.path.dirname(header_path):
os.makedirs(os.path.dirname(header_path), exist_ok=True)

with open(header_path, 'w') as f:
f.write(pragma_once.generate_code())
f.write(empty_line)
f.write(auto_generated_header.generate_code())
f.write(nerlplanner_version.generate_code())
f.write(empty_line)
f.write(layer_type_enums.generate_code())
f.write(probabilistic_activation_enums.generate_code())
f.write(scaling_enums.generate_code())
f.write(unscaling_enums.generate_code())
f.write(pooling_enums.generate_code())
f.write(activation_enums.generate_code())
f.write(layer_type_enums.generate_code())
f.write(model_type_enums.generate_code())
f.write(optimizer_enums.generate_code())
f.write(loss_method_enums.generate_code())




def main():
parser = argparse.ArgumentParser(description='Generate C++ header file for nerlPlanner')
parser.add_argument('-o', '--output', help='output header file path', required=True)
parser.add_argument('-d', '--debug', help='debug mode', action='store_true')
args = parser.parse_args()
gen_header_worker_parameters_definitions(args.output, args.debug)

if __name__=="__main__":
main()

49 changes: 49 additions & 0 deletions src_py/nerlPlanner/CppHeadersExporterDefs.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
from collections import OrderedDict

class AutoGeneratedHeader:
def __init__(self) -> None:
pass

def generate_code(self):
return f'// This file was auto generated\n'

class PragmaOnce:
def __init__(self, header_name: str = '') -> None:
self.header_name = header_name

def generate_code(self):
return f'#pragma once\n'

class EnumType:
def __init__(self, enum_name : str, in_ordered_dict : OrderedDict, all_caps : bool = False, prefix = '') -> None:
self.enum_name = enum_name
self.ordered_dict = in_ordered_dict
self.prefix = prefix
self.all_caps = all_caps

def generate_code(self):
code = f'enum {self.enum_name}'+'{'
for key, value in self.ordered_dict.items():
key = key.upper() if self.all_caps else key
last_key = list(self.ordered_dict.keys())[-1].upper() if self.all_caps else list(self.ordered_dict.keys())[-1]
code += f'{self.prefix}_{key}={value}'
code += ',' if last_key != key else ''
return code.replace('-','_') + '};\n'

class Definition:
def __init__(self, definition, value ) -> None:
self.definition = definition
self.value = value

def generate_code(self):
return f'#define {self.definition} {self.value}\n'

class ConstExpression:
def __init__(self, type, variable, value) -> None:
self.type = type
self.variable = variable
self.value = value

def generate_code(self):
assert self.type in ['int', 'float', 'double', 'char', 'std::string']
return f'constexpr {self.type} {self.variable} = {self.value};\n'
64 changes: 30 additions & 34 deletions src_py/nerlPlanner/JsonElementWorkerDefinitions.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,5 @@

# Maps are based on src_cpp/opennnBridge/definitionsNN.h

LAYER_SPECIAL_TYPE_IDX_SCALING = "1"
LAYER_SPECIAL_TYPE_IDX_POOLING = "2"

from collections import OrderedDict

LayerTypeMap = OrderedDict([
Expand Down Expand Up @@ -75,36 +71,36 @@
("Bounding" , None)]
)

ModelTypeMapping = {
"approximation" : "1",
"classification" : "2",
"forecasting" : "3",
"encoder_decoder" : "4",
"nn" : "5",
"autoencoder" : "6",
"ae-classifier" : "7",
"fed-client": "8",
"fed-server": "9"
}

OptimizerTypeMapping = {
"none" : "0",
"SGD" : "1",
"Mini-Batch" : "2",
"Momentum" : "3",
"NAG" : "4",
"Adagrad" : "5",
"ADAM" : "6"
}

LossMethodMapping = {
"SSE" : "1", # Sum squared Error
"MSE" : "2", # Mean Squared Error
"NSE" : "3", # Normalized Squared Error
"Minkowski-E" : "4", # Minkowski Error
"WSE" : "5", # Weighted Squared Error
"CEE" : "6", # Cross Entropy Error
}
ModelTypeMapping = OrderedDict([
("approximation" , "1"),
("classification" , "2"),
("forecasting" , "3"),
("encoder_decoder" , "4"),
("nn" , "5"),
("autoencoder" , "6"),
("ae-classifier" , "7"),
("fed-client", "8"),
("fed-server", "9")
])

OptimizerTypeMapping = OrderedDict([
("none" , "0"),
("SGD" , "1"),
("Mini-Batch" , "2"),
("Momentum" , "3"),
("NAG" , "4"),
("Adagrad" , "5"),
("ADAM" , "6")
])

LossMethodMapping = OrderedDict([
("SSE" , "1"), # Sum squared Error
("MSE" , "2"), # Mean Squared Error
("NSE" , "3"), # Normalized Squared Error
("MinkowskiE" , "4"), # Minkowski Error
("WSE" , "5"), # Weighted Squared Error
("CEE" , "6"), # Cross Entropy Error
])

def get_key_by_value(in_map : dict, value):
list_of_values = list(in_map.values())
Expand Down

0 comments on commit d34b828

Please sign in to comment.