From 46dd5a75d5fb0532cce70d28e59d95c8470fe351 Mon Sep 17 00:00:00 2001 From: Semyon1104 Date: Mon, 28 Oct 2024 10:36:02 +0300 Subject: [PATCH] add LayerType --- app/Graph/graph_build.cpp | 34 ++++++++++++++++++++++++---------- include/graph/graph.hpp | 30 +++++++++++++++++++++++++++--- include/layers/Layer.hpp | 1 + 3 files changed, 52 insertions(+), 13 deletions(-) diff --git a/app/Graph/graph_build.cpp b/app/Graph/graph_build.cpp index a1d474c7..322bc63a 100644 --- a/app/Graph/graph_build.cpp +++ b/app/Graph/graph_build.cpp @@ -43,9 +43,10 @@ void build_graph(Tensor input, Tensor output) { Tensor tmp_values = tensor; Tensor tmp_bias = make_tensor(tensor.get_bias()); - - layers.push_back( - std::make_shared(1, 0, 0, tmp_values, tmp_bias)); + auto conv_layer = + std::make_shared(1, 0, 0, tmp_values, tmp_bias); + conv_layer->setName(kConvolution); + layers.push_back(conv_layer); std::cout << "ConvLayer added to layers." << std::endl; } @@ -53,7 +54,9 @@ void build_graph(Tensor input, Tensor output) { Tensor tmp_values = tensor; Tensor tmp_bias = make_tensor(tensor.get_bias()); - layers.push_back(std::make_shared(tmp_values, tmp_bias)); + auto fc_layer = std::make_shared(tmp_values, tmp_bias); + fc_layer->setName(kFullyConnected); + layers.push_back(fc_layer); std::cout << "DenseLayer added to layers." << std::endl; } @@ -61,25 +64,31 @@ void build_graph(Tensor input, Tensor output) { Shape shape = {2, 2}; std::cout << "PoolingLayer shape: " << shape[0] << "x" << shape[1] << std::endl; - - layers.push_back(std::make_shared(shape)); + auto pool_layer = std::make_shared(shape); + pool_layer->setName(kPooling); + layers.push_back(pool_layer); std::cout << "PoolingLayer added to layers." << std::endl; } if (layer_type.find("Flatten") != std::string::npos) { - layers.emplace_back(std::make_shared()); + auto flatten_layer = std::make_shared(); + flatten_layer->setName(kFlatten); + layers.push_back(flatten_layer); std::cout << "FlattenLayer added to layers." << std::endl; } if (layer_type.find("Dropout") != std::string::npos) { - layers.emplace_back(std::make_shared(0.5)); + auto dropout_layer = std::make_shared(0.5); + dropout_layer->setName(kDropout); + layers.push_back(dropout_layer); std::cout << "DropOutLayer added to layers with probability 0.5." << std::endl; } } - + std::cout << "number of layers - " << layers.size() + 1<< std::endl; Graph graph(static_cast(layers.size())); InputLayer a1(kNhwc, kNchw, 1, 2); + std::cout << "InputLayer created." << std::endl; graph.setInput(a1, input); @@ -91,10 +100,15 @@ void build_graph(Tensor input, Tensor output) { for (size_t i = 0; i < layers.size() - 1; ++i) { graph.makeConnection(*layers[i], *layers[i + 1]); - std::cout << "Connection made between layer " << i << " and layer " << i + 1 + std::cout << "Connection made between layer " << i << " (" + << layerTypeToString(layers[i]->getName()) << ")" + << " and layer " << i + 1 << " (" + << layerTypeToString(layers[i + 1]->getName()) << ")" << std::endl; } + + graph.setOutput(*layers.back(), output); std::cout << "Output set in graph." << std::endl; diff --git a/include/graph/graph.hpp b/include/graph/graph.hpp index d72e70e5..5c1a18a9 100644 --- a/include/graph/graph.hpp +++ b/include/graph/graph.hpp @@ -10,7 +10,30 @@ #include "layers/Layer.hpp" namespace itlab_2023 { - +std::string layerTypeToString(LayerType type) { + switch (type) { + case kInput: + return "Input"; + case kPooling: + return "Pooling"; + case kNormalization: + return "Normalization"; + case kDropout: + return "Dropout"; + case kElementWise: + return "ElementWise"; + case kConvolution: + return "Convolution"; + case kFullyConnected: + return "FullyConnected"; + case kFlatten: + return "Flatten"; + case kOutput: + return "Output"; + default: + return "Unknown"; + } +} class Graph { int BiggestSize_; int V_; @@ -108,8 +131,9 @@ class Graph { #ifdef ENABLE_STATISTIC_TIME auto start = std::chrono::high_resolution_clock::now(); #endif - std::cout << "Running layer " << i - << " with input shape: " << inten_.get_shape() + std::cout << "Running layer " << i << " (" + << layerTypeToString(layers_[i]->getName()) << ") " + << "with input shape: " << inten_.get_shape() << ", output shape: " << outten_->get_shape() << std::endl; layers_[i]->run(inten_, *outten_); diff --git a/include/layers/Layer.hpp b/include/layers/Layer.hpp index 5c84a7c7..1238f0ef 100644 --- a/include/layers/Layer.hpp +++ b/include/layers/Layer.hpp @@ -18,6 +18,7 @@ enum LayerType { kElementWise, kConvolution, kFullyConnected, + kFlatten, kOutput, };