Skip to content

Commit

Permalink
add LayerType
Browse files Browse the repository at this point in the history
  • Loading branch information
Semyon1104 committed Oct 28, 2024
1 parent b1c8f56 commit 46dd5a7
Show file tree
Hide file tree
Showing 3 changed files with 52 additions and 13 deletions.
34 changes: 24 additions & 10 deletions app/Graph/graph_build.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -43,43 +43,52 @@ void build_graph(Tensor input, Tensor output) {
Tensor tmp_values = tensor;
Tensor tmp_bias = make_tensor(tensor.get_bias());


layers.push_back(
std::make_shared<ConvolutionalLayer>(1, 0, 0, tmp_values, tmp_bias));
auto conv_layer =
std::make_shared<ConvolutionalLayer>(1, 0, 0, tmp_values, tmp_bias);
conv_layer->setName(kConvolution);
layers.push_back(conv_layer);
std::cout << "ConvLayer added to layers." << std::endl;
}

if (layer_type.find("Dense") != std::string::npos) {
Tensor tmp_values = tensor;
Tensor tmp_bias = make_tensor(tensor.get_bias());

layers.push_back(std::make_shared<FCLayer>(tmp_values, tmp_bias));
auto fc_layer = std::make_shared<FCLayer>(tmp_values, tmp_bias);
fc_layer->setName(kFullyConnected);
layers.push_back(fc_layer);
std::cout << "DenseLayer added to layers." << std::endl;
}

if (layer_type.find("Pool") != std::string::npos) {
Shape shape = {2, 2};
std::cout << "PoolingLayer shape: " << shape[0] << "x" << shape[1]
<< std::endl;

layers.push_back(std::make_shared<PoolingLayer>(shape));
auto pool_layer = std::make_shared<PoolingLayer>(shape);
pool_layer->setName(kPooling);
layers.push_back(pool_layer);
std::cout << "PoolingLayer added to layers." << std::endl;
}

if (layer_type.find("Flatten") != std::string::npos) {
layers.emplace_back(std::make_shared<FlattenLayer>());
auto flatten_layer = std::make_shared<FlattenLayer>();
flatten_layer->setName(kFlatten);
layers.push_back(flatten_layer);
std::cout << "FlattenLayer added to layers." << std::endl;
}

if (layer_type.find("Dropout") != std::string::npos) {
layers.emplace_back(std::make_shared<DropOutLayer>(0.5));
auto dropout_layer = std::make_shared<DropOutLayer>(0.5);
dropout_layer->setName(kDropout);
layers.push_back(dropout_layer);
std::cout << "DropOutLayer added to layers with probability 0.5."
<< std::endl;
}
}

std::cout << "number of layers - " << layers.size() + 1<< std::endl;
Graph graph(static_cast<int>(layers.size()));
InputLayer a1(kNhwc, kNchw, 1, 2);

std::cout << "InputLayer created." << std::endl;

graph.setInput(a1, input);
Expand All @@ -91,10 +100,15 @@ void build_graph(Tensor input, Tensor output) {

for (size_t i = 0; i < layers.size() - 1; ++i) {
graph.makeConnection(*layers[i], *layers[i + 1]);
std::cout << "Connection made between layer " << i << " and layer " << i + 1
std::cout << "Connection made between layer " << i << " ("
<< layerTypeToString(layers[i]->getName()) << ")"
<< " and layer " << i + 1 << " ("
<< layerTypeToString(layers[i + 1]->getName()) << ")"
<< std::endl;
}



graph.setOutput(*layers.back(), output);
std::cout << "Output set in graph." << std::endl;

Expand Down
30 changes: 27 additions & 3 deletions include/graph/graph.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,30 @@
#include "layers/Layer.hpp"

namespace itlab_2023 {

std::string layerTypeToString(LayerType type) {
switch (type) {
case kInput:
return "Input";
case kPooling:
return "Pooling";
case kNormalization:
return "Normalization";
case kDropout:
return "Dropout";
case kElementWise:
return "ElementWise";
case kConvolution:
return "Convolution";
case kFullyConnected:
return "FullyConnected";
case kFlatten:
return "Flatten";
case kOutput:
return "Output";
default:
return "Unknown";
}
}
class Graph {
int BiggestSize_;
int V_;
Expand Down Expand Up @@ -108,8 +131,9 @@ class Graph {
#ifdef ENABLE_STATISTIC_TIME
auto start = std::chrono::high_resolution_clock::now();
#endif
std::cout << "Running layer " << i
<< " with input shape: " << inten_.get_shape()
std::cout << "Running layer " << i << " ("
<< layerTypeToString(layers_[i]->getName()) << ") "
<< "with input shape: " << inten_.get_shape()
<< ", output shape: " << outten_->get_shape() << std::endl;

layers_[i]->run(inten_, *outten_);
Expand Down
1 change: 1 addition & 0 deletions include/layers/Layer.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ enum LayerType {
kElementWise,
kConvolution,
kFullyConnected,
kFlatten,
kOutput,
};

Expand Down

0 comments on commit 46dd5a7

Please sign in to comment.