Skip to content

Commit

Permalink
debug info inference
Browse files Browse the repository at this point in the history
  • Loading branch information
Semyon1104 committed Oct 27, 2024
2 parents e912fe7 + b6c2736 commit b1c8f56
Show file tree
Hide file tree
Showing 14 changed files with 639 additions and 76 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/static-analysis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -43,4 +43,4 @@ jobs:
cmake --build build --parallel
- name: Run clang-tidy
run: |
clang-tidy app/**/*.cpp src/**/*.cpp -format-style=file -header-filter=$PWD/.* -p build
clang-tidy app/**/*.cpp src/**/*.cpp -format-style=file -header-filter="($PWD/include/.*|$PWD/src/.*|$PWD/app/.*)" -p build
19 changes: 11 additions & 8 deletions app/Graph/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -8,19 +8,22 @@ execute_process(
COMMAND ${CMAKE_COMMAND} --build "${CMAKE_SOURCE_DIR}/3rdparty/opencv/build" --config "${CMAKE_BUILD_TYPE}"
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}/3rdparty/opencv/build"
)

set(INCLUDE_HEADERS "${CMAKE_CURRENT_SOURCE_DIR}/build.hpp")
set(SRC_FILES "${CMAKE_CURRENT_SOURCE_DIR}/build.cpp")
add_library(BuildGraph STATIC ${INCLUDE_HEADERS} ${SRC_FILES})

set_target_properties(ReadLib PROPERTIES LINKER_LANGUAGE CXX)
set_target_properties(BuildGraph PROPERTIES LINKER_LANGUAGE CXX)

find_package(OpenCV REQUIRED PATHS "${CMAKE_SOURCE_DIR}/3rdparty/opencv/build")
include_directories(${OpenCV_INCLUDE_DIRS})

target_link_libraries(BuildGraph PUBLIC ${OpenCV_LIBS})
target_link_libraries(BuildGraph PUBLIC reader_lib)
target_link_libraries(BuildGraph PUBLIC TBB::tbb)
target_link_libraries(BuildGraph PUBLIC layers_lib)
target_link_libraries(BuildGraph PUBLIC gtest_main)

find_package( OpenCV REQUIRED PATHS "${CMAKE_SOURCE_DIR}/3rdparty/opencv/build" )
include_directories( ${OpenCV_INCLUDE_DIRS} )
target_link_libraries( BuildGraph ${OpenCV_LIBS} )
target_link_libraries( BuildGraph TBB::tbb)
target_link_libraries( BuildGraph layers_lib)
target_link_libraries( BuildGraph gtest_main)
target_include_directories(BuildGraph PUBLIC ${CMAKE_SOURCE_DIR}/3rdparty/Json/include)

add_executable(Graph_Build graph_build.cpp)
target_link_libraries(Graph_Build BuildGraph)
Expand Down
49 changes: 41 additions & 8 deletions app/Graph/graph_build.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,10 @@
#include "build.hpp"
#include "graph/graph.hpp"
#include "layers/ConvLayer.hpp"
#include "layers/DropOutLayer.hpp"
#include "layers/EWLayer.hpp"
#include "layers/FCLayer.hpp"
#include "layers/FlattenLayer.hpp"
#include "layers/InputLayer.hpp"
#include "layers/OutputLayer.hpp"
#include "layers/PoolingLayer.hpp"
Expand All @@ -21,53 +23,84 @@ void build_graph(Tensor input, Tensor output) {
std::string json_file = MODEL_PATH;
json model_data = read_json(json_file);

for (const auto& layer_data : model_data) {
std::cout << "Loaded model data from JSON." << std::endl;

for (const auto& layer_data : model_data) {
std::string layer_type = layer_data["type"];
std::cout << "Processing layer of type: " << layer_type << std::endl;

Tensor tensor =
create_tensor_from_json(layer_data["weights"], Type::kFloat);

if (layer_type.find("Conv") != std::string::npos) {
Shape shape = tensor.get_shape();
Tensor tmp_values = make_tensor(tensor.get_values(), shape);
std::cout << "PoolingLayer shape: ";
for (size_t i = 0; i < shape.dims(); ++i) {
std::cout << shape[i] << " ";
}
std::cout << std::endl;

Tensor tmp_values = tensor;
Tensor tmp_bias = make_tensor(tensor.get_bias());
layers.push_back(


layers.push_back(
std::make_shared<ConvolutionalLayer>(1, 0, 0, tmp_values, tmp_bias));
std::cout << "ConvLayer added to layers." << std::endl;
}

if (layer_type.find("Dense") != std::string::npos) {
Tensor tmp_values = make_tensor(tensor.get_values(), tensor.get_shape());
Tensor tmp_values = tensor;
Tensor tmp_bias = make_tensor(tensor.get_bias());

layers.push_back(std::make_shared<FCLayer>(tmp_values, tmp_bias));
std::cout << "DenseLayer added to layers." << std::endl;
}

if (layer_type.find("Pool") != std::string::npos) {
Shape shape = {2, 2};
std::cout << "PoolingLayer shape: " << shape[0] << "x" << shape[1]
<< std::endl;

layers.push_back(std::make_shared<PoolingLayer>(shape));
std::cout << "PoolingLayer added to layers." << std::endl;
}

if (layer_type.find("Flatten") != std::string::npos) {
layers.emplace_back(/*construcrtor of flatten*/);
layers.emplace_back(std::make_shared<FlattenLayer>());
std::cout << "FlattenLayer added to layers." << std::endl;
}

if (layer_type.find("Dropout") != std::string::npos) {
layers.emplace_back(/*construcrtor of dropout*/);
layers.emplace_back(std::make_shared<DropOutLayer>(0.5));
std::cout << "DropOutLayer added to layers with probability 0.5."
<< std::endl;
}
}
Graph graph(layers.size());

Graph graph(static_cast<int>(layers.size()));
InputLayer a1(kNhwc, kNchw, 1, 2);
std::cout << "InputLayer created." << std::endl;

graph.setInput(a1, input);
std::cout << "Input set in graph." << std::endl;

graph.makeConnection(a1, *layers[0]);
std::cout << "Connection made between InputLayer and first layer."
<< std::endl;

for (size_t i = 0; i < layers.size() - 1; ++i) {
graph.makeConnection(*layers[i], *layers[i + 1]);
std::cout << "Connection made between layer " << i << " and layer " << i + 1
<< std::endl;
}

graph.setOutput(*layers.back(), output);
std::cout << "Output set in graph." << std::endl;

std::cout << "Starting inference..." << std::endl;
graph.inference();
std::cout << "Inference completed." << std::endl;

std::vector<float> tmp = *output.as<float>();
std::vector<float> tmp_output = softmax<float>(*output.as<float>());
Expand Down Expand Up @@ -112,4 +145,4 @@ int main() {
Tensor output = make_tensor(vec, sh1);

build_graph(input, output);
}
}
15 changes: 15 additions & 0 deletions include/graph/graph.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -79,9 +79,11 @@ class Graph {
std::vector<int> traversal;
q.push(start_);
visited[start_] = true;

while (!q.empty()) {
int current = q.front();
q.pop();

if (current == end_) {
int node = current;
while (node != -1) {
Expand All @@ -91,6 +93,7 @@ class Graph {
std::reverse(traversal.begin(), traversal.end());
break;
}

for (int ind = arrayV_[current]; ind < arrayV_[current + 1]; ind++) {
int neighbor = arrayE_[ind];
if (!visited[neighbor]) {
Expand All @@ -100,27 +103,39 @@ class Graph {
}
}
}

for (int i : traversal) {
#ifdef ENABLE_STATISTIC_TIME
auto start = std::chrono::high_resolution_clock::now();
#endif
std::cout << "Running layer " << i
<< " with input shape: " << inten_.get_shape()
<< ", output shape: " << outten_->get_shape() << std::endl;

layers_[i]->run(inten_, *outten_);

#ifdef ENABLE_STATISTIC_TENSORS
tensors_.push_back(inten_);
tensors_.push_back(*outten_);
#endif

#ifdef ENABLE_STATISTIC_WEIGHTS
weights_.push_back(layers_[i]->get_weights());
#endif

inten_ = *outten_;

#ifdef ENABLE_STATISTIC_TIME
auto end = std::chrono::high_resolution_clock::now();
auto elapsed =
std::chrono::duration_cast<std::chrono::milliseconds>(end - start);
time_.push_back(static_cast<int>(elapsed.count()));
std::cout << "Layer " << i << " execution time: " << elapsed.count()
<< " ms" << std::endl;
#endif
}
}

void setOutput(const Layer& lay, Tensor& vec) {
end_ = lay.getID();
outten_ = &vec;
Expand Down
19 changes: 19 additions & 0 deletions include/layers/DropOutLayer.hpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
#pragma once
#include <string>

#include "layers/Layer.hpp"

namespace itlab_2023 {

class DropOutLayer : public Layer {
private:
double drop_rate_;

public:
DropOutLayer() = default;
DropOutLayer(double drop_rate) { drop_rate_ = drop_rate; }
static std::string get_name() { return "DropOut layer"; }
void run(const Tensor& input, Tensor& output) override;
};

} // namespace itlab_2023
15 changes: 15 additions & 0 deletions include/layers/FlattenLayer.hpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
#pragma once
#include <string>

#include "layers/Layer.hpp"

namespace itlab_2023 {

class FlattenLayer : public Layer {
public:
FlattenLayer() = default;
static std::string get_name() { return "Flatten layer"; }
void run(const Tensor& input, Tensor& output) override;
};

} // namespace itlab_2023
5 changes: 4 additions & 1 deletion include/layers/Shape.hpp
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
#pragma once

#include <ostream>
#include <iostream>
#include <algorithm>
#include <initializer_list>
#include <numeric>
Expand Down Expand Up @@ -37,8 +39,9 @@ class Shape {
}
size_t dims() const noexcept { return dims_.size(); }
size_t get_index(const std::vector<size_t>& coords) const;

friend std::ostream& operator<<(std::ostream& os, const Shape& shape);
private:
std::vector<size_t> dims_;
};

} // namespace itlab_2023
Loading

0 comments on commit b1c8f56

Please sign in to comment.