Skip to content

Commit

Permalink
Moving autograd from header only lib to a compiled lib
Browse files Browse the repository at this point in the history
  • Loading branch information
pavanky committed Jul 5, 2017
1 parent 7bb0b6c commit 563c428
Show file tree
Hide file tree
Showing 6 changed files with 267 additions and 176 deletions.
45 changes: 43 additions & 2 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,9 +1,50 @@
cmake_minimum_required(VERSION 3.5.2)
cmake_minimum_required(VERSION 3.5.1)

project(ArrayFireML
VERSION 0.1.0
LANGUAGES C CXX)

find_package(ArrayFire REQUIRED)
set(ArrayFireML_INCLUDE_DIRS ${CMAKE_CURRENT_SOURCE_DIR}/include)

add_library(afml SHARED "")

target_sources(afml
PRIVATE
src/autograd/Variable.cpp
src/autograd/Functions.cpp
)

target_include_directories(afml
PUBLIC
${ArrayFire_INCLUDE_DIRS}
${CMAKE_CURRENT_SOURCE_DIR}/include
)

target_compile_features(afml
PRIVATE
cxx_range_for
cxx_auto_type
cxx_lambdas
cxx_long_long_type
cxx_nullptr
cxx_raw_string_literals
cxx_right_angle_brackets
cxx_static_assert
cxx_thread_local
cxx_uniform_initialization
cxx_variadic_templates
)

target_link_libraries(afml
PUBLIC
af
)

set_target_properties(afml
PROPERTIES
VERSION "${ArrayFireML_VERSION}"
SOVERSION "${ArrayFireML_VERSION_MAJOR}"
)


add_subdirectory(examples)
7 changes: 1 addition & 6 deletions examples/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,9 @@ function(build_example SRC)
get_filename_component(src_name ${SRC} NAME_WE)
set(target "${src_name}")
add_executable(${target} ${SRC})
target_include_directories(${target}
PRIVATE
${ArrayFire_INCLUDE_DIRS}
${ArrayFireML_INCLUDE_DIRS}
)
target_link_libraries(${target}
PRIVATE
af
afml
)
target_compile_features(${target}
PRIVATE cxx_range_for)
Expand Down
25 changes: 3 additions & 22 deletions include/af/autograd/Functions.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -8,31 +8,12 @@
********************************************************/
#pragma once

#include <af/autograd/Variable.hpp>

namespace af {
namespace autograd {

Variable operator +(const Variable lhs, const Variable rhs)
{
auto result = lhs.array() + rhs.array();
auto grad_func = [](std::vector<Variable> inputs, Variable grad_output) {
inputs[0].addGrad(grad_output);
inputs[1].addGrad(grad_output);
};
return Variable(result, {lhs, rhs}, grad_func);
}

Variable operator *(const Variable lhs, const Variable rhs)
{
auto result = lhs.array() * rhs.array();
auto grad_func = [](std::vector<Variable> inputs, Variable grad_output) {
inputs[0].addGrad(grad_output * inputs[1]);
inputs[1].addGrad(grad_output * inputs[0]);
};
return Variable(result, {lhs, rhs}, grad_func);
}
class Variable;

Variable operator +(const Variable lhs, const Variable rhs);
Variable operator *(const Variable lhs, const Variable rhs);
}
namespace ag = autograd;
}
172 changes: 26 additions & 146 deletions include/af/autograd/Variable.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -14,17 +14,11 @@
#include <memory>
#include <vector>
#include <unordered_map>
#include <stdexcept>

#include <arrayfire.h>

namespace af {
namespace autograd {

// Forward declare the function
class Variable;
Variable operator +(const Variable lhs, const Variable rhs);

class Variable
{
public:
Expand All @@ -34,32 +28,12 @@ namespace af {

private:
struct Shared {
Shared() :
m_calc_grad(true),
m_data(),
m_inputs(),
m_grads(),
m_grad_func(nullptr)
{}

Shared(af::array data, bool calc_grad) :
m_calc_grad(calc_grad),
m_data(data),
m_inputs(),
m_grads(),
m_grad_func(nullptr)
{}

Shared();
Shared(af::array data, bool calc_grad);
Shared(af::array data,
std::vector<Variable> inputs,
GradFunc_t grad_func,
bool calc_grad) :
m_calc_grad(calc_grad),
m_data(data),
m_inputs(inputs.begin(), inputs.end()),
m_grads(),
m_grad_func(grad_func)
{}
bool calc_grad);

bool m_calc_grad;
af::array m_data;
Expand All @@ -70,127 +44,33 @@ namespace af {

public:

Variable() :
m_shared(new Shared())
{
}

Variable(af::array data, bool calc_grad) :
m_shared(new Shared(data, calc_grad))
{}

Variable();
Variable(af::array data, bool calc_grad);
Variable(af::array data,
std::vector<Variable> inputs,
GradFunc_t grad_func) :
m_shared(nullptr)
{
bool calc_grad = false;
for (auto input : inputs) {
calc_grad |= input.isCalcGrad();
}
if (calc_grad) {
m_shared = std::shared_ptr<Shared>(new Shared(data, inputs, grad_func, true));
} else {
m_shared = std::shared_ptr<Shared>(new Shared(data, false));
}
}

af::array array() const
{
return m_shared->m_data;
}

Variable grad() const
{
if (!m_shared->m_calc_grad) {
throw af::exception("Gradient calclation disabled.");
}
if (m_shared->m_grads.size() == 0) {
throw af::exception("Gradient hasn't been calculated yet.");
}
return m_shared->m_grads[0];
}

bool isCalcGrad()
{
return m_shared->m_calc_grad;
}

void setCalcGrad(bool calc_grad)
{
m_shared->m_calc_grad = calc_grad;
if (!calc_grad) {
m_shared->m_grad_func = nullptr;
m_shared->m_inputs.clear();
m_shared->m_grads.clear();
}
}

void addGrad(Variable child_grad)
{
if (m_shared->m_calc_grad) {
m_shared->m_grads.push_back(child_grad);
}
}

std::vector<Variable> getInputs() const
{
return m_shared->m_inputs;
}

void evalGrad()
{
// Flag asking not to calculate gradients
if (!m_shared->m_calc_grad) return;
Variable grad = m_shared->m_grads[0];
for (unsigned i = 1; i < m_shared->m_grads.size(); i++) {
grad = grad + m_shared->m_grads[i];
}
grad.array().eval();
m_shared->m_grads.clear();
m_shared->m_grads.push_back(grad);
}

void calcGradInputs()
{
evalGrad();
if (m_shared->m_grad_func) {
m_shared->m_grad_func(m_shared->m_inputs, m_shared->m_grads[0]);
}
}

void backward(Variable grad)
{
this->addGrad(grad);
DAG_t dag = this->build();
for (auto iter = dag.rbegin(); iter != dag.rend(); iter++) {
iter->calcGradInputs();
}
}

DAG_t build()
{
Cache_t cache;
DAG_t dag;
this->buildSubGraph(cache, dag);
return dag;
}

void buildSubGraph(Cache_t &cache, DAG_t &dag)
{
std::ptrdiff_t id = (std::ptrdiff_t)m_shared.get();
if (cache.find(id) != cache.end()) {
return;
}
for (auto input : m_shared->m_inputs) {
input.buildSubGraph(cache, dag);
}
cache[id] = true;
dag.push_back(*this);
}
GradFunc_t grad_func);

af::array array() const;

Variable grad() const;

bool isCalcGrad();

void setCalcGrad(bool calc_grad);

void addGrad(Variable child_grad);

void evalGrad();

void calcGradInputs();

void backward(Variable grad);

DAG_t build();

void buildSubGraph(Cache_t &cache, DAG_t &dag);
private:
std::shared_ptr<Shared> m_shared;
};
}
namespace ag = autograd;
}
37 changes: 37 additions & 0 deletions src/autograd/Functions.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
/*******************************************************
* Copyright (c) 2017, ArrayFire
* All rights reserved.
*
* This file is distributed under 3-clause BSD license.
* The complete license agreement can be obtained at:
* http://arrayfire.com/licenses/BSD-3-Clause
********************************************************/

#include <af/autograd/Variable.hpp>
#include <af/autograd/Functions.hpp>

namespace af {
namespace autograd {

Variable operator +(const Variable lhs, const Variable rhs)
{
auto result = lhs.array() + rhs.array();
auto grad_func = [](std::vector<Variable> inputs, Variable grad_output) {
inputs[0].addGrad(grad_output);
inputs[1].addGrad(grad_output);
};
return Variable(result, {lhs, rhs}, grad_func);
}

Variable operator *(const Variable lhs, const Variable rhs)
{
auto result = lhs.array() * rhs.array();
auto grad_func = [](std::vector<Variable> inputs, Variable grad_output) {
inputs[0].addGrad(grad_output * inputs[1]);
inputs[1].addGrad(grad_output * inputs[0]);
};
return Variable(result, {lhs, rhs}, grad_func);
}

}
}
Loading

0 comments on commit 563c428

Please sign in to comment.