-
Notifications
You must be signed in to change notification settings - Fork 23
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
FEAT: Adding SGD, Adam, and RMSProp optimizers
- Moved zeroGrad to be part of Optimizer class - Renamed perceptron.cpp to xor.cpp - Modified xor example to run with SGD, Adam, or RMSProp optimizers
- Loading branch information
Showing
8 changed files
with
346 additions
and
25 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,9 @@ | ||
/******************************************************* | ||
* Copyright (c) 2017, ArrayFire | ||
* All rights reserved. | ||
* | ||
* This file is distributed under 3-clause BSD license. | ||
* The complete license agreement can be obtained at: | ||
* http://arrayfire.com/licenses/BSD-3-Clause | ||
********************************************************/ | ||
#include <af/optim/Optimizers.hpp> |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,90 @@ | ||
/******************************************************* | ||
* Copyright (c) 2017, ArrayFire | ||
* All rights reserved. | ||
* | ||
* This file is distributed under 3-clause BSD license. | ||
* The complete license agreement can be obtained at: | ||
* http://arrayfire.com/licenses/BSD-3-Clause | ||
********************************************************/ | ||
|
||
#pragma once | ||
|
||
#include <af/autograd/Variable.hpp> | ||
#include <arrayfire.h> | ||
|
||
#include <vector> | ||
|
||
namespace af | ||
{ | ||
namespace optim | ||
{ | ||
|
||
class Optimizer | ||
{ | ||
protected: | ||
std::vector<autograd::Variable> m_parameters; | ||
public: | ||
|
||
Optimizer(const std::vector<autograd::Variable> ¶meters); | ||
|
||
virtual void update() = 0; | ||
|
||
void zeroGrad(); | ||
}; | ||
|
||
class SGDOptimizer : public Optimizer | ||
{ | ||
bool m_use_nesterov; | ||
double m_lr; | ||
double m_mu; | ||
double m_wd; | ||
std::vector<af::array> m_velocities; | ||
public: | ||
SGDOptimizer(const std::vector<autograd::Variable> ¶meters, | ||
double learning_rate, double momentum = 0, | ||
double weight_decay = 0, | ||
bool use_nesterov = false); | ||
void update(); | ||
}; | ||
|
||
class AdamOptimizer : public Optimizer | ||
{ | ||
double m_lr; | ||
double m_beta1; | ||
double m_beta2; | ||
double m_eps; | ||
double m_wd; | ||
int m_count; | ||
std::vector<af::array> m_biased_first; | ||
std::vector<af::array> m_biased_second; | ||
public: | ||
AdamOptimizer(const std::vector<autograd::Variable> ¶meters, | ||
double learning_rate, | ||
double beta1 = 0.9, | ||
double beta2 = 0.999, | ||
double epsilon = 1E-8, | ||
double weight_decay = 0); | ||
void update(); | ||
}; | ||
|
||
class RMSPropOptimizer : public Optimizer | ||
{ | ||
bool m_use_first; | ||
double m_lr; | ||
double m_rho; | ||
double m_eps; | ||
double m_wd; | ||
std::vector<af::array> m_first; | ||
std::vector<af::array> m_second; | ||
public: | ||
RMSPropOptimizer(const std::vector<autograd::Variable> ¶meters, | ||
double learning_rate, | ||
double rho = 0.99, | ||
double epsilon = 1E-8, | ||
double weight_decay = 0, | ||
bool use_first = false); | ||
void update(); | ||
}; | ||
|
||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.