Properly initialize the hidden and output layers.

This commit is contained in:
Relintai 2023-02-06 14:24:43 +01:00
parent 879464fe0d
commit f9b998d5d0
4 changed files with 107 additions and 1 deletions

View File

@ -16,6 +16,7 @@ int MLPPHiddenLayer::get_n_hidden() const {
}
void MLPPHiddenLayer::set_n_hidden(const int val) {
n_hidden = val;
_initialized = false;
}
MLPPActivation::ActivationFunction MLPPHiddenLayer::get_activation() const {
@ -23,6 +24,7 @@ MLPPActivation::ActivationFunction MLPPHiddenLayer::get_activation() const {
}
void MLPPHiddenLayer::set_activation(const MLPPActivation::ActivationFunction val) {
activation = val;
_initialized = false;
}
Ref<MLPPMatrix> MLPPHiddenLayer::get_input() {
@ -30,6 +32,7 @@ Ref<MLPPMatrix> MLPPHiddenLayer::get_input() {
}
void MLPPHiddenLayer::set_input(const Ref<MLPPMatrix> &val) {
input = val;
_initialized = false;
}
Ref<MLPPMatrix> MLPPHiddenLayer::get_weights() {
@ -37,6 +40,7 @@ Ref<MLPPMatrix> MLPPHiddenLayer::get_weights() {
}
void MLPPHiddenLayer::set_weights(const Ref<MLPPMatrix> &val) {
weights = val;
_initialized = false;
}
Ref<MLPPVector> MLPPHiddenLayer::MLPPHiddenLayer::get_bias() {
@ -44,6 +48,7 @@ Ref<MLPPVector> MLPPHiddenLayer::MLPPHiddenLayer::get_bias() {
}
void MLPPHiddenLayer::set_bias(const Ref<MLPPVector> &val) {
bias = val;
_initialized = false;
}
Ref<MLPPMatrix> MLPPHiddenLayer::get_z() {
@ -51,6 +56,7 @@ Ref<MLPPMatrix> MLPPHiddenLayer::get_z() {
}
void MLPPHiddenLayer::set_z(const Ref<MLPPMatrix> &val) {
z = val;
_initialized = false;
}
Ref<MLPPMatrix> MLPPHiddenLayer::get_a() {
@ -58,6 +64,7 @@ Ref<MLPPMatrix> MLPPHiddenLayer::get_a() {
}
void MLPPHiddenLayer::set_a(const Ref<MLPPMatrix> &val) {
a = val;
_initialized = false;
}
Ref<MLPPVector> MLPPHiddenLayer::get_z_test() {
@ -65,6 +72,7 @@ Ref<MLPPVector> MLPPHiddenLayer::get_z_test() {
}
void MLPPHiddenLayer::set_z_test(const Ref<MLPPVector> &val) {
z_test = val;
_initialized = false;
}
Ref<MLPPVector> MLPPHiddenLayer::get_a_test() {
@ -72,6 +80,7 @@ Ref<MLPPVector> MLPPHiddenLayer::get_a_test() {
}
void MLPPHiddenLayer::set_a_test(const Ref<MLPPVector> &val) {
a_test = val;
_initialized = false;
}
Ref<MLPPMatrix> MLPPHiddenLayer::get_delta() {
@ -79,6 +88,7 @@ Ref<MLPPMatrix> MLPPHiddenLayer::get_delta() {
}
void MLPPHiddenLayer::set_delta(const Ref<MLPPMatrix> &val) {
delta = val;
_initialized = false;
}
MLPPReg::RegularizationType MLPPHiddenLayer::get_reg() const {
@ -86,6 +96,7 @@ MLPPReg::RegularizationType MLPPHiddenLayer::get_reg() const {
}
void MLPPHiddenLayer::set_reg(const MLPPReg::RegularizationType val) {
reg = val;
_initialized = false;
}
real_t MLPPHiddenLayer::get_lambda() const {
@ -93,6 +104,7 @@ real_t MLPPHiddenLayer::get_lambda() const {
}
void MLPPHiddenLayer::set_lambda(const real_t val) {
lambda = val;
_initialized = false;
}
real_t MLPPHiddenLayer::get_alpha() const {
@ -100,6 +112,7 @@ real_t MLPPHiddenLayer::get_alpha() const {
}
void MLPPHiddenLayer::set_alpha(const real_t val) {
alpha = val;
_initialized = false;
}
MLPPUtilities::WeightDistributionType MLPPHiddenLayer::get_weight_init() const {
@ -107,9 +120,33 @@ MLPPUtilities::WeightDistributionType MLPPHiddenLayer::get_weight_init() const {
}
void MLPPHiddenLayer::set_weight_init(const MLPPUtilities::WeightDistributionType val) {
weight_init = val;
_initialized = false;
}
bool MLPPHiddenLayer::is_initialized() {
return _initialized;
}
void MLPPHiddenLayer::initialize() {
if (_initialized) {
return;
}
weights->resize(Size2i(n_hidden, input->size().x));
bias->resize(n_hidden);
MLPPUtilities utils;
utils.weight_initializationm(weights, weight_init);
utils.bias_initializationv(bias);
_initialized = true;
}
void MLPPHiddenLayer::forward_pass() {
if (!_initialized) {
initialize();
}
MLPPLinAlg alg;
MLPPActivation avn;
@ -118,6 +155,10 @@ void MLPPHiddenLayer::forward_pass() {
}
void MLPPHiddenLayer::test(const Ref<MLPPVector> &x) {
if (!_initialized) {
initialize();
}
MLPPLinAlg alg;
MLPPActivation avn;
@ -149,13 +190,15 @@ MLPPHiddenLayer::MLPPHiddenLayer(int p_n_hidden, MLPPActivation::ActivationFunct
weights.instance();
bias.instance();
weights->resize(Size2i(input->size().x, n_hidden));
weights->resize(Size2i(n_hidden, input->size().x));
bias->resize(n_hidden);
MLPPUtilities utils;
utils.weight_initializationm(weights, weight_init);
utils.bias_initializationv(bias);
_initialized = true;
}
MLPPHiddenLayer::MLPPHiddenLayer() {
@ -179,6 +222,8 @@ MLPPHiddenLayer::MLPPHiddenLayer() {
weights.instance();
bias.instance();
_initialized = false;
}
MLPPHiddenLayer::~MLPPHiddenLayer() {
}
@ -240,6 +285,9 @@ void MLPPHiddenLayer::_bind_methods() {
ClassDB::bind_method(D_METHOD("set_weight_init", "val"), &MLPPHiddenLayer::set_weight_init);
ADD_PROPERTY(PropertyInfo(Variant::INT, "set_weight_init"), "set_weight_init", "get_weight_init");
ClassDB::bind_method(D_METHOD("is_initialized"), &MLPPHiddenLayer::is_initialized);
ClassDB::bind_method(D_METHOD("initialize"), &MLPPHiddenLayer::initialize);
ClassDB::bind_method(D_METHOD("forward_pass"), &MLPPHiddenLayer::forward_pass);
ClassDB::bind_method(D_METHOD("test", "x"), &MLPPHiddenLayer::test);
}
@ -338,6 +386,7 @@ MLPPOldHiddenLayer::MLPPOldHiddenLayer(int p_n_hidden, std::string p_activation,
void MLPPOldHiddenLayer::forwardPass() {
MLPPLinAlg alg;
MLPPActivation avn;
z = alg.mat_vec_add(alg.matmult(input, weights), bias);
a = (avn.*activation_map[activation])(z, false);
}

View File

@ -70,6 +70,9 @@ public:
MLPPUtilities::WeightDistributionType get_weight_init() const;
void set_weight_init(const MLPPUtilities::WeightDistributionType val);
bool is_initialized();
void initialize();
void forward_pass();
void test(const Ref<MLPPVector> &x);
@ -103,6 +106,8 @@ protected:
real_t alpha; /* This is the controlling param for Elastic Net*/
MLPPUtilities::WeightDistributionType weight_init;
bool _initialized;
};
class MLPPOldHiddenLayer {

View File

@ -16,6 +16,7 @@ int MLPPOutputLayer::get_n_hidden() {
}
void MLPPOutputLayer::set_n_hidden(const int val) {
n_hidden = val;
_initialized = false;
}
MLPPActivation::ActivationFunction MLPPOutputLayer::get_activation() {
@ -23,6 +24,7 @@ MLPPActivation::ActivationFunction MLPPOutputLayer::get_activation() {
}
void MLPPOutputLayer::set_activation(const MLPPActivation::ActivationFunction val) {
activation = val;
_initialized = false;
}
MLPPCost::CostTypes MLPPOutputLayer::get_cost() {
@ -30,6 +32,7 @@ MLPPCost::CostTypes MLPPOutputLayer::get_cost() {
}
void MLPPOutputLayer::set_cost(const MLPPCost::CostTypes val) {
cost = val;
_initialized = false;
}
Ref<MLPPMatrix> MLPPOutputLayer::get_input() {
@ -37,6 +40,7 @@ Ref<MLPPMatrix> MLPPOutputLayer::get_input() {
}
void MLPPOutputLayer::set_input(const Ref<MLPPMatrix> &val) {
input = val;
_initialized = false;
}
Ref<MLPPVector> MLPPOutputLayer::get_weights() {
@ -44,6 +48,7 @@ Ref<MLPPVector> MLPPOutputLayer::get_weights() {
}
void MLPPOutputLayer::set_weights(const Ref<MLPPVector> &val) {
weights = val;
_initialized = false;
}
real_t MLPPOutputLayer::MLPPOutputLayer::get_bias() {
@ -51,6 +56,7 @@ real_t MLPPOutputLayer::MLPPOutputLayer::get_bias() {
}
void MLPPOutputLayer::set_bias(const real_t val) {
bias = val;
_initialized = false;
}
Ref<MLPPVector> MLPPOutputLayer::get_z() {
@ -58,6 +64,7 @@ Ref<MLPPVector> MLPPOutputLayer::get_z() {
}
void MLPPOutputLayer::set_z(const Ref<MLPPVector> &val) {
z = val;
_initialized = false;
}
Ref<MLPPVector> MLPPOutputLayer::get_a() {
@ -65,6 +72,7 @@ Ref<MLPPVector> MLPPOutputLayer::get_a() {
}
void MLPPOutputLayer::set_a(const Ref<MLPPVector> &val) {
a = val;
_initialized = false;
}
Ref<MLPPVector> MLPPOutputLayer::get_z_test() {
@ -72,6 +80,7 @@ Ref<MLPPVector> MLPPOutputLayer::get_z_test() {
}
void MLPPOutputLayer::set_z_test(const Ref<MLPPVector> &val) {
z_test = val;
_initialized = false;
}
Ref<MLPPVector> MLPPOutputLayer::get_a_test() {
@ -79,6 +88,7 @@ Ref<MLPPVector> MLPPOutputLayer::get_a_test() {
}
void MLPPOutputLayer::set_a_test(const Ref<MLPPVector> &val) {
a_test = val;
_initialized = false;
}
Ref<MLPPVector> MLPPOutputLayer::get_delta() {
@ -86,6 +96,7 @@ Ref<MLPPVector> MLPPOutputLayer::get_delta() {
}
void MLPPOutputLayer::set_delta(const Ref<MLPPVector> &val) {
delta = val;
_initialized = false;
}
MLPPReg::RegularizationType MLPPOutputLayer::get_reg() {
@ -100,6 +111,7 @@ real_t MLPPOutputLayer::get_lambda() {
}
void MLPPOutputLayer::set_lambda(const real_t val) {
lambda = val;
_initialized = false;
}
real_t MLPPOutputLayer::get_alpha() {
@ -107,6 +119,7 @@ real_t MLPPOutputLayer::get_alpha() {
}
void MLPPOutputLayer::set_alpha(const real_t val) {
alpha = val;
_initialized = false;
}
MLPPUtilities::WeightDistributionType MLPPOutputLayer::get_weight_init() {
@ -114,9 +127,32 @@ MLPPUtilities::WeightDistributionType MLPPOutputLayer::get_weight_init() {
}
void MLPPOutputLayer::set_weight_init(const MLPPUtilities::WeightDistributionType val) {
weight_init = val;
_initialized = false;
}
bool MLPPOutputLayer::is_initialized() {
return _initialized;
}
void MLPPOutputLayer::initialize() {
if (_initialized) {
return;
}
weights->resize(n_hidden);
MLPPUtilities utils;
utils.weight_initializationv(weights, weight_init);
bias = utils.bias_initializationr();
_initialized = true;
}
void MLPPOutputLayer::forward_pass() {
if (!_initialized) {
initialize();
}
MLPPLinAlg alg;
MLPPActivation avn;
@ -125,6 +161,10 @@ void MLPPOutputLayer::forward_pass() {
}
void MLPPOutputLayer::test(const Ref<MLPPVector> &x) {
if (!_initialized) {
initialize();
}
MLPPLinAlg alg;
MLPPActivation avn;
@ -162,6 +202,8 @@ MLPPOutputLayer::MLPPOutputLayer(int p_n_hidden, MLPPActivation::ActivationFunct
utils.weight_initializationv(weights, weight_init);
bias = utils.bias_initializationr();
_initialized = true;
}
MLPPOutputLayer::MLPPOutputLayer() {
@ -185,6 +227,8 @@ MLPPOutputLayer::MLPPOutputLayer() {
weights.instance();
bias = 0;
_initialized = false;
}
MLPPOutputLayer::~MLPPOutputLayer() {
}
@ -250,6 +294,9 @@ void MLPPOutputLayer::_bind_methods() {
ClassDB::bind_method(D_METHOD("set_weight_init", "val"), &MLPPOutputLayer::set_weight_init);
ADD_PROPERTY(PropertyInfo(Variant::INT, "set_weight_init"), "set_weight_init", "get_weight_init");
ClassDB::bind_method(D_METHOD("is_initialized"), &MLPPOutputLayer::is_initialized);
ClassDB::bind_method(D_METHOD("initialize"), &MLPPOutputLayer::initialize);
ClassDB::bind_method(D_METHOD("forward_pass"), &MLPPOutputLayer::forward_pass);
ClassDB::bind_method(D_METHOD("test", "x"), &MLPPOutputLayer::test);
}

View File

@ -74,6 +74,9 @@ public:
MLPPUtilities::WeightDistributionType get_weight_init();
void set_weight_init(const MLPPUtilities::WeightDistributionType val);
bool is_initialized();
void initialize();
void forward_pass();
void test(const Ref<MLPPVector> &x);
@ -108,6 +111,8 @@ protected:
real_t alpha; /* This is the controlling param for Elastic Net*/
MLPPUtilities::WeightDistributionType weight_init;
bool _initialized;
};
class MLPPOldOutputLayer {