mirror of
https://github.com/Relintai/pmlpp.git
synced 2025-01-08 17:29:36 +01:00
New initial OutputLayer implementation.
This commit is contained in:
parent
5e82d4a907
commit
b004a092b7
@ -8,7 +8,6 @@
|
||||
// Created by Marc Melikyan on 11/4/20.
|
||||
//
|
||||
|
||||
#include "core/containers/hash_map.h"
|
||||
#include "core/math/math_defs.h"
|
||||
#include "core/string/ustring.h"
|
||||
|
||||
|
@ -11,6 +11,238 @@
|
||||
#include <iostream>
|
||||
#include <random>
|
||||
|
||||
int MLPPOutputLayer::get_n_hidden() {
|
||||
return n_hidden;
|
||||
}
|
||||
void MLPPOutputLayer::set_n_hidden(const int val) {
|
||||
n_hidden = val;
|
||||
}
|
||||
|
||||
MLPPActivation::ActivationFunction MLPPOutputLayer::get_activation() {
|
||||
return activation;
|
||||
}
|
||||
void MLPPOutputLayer::set_activation(const MLPPActivation::ActivationFunction val) {
|
||||
activation = val;
|
||||
}
|
||||
|
||||
Ref<MLPPMatrix> MLPPOutputLayer::get_input() {
|
||||
return input;
|
||||
}
|
||||
void MLPPOutputLayer::set_input(const Ref<MLPPMatrix> &val) {
|
||||
input = val;
|
||||
}
|
||||
|
||||
Ref<MLPPVector> MLPPOutputLayer::get_weights() {
|
||||
return weights;
|
||||
}
|
||||
void MLPPOutputLayer::set_weights(const Ref<MLPPVector> &val) {
|
||||
weights = val;
|
||||
}
|
||||
|
||||
real_t MLPPOutputLayer::MLPPOutputLayer::get_bias() {
|
||||
return bias;
|
||||
}
|
||||
void MLPPOutputLayer::set_bias(const real_t val) {
|
||||
bias = val;
|
||||
}
|
||||
|
||||
Ref<MLPPVector> MLPPOutputLayer::get_z() {
|
||||
return z;
|
||||
}
|
||||
void MLPPOutputLayer::set_z(const Ref<MLPPVector> &val) {
|
||||
z = val;
|
||||
}
|
||||
|
||||
Ref<MLPPVector> MLPPOutputLayer::get_a() {
|
||||
return a;
|
||||
}
|
||||
void MLPPOutputLayer::set_a(const Ref<MLPPVector> &val) {
|
||||
a = val;
|
||||
}
|
||||
|
||||
Ref<MLPPVector> MLPPOutputLayer::get_z_test() {
|
||||
return z_test;
|
||||
}
|
||||
void MLPPOutputLayer::set_z_test(const Ref<MLPPVector> &val) {
|
||||
z_test = val;
|
||||
}
|
||||
|
||||
Ref<MLPPVector> MLPPOutputLayer::get_a_test() {
|
||||
return a_test;
|
||||
}
|
||||
void MLPPOutputLayer::set_a_test(const Ref<MLPPVector> &val) {
|
||||
a_test = val;
|
||||
}
|
||||
|
||||
Ref<MLPPVector> MLPPOutputLayer::get_delta() {
|
||||
return delta;
|
||||
}
|
||||
void MLPPOutputLayer::set_delta(const Ref<MLPPVector> &val) {
|
||||
delta = val;
|
||||
}
|
||||
|
||||
MLPPReg::RegularizationType MLPPOutputLayer::get_reg() {
|
||||
return reg;
|
||||
}
|
||||
void MLPPOutputLayer::set_reg(const MLPPReg::RegularizationType val) {
|
||||
reg = val;
|
||||
}
|
||||
|
||||
real_t MLPPOutputLayer::get_lambda() {
|
||||
return lambda;
|
||||
}
|
||||
void MLPPOutputLayer::set_lambda(const real_t val) {
|
||||
lambda = val;
|
||||
}
|
||||
|
||||
real_t MLPPOutputLayer::get_alpha() {
|
||||
return alpha;
|
||||
}
|
||||
void MLPPOutputLayer::set_alpha(const real_t val) {
|
||||
alpha = val;
|
||||
}
|
||||
|
||||
MLPPUtilities::WeightDistributionType MLPPOutputLayer::get_weight_init() {
|
||||
return weight_init;
|
||||
}
|
||||
void MLPPOutputLayer::set_weight_init(const MLPPUtilities::WeightDistributionType val) {
|
||||
weight_init = val;
|
||||
}
|
||||
|
||||
void MLPPOutputLayer::forward_pass() {
|
||||
MLPPLinAlg alg;
|
||||
MLPPActivation avn;
|
||||
|
||||
//z = alg.mat_vec_addv(alg.matmultm(input, weights), bias);
|
||||
//a = avn.run_activation_norm_matrix(activation, z);
|
||||
}
|
||||
|
||||
void MLPPOutputLayer::test(const Ref<MLPPVector> &x) {
|
||||
MLPPLinAlg alg;
|
||||
MLPPActivation avn;
|
||||
|
||||
//z_test = alg.additionm(alg.mat_vec_multv(alg.transposem(weights), x), bias);
|
||||
//a_test = avn.run_activation_norm_matrix(activation, z_test);
|
||||
}
|
||||
|
||||
MLPPOutputLayer::MLPPOutputLayer(int p_n_hidden, MLPPActivation::ActivationFunction p_activation, Ref<MLPPMatrix> p_input, MLPPUtilities::WeightDistributionType p_weight_init, MLPPReg::RegularizationType p_reg, real_t p_lambda, real_t p_alpha) {
|
||||
n_hidden = p_n_hidden;
|
||||
activation = p_activation;
|
||||
|
||||
input = p_input;
|
||||
|
||||
// Regularization Params
|
||||
reg = p_reg;
|
||||
lambda = p_lambda; /* Regularization Parameter */
|
||||
alpha = p_alpha; /* This is the controlling param for Elastic Net*/
|
||||
|
||||
weight_init = p_weight_init;
|
||||
|
||||
z.instance();
|
||||
a.instance();
|
||||
|
||||
z_test.instance();
|
||||
a_test.instance();
|
||||
|
||||
delta.instance();
|
||||
|
||||
weights.instance();
|
||||
bias = 0;
|
||||
|
||||
//weights->resize(Size2i(input->size().x, n_hidden));
|
||||
//bias->resize(n_hidden);
|
||||
|
||||
//MLPPUtilities utils;
|
||||
|
||||
//utils.weight_initializationm(weights, weight_init);
|
||||
//utils.bias_initializationv(bias);
|
||||
}
|
||||
|
||||
MLPPOutputLayer::MLPPOutputLayer() {
|
||||
n_hidden = 0;
|
||||
activation = MLPPActivation::ACTIVATION_FUNCTION_LINEAR;
|
||||
|
||||
// Regularization Params
|
||||
//reg = 0;
|
||||
lambda = 0; /* Regularization Parameter */
|
||||
alpha = 0; /* This is the controlling param for Elastic Net*/
|
||||
|
||||
weight_init = MLPPUtilities::WEIGHT_DISTRIBUTION_TYPE_DEFAULT;
|
||||
|
||||
z.instance();
|
||||
a.instance();
|
||||
|
||||
z_test.instance();
|
||||
a_test.instance();
|
||||
|
||||
delta.instance();
|
||||
|
||||
weights.instance();
|
||||
bias = 0;
|
||||
}
|
||||
MLPPOutputLayer::~MLPPOutputLayer() {
|
||||
}
|
||||
|
||||
void MLPPOutputLayer::_bind_methods() {
|
||||
ClassDB::bind_method(D_METHOD("get_n_hidden"), &MLPPOutputLayer::get_n_hidden);
|
||||
ClassDB::bind_method(D_METHOD("set_n_hidden", "val"), &MLPPOutputLayer::set_n_hidden);
|
||||
ADD_PROPERTY(PropertyInfo(Variant::INT, "n_hidden"), "set_n_hidden", "get_n_hidden");
|
||||
|
||||
ClassDB::bind_method(D_METHOD("get_activation"), &MLPPOutputLayer::get_activation);
|
||||
ClassDB::bind_method(D_METHOD("set_activation", "val"), &MLPPOutputLayer::set_activation);
|
||||
ADD_PROPERTY(PropertyInfo(Variant::INT, "activation"), "set_activation", "get_activation");
|
||||
|
||||
ClassDB::bind_method(D_METHOD("get_input"), &MLPPOutputLayer::get_input);
|
||||
ClassDB::bind_method(D_METHOD("set_input", "val"), &MLPPOutputLayer::set_input);
|
||||
ADD_PROPERTY(PropertyInfo(Variant::OBJECT, "input", PROPERTY_HINT_RESOURCE_TYPE, "MLPPMatrix"), "set_input", "get_input");
|
||||
|
||||
ClassDB::bind_method(D_METHOD("get_weights"), &MLPPOutputLayer::get_weights);
|
||||
ClassDB::bind_method(D_METHOD("set_weights", "val"), &MLPPOutputLayer::set_weights);
|
||||
ADD_PROPERTY(PropertyInfo(Variant::OBJECT, "weights", PROPERTY_HINT_RESOURCE_TYPE, "MLPPVector"), "set_weights", "get_weights");
|
||||
|
||||
ClassDB::bind_method(D_METHOD("get_bias"), &MLPPOutputLayer::get_bias);
|
||||
ClassDB::bind_method(D_METHOD("set_bias", "val"), &MLPPOutputLayer::set_bias);
|
||||
ADD_PROPERTY(PropertyInfo(Variant::REAL, "bias"), "set_bias", "get_bias");
|
||||
|
||||
ClassDB::bind_method(D_METHOD("get_z"), &MLPPOutputLayer::get_z);
|
||||
ClassDB::bind_method(D_METHOD("set_z", "val"), &MLPPOutputLayer::set_z);
|
||||
ADD_PROPERTY(PropertyInfo(Variant::OBJECT, "z", PROPERTY_HINT_RESOURCE_TYPE, "MLPPVector"), "set_z", "get_z");
|
||||
|
||||
ClassDB::bind_method(D_METHOD("get_a"), &MLPPOutputLayer::get_a);
|
||||
ClassDB::bind_method(D_METHOD("set_a", "val"), &MLPPOutputLayer::set_a);
|
||||
ADD_PROPERTY(PropertyInfo(Variant::OBJECT, "a", PROPERTY_HINT_RESOURCE_TYPE, "MLPPVector"), "set_a", "get_a");
|
||||
|
||||
ClassDB::bind_method(D_METHOD("get_z_test"), &MLPPOutputLayer::get_z_test);
|
||||
ClassDB::bind_method(D_METHOD("set_z_test", "val"), &MLPPOutputLayer::set_z_test);
|
||||
ADD_PROPERTY(PropertyInfo(Variant::OBJECT, "z_test", PROPERTY_HINT_RESOURCE_TYPE, "MLPPVector"), "set_z_test", "get_z_test");
|
||||
|
||||
ClassDB::bind_method(D_METHOD("get_a_test"), &MLPPOutputLayer::get_a_test);
|
||||
ClassDB::bind_method(D_METHOD("set_a_test", "val"), &MLPPOutputLayer::set_a_test);
|
||||
ADD_PROPERTY(PropertyInfo(Variant::OBJECT, "a_test", PROPERTY_HINT_RESOURCE_TYPE, "MLPPVector"), "set_a_test", "get_a_test");
|
||||
|
||||
ClassDB::bind_method(D_METHOD("get_delta"), &MLPPOutputLayer::get_delta);
|
||||
ClassDB::bind_method(D_METHOD("set_delta", "val"), &MLPPOutputLayer::set_delta);
|
||||
ADD_PROPERTY(PropertyInfo(Variant::OBJECT, "delta", PROPERTY_HINT_RESOURCE_TYPE, "MLPPVector"), "set_delta", "get_delta");
|
||||
|
||||
ClassDB::bind_method(D_METHOD("get_reg"), &MLPPOutputLayer::get_reg);
|
||||
ClassDB::bind_method(D_METHOD("set_reg", "val"), &MLPPOutputLayer::set_reg);
|
||||
ADD_PROPERTY(PropertyInfo(Variant::INT, "reg"), "set_reg", "get_reg");
|
||||
|
||||
ClassDB::bind_method(D_METHOD("get_lambda"), &MLPPOutputLayer::get_lambda);
|
||||
ClassDB::bind_method(D_METHOD("set_lambda", "val"), &MLPPOutputLayer::set_lambda);
|
||||
ADD_PROPERTY(PropertyInfo(Variant::REAL, "lambda"), "set_lambda", "get_lambda");
|
||||
|
||||
ClassDB::bind_method(D_METHOD("get_alpha"), &MLPPOutputLayer::get_alpha);
|
||||
ClassDB::bind_method(D_METHOD("set_alpha", "val"), &MLPPOutputLayer::set_alpha);
|
||||
ADD_PROPERTY(PropertyInfo(Variant::REAL, "alpha"), "set_alpha", "get_alpha");
|
||||
|
||||
ClassDB::bind_method(D_METHOD("get_weight_init"), &MLPPOutputLayer::get_weight_init);
|
||||
ClassDB::bind_method(D_METHOD("set_weight_init", "val"), &MLPPOutputLayer::set_weight_init);
|
||||
ADD_PROPERTY(PropertyInfo(Variant::INT, "set_weight_init"), "set_weight_init", "get_weight_init");
|
||||
|
||||
ClassDB::bind_method(D_METHOD("forward_pass"), &MLPPOutputLayer::forward_pass);
|
||||
ClassDB::bind_method(D_METHOD("test", "x"), &MLPPOutputLayer::test);
|
||||
}
|
||||
|
||||
MLPPOldOutputLayer::MLPPOldOutputLayer(int n_hidden, std::string activation, std::string cost, std::vector<std::vector<real_t>> input, std::string weightInit, std::string reg, real_t lambda, real_t alpha) :
|
||||
n_hidden(n_hidden), activation(activation), cost(cost), input(input), weightInit(weightInit), reg(reg), lambda(lambda), alpha(alpha) {
|
||||
|
@ -9,14 +9,106 @@
|
||||
//
|
||||
|
||||
#include "core/math/math_defs.h"
|
||||
#include "core/string/ustring.h"
|
||||
|
||||
#include "core/object/reference.h"
|
||||
|
||||
#include "../activation/activation.h"
|
||||
#include "../cost/cost.h"
|
||||
#include "../regularization/reg.h"
|
||||
#include "../utilities/utilities.h"
|
||||
|
||||
#include "../lin_alg/mlpp_matrix.h"
|
||||
#include "../lin_alg/mlpp_vector.h"
|
||||
|
||||
#include <map>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
class MLPPOutputLayer : public Reference {
|
||||
GDCLASS(MLPPOutputLayer, Reference);
|
||||
|
||||
public:
|
||||
int get_n_hidden();
|
||||
void set_n_hidden(const int val);
|
||||
|
||||
MLPPActivation::ActivationFunction get_activation();
|
||||
void set_activation(const MLPPActivation::ActivationFunction val);
|
||||
|
||||
Ref<MLPPMatrix> get_input();
|
||||
void set_input(const Ref<MLPPMatrix> &val);
|
||||
|
||||
Ref<MLPPVector> get_weights();
|
||||
void set_weights(const Ref<MLPPVector> &val);
|
||||
|
||||
real_t get_bias();
|
||||
void set_bias(const real_t val);
|
||||
|
||||
Ref<MLPPVector> get_z();
|
||||
void set_z(const Ref<MLPPVector> &val);
|
||||
|
||||
Ref<MLPPVector> get_a();
|
||||
void set_a(const Ref<MLPPVector> &val);
|
||||
|
||||
Ref<MLPPVector> get_z_test();
|
||||
void set_z_test(const Ref<MLPPVector> &val);
|
||||
|
||||
Ref<MLPPVector> get_a_test();
|
||||
void set_a_test(const Ref<MLPPVector> &val);
|
||||
|
||||
Ref<MLPPVector> get_delta();
|
||||
void set_delta(const Ref<MLPPVector> &val);
|
||||
|
||||
MLPPReg::RegularizationType get_reg();
|
||||
void set_reg(const MLPPReg::RegularizationType val);
|
||||
|
||||
real_t get_lambda();
|
||||
void set_lambda(const real_t val);
|
||||
|
||||
real_t get_alpha();
|
||||
void set_alpha(const real_t val);
|
||||
|
||||
MLPPUtilities::WeightDistributionType get_weight_init();
|
||||
void set_weight_init(const MLPPUtilities::WeightDistributionType val);
|
||||
|
||||
void forward_pass();
|
||||
void test(const Ref<MLPPVector> &x);
|
||||
|
||||
MLPPOutputLayer(int p_n_hidden, MLPPActivation::ActivationFunction p_activation, Ref<MLPPMatrix> p_input, MLPPUtilities::WeightDistributionType p_weight_init, MLPPReg::RegularizationType p_reg, real_t p_lambda, real_t p_alpha);
|
||||
|
||||
MLPPOutputLayer();
|
||||
~MLPPOutputLayer();
|
||||
|
||||
protected:
|
||||
static void _bind_methods();
|
||||
|
||||
int n_hidden;
|
||||
MLPPActivation::ActivationFunction activation;
|
||||
std::string cost;
|
||||
|
||||
Ref<MLPPMatrix> input;
|
||||
|
||||
Ref<MLPPVector> weights;
|
||||
real_t bias;
|
||||
|
||||
Ref<MLPPVector> z;
|
||||
Ref<MLPPVector> a;
|
||||
|
||||
Ref<MLPPVector> z_test;
|
||||
Ref<MLPPVector> a_test;
|
||||
|
||||
Ref<MLPPVector> delta;
|
||||
|
||||
// Regularization Params
|
||||
MLPPReg::RegularizationType reg;
|
||||
real_t lambda; /* Regularization Parameter */
|
||||
real_t alpha; /* This is the controlling param for Elastic Net*/
|
||||
|
||||
MLPPUtilities::WeightDistributionType weight_init;
|
||||
|
||||
//std::map<std::string, real_t (MLPPCost::*)(std::vector<real_t>, std::vector<real_t>)> cost_map;
|
||||
//std::map<std::string, std::vector<real_t> (MLPPCost::*)(std::vector<real_t>, std::vector<real_t>)> costDeriv_map;
|
||||
};
|
||||
|
||||
class MLPPOldOutputLayer {
|
||||
public:
|
||||
@ -55,5 +147,4 @@ public:
|
||||
void Test(std::vector<real_t> x);
|
||||
};
|
||||
|
||||
|
||||
#endif /* OutputLayer_hpp */
|
||||
|
Loading…
Reference in New Issue
Block a user