pmlpp/mlpp/hidden_layer/hidden_layer.h

113 lines
2.6 KiB
C
Raw Normal View History

2023-01-24 18:57:18 +01:00
#ifndef MLPP_HIDDEN_LAYER_H
#define MLPP_HIDDEN_LAYER_H
//
// HiddenLayer.hpp
//
// Created by Marc Melikyan on 11/4/20.
//
2023-01-27 13:01:16 +01:00
#include "core/math/math_defs.h"
2023-01-31 01:22:13 +01:00
#include "core/string/ustring.h"
#include "core/object/reference.h"
2023-01-27 13:01:16 +01:00
2023-01-24 18:12:23 +01:00
#include "../activation/activation.h"
#include "../regularization/reg.h"
#include "../utilities/utilities.h"
2023-01-31 01:22:13 +01:00
#include "../lin_alg/mlpp_matrix.h"
#include "../lin_alg/mlpp_vector.h"
#include <map>
#include <string>
2023-01-24 19:00:54 +01:00
#include <vector>
2023-01-31 01:22:13 +01:00
class MLPPHiddenLayer : public Reference {
GDCLASS(MLPPHiddenLayer, Reference);
public:
2023-02-06 02:36:22 +01:00
int get_n_hidden() const;
void set_n_hidden(const int val);
2023-02-06 12:20:52 +01:00
MLPPActivation::ActivationFunction get_activation() const;
void set_activation(const MLPPActivation::ActivationFunction val);
Ref<MLPPMatrix> get_input();
void set_input(const Ref<MLPPMatrix> &val);
Ref<MLPPMatrix> get_weights();
void set_weights(const Ref<MLPPMatrix> &val);
Ref<MLPPVector> get_bias();
void set_bias(const Ref<MLPPVector> &val);
Ref<MLPPMatrix> get_z();
void set_z(const Ref<MLPPMatrix> &val);
Ref<MLPPMatrix> get_a();
void set_a(const Ref<MLPPMatrix> &val);
Ref<MLPPVector> get_z_test();
void set_z_test(const Ref<MLPPVector> &val);
Ref<MLPPVector> get_a_test();
void set_a_test(const Ref<MLPPVector> &val);
Ref<MLPPMatrix> get_delta();
void set_delta(const Ref<MLPPMatrix> &val);
2023-02-06 02:36:22 +01:00
MLPPReg::RegularizationType get_reg() const;
void set_reg(const MLPPReg::RegularizationType val);
2023-02-06 02:36:22 +01:00
real_t get_lambda() const;
void set_lambda(const real_t val);
2023-02-06 02:36:22 +01:00
real_t get_alpha() const;
void set_alpha(const real_t val);
2023-02-06 02:36:22 +01:00
MLPPUtilities::WeightDistributionType get_weight_init() const;
void set_weight_init(const MLPPUtilities::WeightDistributionType val);
bool is_initialized();
void initialize();
void forward_pass();
void test(const Ref<MLPPVector> &x);
MLPPHiddenLayer(int p_n_hidden, MLPPActivation::ActivationFunction p_activation, Ref<MLPPMatrix> p_input, MLPPUtilities::WeightDistributionType p_weight_init, MLPPReg::RegularizationType p_reg, real_t p_lambda, real_t p_alpha);
MLPPHiddenLayer();
~MLPPHiddenLayer();
protected:
static void _bind_methods();
2023-02-13 00:19:16 +01:00
int _n_hidden;
MLPPActivation::ActivationFunction _activation;
2023-01-31 01:22:13 +01:00
2023-02-13 00:19:16 +01:00
Ref<MLPPMatrix> _input;
2023-01-31 01:22:13 +01:00
2023-02-13 00:19:16 +01:00
Ref<MLPPMatrix> _weights;
Ref<MLPPVector> _bias;
2023-01-31 01:22:13 +01:00
2023-02-13 00:19:16 +01:00
Ref<MLPPMatrix> _z;
Ref<MLPPMatrix> _a;
2023-01-31 01:22:13 +01:00
2023-02-13 00:19:16 +01:00
Ref<MLPPVector> _z_test;
Ref<MLPPVector> _a_test;
2023-01-31 01:22:13 +01:00
2023-02-13 00:19:16 +01:00
Ref<MLPPMatrix> _delta;
2023-01-31 01:22:13 +01:00
// Regularization Params
2023-02-13 00:19:16 +01:00
MLPPReg::RegularizationType _reg;
real_t _lambda; /* Regularization Parameter */
real_t _alpha; /* This is the controlling param for Elastic Net*/
2023-01-31 01:22:13 +01:00
2023-02-13 00:19:16 +01:00
MLPPUtilities::WeightDistributionType _weight_init;
bool _initialized;
2023-01-31 01:22:13 +01:00
};
#endif /* HiddenLayer_hpp */