pmlpp/mlpp/hidden_layer/hidden_layer.h

100 lines
2.4 KiB
C
Raw Normal View History

2023-01-24 18:57:18 +01:00
#ifndef MLPP_HIDDEN_LAYER_H
#define MLPP_HIDDEN_LAYER_H
//
// HiddenLayer.hpp
//
// Created by Marc Melikyan on 11/4/20.
//
2023-01-31 01:22:13 +01:00
#include "core/containers/hash_map.h"
2023-01-27 13:01:16 +01:00
#include "core/math/math_defs.h"
2023-01-31 01:22:13 +01:00
#include "core/string/ustring.h"
#include "core/object/reference.h"
2023-01-27 13:01:16 +01:00
2023-01-24 18:12:23 +01:00
#include "../activation/activation.h"
2023-01-31 01:22:13 +01:00
#include "../lin_alg/mlpp_matrix.h"
#include "../lin_alg/mlpp_vector.h"
#include <map>
#include <string>
2023-01-24 19:00:54 +01:00
#include <vector>
2023-01-31 01:22:13 +01:00
class MLPPHiddenLayer : public Reference {
GDCLASS(MLPPHiddenLayer, Reference);
public:
int n_hidden;
int activation;
Ref<MLPPMatrix> input;
Ref<MLPPMatrix> weights;
Ref<MLPPVector> bias;
Ref<MLPPMatrix> z;
Ref<MLPPMatrix> a;
HashMap<int, Ref<MLPPMatrix> (MLPPActivation::*)(const Ref<MLPPMatrix> &, bool)> activation_map;
HashMap<int, Ref<MLPPVector> (MLPPActivation::*)(const Ref<MLPPVector> &, bool)> activation_test_map;
Ref<MLPPVector> z_test;
Ref<MLPPVector> a_test;
Ref<MLPPMatrix> delta;
// Regularization Params
String reg;
real_t lambda; /* Regularization Parameter */
real_t alpha; /* This is the controlling param for Elastic Net*/
String weight_init;
void forward_pass();
void test(const Ref<MLPPVector> &x);
MLPPHiddenLayer(int p_n_hidden, int p_activation, Ref<MLPPMatrix> p_input, String p_weight_init, String p_reg, real_t p_lambda, real_t p_alpha);
MLPPHiddenLayer();
~MLPPHiddenLayer();
};
2023-01-24 19:20:18 +01:00
class MLPPOldHiddenLayer {
2023-01-24 19:00:54 +01:00
public:
MLPPOldHiddenLayer(int n_hidden, std::string activation, std::vector<std::vector<real_t>> input, std::string weightInit, std::string reg, real_t lambda, real_t alpha);
2023-01-24 19:00:54 +01:00
int n_hidden;
std::string activation;
2023-01-27 13:01:16 +01:00
std::vector<std::vector<real_t>> input;
2023-01-27 13:01:16 +01:00
std::vector<std::vector<real_t>> weights;
std::vector<real_t> bias;
2023-01-27 13:01:16 +01:00
std::vector<std::vector<real_t>> z;
std::vector<std::vector<real_t>> a;
2023-01-27 13:01:16 +01:00
std::map<std::string, std::vector<std::vector<real_t>> (MLPPActivation::*)(std::vector<std::vector<real_t>>, bool)> activation_map;
std::map<std::string, std::vector<real_t> (MLPPActivation::*)(std::vector<real_t>, bool)> activationTest_map;
2023-01-27 13:01:16 +01:00
std::vector<real_t> z_test;
std::vector<real_t> a_test;
2023-01-27 13:01:16 +01:00
std::vector<std::vector<real_t>> delta;
2023-01-24 19:00:54 +01:00
// Regularization Params
std::string reg;
2023-01-27 13:01:16 +01:00
real_t lambda; /* Regularization Parameter */
real_t alpha; /* This is the controlling param for Elastic Net*/
2023-01-24 19:00:54 +01:00
std::string weightInit;
2023-01-24 19:00:54 +01:00
void forwardPass();
2023-01-27 13:01:16 +01:00
void Test(std::vector<real_t> x);
2023-01-24 19:00:54 +01:00
};
2023-01-24 19:20:18 +01:00
#endif /* HiddenLayer_hpp */