#ifndef MLPP_HIDDEN_LAYER_H #define MLPP_HIDDEN_LAYER_H // // HiddenLayer.hpp // // Created by Marc Melikyan on 11/4/20. // #include "core/math/math_defs.h" #include "core/string/ustring.h" #include "core/object/reference.h" #include "../activation/activation.h" #include "../regularization/reg.h" #include "../utilities/utilities.h" #include "../lin_alg/mlpp_matrix.h" #include "../lin_alg/mlpp_vector.h" #include #include #include class MLPPHiddenLayer : public Reference { GDCLASS(MLPPHiddenLayer, Reference); public: int get_n_hidden() const; void set_n_hidden(const int val); MLPPActivation::ActivationFunction get_activation() const; void set_activation(const MLPPActivation::ActivationFunction val); Ref get_input(); void set_input(const Ref &val); Ref get_weights(); void set_weights(const Ref &val); Ref get_bias(); void set_bias(const Ref &val); Ref get_z(); void set_z(const Ref &val); Ref get_a(); void set_a(const Ref &val); Ref get_z_test(); void set_z_test(const Ref &val); Ref get_a_test(); void set_a_test(const Ref &val); Ref get_delta(); void set_delta(const Ref &val); MLPPReg::RegularizationType get_reg() const; void set_reg(const MLPPReg::RegularizationType val); real_t get_lambda() const; void set_lambda(const real_t val); real_t get_alpha() const; void set_alpha(const real_t val); MLPPUtilities::WeightDistributionType get_weight_init() const; void set_weight_init(const MLPPUtilities::WeightDistributionType val); bool is_initialized(); void initialize(); void forward_pass(); void test(const Ref &x); MLPPHiddenLayer(int p_n_hidden, MLPPActivation::ActivationFunction p_activation, Ref p_input, MLPPUtilities::WeightDistributionType p_weight_init, MLPPReg::RegularizationType p_reg, real_t p_lambda, real_t p_alpha); MLPPHiddenLayer(); ~MLPPHiddenLayer(); protected: static void _bind_methods(); int n_hidden; MLPPActivation::ActivationFunction activation; Ref input; Ref weights; Ref bias; Ref z; Ref a; Ref z_test; Ref a_test; Ref delta; // Regularization Params MLPPReg::RegularizationType reg; real_t lambda; /* Regularization Parameter */ real_t alpha; /* This is the controlling param for Elastic Net*/ MLPPUtilities::WeightDistributionType weight_init; bool _initialized; }; class MLPPOldHiddenLayer { public: MLPPOldHiddenLayer(int n_hidden, std::string activation, std::vector> input, std::string weightInit, std::string reg, real_t lambda, real_t alpha); int n_hidden; std::string activation; std::vector> input; std::vector> weights; std::vector bias; std::vector> z; std::vector> a; std::map> (MLPPActivation::*)(std::vector>, bool)> activation_map; std::map (MLPPActivation::*)(std::vector, bool)> activationTest_map; std::vector z_test; std::vector a_test; std::vector> delta; // Regularization Params std::string reg; real_t lambda; /* Regularization Parameter */ real_t alpha; /* This is the controlling param for Elastic Net*/ std::string weightInit; void forwardPass(); void Test(std::vector x); }; #endif /* HiddenLayer_hpp */