Use the new MLPPReg::RegularizationType instead of a String in MLPPHiddenLayer.

This commit is contained in:
Relintai 2023-02-04 00:58:48 +01:00
parent bda7a7aee4
commit bbe334856b
2 changed files with 4 additions and 3 deletions

View File

@ -27,7 +27,7 @@ void MLPPHiddenLayer::test(const Ref<MLPPVector> &x) {
a_test = avn.run_activation_norm_matrix(activation, z_test);
}
MLPPHiddenLayer::MLPPHiddenLayer(int p_n_hidden, MLPPActivation::ActivationFunction p_activation, Ref<MLPPMatrix> p_input, MLPPUtilities::WeightDistributionType p_weight_init, String p_reg, real_t p_lambda, real_t p_alpha) {
MLPPHiddenLayer::MLPPHiddenLayer(int p_n_hidden, MLPPActivation::ActivationFunction p_activation, Ref<MLPPMatrix> p_input, MLPPUtilities::WeightDistributionType p_weight_init, MLPPReg::RegularizationType p_reg, real_t p_lambda, real_t p_alpha) {
n_hidden = p_n_hidden;
activation = p_activation;

View File

@ -16,6 +16,7 @@
#include "../activation/activation.h"
#include "../utilities/utilities.h"
#include "../regularization/reg.h"
#include "../lin_alg/mlpp_matrix.h"
#include "../lin_alg/mlpp_vector.h"
@ -45,7 +46,7 @@ public:
Ref<MLPPMatrix> delta;
// Regularization Params
String reg;
MLPPReg::RegularizationType reg;
real_t lambda; /* Regularization Parameter */
real_t alpha; /* This is the controlling param for Elastic Net*/
@ -54,7 +55,7 @@ public:
void forward_pass();
void test(const Ref<MLPPVector> &x);
MLPPHiddenLayer(int p_n_hidden, MLPPActivation::ActivationFunction p_activation, Ref<MLPPMatrix> p_input, MLPPUtilities::WeightDistributionType p_weight_init, String p_reg, real_t p_lambda, real_t p_alpha);
MLPPHiddenLayer(int p_n_hidden, MLPPActivation::ActivationFunction p_activation, Ref<MLPPMatrix> p_input, MLPPUtilities::WeightDistributionType p_weight_init, MLPPReg::RegularizationType p_reg, real_t p_lambda, real_t p_alpha);
MLPPHiddenLayer();
~MLPPHiddenLayer();