From a0f6078afe29f4de11ceb4354d2fbc4aff3f60bf Mon Sep 17 00:00:00 2001 From: Relintai Date: Sat, 4 Feb 2023 01:43:19 +0100 Subject: [PATCH] Fix warning. --- mlpp/hidden_layer/hidden_layer.cpp | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/mlpp/hidden_layer/hidden_layer.cpp b/mlpp/hidden_layer/hidden_layer.cpp index 8468377..2aae58b 100644 --- a/mlpp/hidden_layer/hidden_layer.cpp +++ b/mlpp/hidden_layer/hidden_layer.cpp @@ -244,8 +244,15 @@ void MLPPHiddenLayer::_bind_methods() { ClassDB::bind_method(D_METHOD("test", "x"), &MLPPHiddenLayer::test); } -MLPPOldHiddenLayer::MLPPOldHiddenLayer(int n_hidden, std::string activation, std::vector> input, std::string weightInit, std::string reg, real_t lambda, real_t alpha) : - n_hidden(n_hidden), activation(activation), input(input), weightInit(weightInit), reg(reg), lambda(lambda), alpha(alpha) { +MLPPOldHiddenLayer::MLPPOldHiddenLayer(int p_n_hidden, std::string p_activation, std::vector> p_input, std::string p_weightInit, std::string p_reg, real_t p_lambda, real_t p_alpha) { + n_hidden = p_n_hidden; + activation = p_activation; + input = p_input; + weightInit = p_weightInit; + reg = p_reg; + lambda = p_lambda; + alpha = p_alpha; + weights = MLPPUtilities::weightInitialization(input[0].size(), n_hidden, weightInit); bias = MLPPUtilities::biasInitialization(n_hidden);