Renamed MultiOutputLayer to OldMultiOutputLayer.

This commit is contained in:
Relintai 2023-02-04 13:36:52 +01:00
parent 53ad617681
commit a34628e8c4
4 changed files with 16 additions and 8 deletions

View File

@ -151,9 +151,9 @@ void MLPPMANN::addLayer(int n_hidden, std::string activation, std::string weight
void MLPPMANN::addOutputLayer(std::string activation, std::string loss, std::string weightInit, std::string reg, real_t lambda, real_t alpha) {
if (!network.empty()) {
outputLayer = new MLPPMultiOutputLayer(n_output, network[0].n_hidden, activation, loss, network[network.size() - 1].a, weightInit, reg, lambda, alpha);
outputLayer = new MLPPOldMultiOutputLayer(n_output, network[0].n_hidden, activation, loss, network[network.size() - 1].a, weightInit, reg, lambda, alpha);
} else {
outputLayer = new MLPPMultiOutputLayer(n_output, k, activation, loss, inputSet, weightInit, reg, lambda, alpha);
outputLayer = new MLPPOldMultiOutputLayer(n_output, k, activation, loss, inputSet, weightInit, reg, lambda, alpha);
}
}

View File

@ -40,7 +40,7 @@ private:
std::vector<std::vector<real_t>> y_hat;
std::vector<MLPPOldHiddenLayer> network;
MLPPMultiOutputLayer *outputLayer;
MLPPOldMultiOutputLayer *outputLayer;
int n;
int k;

View File

@ -12,7 +12,7 @@
#include <random>
MLPPMultiOutputLayer::MLPPMultiOutputLayer(int n_output, int n_hidden, std::string activation, std::string cost, std::vector<std::vector<real_t>> input, std::string weightInit, std::string reg, real_t lambda, real_t alpha) :
MLPPOldMultiOutputLayer::MLPPOldMultiOutputLayer(int n_output, int n_hidden, std::string activation, std::string cost, std::vector<std::vector<real_t>> input, std::string weightInit, std::string reg, real_t lambda, real_t alpha) :
n_output(n_output), n_hidden(n_hidden), activation(activation), cost(cost), input(input), weightInit(weightInit), reg(reg), lambda(lambda), alpha(alpha) {
weights = MLPPUtilities::weightInitialization(n_hidden, n_output, weightInit);
bias = MLPPUtilities::biasInitialization(n_output);
@ -116,14 +116,14 @@ MLPPMultiOutputLayer::MLPPMultiOutputLayer(int n_output, int n_hidden, std::stri
cost_map["WassersteinLoss"] = &MLPPCost::HingeLoss;
}
void MLPPMultiOutputLayer::forwardPass() {
void MLPPOldMultiOutputLayer::forwardPass() {
MLPPLinAlg alg;
MLPPActivation avn;
z = alg.mat_vec_add(alg.matmult(input, weights), bias);
a = (avn.*activation_map[activation])(z, 0);
}
void MLPPMultiOutputLayer::Test(std::vector<real_t> x) {
void MLPPOldMultiOutputLayer::Test(std::vector<real_t> x) {
MLPPLinAlg alg;
MLPPActivation avn;
z_test = alg.addition(alg.mat_vec_mult(alg.transpose(weights), x), bias);

View File

@ -9,18 +9,26 @@
//
#include "core/math/math_defs.h"
#include "core/string/ustring.h"
#include "core/object/reference.h"
#include "../activation/activation.h"
#include "../cost/cost.h"
#include "../regularization/reg.h"
#include "../utilities/utilities.h"
#include "../lin_alg/mlpp_matrix.h"
#include "../lin_alg/mlpp_vector.h"
#include <map>
#include <string>
#include <vector>
class MLPPMultiOutputLayer {
class MLPPOldMultiOutputLayer {
public:
MLPPMultiOutputLayer(int n_output, int n_hidden, std::string activation, std::string cost, std::vector<std::vector<real_t>> input, std::string weightInit, std::string reg, real_t lambda, real_t alpha);
MLPPOldMultiOutputLayer(int n_output, int n_hidden, std::string activation, std::string cost, std::vector<std::vector<real_t>> input, std::string weightInit, std::string reg, real_t lambda, real_t alpha);
int n_output;
int n_hidden;