pmlpp/mlpp/multi_output_layer/multi_output_layer_old.cpp

140 lines
5.0 KiB
C++
Raw Normal View History

//
// MultiOutputLayer.cpp
//
// Created by Marc Melikyan on 11/4/20.
//
#include "multi_output_layer_old.h"
#include "../lin_alg/lin_alg.h"
#include "../utilities/utilities.h"
#include <iostream>
#include <random>
MLPPOldMultiOutputLayer::MLPPOldMultiOutputLayer(int p_n_output, int p_n_hidden, std::string p_activation, std::string p_cost, std::vector<std::vector<real_t>> p_input, std::string p_weightInit, std::string p_reg, real_t p_lambda, real_t p_alpha) {
n_output = p_n_output;
n_hidden = p_n_hidden;
activation = p_activation;
cost = p_cost;
input = p_input;
weightInit = p_weightInit;
reg = p_reg;
lambda = p_lambda;
alpha = p_alpha;
weights = MLPPUtilities::weightInitialization(n_hidden, n_output, weightInit);
bias = MLPPUtilities::biasInitialization(n_output);
2023-04-22 14:11:07 +02:00
activation_map["Linear"] = &MLPPActivationOld::linear;
activationTest_map["Linear"] = &MLPPActivationOld::linear;
2023-04-22 14:11:07 +02:00
activation_map["Sigmoid"] = &MLPPActivationOld::sigmoid;
activationTest_map["Sigmoid"] = &MLPPActivationOld::sigmoid;
2023-04-22 14:11:07 +02:00
activation_map["Softmax"] = &MLPPActivationOld::softmax;
activationTest_map["Softmax"] = &MLPPActivationOld::softmax;
2023-04-22 14:11:07 +02:00
activation_map["Swish"] = &MLPPActivationOld::swish;
activationTest_map["Swish"] = &MLPPActivationOld::swish;
2023-04-22 14:11:07 +02:00
activation_map["Mish"] = &MLPPActivationOld::mish;
activationTest_map["Mish"] = &MLPPActivationOld::mish;
2023-04-22 14:11:07 +02:00
activation_map["SinC"] = &MLPPActivationOld::sinc;
activationTest_map["SinC"] = &MLPPActivationOld::sinc;
2023-04-22 14:11:07 +02:00
activation_map["Softplus"] = &MLPPActivationOld::softplus;
activationTest_map["Softplus"] = &MLPPActivationOld::softplus;
2023-04-22 14:11:07 +02:00
activation_map["Softsign"] = &MLPPActivationOld::softsign;
activationTest_map["Softsign"] = &MLPPActivationOld::softsign;
2023-04-22 14:11:07 +02:00
activation_map["CLogLog"] = &MLPPActivationOld::cloglog;
activationTest_map["CLogLog"] = &MLPPActivationOld::cloglog;
2023-04-22 14:11:07 +02:00
activation_map["Logit"] = &MLPPActivationOld::logit;
activationTest_map["Logit"] = &MLPPActivationOld::logit;
2023-04-22 14:11:07 +02:00
activation_map["GaussianCDF"] = &MLPPActivationOld::gaussianCDF;
activationTest_map["GaussianCDF"] = &MLPPActivationOld::gaussianCDF;
2023-04-22 14:11:07 +02:00
activation_map["RELU"] = &MLPPActivationOld::RELU;
activationTest_map["RELU"] = &MLPPActivationOld::RELU;
2023-04-22 14:11:07 +02:00
activation_map["GELU"] = &MLPPActivationOld::GELU;
activationTest_map["GELU"] = &MLPPActivationOld::GELU;
2023-04-22 14:11:07 +02:00
activation_map["Sign"] = &MLPPActivationOld::sign;
activationTest_map["Sign"] = &MLPPActivationOld::sign;
2023-04-22 14:11:07 +02:00
activation_map["UnitStep"] = &MLPPActivationOld::unitStep;
activationTest_map["UnitStep"] = &MLPPActivationOld::unitStep;
2023-04-22 14:11:07 +02:00
activation_map["Sinh"] = &MLPPActivationOld::sinh;
activationTest_map["Sinh"] = &MLPPActivationOld::sinh;
2023-04-22 14:11:07 +02:00
activation_map["Cosh"] = &MLPPActivationOld::cosh;
activationTest_map["Cosh"] = &MLPPActivationOld::cosh;
2023-04-22 14:11:07 +02:00
activation_map["Tanh"] = &MLPPActivationOld::tanh;
activationTest_map["Tanh"] = &MLPPActivationOld::tanh;
2023-04-22 14:11:07 +02:00
activation_map["Csch"] = &MLPPActivationOld::csch;
activationTest_map["Csch"] = &MLPPActivationOld::csch;
2023-04-22 14:11:07 +02:00
activation_map["Sech"] = &MLPPActivationOld::sech;
activationTest_map["Sech"] = &MLPPActivationOld::sech;
2023-04-22 14:11:07 +02:00
activation_map["Coth"] = &MLPPActivationOld::coth;
activationTest_map["Coth"] = &MLPPActivationOld::coth;
2023-04-22 14:11:07 +02:00
activation_map["Arsinh"] = &MLPPActivationOld::arsinh;
activationTest_map["Arsinh"] = &MLPPActivationOld::arsinh;
2023-04-22 14:11:07 +02:00
activation_map["Arcosh"] = &MLPPActivationOld::arcosh;
activationTest_map["Arcosh"] = &MLPPActivationOld::arcosh;
2023-04-22 14:11:07 +02:00
activation_map["Artanh"] = &MLPPActivationOld::artanh;
activationTest_map["Artanh"] = &MLPPActivationOld::artanh;
2023-04-22 14:11:07 +02:00
activation_map["Arcsch"] = &MLPPActivationOld::arcsch;
activationTest_map["Arcsch"] = &MLPPActivationOld::arcsch;
2023-04-22 14:11:07 +02:00
activation_map["Arsech"] = &MLPPActivationOld::arsech;
activationTest_map["Arsech"] = &MLPPActivationOld::arsech;
2023-04-22 14:11:07 +02:00
activation_map["Arcoth"] = &MLPPActivationOld::arcoth;
activationTest_map["Arcoth"] = &MLPPActivationOld::arcoth;
costDeriv_map["MSE"] = &MLPPCost::MSEDeriv;
cost_map["MSE"] = &MLPPCost::MSE;
costDeriv_map["RMSE"] = &MLPPCost::RMSEDeriv;
cost_map["RMSE"] = &MLPPCost::RMSE;
costDeriv_map["MAE"] = &MLPPCost::MAEDeriv;
cost_map["MAE"] = &MLPPCost::MAE;
costDeriv_map["MBE"] = &MLPPCost::MBEDeriv;
cost_map["MBE"] = &MLPPCost::MBE;
costDeriv_map["LogLoss"] = &MLPPCost::LogLossDeriv;
cost_map["LogLoss"] = &MLPPCost::LogLoss;
costDeriv_map["CrossEntropy"] = &MLPPCost::CrossEntropyDeriv;
cost_map["CrossEntropy"] = &MLPPCost::CrossEntropy;
costDeriv_map["HingeLoss"] = &MLPPCost::HingeLossDeriv;
cost_map["HingeLoss"] = &MLPPCost::HingeLoss;
costDeriv_map["WassersteinLoss"] = &MLPPCost::HingeLossDeriv;
cost_map["WassersteinLoss"] = &MLPPCost::HingeLoss;
}
void MLPPOldMultiOutputLayer::forwardPass() {
MLPPLinAlg alg;
2023-04-22 14:11:07 +02:00
MLPPActivationOld avn;
z = alg.mat_vec_add(alg.matmult(input, weights), bias);
a = (avn.*activation_map[activation])(z, false);
}
void MLPPOldMultiOutputLayer::Test(std::vector<real_t> x) {
MLPPLinAlg alg;
2023-04-22 14:11:07 +02:00
MLPPActivationOld avn;
z_test = alg.addition(alg.mat_vec_mult(alg.transpose(weights), x), bias);
a_test = (avn.*activationTest_map[activation])(z_test, false);
}