diff --git a/MLPP/Activation/Activation.cpp b/MLPP/Activation/Activation.cpp index 6b31fa0..c41c7aa 100644 --- a/MLPP/Activation/Activation.cpp +++ b/MLPP/Activation/Activation.cpp @@ -389,6 +389,24 @@ namespace MLPP{ return a; } + std::vector> Activation::leakyReLU(std::vector> z, double c, bool deriv){ + if(deriv){ + std::vector> deriv; + deriv.resize(z.size()); + for(int i = 0; i < z.size(); i++){ + deriv[i] = leakyReLU(z[i], c, 1); + } + return deriv; + } + std::vector> a; + a.resize(z.size()); + + for(int i = 0; i < a.size(); i++){ + a[i] = leakyReLU(z[i], c); + } + return a; + } + double Activation::ELU(double z, double c, bool deriv){ if (deriv){ if(z <= 0){ @@ -424,6 +442,24 @@ namespace MLPP{ return a; } + std::vector> Activation::ELU(std::vector> z, double c, bool deriv){ + if(deriv){ + std::vector> deriv; + deriv.resize(z.size()); + for(int i = 0; i < z.size(); i++){ + deriv[i] = ELU(z[i], c, 1); + } + return deriv; + } + std::vector> a; + a.resize(z.size()); + + for(int i = 0; i < a.size(); i++){ + a[i] = ELU(z[i], c); + } + return a; + } + double Activation::SELU(double z, double lambda, double c, bool deriv){ if (deriv){ return ELU(z, c, 1); diff --git a/MLPP/Activation/Activation.hpp b/MLPP/Activation/Activation.hpp index 440350f..b3dc87e 100644 --- a/MLPP/Activation/Activation.hpp +++ b/MLPP/Activation/Activation.hpp @@ -55,9 +55,11 @@ namespace MLPP{ double leakyReLU(double z, double c, bool deriv = 0); std::vector leakyReLU(std::vector z, double c, bool deriv = 0); + std::vector> leakyReLU(std::vector> z, double c, bool deriv = 0); double ELU(double z, double c, bool deriv = 0); std::vector ELU(std::vector z, double c, bool deriv = 0); + std::vector> ELU(std::vector> z, double c, bool deriv = 0); double SELU(double z, double lambda, double c, bool deriv = 0); std::vector SELU(std::vector z, double lambda, double c, bool deriv = 0); diff --git a/main.cpp b/main.cpp index 4481611..2b8c238 100644 --- a/main.cpp +++ b/main.cpp @@ -9,7 +9,7 @@ // POLYMORPHIC IMPLEMENTATION OF REGRESSION CLASSES // EXTEND SGD/MBGD SUPPORT FOR DYN. SIZED ANN // STANDARDIZE ACTIVATIONS/OPTIMIZATIONS -// FINISH ADDING ALL ACTIVATIONS TO ANN +// ADD LEAKYRELU, ELU TO ANN // HYPOTHESIS TESTING CLASS // GAUSS MARKOV CHECKER CLASS