diff --git a/MLPP/Activation/Activation.cpp b/MLPP/Activation/Activation.cpp index c2bc5ba..f4a91cc 100644 --- a/MLPP/Activation/Activation.cpp +++ b/MLPP/Activation/Activation.cpp @@ -570,78 +570,34 @@ namespace MLPP{ } std::vector Activation::csch(std::vector z, bool deriv){ - if(deriv){ - std::vector deriv; - deriv.resize(z.size()); - for(int i = 0; i < z.size(); i++){ - deriv[i] = csch(z[i], 1); - } - return deriv; - } - std::vector a; - a.resize(z.size()); - for(int i = 0; i < z.size(); i++){ - a[i] = csch(z[i]); - } - return a; + LinAlg alg; + if(deriv){ return alg.hadamard_product(alg.scalarMultiply(-1, csch(z)), coth(z)); } + return alg.elementWiseDivision(alg.onevec(z.size()), sinh(z)); } std::vector> Activation::csch(std::vector> z, bool deriv){ - if(deriv){ - std::vector> deriv; - deriv.resize(z.size()); - for(int i = 0; i < z.size(); i++){ - deriv[i] = csch(z[i], 1); - } - return deriv; - } - std::vector> a; - a.resize(z.size()); - for(int i = 0; i < z.size(); i++){ - a[i] = csch(z[i]); - } - return a; + LinAlg alg; + if(deriv){ return alg.hadamard_product(alg.scalarMultiply(-1, csch(z)), coth(z)); } + return alg.elementWiseDivision(alg.onemat(z.size(), z[0].size()), sinh(z)); } double Activation::sech(double z, bool deriv){ if(deriv){ return -sech(z) * tanh(z); } - return 2 / (exp(z) + exp(-z)); + return 1 / cosh(z); } std::vector Activation::sech(std::vector z, bool deriv){ - if(deriv){ - std::vector deriv; - deriv.resize(z.size()); - for(int i = 0; i < z.size(); i++){ - deriv[i] = sech(z[i], 1); - } - return deriv; - } - std::vector a; - a.resize(z.size()); - for(int i = 0; i < z.size(); i++){ - a[i] = sech(z[i]); - } - return a; + LinAlg alg; + if(deriv){ return alg.hadamard_product(alg.scalarMultiply(-1, sech(z)), tanh(z)); } + return alg.elementWiseDivision(alg.onevec(z.size()), cosh(z)); // return activation(z, deriv, static_cast(&sech)); } std::vector> Activation::sech(std::vector> z, bool deriv){ - if(deriv){ - std::vector> deriv; - deriv.resize(z.size()); - for(int i = 0; i < z.size(); i++){ - deriv[i] = sech(z[i], 1); - } - return deriv; - } - std::vector> a; - a.resize(z.size()); - for(int i = 0; i < z.size(); i++){ - a[i] = sech(z[i]); - } - return a; + LinAlg alg; + if(deriv){ return alg.hadamard_product(alg.scalarMultiply(-1, sech(z)), tanh(z)); } + return alg.elementWiseDivision(alg.onemat(z.size(), z[0].size()), cosh(z)); // return activation(z, deriv, static_cast(&sech)); } @@ -653,37 +609,15 @@ namespace MLPP{ } std::vector Activation::coth(std::vector z, bool deriv){ - if(deriv){ - std::vector deriv; - deriv.resize(z.size()); - for(int i = 0; i < z.size(); i++){ - deriv[i] = coth(z[i], 1); - } - return deriv; - } - std::vector a; - a.resize(z.size()); - for(int i = 0; i < z.size(); i++){ - a[i] = coth(z[i]); - } - return a; + LinAlg alg; + if(deriv){ return alg.hadamard_product(alg.scalarMultiply(-1, csch(z)), csch(z)); } + return alg.elementWiseDivision(alg.onevec(z.size()), tanh(z)); } std::vector> Activation::coth(std::vector> z, bool deriv){ - if(deriv){ - std::vector> deriv; - deriv.resize(z.size()); - for(int i = 0; i < z.size(); i++){ - deriv[i] = coth(z[i], 1); - } - return deriv; - } - std::vector> a; - a.resize(z.size()); - for(int i = 0; i < z.size(); i++){ - a[i] = coth(z[i]); - } - return a; + LinAlg alg; + if(deriv){ return alg.hadamard_product(alg.scalarMultiply(-1, csch(z)), csch(z)); } + return alg.elementWiseDivision(alg.onemat(z.size(), z[0].size()), tanh(z)); } double Activation::arsinh(double z, bool deriv){ diff --git a/MLPP/LinAlg/LinAlg.cpp b/MLPP/LinAlg/LinAlg.cpp index bb0e74b..41296e1 100644 --- a/MLPP/LinAlg/LinAlg.cpp +++ b/MLPP/LinAlg/LinAlg.cpp @@ -300,17 +300,21 @@ namespace MLPP{ } std::vector> LinAlg::onemat(int n, int m){ - std::vector> onemat; - onemat.resize(n); - for(int i = 0; i < onemat.size(); i++){ - onemat[i].resize(m); + return full(n, m, 1); + } + + std::vector> LinAlg::full(int n, int m, int k){ + std::vector> full; + full.resize(n); + for(int i = 0; i < full.size(); i++){ + full[i].resize(m); } - for(int i = 0; i < onemat.size(); i++){ - for(int j = 0; j < onemat[i].size(); j++){ - onemat[i][j] = 1; + for(int i = 0; i < full.size(); i++){ + for(int j = 0; j < full[i].size(); j++){ + full[i][j] = k; } } - return onemat; + return full; } std::vector> LinAlg::round(std::vector> A){ @@ -640,13 +644,23 @@ namespace MLPP{ return c; } + std::vector LinAlg::zerovec(int n){ + std::vector zerovec; + zerovec.resize(n); + return zerovec; + } + std::vector LinAlg::onevec(int n){ - std::vector onevec; - onevec.resize(n); - for(int i = 0; i < onevec.size(); i++){ - onevec[i] = 1; + return full(n, 1); + } + + std::vector LinAlg::full(int n, int k){ + std::vector full; + full.resize(n); + for(int i = 0; i < full.size(); i++){ + full[i] = k; } - return onevec; + return full; } double LinAlg::max(std::vector a){ diff --git a/MLPP/LinAlg/LinAlg.hpp b/MLPP/LinAlg/LinAlg.hpp index f9ec1c6..ff41181 100644 --- a/MLPP/LinAlg/LinAlg.hpp +++ b/MLPP/LinAlg/LinAlg.hpp @@ -58,6 +58,8 @@ namespace MLPP{ std::vector> onemat(int n, int m); + std::vector> full(int n, int m, int k); + std::vector> round(std::vector> A); std::vector> identity(double d); @@ -102,8 +104,12 @@ namespace MLPP{ double dot(std::vector a, std::vector b); + std::vector zerovec(int n); + std::vector onevec(int n); + std::vector full(int n, int k); + double max(std::vector a); double min(std::vector a); diff --git a/a.out b/a.out index 0e2d5ba..fb4e7ae 100755 Binary files a/a.out and b/a.out differ diff --git a/main.cpp b/main.cpp index 938918d..09ea111 100644 --- a/main.cpp +++ b/main.cpp @@ -9,7 +9,7 @@ // POLYMORPHIC IMPLEMENTATION OF REGRESSION CLASSES // EXTEND SGD/MBGD SUPPORT FOR DYN. SIZED ANN // STANDARDIZE ACTIVATIONS/OPTIMIZATIONS -// ADD LEAKYRELU, ELU TO ANN +// ADD LEAKYRELU, ELU, SELU TO ANN // HYPOTHESIS TESTING CLASS // GAUSS MARKOV CHECKER CLASS @@ -348,21 +348,20 @@ int main() { // OutlierFinder outlierFinder(2); // Any datapoint outside of 2 stds from the mean is marked as an outlier. // alg.printVector(outlierFinder.modelTest(inputSet)); - // // Testing for new Functions + // // Testing new Functions // double z_s = 4; - // std::cout << avn.sigmoid(z_s) << std::endl; - // std::cout << avn.sigmoid(z_s, 1) << std::endl; + // std::cout << avn.coth(z_s) << std::endl; + // std::cout << avn.coth(z_s, 1) << std::endl; // std::vector z_v = {4, 5}; - // alg.printVector(avn.sigmoid(z_v)); - // alg.printVector(avn.sigmoid(z_v, 1)); + // alg.printVector(avn.coth(z_v)); + // alg.printVector(avn.coth(z_v, 1)); // std::vector> Z_m = {{4, 5}}; - // alg.printMatrix(avn.sigmoid(Z_m)); - // alg.printMatrix(avn.sigmoid(Z_m, 1)); + // alg.printMatrix(avn.coth(Z_m)); + // alg.printMatrix(avn.coth(Z_m, 1)); - // alg.printMatrix(alg.pinverse({{1,2}, {3,4}})); + // // alg.printMatrix(alg.pinverse({{1,2}, {3,4}})); - return 0; } \ No newline at end of file