diff --git a/MLPP/Activation/Activation.cpp b/MLPP/Activation/Activation.cpp index f8fa201..f868745 100644 --- a/MLPP/Activation/Activation.cpp +++ b/MLPP/Activation/Activation.cpp @@ -163,6 +163,23 @@ namespace MLPP{ return alg.log(alg.addition(alg.onemat(z.size(), z[0].size()), alg.exp(z))); } + double Activation::softsign(double z, bool deriv){ + if(deriv){ return 1/((1 + abs(z)) * (1 + abs(z))); } + return z/(1 + abs(z)); + } + + std::vector Activation::softsign(std::vector z, bool deriv){ + LinAlg alg; + if(deriv) { return alg.elementWiseDivision(alg.onevec(z.size()), alg.exponentiate(alg.addition(alg.onevec(z.size()), alg.abs(z)), 2)); } + return alg.elementWiseDivision(z, alg.addition(alg.onevec(z.size()), alg.abs(z))); + } + + std::vector> Activation::softsign(std::vector> z, bool deriv){ + LinAlg alg; + if(deriv) { return alg.elementWiseDivision(alg.onemat(z.size(), z[0].size()), alg.exponentiate(alg.addition(alg.onemat(z.size(), z[0].size()), alg.abs(z)), 2)); } + return alg.elementWiseDivision(z, alg.addition(alg.onemat(z.size(), z[0].size()), alg.abs(z))); + } + double Activation::gaussianCDF(double z, bool deriv){ if(deriv) { return (1 / sqrt(2 * M_PI)) * exp(-z * z / 2); diff --git a/MLPP/Activation/Activation.hpp b/MLPP/Activation/Activation.hpp index b3dc87e..58ede3c 100644 --- a/MLPP/Activation/Activation.hpp +++ b/MLPP/Activation/Activation.hpp @@ -33,6 +33,10 @@ namespace MLPP{ std::vector softplus(std::vector z, bool deriv = 0); std::vector> softplus(std::vector> z, bool deriv = 0); + double softsign(double z, bool deriv = 0); + std::vector softsign(std::vector z, bool deriv = 0); + std::vector> softsign(std::vector> z, bool deriv = 0); + double gaussianCDF(double z, bool deriv = 0); std::vector gaussianCDF(std::vector z, bool deriv = 0); std::vector> gaussianCDF(std::vector> z, bool deriv = 0); diff --git a/MLPP/HiddenLayer/HiddenLayer.cpp b/MLPP/HiddenLayer/HiddenLayer.cpp index 95fc9f7..6ce42b3 100644 --- a/MLPP/HiddenLayer/HiddenLayer.cpp +++ b/MLPP/HiddenLayer/HiddenLayer.cpp @@ -31,6 +31,9 @@ namespace MLPP { activation_map["Softplus"] = &Activation::softplus; activationTest_map["Softplus"] = &Activation::softplus; + activation_map["Softsign"] = &Activation::softsign; + activationTest_map["Softsign"] = &Activation::softsign; + activation_map["CLogLog"] = &Activation::cloglog; activationTest_map["CLogLog"] = &Activation::cloglog; diff --git a/MLPP/LinAlg/LinAlg.cpp b/MLPP/LinAlg/LinAlg.cpp index 72a7528..072a2d0 100644 --- a/MLPP/LinAlg/LinAlg.cpp +++ b/MLPP/LinAlg/LinAlg.cpp @@ -227,12 +227,26 @@ namespace MLPP{ else if(n < 0){ A = inverse(A); } - for(int i = 0; i < abs(n); i++){ + for(int i = 0; i < std::abs(n); i++){ B = matmult(B, A); } return B; } + std::vector> LinAlg::abs(std::vector> A){ + std::vector> B; + B.resize(A.size()); + for(int i = 0; i < B.size(); i++){ + B[i].resize(A[0].size()); + } + for(int i = 0; i < B.size(); i++){ + for(int j = 0; j < B[i].size(); j++){ + B[i][j] = std::abs(A[i][j]); + } + } + return B; + } + double LinAlg::det(std::vector> A, int d){ double deter = 0; @@ -452,12 +466,12 @@ namespace MLPP{ double sub_j = 1; for(int i = 0; i < A.size(); i++){ for(int j = 0; j < A[i].size(); j++){ - if(i != j && abs(A[i][j]) > a_ij){ + if(i != j && std::abs(A[i][j]) > a_ij){ a_ij = A[i][j]; sub_i = i; sub_j = j; } - else if(i != j && abs(A[i][j]) == a_ij){ + else if(i != j && std::abs(A[i][j]) == a_ij){ if(i < sub_i){ a_ij = A[i][j]; sub_i = i; @@ -732,6 +746,15 @@ namespace MLPP{ return c; } + std::vector LinAlg::abs(std::vector a){ + std::vector b; + b.resize(a.size()); + for(int i = 0; i < b.size(); i++){ + b[i] = std::abs(a[i]); + } + return b; + } + std::vector LinAlg::zerovec(int n){ std::vector zerovec; zerovec.resize(n); diff --git a/MLPP/LinAlg/LinAlg.hpp b/MLPP/LinAlg/LinAlg.hpp index 8c5315c..cb1c1ad 100644 --- a/MLPP/LinAlg/LinAlg.hpp +++ b/MLPP/LinAlg/LinAlg.hpp @@ -47,6 +47,8 @@ namespace MLPP{ std::vector> sqrt(std::vector> A); std::vector> matrixPower(std::vector> A, int n); + + std::vector> abs(std::vector> A); double det(std::vector> A, int d); @@ -120,6 +122,8 @@ namespace MLPP{ double dot(std::vector a, std::vector b); + std::vector abs(std::vector a); + std::vector zerovec(int n); std::vector onevec(int n); diff --git a/MLPP/OutputLayer/OutputLayer.cpp b/MLPP/OutputLayer/OutputLayer.cpp index db3fa17..c234053 100644 --- a/MLPP/OutputLayer/OutputLayer.cpp +++ b/MLPP/OutputLayer/OutputLayer.cpp @@ -30,6 +30,9 @@ namespace MLPP { activation_map["Softplus"] = &Activation::softplus; activationTest_map["Softplus"] = &Activation::softplus; + activation_map["Softsign"] = &Activation::softsign; + activationTest_map["Softsign"] = &Activation::softsign; + activation_map["CLogLog"] = &Activation::cloglog; activationTest_map["CLogLog"] = &Activation::cloglog; diff --git a/a.out b/a.out new file mode 100755 index 0000000..9129d28 Binary files /dev/null and b/a.out differ diff --git a/main.cpp b/main.cpp index b86d81b..f069d95 100644 --- a/main.cpp +++ b/main.cpp @@ -224,7 +224,7 @@ int main() { // // DYNAMICALLY SIZED ANN // // Possible Weight Init Methods: Default, Uniform, HeNormal, HeUniform, XavierNormal, XavierUniform - // // Possible Activations: Linear, Sigmoid, Swish, Softplus, CLogLog, Ar{Sinh, Cosh, Tanh, Csch, Sech, Coth}, GaussianCDF, GELU, UnitStep + // // Possible Activations: Linear, Sigmoid, Swish, Softplus, Softsign, CLogLog, Ar{Sinh, Cosh, Tanh, Csch, Sech, Coth}, GaussianCDF, GELU, UnitStep // // Possible Loss Functions: MSE, RMSE, MBE, LogLoss, CrossEntropy, HingeLoss // std::vector> inputSet = {{0,0,1,1}, {0,1,0,1}}; // std::vector outputSet = {0,1,1,0}; @@ -350,16 +350,16 @@ int main() { // // Testing new Functions // double z_s = 0.001; - // std::cout << avn.arcoth(z_s) << std::endl; - // std::cout << avn.arcoth(z_s, 1) << std::endl; + // std::cout << avn.softsign(z_s) << std::endl; + // std::cout << avn.softsign(z_s, 1) << std::endl; // std::vector z_v = {0.001, 5}; - // alg.printVector(avn.arcoth(z_v)); - // alg.printVector(avn.arcoth(z_v, 1)); + // alg.printVector(avn.softsign(z_v)); + // alg.printVector(avn.softsign(z_v, 1)); // std::vector> Z_m = {{0.001, 5}}; - // alg.printMatrix(avn.arcoth(Z_m)); - // alg.printMatrix(avn.arcoth(Z_m, 1)); + // alg.printMatrix(avn.softsign(Z_m)); + // alg.printMatrix(avn.softsign(Z_m, 1)); // std::cout << alg.trace({{1,2}, {3,4}}) << std::endl; // alg.printMatrix(alg.pinverse({{1,2}, {3,4}}));