Added LinAlg.abs, added softsign activation.

This commit is contained in:
novak_99 2021-05-31 22:55:24 -07:00
parent 5a1392bdd1
commit 629f63059c
8 changed files with 64 additions and 10 deletions

View File

@ -163,6 +163,23 @@ namespace MLPP{
return alg.log(alg.addition(alg.onemat(z.size(), z[0].size()), alg.exp(z)));
}
double Activation::softsign(double z, bool deriv){
if(deriv){ return 1/((1 + abs(z)) * (1 + abs(z))); }
return z/(1 + abs(z));
}
std::vector<double> Activation::softsign(std::vector<double> z, bool deriv){
LinAlg alg;
if(deriv) { return alg.elementWiseDivision(alg.onevec(z.size()), alg.exponentiate(alg.addition(alg.onevec(z.size()), alg.abs(z)), 2)); }
return alg.elementWiseDivision(z, alg.addition(alg.onevec(z.size()), alg.abs(z)));
}
std::vector<std::vector<double>> Activation::softsign(std::vector<std::vector<double>> z, bool deriv){
LinAlg alg;
if(deriv) { return alg.elementWiseDivision(alg.onemat(z.size(), z[0].size()), alg.exponentiate(alg.addition(alg.onemat(z.size(), z[0].size()), alg.abs(z)), 2)); }
return alg.elementWiseDivision(z, alg.addition(alg.onemat(z.size(), z[0].size()), alg.abs(z)));
}
double Activation::gaussianCDF(double z, bool deriv){
if(deriv) {
return (1 / sqrt(2 * M_PI)) * exp(-z * z / 2);

View File

@ -33,6 +33,10 @@ namespace MLPP{
std::vector<double> softplus(std::vector<double> z, bool deriv = 0);
std::vector<std::vector<double>> softplus(std::vector<std::vector<double>> z, bool deriv = 0);
double softsign(double z, bool deriv = 0);
std::vector<double> softsign(std::vector<double> z, bool deriv = 0);
std::vector<std::vector<double>> softsign(std::vector<std::vector<double>> z, bool deriv = 0);
double gaussianCDF(double z, bool deriv = 0);
std::vector<double> gaussianCDF(std::vector<double> z, bool deriv = 0);
std::vector<std::vector<double>> gaussianCDF(std::vector<std::vector<double>> z, bool deriv = 0);

View File

@ -31,6 +31,9 @@ namespace MLPP {
activation_map["Softplus"] = &Activation::softplus;
activationTest_map["Softplus"] = &Activation::softplus;
activation_map["Softsign"] = &Activation::softsign;
activationTest_map["Softsign"] = &Activation::softsign;
activation_map["CLogLog"] = &Activation::cloglog;
activationTest_map["CLogLog"] = &Activation::cloglog;

View File

@ -227,12 +227,26 @@ namespace MLPP{
else if(n < 0){
A = inverse(A);
}
for(int i = 0; i < abs(n); i++){
for(int i = 0; i < std::abs(n); i++){
B = matmult(B, A);
}
return B;
}
std::vector<std::vector<double>> LinAlg::abs(std::vector<std::vector<double>> A){
std::vector<std::vector<double>> B;
B.resize(A.size());
for(int i = 0; i < B.size(); i++){
B[i].resize(A[0].size());
}
for(int i = 0; i < B.size(); i++){
for(int j = 0; j < B[i].size(); j++){
B[i][j] = std::abs(A[i][j]);
}
}
return B;
}
double LinAlg::det(std::vector<std::vector<double>> A, int d){
double deter = 0;
@ -452,12 +466,12 @@ namespace MLPP{
double sub_j = 1;
for(int i = 0; i < A.size(); i++){
for(int j = 0; j < A[i].size(); j++){
if(i != j && abs(A[i][j]) > a_ij){
if(i != j && std::abs(A[i][j]) > a_ij){
a_ij = A[i][j];
sub_i = i;
sub_j = j;
}
else if(i != j && abs(A[i][j]) == a_ij){
else if(i != j && std::abs(A[i][j]) == a_ij){
if(i < sub_i){
a_ij = A[i][j];
sub_i = i;
@ -732,6 +746,15 @@ namespace MLPP{
return c;
}
std::vector<double> LinAlg::abs(std::vector<double> a){
std::vector<double> b;
b.resize(a.size());
for(int i = 0; i < b.size(); i++){
b[i] = std::abs(a[i]);
}
return b;
}
std::vector<double> LinAlg::zerovec(int n){
std::vector<double> zerovec;
zerovec.resize(n);

View File

@ -47,6 +47,8 @@ namespace MLPP{
std::vector<std::vector<double>> sqrt(std::vector<std::vector<double>> A);
std::vector<std::vector<double>> matrixPower(std::vector<std::vector<double>> A, int n);
std::vector<std::vector<double>> abs(std::vector<std::vector<double>> A);
double det(std::vector<std::vector<double>> A, int d);
@ -120,6 +122,8 @@ namespace MLPP{
double dot(std::vector<double> a, std::vector<double> b);
std::vector<double> abs(std::vector<double> a);
std::vector<double> zerovec(int n);
std::vector<double> onevec(int n);

View File

@ -30,6 +30,9 @@ namespace MLPP {
activation_map["Softplus"] = &Activation::softplus;
activationTest_map["Softplus"] = &Activation::softplus;
activation_map["Softsign"] = &Activation::softsign;
activationTest_map["Softsign"] = &Activation::softsign;
activation_map["CLogLog"] = &Activation::cloglog;
activationTest_map["CLogLog"] = &Activation::cloglog;

BIN
a.out Executable file

Binary file not shown.

View File

@ -224,7 +224,7 @@ int main() {
// // DYNAMICALLY SIZED ANN
// // Possible Weight Init Methods: Default, Uniform, HeNormal, HeUniform, XavierNormal, XavierUniform
// // Possible Activations: Linear, Sigmoid, Swish, Softplus, CLogLog, Ar{Sinh, Cosh, Tanh, Csch, Sech, Coth}, GaussianCDF, GELU, UnitStep
// // Possible Activations: Linear, Sigmoid, Swish, Softplus, Softsign, CLogLog, Ar{Sinh, Cosh, Tanh, Csch, Sech, Coth}, GaussianCDF, GELU, UnitStep
// // Possible Loss Functions: MSE, RMSE, MBE, LogLoss, CrossEntropy, HingeLoss
// std::vector<std::vector<double>> inputSet = {{0,0,1,1}, {0,1,0,1}};
// std::vector<double> outputSet = {0,1,1,0};
@ -350,16 +350,16 @@ int main() {
// // Testing new Functions
// double z_s = 0.001;
// std::cout << avn.arcoth(z_s) << std::endl;
// std::cout << avn.arcoth(z_s, 1) << std::endl;
// std::cout << avn.softsign(z_s) << std::endl;
// std::cout << avn.softsign(z_s, 1) << std::endl;
// std::vector<double> z_v = {0.001, 5};
// alg.printVector(avn.arcoth(z_v));
// alg.printVector(avn.arcoth(z_v, 1));
// alg.printVector(avn.softsign(z_v));
// alg.printVector(avn.softsign(z_v, 1));
// std::vector<std::vector<double>> Z_m = {{0.001, 5}};
// alg.printMatrix(avn.arcoth(Z_m));
// alg.printMatrix(avn.arcoth(Z_m, 1));
// alg.printMatrix(avn.softsign(Z_m));
// alg.printMatrix(avn.softsign(Z_m, 1));
// std::cout << alg.trace({{1,2}, {3,4}}) << std::endl;
// alg.printMatrix(alg.pinverse({{1,2}, {3,4}}));