"Vectorized" swish

This commit is contained in:
novak_99 2021-05-25 22:46:52 -07:00
parent 6cb5770a26
commit 14251a7dbc
35 changed files with 21 additions and 29 deletions

BIN
.DS_Store vendored Normal file

Binary file not shown.

View File

@ -276,39 +276,19 @@ namespace MLPP{
}
std::vector<double> Activation::swish(std::vector<double> z, bool deriv){
LinAlg alg;
if(deriv){
std::vector<double> deriv;
deriv.resize(z.size());
for(int i = 0; i < z.size(); i++){
deriv[i] = swish(z[i], 1);
}
return deriv;
alg.addition(swish(z), alg.subtraction(sigmoid(z), alg.hadamard_product(sigmoid(z), swish(z))));
}
std::vector<double> a;
a.resize(z.size());
for(int i = 0; i < a.size(); i++){
a[i] = swish(z[i]);
}
return a;
return alg.hadamard_product(z, sigmoid(z));
}
std::vector<std::vector<double>> Activation::swish(std::vector<std::vector<double>> z, bool deriv){
LinAlg alg;
if(deriv){
std::vector<std::vector<double>> deriv;
deriv.resize(z.size());
for(int i = 0; i < z.size(); i++){
deriv[i] = swish(z[i], 1);
}
return deriv;
alg.addition(swish(z), alg.subtraction(sigmoid(z), alg.hadamard_product(sigmoid(z), swish(z))));
}
std::vector<std::vector<double>> a;
a.resize(z.size());
for(int i = 0; i < a.size(); i++){
a[i] = swish(z[i]);
}
return a;
return alg.hadamard_product(z, sigmoid(z));
}
double Activation::RELU(double z, bool deriv){

BIN
SharedLib/.DS_Store vendored Normal file

Binary file not shown.

BIN
SharedLib/ANN.o Normal file

Binary file not shown.

BIN
SharedLib/Activation.o Normal file

Binary file not shown.

BIN
SharedLib/AutoEncoder.o Normal file

Binary file not shown.

BIN
SharedLib/BernoulliNB.o Normal file

Binary file not shown.

BIN
SharedLib/CLogLogReg.o Normal file

Binary file not shown.

BIN
SharedLib/Convolutions.o Normal file

Binary file not shown.

BIN
SharedLib/Cost.o Normal file

Binary file not shown.

BIN
SharedLib/Data.o Normal file

Binary file not shown.

BIN
SharedLib/ExpReg.o Normal file

Binary file not shown.

BIN
SharedLib/GaussianNB.o Normal file

Binary file not shown.

BIN
SharedLib/HiddenLayer.o Normal file

Binary file not shown.

BIN
SharedLib/KMeans.o Normal file

Binary file not shown.

BIN
SharedLib/LinAlg.o Normal file

Binary file not shown.

BIN
SharedLib/LinReg.o Normal file

Binary file not shown.

BIN
SharedLib/LogReg.o Normal file

Binary file not shown.

BIN
SharedLib/MLP.o Normal file

Binary file not shown.

BIN
SharedLib/MLPP.so Executable file

Binary file not shown.

BIN
SharedLib/MultinomialNB.o Normal file

Binary file not shown.

BIN
SharedLib/OutlierFinder.o Normal file

Binary file not shown.

BIN
SharedLib/OutputLayer.o Normal file

Binary file not shown.

BIN
SharedLib/PCA.o Normal file

Binary file not shown.

BIN
SharedLib/ProbitReg.o Normal file

Binary file not shown.

BIN
SharedLib/Reg.o Normal file

Binary file not shown.

BIN
SharedLib/SoftmaxNet.o Normal file

Binary file not shown.

BIN
SharedLib/SoftmaxReg.o Normal file

Binary file not shown.

BIN
SharedLib/Stat.o Normal file

Binary file not shown.

BIN
SharedLib/TanhReg.o Normal file

Binary file not shown.

BIN
SharedLib/UniLinReg.o Normal file

Binary file not shown.

BIN
SharedLib/Utilities.o Normal file

Binary file not shown.

BIN
SharedLib/kNN.o Normal file

Binary file not shown.

BIN
a.out Executable file

Binary file not shown.

View File

@ -224,14 +224,14 @@ int main() {
// // DYNAMICALLY SIZED ANN
// // Possible Weight Init Methods: Default, Uniform, HeNormal, HeUniform, XavierNormal, XavierUniform
// // Possible Activations: Linear, Sigmoid, Swish, CLogLog, Ar{Sinh, Cosh, Tanh, Csch, Sech, Coth}, GaussianCDF, GELU, UnitStep
// // Possible Activations: Linear, Sigmoid, Swish, Softplus, CLogLog, Ar{Sinh, Cosh, Tanh, Csch, Sech, Coth}, GaussianCDF, GELU, UnitStep
// // Possible Loss Functions: MSE, RMSE, MBE, LogLoss, CrossEntropy, HingeLoss
// std::vector<std::vector<double>> inputSet = {{0,0,1,1}, {0,1,0,1}};
// std::vector<double> outputSet = {0,1,1,0};
// ANN ann(alg.transpose(inputSet), outputSet);
// ann.addLayer(10, "RELU", "Default", "Ridge", 0.0001);
// ann.addLayer(10, "Sigmoid", "Default");
// ann.addOutputLayer("Sigmoid", "LogLoss", "XavierNormal");
// ann.addOutputLayer("Softplus", "LogLoss", "XavierNormal");
// ann.gradientDescent(0.1, 80000, 0);
// alg.printVector(ann.modelSetTest(alg.transpose(inputSet)));
// std::cout << "ACCURACY: " << 100 * ann.score() << "%" << std::endl;
@ -348,8 +348,20 @@ int main() {
// OutlierFinder outlierFinder(2); // Any datapoint outside of 2 stds from the mean is marked as an outlier.
// alg.printVector(outlierFinder.modelTest(inputSet));
// // Testing for new Functions
// Testing for new Functions
double z_s = 4;
std::cout << avn.swish(z_s) << std::endl;
std::cout << avn.swish(z_s, 1) << std::endl;
std::vector<double> z_v = {4, 5};
alg.printVector(avn.swish(z_v));
alg.printVector(avn.swish(z_v, 1));
std::vector<std::vector<double>> Z_m = {{4, 5}};
alg.printMatrix(avn.swish(Z_m));
alg.printMatrix(avn.swish(Z_m, 1));
// alg.printMatrix(alg.pinverse({{1,2}, {3,4}}));
return 0;
}