Added trace, reordered activation function map order in HL and OL classes, created buildSO.sh, recompiled

This commit is contained in:
novak_99 2021-05-28 23:11:02 -07:00
parent e5598185dd
commit 1756c53b1b
8 changed files with 43 additions and 25 deletions

View File

@ -34,6 +34,18 @@ namespace MLPP {
activation_map["CLogLog"] = &Activation::cloglog;
activationTest_map["CLogLog"] = &Activation::cloglog;
activation_map["GaussianCDF"] = &Activation::gaussianCDF;
activationTest_map["GaussianCDF"] = &Activation::gaussianCDF;
activation_map["RELU"] = &Activation::RELU;
activationTest_map["RELU"] = &Activation::RELU;
activation_map["GELU"] = &Activation::GELU;
activationTest_map["GELU"] = &Activation::GELU;
activation_map["UnitStep"] = &Activation::unitStep;
activationTest_map["UnitStep"] = &Activation::unitStep;
activation_map["Sinh"] = &Activation::sinh;
activationTest_map["Sinh"] = &Activation::sinh;
@ -69,18 +81,6 @@ namespace MLPP {
activation_map["Arcoth"] = &Activation::arcoth;
activationTest_map["Arcoth"] = &Activation::arcoth;
activation_map["GaussianCDF"] = &Activation::gaussianCDF;
activationTest_map["GaussianCDF"] = &Activation::gaussianCDF;
activation_map["RELU"] = &Activation::RELU;
activationTest_map["RELU"] = &Activation::RELU;
activation_map["GELU"] = &Activation::GELU;
activationTest_map["GELU"] = &Activation::GELU;
activation_map["UnitStep"] = &Activation::unitStep;
activationTest_map["UnitStep"] = &Activation::unitStep;
}
void HiddenLayer::forwardPass(){

View File

@ -222,6 +222,14 @@ namespace MLPP{
return deter;
}
double LinAlg::trace(std::vector<std::vector<double>> A){
double trace = 0;
for(int i = 0; i < A.size(); i++){
trace += A[i][i];
}
return trace;
}
std::vector<std::vector<double>> LinAlg::cofactor(std::vector<std::vector<double>> A, int n, int i, int j){
std::vector<std::vector<double>> cof;
cof.resize(A.size());

View File

@ -45,6 +45,8 @@ namespace MLPP{
std::vector<std::vector<double>> exponentiate(std::vector<std::vector<double>> A, double p);
double det(std::vector<std::vector<double>> A, int d);
double trace(std::vector<std::vector<double>> A);
std::vector<std::vector<double>> cofactor(std::vector<std::vector<double>> A, int n, int i, int j);

View File

@ -33,6 +33,18 @@ namespace MLPP {
activation_map["CLogLog"] = &Activation::cloglog;
activationTest_map["CLogLog"] = &Activation::cloglog;
activation_map["GaussianCDF"] = &Activation::gaussianCDF;
activationTest_map["GaussianCDF"] = &Activation::gaussianCDF;
activation_map["RELU"] = &Activation::RELU;
activationTest_map["RELU"] = &Activation::RELU;
activation_map["GELU"] = &Activation::GELU;
activationTest_map["GELU"] = &Activation::GELU;
activation_map["UnitStep"] = &Activation::unitStep;
activationTest_map["UnitStep"] = &Activation::unitStep;
activation_map["Sinh"] = &Activation::sinh;
activationTest_map["Sinh"] = &Activation::sinh;
@ -69,18 +81,6 @@ namespace MLPP {
activation_map["Arcoth"] = &Activation::arcoth;
activationTest_map["Arcoth"] = &Activation::arcoth;
activation_map["GaussianCDF"] = &Activation::gaussianCDF;
activationTest_map["GaussianCDF"] = &Activation::gaussianCDF;
activation_map["RELU"] = &Activation::RELU;
activationTest_map["RELU"] = &Activation::RELU;
activation_map["GELU"] = &Activation::GELU;
activationTest_map["GELU"] = &Activation::GELU;
activation_map["UnitStep"] = &Activation::unitStep;
activationTest_map["UnitStep"] = &Activation::unitStep;
costDeriv_map["MSE"] = &Cost::MSEDeriv;
cost_map["MSE"] = &Cost::MSE;
costDeriv_map["RMSE"] = &Cost::RMSEDeriv;

Binary file not shown.

BIN
a.out

Binary file not shown.

7
buildSO.sh Executable file
View File

@ -0,0 +1,7 @@
g++ -I MLPP -c MLPP/Stat/Stat.cpp MLPP/LinAlg/LinAlg.cpp MLPP/Regularization/Reg.cpp MLPP/Activation/Activation.cpp MLPP/Utilities/Utilities.cpp MLPP/Data/Data.cpp MLPP/Cost/Cost.cpp MLPP/ANN/ANN.cpp MLPP/HiddenLayer/HiddenLayer.cpp MLPP/OutputLayer/OutputLayer.cpp MLPP/MLP/MLP.cpp MLPP/LinReg/LinReg.cpp MLPP/LogReg/LogReg.cpp MLPP/UniLinReg/UniLinReg.cpp MLPP/CLogLogReg/CLogLogReg.cpp MLPP/ExpReg/ExpReg.cpp MLPP/ProbitReg/ProbitReg.cpp MLPP/SoftmaxReg/SoftmaxReg.cpp MLPP/TanhReg/TanhReg.cpp MLPP/SoftmaxNet/SoftmaxNet.cpp MLPP/Convolutions/Convolutions.cpp MLPP/AutoEncoder/AutoEncoder.cpp MLPP/MultinomialNB/MultinomialNB.cpp MLPP/BernoulliNB/BernoulliNB.cpp MLPP/GaussianNB/GaussianNB.cpp MLPP/KMeans/KMeans.cpp MLPP/kNN/kNN.cpp MLPP/PCA/PCA.cpp MLPP/OutlierFinder/OutlierFinder.cpp --std=c++17 -pthread
g++ -shared -o MLPP.so Reg.o LinAlg.o Stat.o Activation.o LinReg.o Utilities.o Cost.o LogReg.o ProbitReg.o ExpReg.o CLogLogReg.o SoftmaxReg.o TanhReg.o kNN.o KMeans.o UniLinReg.o SoftmaxNet.o MLP.o AutoEncoder.o HiddenLayer.o OutputLayer.o ANN.o BernoulliNB.o GaussianNB.o MultinomialNB.o Convolutions.o OutlierFinder.o Data.o
mv MLPP.so SharedLib
rm *.o

View File

@ -297,7 +297,7 @@ int main() {
// auto [U, S, Vt] = alg.SVD(inputSet);
// // PCA done using Jacobi's method to approximate eigenvalues.
// // PCA done using Jacobi's method to approximate eigenvalues and eigenvectors.
// PCA dr(inputSet, 1); // 1 dimensional representation.
// std::cout << std::endl;
// std::cout << "Dimensionally reduced representation:" << std::endl;
@ -361,6 +361,7 @@ int main() {
// alg.printMatrix(avn.arcoth(Z_m));
// alg.printMatrix(avn.arcoth(Z_m, 1));
// std::cout << alg.trace({{1,2}, {3,4}}) << std::endl;
// alg.printMatrix(alg.pinverse({{1,2}, {3,4}}));
return 0;