mirror of
https://github.com/Relintai/MLPP.git
synced 2024-11-12 10:15:01 +01:00
Added logit activation
This commit is contained in:
parent
d47cd7e976
commit
51ec771789
@ -218,6 +218,27 @@ namespace MLPP{
|
||||
return alg.scalarMultiply(-1, alg.scalarAdd(-1, alg.exp(alg.scalarMultiply(-1, alg.exp(z)))));
|
||||
}
|
||||
|
||||
double Activation::logit(double z, bool deriv){
|
||||
if(deriv) { return 1/z - 1/(z-1); }
|
||||
return std::log(z / (1 + z));
|
||||
}
|
||||
|
||||
std::vector<double> Activation::logit(std::vector<double> z, bool deriv){
|
||||
LinAlg alg;
|
||||
if(deriv) {
|
||||
return alg.subtraction(alg.elementWiseDivision(alg.onevec(z.size()), z), alg.elementWiseDivision(alg.onevec(z.size()), alg.subtraction(z, alg.onevec(z.size()))));
|
||||
}
|
||||
return alg.log(alg.elementWiseDivision(z, alg.subtraction(alg.onevec(z.size()), z)));
|
||||
}
|
||||
|
||||
std::vector<std::vector<double>> Activation::logit(std::vector<std::vector<double>> z, bool deriv){
|
||||
LinAlg alg;
|
||||
if(deriv) {
|
||||
return alg.subtraction(alg.elementWiseDivision(alg.onemat(z.size(), z[0].size()), z), alg.elementWiseDivision(alg.onemat(z.size(), z[0].size()), alg.subtraction(z, alg.onemat(z.size(), z[0].size()))));
|
||||
}
|
||||
return alg.log(alg.elementWiseDivision(z, alg.subtraction(alg.onemat(z.size(), z[0].size()), z)));
|
||||
}
|
||||
|
||||
double Activation::unitStep(double z, bool deriv){
|
||||
if(deriv) {
|
||||
return 0;
|
||||
|
@ -45,6 +45,10 @@ namespace MLPP{
|
||||
std::vector<double> cloglog(std::vector<double> z, bool deriv = 0);
|
||||
std::vector<std::vector<double>> cloglog(std::vector<std::vector<double>> z, bool deriv = 0);
|
||||
|
||||
double logit(double z, bool deriv = 0);
|
||||
std::vector<double> logit(std::vector<double> z, bool deriv = 0);
|
||||
std::vector<std::vector<double>> logit(std::vector<std::vector<double>> z, bool deriv = 0);
|
||||
|
||||
double unitStep(double z, bool deriv = 0);
|
||||
std::vector<double> unitStep(std::vector<double> z, bool deriv = 0);
|
||||
std::vector<std::vector<double>> unitStep(std::vector<std::vector<double>> z, bool deriv = 0);
|
||||
|
@ -43,6 +43,9 @@ namespace MLPP {
|
||||
activation_map["CLogLog"] = &Activation::cloglog;
|
||||
activationTest_map["CLogLog"] = &Activation::cloglog;
|
||||
|
||||
activation_map["Logit"] = &Activation::logit;
|
||||
activationTest_map["Logit"] = &Activation::logit;
|
||||
|
||||
activation_map["GaussianCDF"] = &Activation::gaussianCDF;
|
||||
activationTest_map["GaussianCDF"] = &Activation::gaussianCDF;
|
||||
|
||||
@ -52,8 +55,8 @@ namespace MLPP {
|
||||
activation_map["GELU"] = &Activation::GELU;
|
||||
activationTest_map["GELU"] = &Activation::GELU;
|
||||
|
||||
activation_map["Sign"] = &Activation::unitStep;
|
||||
activationTest_map["Sign"] = &Activation::unitStep;
|
||||
activation_map["Sign"] = &Activation::sign;
|
||||
activationTest_map["Sign"] = &Activation::sign;
|
||||
|
||||
activation_map["UnitStep"] = &Activation::unitStep;
|
||||
activationTest_map["UnitStep"] = &Activation::unitStep;
|
||||
|
@ -45,6 +45,9 @@ namespace MLPP {
|
||||
activation_map["CLogLog"] = &Activation::cloglog;
|
||||
activationTest_map["CLogLog"] = &Activation::cloglog;
|
||||
|
||||
activation_map["Logit"] = &Activation::logit;
|
||||
activationTest_map["Logit"] = &Activation::logit;
|
||||
|
||||
activation_map["GaussianCDF"] = &Activation::gaussianCDF;
|
||||
activationTest_map["GaussianCDF"] = &Activation::gaussianCDF;
|
||||
|
||||
@ -54,8 +57,8 @@ namespace MLPP {
|
||||
activation_map["GELU"] = &Activation::GELU;
|
||||
activationTest_map["GELU"] = &Activation::GELU;
|
||||
|
||||
activation_map["Sign"] = &Activation::unitStep;
|
||||
activationTest_map["Sign"] = &Activation::unitStep;
|
||||
activation_map["Sign"] = &Activation::sign;
|
||||
activationTest_map["Sign"] = &Activation::sign;
|
||||
|
||||
activation_map["UnitStep"] = &Activation::unitStep;
|
||||
activationTest_map["UnitStep"] = &Activation::unitStep;
|
||||
|
@ -42,6 +42,9 @@ namespace MLPP {
|
||||
activation_map["CLogLog"] = &Activation::cloglog;
|
||||
activationTest_map["CLogLog"] = &Activation::cloglog;
|
||||
|
||||
activation_map["Logit"] = &Activation::logit;
|
||||
activationTest_map["Logit"] = &Activation::logit;
|
||||
|
||||
activation_map["GaussianCDF"] = &Activation::gaussianCDF;
|
||||
activationTest_map["GaussianCDF"] = &Activation::gaussianCDF;
|
||||
|
||||
@ -51,8 +54,8 @@ namespace MLPP {
|
||||
activation_map["GELU"] = &Activation::GELU;
|
||||
activationTest_map["GELU"] = &Activation::GELU;
|
||||
|
||||
activation_map["Sign"] = &Activation::unitStep;
|
||||
activationTest_map["Sign"] = &Activation::unitStep;
|
||||
activation_map["Sign"] = &Activation::sign;
|
||||
activationTest_map["Sign"] = &Activation::sign;
|
||||
|
||||
activation_map["UnitStep"] = &Activation::unitStep;
|
||||
activationTest_map["UnitStep"] = &Activation::unitStep;
|
||||
|
32
main.cpp
32
main.cpp
@ -197,14 +197,14 @@ int main() {
|
||||
// data.setData(4, "/Users/marcmelikyan/Desktop/Data/Iris.csv", inputSet, tempOutputSet);
|
||||
// std::vector<std::vector<double>> outputSet = data.oneHotRep(tempOutputSet, 3);
|
||||
|
||||
// SUPPORT VECTOR CLASSIFICATION
|
||||
std::vector<std::vector<double>> inputSet;
|
||||
std::vector<double> outputSet;
|
||||
data.setData(30, "/Users/marcmelikyan/Desktop/Data/BreastCancerSVM.csv", inputSet, outputSet);
|
||||
SVC model(inputSet, outputSet, 1);
|
||||
model.SGD(0.00001, 100000, 1);
|
||||
alg.printVector(model.modelSetTest(inputSet));
|
||||
std::cout << "ACCURACY: " << 100 * model.score() << "%" << std::endl;
|
||||
// // SUPPORT VECTOR CLASSIFICATION
|
||||
// std::vector<std::vector<double>> inputSet;
|
||||
// std::vector<double> outputSet;
|
||||
// data.setData(30, "/Users/marcmelikyan/Desktop/Data/BreastCancerSVM.csv", inputSet, outputSet);
|
||||
// SVC model(inputSet, outputSet, 1);
|
||||
// model.SGD(0.00001, 100000, 1);
|
||||
// alg.printVector(model.modelSetTest(inputSet));
|
||||
// std::cout << "ACCURACY: " << 100 * model.score() << "%" << std::endl;
|
||||
|
||||
// SoftmaxReg model(inputSet, outputSet);
|
||||
// model.SGD(0.001, 20000, 0);
|
||||
@ -404,16 +404,16 @@ int main() {
|
||||
|
||||
// // Testing new Functions
|
||||
// double z_s = 0.001;
|
||||
// std::cout << avn.sinc(z_s) << std::endl;
|
||||
// std::cout << avn.sinc(z_s, 1) << std::endl;
|
||||
// std::cout << avn.logit(z_s) << std::endl;
|
||||
// std::cout << avn.logit(z_s, 1) << std::endl;
|
||||
|
||||
// std::vector<double> z_v = {0.001, 5};
|
||||
// alg.printVector(avn.sinc(z_v));
|
||||
// alg.printVector(avn.sinc(z_v, 1));
|
||||
// std::vector<double> z_v = {0.001};
|
||||
// alg.printVector(avn.logit(z_v));
|
||||
// alg.printVector(avn.logit(z_v, 1));
|
||||
|
||||
// std::vector<std::vector<double>> Z_m = {{0.001, 5}};
|
||||
// alg.printMatrix(avn.sinc(Z_m));
|
||||
// alg.printMatrix(avn.sinc(Z_m, 1));
|
||||
// std::vector<std::vector<double>> Z_m = {{0.001}};
|
||||
// alg.printMatrix(avn.logit(Z_m));
|
||||
// alg.printMatrix(avn.logit(Z_m, 1));
|
||||
|
||||
// std::cout << alg.trace({{1,2}, {3,4}}) << std::endl;
|
||||
// alg.printMatrix(alg.pinverse({{1,2}, {3,4}}));
|
||||
|
Loading…
Reference in New Issue
Block a user