From c93400840aac599f86259fca95ef6bd5bf4fda9e Mon Sep 17 00:00:00 2001 From: novak_99 Date: Fri, 21 Jan 2022 17:08:31 -0800 Subject: [PATCH] so --- MLPP/Activation/Activation.cpp | 14 +++++++------- MLPP/BernoulliNB/BernoulliNB.cpp | 12 ++++++------ MLPP/Cost/Cost.cpp | 8 ++++---- MLPP/Data/Data.cpp | 2 +- MLPP/GaussianNB/GaussianNB.cpp | 4 ++-- MLPP/LinAlg/LinAlg.hpp | 4 ++-- MLPP/MultinomialNB/MultinomialNB.cpp | 8 ++++---- MLPP/Stat/Stat.cpp | 2 +- 8 files changed, 27 insertions(+), 27 deletions(-) diff --git a/MLPP/Activation/Activation.cpp b/MLPP/Activation/Activation.cpp index 4444628..2d0f7ec 100644 --- a/MLPP/Activation/Activation.cpp +++ b/MLPP/Activation/Activation.cpp @@ -142,7 +142,7 @@ namespace MLPP{ double Activation::softplus(double z, bool deriv){ if(deriv){ return sigmoid(z); } - return log(1 + exp(z)); + return std::log(1 + exp(z)); } std::vector Activation::softplus(std::vector z, bool deriv){ @@ -752,7 +752,7 @@ namespace MLPP{ double Activation::arsinh(double z, bool deriv){ if(deriv){ return 1 / sqrt(z * z + 1); } - return log(z + sqrt(z * z + 1)); + return std::log(z + sqrt(z * z + 1)); } std::vector Activation::arsinh(std::vector z, bool deriv){ @@ -771,7 +771,7 @@ namespace MLPP{ if(deriv){ return 1/sqrt(z * z - 1); } - return log(z + sqrt(z * z - 1)); + return std::log(z + sqrt(z * z - 1)); } std::vector Activation::arcosh(std::vector z, bool deriv){ @@ -790,7 +790,7 @@ namespace MLPP{ if(deriv){ return 1/(1 - z * z); } - return 0.5 * log((1 + z)/(1 - z)); + return 0.5 * std::log((1 + z)/(1 - z)); } std::vector Activation::artanh(std::vector z, bool deriv){ @@ -809,7 +809,7 @@ namespace MLPP{ if(deriv){ return -1/((z * z) * sqrt(1 + (1/(z * z)))); } - return log(sqrt(1 + (1 / (z * z))) + (1/z)); + return std::log(sqrt(1 + (1 / (z * z))) + (1/z)); } std::vector Activation::arcsch(std::vector z, bool deriv){ @@ -829,7 +829,7 @@ namespace MLPP{ if(deriv){ return -1/(z * sqrt(1 - z * z)); } - return log((1/z) + ((1/z) + 1) * ((1/z) - 1)); + return std::log((1/z) + ((1/z) + 1) * ((1/z) - 1)); } std::vector Activation::arsech(std::vector z, bool deriv){ @@ -848,7 +848,7 @@ namespace MLPP{ if(deriv){ return 1/(1 - z * z); } - return 0.5 * log((1 + z)/(z - 1)); + return 0.5 * std::log((1 + z)/(z - 1)); } std::vector Activation::arcoth(std::vector z, bool deriv){ diff --git a/MLPP/BernoulliNB/BernoulliNB.cpp b/MLPP/BernoulliNB/BernoulliNB.cpp index edeadf5..3fd4a4a 100644 --- a/MLPP/BernoulliNB/BernoulliNB.cpp +++ b/MLPP/BernoulliNB/BernoulliNB.cpp @@ -139,8 +139,8 @@ namespace MLPP{ for(int j = 0; j < inputSet.size(); j++){ for(int k = 0; k < vocab.size(); k++){ if(inputSet[i][j] == vocab[k]){ - score_0 += log(theta[0][vocab[k]]); - score_1 += log(theta[1][vocab[k]]); + score_0 += std::log(theta[0][vocab[k]]); + score_1 += std::log(theta[1][vocab[k]]); foundIndices.push_back(k); } @@ -155,13 +155,13 @@ namespace MLPP{ } } if(!found){ - score_0 += log(1 - theta[0][vocab[i]]); - score_1 += log(1 - theta[1][vocab[i]]); + score_0 += std::log(1 - theta[0][vocab[i]]); + score_1 += std::log(1 - theta[1][vocab[i]]); } } - score_0 += log(prior_0); - score_1 += log(prior_1); + score_0 += std::log(prior_0); + score_1 += std::log(prior_1); score_0 = exp(score_0); score_1 = exp(score_1); diff --git a/MLPP/Cost/Cost.cpp b/MLPP/Cost/Cost.cpp index e6b1084..b94a49e 100644 --- a/MLPP/Cost/Cost.cpp +++ b/MLPP/Cost/Cost.cpp @@ -157,7 +157,7 @@ namespace MLPP{ double sum = 0; double eps = 1e-8; for(int i = 0; i < y_hat.size(); i++){ - sum += -(y[i] * log(y_hat[i] + eps) + (1 - y[i]) * log(1 - y_hat[i] + eps)); + sum += -(y[i] * std::log(y_hat[i] + eps) + (1 - y[i]) * std::log(1 - y_hat[i] + eps)); } return sum / y_hat.size(); @@ -168,7 +168,7 @@ namespace MLPP{ double eps = 1e-8; for(int i = 0; i < y_hat.size(); i++){ for(int j = 0; j < y_hat[i].size(); j++){ - sum += -(y[i][j] * log(y_hat[i][j] + eps) + (1 - y[i][j]) * log(1 - y_hat[i][j] + eps)); + sum += -(y[i][j] * std::log(y_hat[i][j] + eps) + (1 - y[i][j]) * std::log(1 - y_hat[i][j] + eps)); } } @@ -188,7 +188,7 @@ namespace MLPP{ double Cost::CrossEntropy(std::vector y_hat, std::vector y){ double sum = 0; for(int i = 0; i < y_hat.size(); i++){ - sum += y[i] * log(y_hat[i]); + sum += y[i] * std::log(y_hat[i]); } return -1 * sum; @@ -198,7 +198,7 @@ namespace MLPP{ double sum = 0; for(int i = 0; i < y_hat.size(); i++){ for(int j = 0; j < y_hat[i].size(); j++){ - sum += y[i][j] * log(y_hat[i][j]); + sum += y[i][j] * std::log(y_hat[i][j]); } } diff --git a/MLPP/Data/Data.cpp b/MLPP/Data/Data.cpp index 3266e27..ec27926 100644 --- a/MLPP/Data/Data.cpp +++ b/MLPP/Data/Data.cpp @@ -385,7 +385,7 @@ namespace MLPP{ IDF.resize(frequency.size()); for(int i = 0; i < IDF.size(); i++){ - IDF[i] = log((double)segmented_sentences.size() / (double)frequency[i]); + IDF[i] = std::log((double)segmented_sentences.size() / (double)frequency[i]); } std::vector> TFIDF; diff --git a/MLPP/GaussianNB/GaussianNB.cpp b/MLPP/GaussianNB/GaussianNB.cpp index cc7ef58..7d036b6 100644 --- a/MLPP/GaussianNB/GaussianNB.cpp +++ b/MLPP/GaussianNB/GaussianNB.cpp @@ -36,7 +36,7 @@ namespace MLPP{ double score[class_num]; double y_hat_i = 1; for(int i = class_num - 1; i >= 0; i--){ - y_hat_i += log(priors[i] * (1 / sqrt(2 * M_PI * sigma[i] * sigma[i])) * exp(-(x[i] * mu[i]) * (x[i] * mu[i]) / (2 * sigma[i] * sigma[i]))); + y_hat_i += std::log(priors[i] * (1 / sqrt(2 * M_PI * sigma[i] * sigma[i])) * exp(-(x[i] * mu[i]) * (x[i] * mu[i]) / (2 * sigma[i] * sigma[i]))); score[i] = exp(y_hat_i); } return std::distance(score, std::max_element(score, score + sizeof(score) / sizeof(double))); @@ -79,7 +79,7 @@ namespace MLPP{ double y_hat_i = 1; for(int j = class_num - 1; j >= 0; j--){ for(int k = 0; k < inputSet[i].size(); k++){ - y_hat_i += log(priors[j] * (1 / sqrt(2 * M_PI * sigma[j] * sigma[j])) * exp(-(inputSet[i][k] * mu[j]) * (inputSet[i][k] * mu[j]) / (2 * sigma[j] * sigma[j]))); + y_hat_i += std::log(priors[j] * (1 / sqrt(2 * M_PI * sigma[j] * sigma[j])) * exp(-(inputSet[i][k] * mu[j]) * (inputSet[i][k] * mu[j]) / (2 * sigma[j] * sigma[j]))); } score[j] = exp(y_hat_i); std::cout << score[j] << std::endl; diff --git a/MLPP/LinAlg/LinAlg.hpp b/MLPP/LinAlg/LinAlg.hpp index a81e7ce..de30769 100644 --- a/MLPP/LinAlg/LinAlg.hpp +++ b/MLPP/LinAlg/LinAlg.hpp @@ -34,7 +34,7 @@ namespace MLPP{ std::vector> scalarAdd(double scalar, std::vector> A); - std::vector> log(std::vector> A); + std::vector> std::log(std::vector> A); std::vector> log10(std::vector> A); @@ -132,7 +132,7 @@ namespace MLPP{ std::vector subtractMatrixRows(std::vector a, std::vector> B); - std::vector log(std::vector a); + std::vector std::log(std::vector a); std::vector log10(std::vector a); diff --git a/MLPP/MultinomialNB/MultinomialNB.cpp b/MLPP/MultinomialNB/MultinomialNB.cpp index 5b1af86..ab8a54f 100644 --- a/MLPP/MultinomialNB/MultinomialNB.cpp +++ b/MLPP/MultinomialNB/MultinomialNB.cpp @@ -35,14 +35,14 @@ namespace MLPP{ for(int k = 0; k < vocab.size(); k++){ if(x[j] == vocab[k]){ for(int p = class_num - 1; p >= 0; p--){ - score[p] += log(theta[p][vocab[k]]); + score[p] += std::log(theta[p][vocab[k]]); } } } } for(int i = 0; i < priors.size(); i++){ - score[i] += log(priors[i]); + score[i] += std::log(priors[i]); } return std::distance(score, std::max_element(score, score + sizeof(score) / sizeof(double))); @@ -98,14 +98,14 @@ namespace MLPP{ for(int k = 0; k < vocab.size(); k++){ if(inputSet[i][j] == vocab[k]){ for(int p = class_num - 1; p >= 0; p--){ - score[p] += log(theta[i][vocab[k]]); + score[p] += std::log(theta[i][vocab[k]]); } } } } for(int i = 0; i < priors.size(); i++){ - score[i] += log(priors[i]); + score[i] += std::log(priors[i]); score[i] = exp(score[i]); } diff --git a/MLPP/Stat/Stat.cpp b/MLPP/Stat/Stat.cpp index 0bba33f..cdc2b67 100644 --- a/MLPP/Stat/Stat.cpp +++ b/MLPP/Stat/Stat.cpp @@ -214,6 +214,6 @@ namespace MLPP{ if(x == y){ return x; } - return (y - x) / (log(y) - log(x)); + return (y - x) / (log(y) - std::log(x)); } } \ No newline at end of file