mirror of
https://github.com/Relintai/MLPP.git
synced 2025-02-04 15:55:53 +01:00
Reordered functions in Stat.hpp, added Stat.range, Stat.midrange, cleaned up code for LinAlg.max, LinAlg.min
This commit is contained in:
parent
6dfa1cafe3
commit
0753ffe261
@ -391,19 +391,11 @@ namespace MLPP{
|
||||
}
|
||||
|
||||
double LinAlg::max(std::vector<std::vector<double>> A){
|
||||
std::vector<double> max_elements;
|
||||
for(int i = 0; i < A.size(); i++){
|
||||
max_elements.push_back(max(A[i]));
|
||||
}
|
||||
return max(max_elements);
|
||||
return max(flatten(A));
|
||||
}
|
||||
|
||||
double LinAlg::min(std::vector<std::vector<double>> A){
|
||||
std::vector<double> max_elements;
|
||||
for(int i = 0; i < A.size(); i++){
|
||||
max_elements.push_back(min(A[i]));
|
||||
}
|
||||
return min(max_elements);
|
||||
return min(flatten(A));
|
||||
}
|
||||
|
||||
std::vector<std::vector<double>> LinAlg::round(std::vector<std::vector<double>> A){
|
||||
|
@ -6,6 +6,7 @@
|
||||
|
||||
#include "Stat.hpp"
|
||||
#include "Activation/Activation.hpp"
|
||||
#include "LinAlg/LinAlg.hpp"
|
||||
#include "Data/Data.hpp"
|
||||
#include <algorithm>
|
||||
#include <map>
|
||||
@ -66,6 +67,27 @@ namespace MLPP{
|
||||
return modes;
|
||||
}
|
||||
|
||||
double Stat::range(std::vector<double> x){
|
||||
LinAlg alg;
|
||||
return alg.max(x) - alg.min(x);
|
||||
}
|
||||
|
||||
double Stat::midrange(std::vector<double> x){
|
||||
return range(x)/2;
|
||||
}
|
||||
|
||||
double Stat::absAvgDeviation(std::vector<double> x){
|
||||
double sum = 0;
|
||||
for(int i = 0; i < x.size(); i++){
|
||||
sum += std::abs(x[i] - mean(x));
|
||||
}
|
||||
return sum / x.size();
|
||||
}
|
||||
|
||||
double Stat::standardDeviation(std::vector<double> x){
|
||||
return std::sqrt(variance(x));
|
||||
}
|
||||
|
||||
double Stat::variance(std::vector<double> x){
|
||||
double sum = 0;
|
||||
for(int i = 0; i < x.size(); i++){
|
||||
@ -90,6 +112,11 @@ namespace MLPP{
|
||||
return correlation(x, y) * correlation(x, y);
|
||||
}
|
||||
|
||||
double Stat::chebyshevIneq(double k){
|
||||
// X may or may not belong to a Gaussian Distribution
|
||||
return 1 - 1 / (k * k);
|
||||
}
|
||||
|
||||
double Stat::weightedMean(std::vector<double> x, std::vector<double> weights){
|
||||
double sum = 0;
|
||||
double weights_sum = 0;
|
||||
@ -189,21 +216,4 @@ namespace MLPP{
|
||||
}
|
||||
return (y - x) / (log(y) - log(x));
|
||||
}
|
||||
|
||||
double Stat::standardDeviation(std::vector<double> x){
|
||||
return std::sqrt(variance(x));
|
||||
}
|
||||
|
||||
double Stat::absAvgDeviation(std::vector<double> x){
|
||||
double sum = 0;
|
||||
for(int i = 0; i < x.size(); i++){
|
||||
sum += std::abs(x[i] - mean(x));
|
||||
}
|
||||
return sum / x.size();
|
||||
}
|
||||
|
||||
double Stat::chebyshevIneq(double k){
|
||||
//Pr(|X - mu| >= k * sigma) <= 1/k^2, X may or may not belong to a Gaussian Distribution
|
||||
return 1 - 1 / (k * k);
|
||||
}
|
||||
}
|
@ -20,10 +20,16 @@ namespace MLPP{
|
||||
double mean(std::vector <double> x);
|
||||
double median(std::vector<double> x);
|
||||
std::vector<double> mode(std::vector<double> x);
|
||||
double range(std::vector<double> x);
|
||||
double midrange(std::vector<double> x);
|
||||
double absAvgDeviation(std::vector <double> x);
|
||||
double standardDeviation(std::vector <double> x);
|
||||
double variance(std::vector <double> x);
|
||||
double covariance(std::vector <double> x, std::vector <double> y);
|
||||
double correlation(std::vector <double> x, std::vector<double> y);
|
||||
double R2(std::vector <double> x, std::vector<double> y);
|
||||
double chebyshevIneq(double k);
|
||||
|
||||
|
||||
// Extras
|
||||
double weightedMean(std::vector<double> x, std::vector<double> weights);
|
||||
@ -40,9 +46,6 @@ namespace MLPP{
|
||||
double stolarskyMean(double x, double y, double p);
|
||||
double identricMean(double x, double y);
|
||||
double logMean(double x, double y);
|
||||
double standardDeviation(std::vector <double> x);
|
||||
double absAvgDeviation(std::vector <double> x);
|
||||
double chebyshevIneq(double k);
|
||||
|
||||
private:
|
||||
|
||||
|
18
main.cpp
18
main.cpp
@ -78,17 +78,23 @@ int main() {
|
||||
// std::vector<double> outputSet = {0,1,1,0};
|
||||
|
||||
// // STATISTICS
|
||||
// std::vector<double> x = {1,2,3,4,5,6,7,8,9,1};
|
||||
// std::vector<double> x = {1,2,3,4,5,6,7,8,9,10};
|
||||
// std::vector<double> y = {10,9,8,7,6,5,4,3,2,1};
|
||||
// std::vector<double> w = {0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1};
|
||||
|
||||
// std::cout << "Arithmetic Mean: " << stat.mean(x) << std::endl;
|
||||
// std::cout << "Median: " << stat.median(x) << std::endl;
|
||||
// alg.printVector(stat.mode(x));
|
||||
// std::cout << "Range: " << stat.range(x) << std::endl;
|
||||
// std::cout << "Midrange: " << stat.midrange(x) << std::endl;
|
||||
// std::cout << "Absolute Average Deviation: " << stat.absAvgDeviation(x) << std::endl;
|
||||
// std::cout << "Standard Deviation: " << stat.standardDeviation(x) << std::endl;
|
||||
// std::cout << "Variance: " << stat.variance(x) << std::endl;
|
||||
// std::cout << "Covariance: " << stat.covariance(x, y) << std::endl;
|
||||
// std::cout << "Correlation: " << stat.correlation(x, y) << std::endl;
|
||||
// std::cout << "R^2: " << stat.R2(x, y) << std::endl;
|
||||
// // Returns 1 - (1/k^2)
|
||||
// std::cout << "Chebyshev Inequality: " << stat.chebyshevIneq(2) << std::endl;
|
||||
// std::cout << "Weighted Mean: " << stat.weightedMean(x, w) << std::endl;
|
||||
// std::cout << "Geometric Mean: " << stat.geometricMean(x) << std::endl;
|
||||
// std::cout << "Harmonic Mean: " << stat.harmonicMean(x) << std::endl;
|
||||
@ -103,10 +109,7 @@ int main() {
|
||||
// std::cout << "Stolarsky Mean (p = 5): " << stat.stolarskyMean(1, 10, 5) << std::endl;
|
||||
// std::cout << "Identric Mean: " << stat.identricMean(1, 10) << std::endl;
|
||||
// std::cout << "Logarithmic Mean: " << stat.logMean(1, 10) << std::endl;
|
||||
// std::cout << "Standard Deviation: " << stat.standardDeviation(x) << std::endl;
|
||||
// std::cout << "Absolute Average Deviation: " << stat.absAvgDeviation(x) << std::endl;
|
||||
// // Returns 1 - (1/k^2)
|
||||
// std::cout << "Chebyshev Inequality: " << stat.chebyshevIneq(2) << std::endl;
|
||||
|
||||
// // LINEAR ALGEBRA
|
||||
// std::vector<std::vector<double>> A = {
|
||||
@ -144,12 +147,12 @@ int main() {
|
||||
// alg.printVector(model.modelSetTest((alg.transpose(inputSet))));
|
||||
// std::cout << "ACCURACY: " << 100 * model.score() << "%" << std::endl;
|
||||
|
||||
// LOGISTIC REGRESSION
|
||||
// // LOGISTIC REGRESSION
|
||||
// std::vector<std::vector<double>> inputSet;
|
||||
// std::vector<double> outputSet;
|
||||
// data.setData(30, "/Users/marcmelikyan/Desktop/Data/BreastCancer.csv", inputSet, outputSet);
|
||||
// LogReg model(inputSet, outputSet);
|
||||
// model.gradientDescent(0.0001, 100000, 0);
|
||||
// model.SGD(0.001, 100000, 0);
|
||||
// model.MLE(0.1, 10000, 0);
|
||||
// alg.printVector(model.modelSetTest(inputSet));
|
||||
// std::cout << "ACCURACY: " << 100 * model.score() << "%" << std::endl;
|
||||
@ -188,7 +191,6 @@ int main() {
|
||||
// std::cout << "ACCURACY: " << 100 * model.score() << "%" << std::endl;
|
||||
|
||||
// // SOFTMAX REGRESSION
|
||||
// time_t start = time(0);
|
||||
// std::vector<std::vector<double>> inputSet;
|
||||
// std::vector<double> tempOutputSet;
|
||||
// data.setData(4, "/Users/marcmelikyan/Desktop/Data/Iris.csv", inputSet, tempOutputSet);
|
||||
@ -374,6 +376,8 @@ int main() {
|
||||
// std::vector<double> vectorOfCubes = {1,2,64,27};
|
||||
// alg.printMatrix(alg.cbrt(matrixOfCubes));
|
||||
// alg.printVector(alg.cbrt(vectorOfCubes));
|
||||
// std::cout << alg.max({{1,2,3,4,5}, {6,5,3,4,1}, {9,9,9,9,9}}) << std::endl;
|
||||
// std::cout << alg.min({{1,2,3,4,5}, {6,5,3,4,1}, {9,9,9,9,9}}) << std::endl;
|
||||
|
||||
return 0;
|
||||
}
|
Loading…
Reference in New Issue
Block a user