From 256b7c9c141ee5acce64ea14180356a424e795df Mon Sep 17 00:00:00 2001 From: Relintai Date: Tue, 7 Feb 2023 21:18:01 +0100 Subject: [PATCH] Added MLPPPCAOld. --- SCsub | 1 + mlpp/pca/pca_old.cpp | 54 ++++++++++++++++++++++++++++++++++++++++++++ mlpp/pca/pca_old.h | 31 +++++++++++++++++++++++++ test/mlpp_tests.cpp | 3 ++- 4 files changed, 88 insertions(+), 1 deletion(-) create mode 100644 mlpp/pca/pca_old.cpp create mode 100644 mlpp/pca/pca_old.h diff --git a/SCsub b/SCsub index 7d7f19f..2d0362f 100644 --- a/SCsub +++ b/SCsub @@ -55,6 +55,7 @@ sources = [ "mlpp/multi_output_layer/multi_output_layer_old.cpp", "mlpp/hidden_layer/hidden_layer_old.cpp", "mlpp/mlp/mlp_old.cpp", + "mlpp/pca/pca_old.cpp", "test/mlpp_tests.cpp", ] diff --git a/mlpp/pca/pca_old.cpp b/mlpp/pca/pca_old.cpp new file mode 100644 index 0000000..3b52b03 --- /dev/null +++ b/mlpp/pca/pca_old.cpp @@ -0,0 +1,54 @@ +// +// PCA.cpp +// +// Created by Marc Melikyan on 10/2/20. +// + +#include "pca_old.h" +#include "../data/data.h" +#include "../lin_alg/lin_alg.h" + +#include +#include + + + +MLPPPCAOld::MLPPPCAOld(std::vector> inputSet, int k) : + inputSet(inputSet), k(k) { +} + +std::vector> MLPPPCAOld::principalComponents() { + MLPPLinAlg alg; + MLPPData data; + + auto [U, S, Vt] = alg.SVD(alg.cov(inputSet)); + X_normalized = data.meanCentering(inputSet); + U_reduce.resize(U.size()); + for (int i = 0; i < k; i++) { + for (int j = 0; j < U.size(); j++) { + U_reduce[j].push_back(U[j][i]); + } + } + Z = alg.matmult(alg.transpose(U_reduce), X_normalized); + return Z; +} +// Simply tells us the percentage of variance maintained. +real_t MLPPPCAOld::score() { + MLPPLinAlg alg; + std::vector> X_approx = alg.matmult(U_reduce, Z); + real_t num, den = 0; + for (int i = 0; i < X_normalized.size(); i++) { + num += alg.norm_sq(alg.subtraction(X_normalized[i], X_approx[i])); + } + num /= X_normalized.size(); + for (int i = 0; i < X_normalized.size(); i++) { + den += alg.norm_sq(X_normalized[i]); + } + + den /= X_normalized.size(); + if (den == 0) { + den += 1e-10; // For numerical sanity as to not recieve a domain error + } + return 1 - num / den; +} + diff --git a/mlpp/pca/pca_old.h b/mlpp/pca/pca_old.h new file mode 100644 index 0000000..03ac4c1 --- /dev/null +++ b/mlpp/pca/pca_old.h @@ -0,0 +1,31 @@ + +#ifndef MLPP_PCA_OLD_H +#define MLPP_PCA_OLD_H + +// +// PCA.hpp +// +// Created by Marc Melikyan on 10/2/20. +// + +#include "core/math/math_defs.h" + +#include + + +class MLPPPCAOld { +public: + MLPPPCAOld(std::vector> inputSet, int k); + std::vector> principalComponents(); + real_t score(); + +private: + std::vector> inputSet; + std::vector> X_normalized; + std::vector> U_reduce; + std::vector> Z; + int k; +}; + + +#endif /* PCA_hpp */ diff --git a/test/mlpp_tests.cpp b/test/mlpp_tests.cpp index 998e63f..c606910 100644 --- a/test/mlpp_tests.cpp +++ b/test/mlpp_tests.cpp @@ -49,6 +49,7 @@ #include "../mlpp/mlp/mlp_old.h" #include "../mlpp/wgan/wgan_old.h" +#include "../mlpp/pca/pca_old.h" Vector dstd_vec_to_vec(const std::vector &in) { Vector r; @@ -732,7 +733,7 @@ void MLPPTests::test_pca_svd_eigenvalues_eigenvectors(bool ui) { std::cout << "PCA" << std::endl; // PCA done using Jacobi's method to approximate eigenvalues and eigenvectors. - MLPPPCA dr(inputSet, 1); // 1 dimensional representation. + MLPPPCAOld dr(inputSet, 1); // 1 dimensional representation. std::cout << std::endl; std::cout << "Dimensionally reduced representation:" << std::endl; alg.printMatrix(dr.principalComponents());