Added MLPPPCAOld.

This commit is contained in:
Relintai 2023-02-07 21:18:01 +01:00
parent 48d979f6b8
commit 256b7c9c14
4 changed files with 88 additions and 1 deletions

1
SCsub
View File

@ -55,6 +55,7 @@ sources = [
"mlpp/multi_output_layer/multi_output_layer_old.cpp", "mlpp/multi_output_layer/multi_output_layer_old.cpp",
"mlpp/hidden_layer/hidden_layer_old.cpp", "mlpp/hidden_layer/hidden_layer_old.cpp",
"mlpp/mlp/mlp_old.cpp", "mlpp/mlp/mlp_old.cpp",
"mlpp/pca/pca_old.cpp",
"test/mlpp_tests.cpp", "test/mlpp_tests.cpp",
] ]

54
mlpp/pca/pca_old.cpp Normal file
View File

@ -0,0 +1,54 @@
//
// PCA.cpp
//
// Created by Marc Melikyan on 10/2/20.
//
#include "pca_old.h"
#include "../data/data.h"
#include "../lin_alg/lin_alg.h"
#include <iostream>
#include <random>
MLPPPCAOld::MLPPPCAOld(std::vector<std::vector<real_t>> inputSet, int k) :
inputSet(inputSet), k(k) {
}
std::vector<std::vector<real_t>> MLPPPCAOld::principalComponents() {
MLPPLinAlg alg;
MLPPData data;
auto [U, S, Vt] = alg.SVD(alg.cov(inputSet));
X_normalized = data.meanCentering(inputSet);
U_reduce.resize(U.size());
for (int i = 0; i < k; i++) {
for (int j = 0; j < U.size(); j++) {
U_reduce[j].push_back(U[j][i]);
}
}
Z = alg.matmult(alg.transpose(U_reduce), X_normalized);
return Z;
}
// Simply tells us the percentage of variance maintained.
real_t MLPPPCAOld::score() {
MLPPLinAlg alg;
std::vector<std::vector<real_t>> X_approx = alg.matmult(U_reduce, Z);
real_t num, den = 0;
for (int i = 0; i < X_normalized.size(); i++) {
num += alg.norm_sq(alg.subtraction(X_normalized[i], X_approx[i]));
}
num /= X_normalized.size();
for (int i = 0; i < X_normalized.size(); i++) {
den += alg.norm_sq(X_normalized[i]);
}
den /= X_normalized.size();
if (den == 0) {
den += 1e-10; // For numerical sanity as to not recieve a domain error
}
return 1 - num / den;
}

31
mlpp/pca/pca_old.h Normal file
View File

@ -0,0 +1,31 @@
#ifndef MLPP_PCA_OLD_H
#define MLPP_PCA_OLD_H
//
// PCA.hpp
//
// Created by Marc Melikyan on 10/2/20.
//
#include "core/math/math_defs.h"
#include <vector>
class MLPPPCAOld {
public:
MLPPPCAOld(std::vector<std::vector<real_t>> inputSet, int k);
std::vector<std::vector<real_t>> principalComponents();
real_t score();
private:
std::vector<std::vector<real_t>> inputSet;
std::vector<std::vector<real_t>> X_normalized;
std::vector<std::vector<real_t>> U_reduce;
std::vector<std::vector<real_t>> Z;
int k;
};
#endif /* PCA_hpp */

View File

@ -49,6 +49,7 @@
#include "../mlpp/mlp/mlp_old.h" #include "../mlpp/mlp/mlp_old.h"
#include "../mlpp/wgan/wgan_old.h" #include "../mlpp/wgan/wgan_old.h"
#include "../mlpp/pca/pca_old.h"
Vector<real_t> dstd_vec_to_vec(const std::vector<real_t> &in) { Vector<real_t> dstd_vec_to_vec(const std::vector<real_t> &in) {
Vector<real_t> r; Vector<real_t> r;
@ -732,7 +733,7 @@ void MLPPTests::test_pca_svd_eigenvalues_eigenvectors(bool ui) {
std::cout << "PCA" << std::endl; std::cout << "PCA" << std::endl;
// PCA done using Jacobi's method to approximate eigenvalues and eigenvectors. // PCA done using Jacobi's method to approximate eigenvalues and eigenvectors.
MLPPPCA dr(inputSet, 1); // 1 dimensional representation. MLPPPCAOld dr(inputSet, 1); // 1 dimensional representation.
std::cout << std::endl; std::cout << std::endl;
std::cout << "Dimensionally reduced representation:" << std::endl; std::cout << "Dimensionally reduced representation:" << std::endl;
alg.printMatrix(dr.principalComponents()); alg.printMatrix(dr.principalComponents());