pmlpp/lin_alg/lin_alg.h

267 lines
10 KiB
C
Raw Normal View History

2023-01-24 18:57:18 +01:00
#ifndef MLPP_LIN_ALG_H
#define MLPP_LIN_ALG_H
2023-12-30 00:41:59 +01:00
/*************************************************************************/
/* lin_alg.h */
/*************************************************************************/
/* This file is part of: */
/* PMLPP Machine Learning Library */
/* https://github.com/Relintai/pmlpp */
/*************************************************************************/
2023-12-30 00:43:39 +01:00
/* Copyright (c) 2023-present Péter Magyar. */
2023-12-30 00:41:59 +01:00
/* Copyright (c) 2022-2023 Marc Melikyan */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
2023-02-02 02:19:16 +01:00
//TODO Methods here should probably use error macros in a way where they get disabled in non-tools(?) (maybe release?) builds
#ifndef GDNATIVE
2023-01-27 13:01:16 +01:00
#include "core/math/math_defs.h"
#include "core/object/reference.h"
2023-12-28 23:26:14 +01:00
#else
#include "core/defs.h"
#include "core/math_funcs.h"
#include "gen/resource.h"
#endif
#include "../lin_alg/mlpp_matrix.h"
#include "../lin_alg/mlpp_vector.h"
#include <tuple>
2023-01-24 19:00:54 +01:00
#include <vector>
class MLPPLinAlg : public Reference {
GDCLASS(MLPPLinAlg, Reference);
2023-01-24 19:00:54 +01:00
public:
// MATRIX FUNCTIONS
2023-12-29 19:47:16 +01:00
Ref<MLPPMatrix> gram_matrix(const Ref<MLPPMatrix> &A);
bool linear_independence_checker(const Ref<MLPPMatrix> &A);
2023-01-24 19:00:54 +01:00
2023-02-06 02:36:22 +01:00
Ref<MLPPMatrix> gaussian_noise(int n, int m);
2023-01-24 19:00:54 +01:00
2023-04-22 14:23:51 +02:00
Ref<MLPPMatrix> additionnm(const Ref<MLPPMatrix> &A, const Ref<MLPPMatrix> &B);
Ref<MLPPMatrix> subtractionnm(const Ref<MLPPMatrix> &A, const Ref<MLPPMatrix> &B);
Ref<MLPPMatrix> matmultnm(const Ref<MLPPMatrix> &A, const Ref<MLPPMatrix> &B);
2023-01-24 19:00:54 +01:00
2023-04-22 14:23:51 +02:00
Ref<MLPPMatrix> hadamard_productnm(const Ref<MLPPMatrix> &A, const Ref<MLPPMatrix> &B);
Ref<MLPPMatrix> kronecker_productnm(const Ref<MLPPMatrix> &A, const Ref<MLPPMatrix> &B);
2023-04-29 13:50:35 +02:00
Ref<MLPPMatrix> division_element_wisenvnm(const Ref<MLPPMatrix> &A, const Ref<MLPPMatrix> &B);
2023-01-31 03:20:20 +01:00
2023-04-22 14:23:51 +02:00
Ref<MLPPMatrix> transposenm(const Ref<MLPPMatrix> &A);
Ref<MLPPMatrix> scalar_multiplynm(real_t scalar, const Ref<MLPPMatrix> &A);
Ref<MLPPMatrix> scalar_addnm(real_t scalar, const Ref<MLPPMatrix> &A);
2023-01-31 03:20:20 +01:00
2023-04-22 14:39:13 +02:00
Ref<MLPPMatrix> lognm(const Ref<MLPPMatrix> &A);
Ref<MLPPMatrix> log10nm(const Ref<MLPPMatrix> &A);
Ref<MLPPMatrix> expnm(const Ref<MLPPMatrix> &A);
Ref<MLPPMatrix> erfnm(const Ref<MLPPMatrix> &A);
Ref<MLPPMatrix> exponentiatenm(const Ref<MLPPMatrix> &A, real_t p);
Ref<MLPPMatrix> sqrtnm(const Ref<MLPPMatrix> &A);
Ref<MLPPMatrix> cbrtnm(const Ref<MLPPMatrix> &A);
2023-02-02 02:19:16 +01:00
2023-04-22 17:17:58 +02:00
//std::vector<std::vector<real_t>> matrixPower(std::vector<std::vector<real_t>> A, int n);
2023-04-22 14:39:13 +02:00
Ref<MLPPMatrix> absnm(const Ref<MLPPMatrix> &A);
2023-02-02 02:19:16 +01:00
real_t detm(const Ref<MLPPMatrix> &A, int d);
2023-04-22 17:17:58 +02:00
//real_t trace(std::vector<std::vector<real_t>> A);
2023-04-22 14:39:13 +02:00
Ref<MLPPMatrix> cofactornm(const Ref<MLPPMatrix> &A, int n, int i, int j);
Ref<MLPPMatrix> adjointnm(const Ref<MLPPMatrix> &A);
Ref<MLPPMatrix> inversenm(const Ref<MLPPMatrix> &A);
Ref<MLPPMatrix> pinversenm(const Ref<MLPPMatrix> &A);
2023-04-22 14:39:13 +02:00
Ref<MLPPMatrix> zeromatnm(int n, int m);
Ref<MLPPMatrix> onematnm(int n, int m);
Ref<MLPPMatrix> fullnm(int n, int m, int k);
2023-04-22 14:39:13 +02:00
Ref<MLPPMatrix> sinnm(const Ref<MLPPMatrix> &A);
Ref<MLPPMatrix> cosnm(const Ref<MLPPMatrix> &A);
2023-04-22 14:39:13 +02:00
Ref<MLPPMatrix> maxnm(const Ref<MLPPMatrix> &A, const Ref<MLPPMatrix> &B);
2023-02-17 16:55:00 +01:00
2023-04-22 17:17:58 +02:00
//real_t max(std::vector<std::vector<real_t>> A);
//real_t min(std::vector<std::vector<real_t>> A);
2023-04-22 17:17:58 +02:00
//std::vector<std::vector<real_t>> round(std::vector<std::vector<real_t>> A);
2023-04-22 17:17:58 +02:00
//real_t norm_2(std::vector<std::vector<real_t>> A);
Ref<MLPPMatrix> identitym(int d);
2023-04-22 14:39:13 +02:00
Ref<MLPPMatrix> covnm(const Ref<MLPPMatrix> &A);
2023-02-07 22:10:16 +01:00
struct EigenResult {
Ref<MLPPMatrix> eigen_vectors;
Ref<MLPPMatrix> eigen_values;
};
2023-01-26 14:52:49 +01:00
EigenResult eigen(Ref<MLPPMatrix> A);
2023-02-08 01:26:37 +01:00
struct SVDResult {
2023-02-07 22:10:16 +01:00
Ref<MLPPMatrix> U;
Ref<MLPPMatrix> S;
Ref<MLPPMatrix> Vt;
};
2023-02-08 01:26:37 +01:00
SVDResult svd(const Ref<MLPPMatrix> &A);
2023-01-26 14:52:49 +01:00
2023-12-28 23:26:14 +01:00
Ref<MLPPVector> vector_projection(const Ref<MLPPVector> &a, const Ref<MLPPVector> &b);
2023-12-28 23:26:14 +01:00
Ref<MLPPMatrix> gram_schmidt_process(const Ref<MLPPMatrix> &A);
2023-01-26 14:52:49 +01:00
struct QRDResult {
2023-12-28 23:26:14 +01:00
Ref<MLPPMatrix> Q;
Ref<MLPPMatrix> R;
2023-01-26 14:52:49 +01:00
};
2023-12-28 23:26:14 +01:00
QRDResult qrd(const Ref<MLPPMatrix> &A);
2023-01-26 14:52:49 +01:00
struct CholeskyResult {
2023-12-29 10:43:16 +01:00
Ref<MLPPMatrix> L;
Ref<MLPPMatrix> Lt;
2023-01-26 14:52:49 +01:00
};
2023-12-29 10:43:16 +01:00
CholeskyResult cholesky(const Ref<MLPPMatrix> &A);
2023-01-26 14:52:49 +01:00
2023-04-22 17:17:58 +02:00
//real_t sum_elements(std::vector<std::vector<real_t>> A);
2023-04-22 14:39:13 +02:00
Ref<MLPPVector> flattenvvnv(const Ref<MLPPMatrix> &A);
2023-12-29 10:43:16 +01:00
Ref<MLPPVector> solve(const Ref<MLPPMatrix> &A, const Ref<MLPPVector> &b);
2023-12-29 10:43:16 +01:00
bool positive_definite_checker(const Ref<MLPPMatrix> &A);
bool negative_definite_checker(const Ref<MLPPMatrix> &A);
2023-12-29 10:43:16 +01:00
bool zero_eigenvalue(const Ref<MLPPMatrix> &A);
2023-01-24 19:00:54 +01:00
// VECTOR FUNCTIONS
Ref<MLPPVector> flattenmnv(const Vector<Ref<MLPPVector>> &A);
2023-02-02 02:19:16 +01:00
Ref<MLPPVector> hadamard_productnv(const Ref<MLPPVector> &a, const Ref<MLPPVector> &b);
void hadamard_productv(const Ref<MLPPVector> &a, const Ref<MLPPVector> &b, Ref<MLPPVector> out);
2023-04-29 13:50:35 +02:00
Ref<MLPPVector> division_element_wisenv(const Ref<MLPPVector> &a, const Ref<MLPPVector> &b);
2023-01-29 15:46:55 +01:00
Ref<MLPPVector> scalar_multiplynv(real_t scalar, const Ref<MLPPVector> &a);
void scalar_multiplyv(real_t scalar, const Ref<MLPPVector> &a, Ref<MLPPVector> out);
2023-02-02 02:19:16 +01:00
Ref<MLPPVector> scalar_addnv(real_t scalar, const Ref<MLPPVector> &a);
void scalar_addv(real_t scalar, const Ref<MLPPVector> &a, Ref<MLPPVector> out);
2023-01-29 15:46:55 +01:00
Ref<MLPPVector> additionnv(const Ref<MLPPVector> &a, const Ref<MLPPVector> &b);
void additionv(const Ref<MLPPVector> &a, const Ref<MLPPVector> &b, Ref<MLPPVector> out);
2023-01-29 15:46:55 +01:00
Ref<MLPPVector> subtractionnv(const Ref<MLPPVector> &a, const Ref<MLPPVector> &b);
void subtractionv(const Ref<MLPPVector> &a, const Ref<MLPPVector> &b, Ref<MLPPVector> out);
2023-04-22 13:17:54 +02:00
Ref<MLPPVector> lognv(const Ref<MLPPVector> &a);
Ref<MLPPVector> log10nv(const Ref<MLPPVector> &a);
Ref<MLPPVector> expnv(const Ref<MLPPVector> &a);
Ref<MLPPVector> erfnv(const Ref<MLPPVector> &a);
Ref<MLPPVector> exponentiatenv(const Ref<MLPPVector> &a, real_t p);
Ref<MLPPVector> sqrtnv(const Ref<MLPPVector> &a);
Ref<MLPPVector> cbrtnv(const Ref<MLPPVector> &a);
2023-01-31 02:37:20 +01:00
2023-04-22 14:46:25 +02:00
real_t dotnv(const Ref<MLPPVector> &a, const Ref<MLPPVector> &b);
2023-04-22 17:17:58 +02:00
//std::vector<real_t> cross(std::vector<real_t> a, std::vector<real_t> b);
2023-02-02 02:19:16 +01:00
Ref<MLPPVector> absv(const Ref<MLPPVector> &a);
2023-04-22 14:46:25 +02:00
Ref<MLPPVector> zerovecnv(int n);
Ref<MLPPVector> onevecnv(int n);
Ref<MLPPVector> fullnv(int n, int k);
2023-04-22 14:46:25 +02:00
Ref<MLPPVector> sinnv(const Ref<MLPPVector> &a);
Ref<MLPPVector> cosnv(const Ref<MLPPVector> &a);
2023-02-15 00:30:02 +01:00
Ref<MLPPVector> maxnvv(const Ref<MLPPVector> &a, const Ref<MLPPVector> &b);
2023-02-15 00:30:02 +01:00
real_t maxvr(const Ref<MLPPVector> &a);
real_t minvr(const Ref<MLPPVector> &a);
2023-04-22 17:17:58 +02:00
//std::vector<real_t> round(std::vector<real_t> a);
real_t euclidean_distance(const Ref<MLPPVector> &a, const Ref<MLPPVector> &b);
real_t euclidean_distance_squared(const Ref<MLPPVector> &a, const Ref<MLPPVector> &b);
2023-04-22 17:17:58 +02:00
/*
2023-01-27 13:01:16 +01:00
real_t norm_2(std::vector<real_t> a);
2023-04-22 17:17:58 +02:00
*/
2023-01-29 15:46:55 +01:00
real_t norm_sqv(const Ref<MLPPVector> &a);
2023-04-22 17:17:58 +02:00
real_t sum_elementsv(const Ref<MLPPVector> &a);
2023-04-22 17:17:58 +02:00
//real_t cosineSimilarity(std::vector<real_t> a, std::vector<real_t> b);
2023-01-24 19:00:54 +01:00
// MATRIX-VECTOR FUNCTIONS
2023-04-22 14:46:25 +02:00
Ref<MLPPVector> mat_vec_multnv(const Ref<MLPPMatrix> &A, const Ref<MLPPVector> &b);
Ref<MLPPVector> subtract_matrix_rowsnv(const Ref<MLPPVector> &a, const Ref<MLPPMatrix> &B);
Ref<MLPPMatrix> outer_product(const Ref<MLPPVector> &a, const Ref<MLPPVector> &b); // This multiplies a, bT
Ref<MLPPMatrix> mat_vec_addnm(const Ref<MLPPMatrix> &A, const Ref<MLPPVector> &b);
Ref<MLPPMatrix> diagnm(const Ref<MLPPVector> &a);
2023-01-24 19:00:54 +01:00
// TENSOR FUNCTIONS
2023-04-22 17:17:58 +02:00
Vector<Ref<MLPPMatrix>> additionnvt(const Vector<Ref<MLPPMatrix>> &A, const Vector<Ref<MLPPMatrix>> &B);
void division_element_wisevt(const Vector<Ref<MLPPMatrix>> &A, const Vector<Ref<MLPPMatrix>> &B);
2023-04-29 13:50:35 +02:00
Vector<Ref<MLPPMatrix>> division_element_wisenvnvt(const Vector<Ref<MLPPMatrix>> &A, const Vector<Ref<MLPPMatrix>> &B);
2023-04-22 17:17:58 +02:00
Vector<Ref<MLPPMatrix>> sqrtnvt(const Vector<Ref<MLPPMatrix>> &A);
2023-04-22 17:17:58 +02:00
Vector<Ref<MLPPMatrix>> exponentiatenvt(const Vector<Ref<MLPPMatrix>> &A, real_t p);
2023-04-22 17:17:58 +02:00
//std::vector<std::vector<real_t>> tensor_vec_mult(std::vector<std::vector<std::vector<real_t>>> A, std::vector<real_t> b);
2023-02-06 12:20:52 +01:00
2023-04-22 17:17:58 +02:00
//std::vector<real_t> flatten(std::vector<std::vector<std::vector<real_t>>> A);
2023-04-22 17:17:58 +02:00
Vector<Ref<MLPPMatrix>> scalar_multiplynvt(real_t scalar, Vector<Ref<MLPPMatrix>> A);
Vector<Ref<MLPPMatrix>> scalar_addnvt(real_t scalar, Vector<Ref<MLPPMatrix>> A);
2023-02-17 16:55:00 +01:00
void resizevt(Vector<Ref<MLPPMatrix>> &r_target, const Vector<Ref<MLPPMatrix>> &A);
Vector<Ref<MLPPMatrix>> resizencvt(const Vector<Ref<MLPPMatrix>> &A);
2023-04-22 17:17:58 +02:00
//std::vector<std::vector<std::vector<real_t>>> hadamard_product(std::vector<std::vector<std::vector<real_t>>> A, std::vector<std::vector<std::vector<real_t>>> B);
2023-04-22 17:17:58 +02:00
Vector<Ref<MLPPMatrix>> maxnvt(const Vector<Ref<MLPPMatrix>> &A, const Vector<Ref<MLPPMatrix>> &B);
Vector<Ref<MLPPMatrix>> absnvt(const Vector<Ref<MLPPMatrix>> &A);
2023-04-22 17:17:58 +02:00
//real_t norm_2(std::vector<std::vector<std::vector<real_t>>> A);
2023-04-22 17:17:58 +02:00
//std::vector<std::vector<std::vector<real_t>>> vector_wise_tensor_product(std::vector<std::vector<std::vector<real_t>>> A, std::vector<std::vector<real_t>> B);
protected:
static void _bind_methods();
2023-01-24 19:00:54 +01:00
};
#endif /* LinAlg_hpp */