pmlpp/mlpp/utilities/utilities.h

68 lines
2.9 KiB
C
Raw Normal View History

2023-01-24 18:57:18 +01:00
#ifndef MLPP_UTILITIES_H
#define MLPP_UTILITIES_H
//
// Utilities.hpp
//
// Created by Marc Melikyan on 1/16/21.
//
2023-01-27 13:01:16 +01:00
#include "core/math/math_defs.h"
#include "core/containers/vector.h"
#include "core/variant/variant.h"
#include "../lin_alg/mlpp_matrix.h"
#include "../lin_alg/mlpp_vector.h"
2023-01-27 13:01:16 +01:00
#include <string>
2023-01-24 19:00:54 +01:00
#include <tuple>
#include <vector>
2023-01-24 19:20:18 +01:00
class MLPPUtilities {
2023-01-24 19:00:54 +01:00
public:
// Weight Init
2023-01-27 13:01:16 +01:00
static std::vector<real_t> weightInitialization(int n, std::string type = "Default");
static real_t biasInitialization();
2023-01-24 19:00:54 +01:00
2023-01-27 13:01:16 +01:00
static std::vector<std::vector<real_t>> weightInitialization(int n, int m, std::string type = "Default");
static std::vector<real_t> biasInitialization(int n);
2023-01-24 19:00:54 +01:00
// Cost/Performance related Functions
2023-01-27 13:01:16 +01:00
real_t performance(std::vector<real_t> y_hat, std::vector<real_t> y);
real_t performance(std::vector<std::vector<real_t>> y_hat, std::vector<std::vector<real_t>> y);
2023-01-24 19:00:54 +01:00
real_t performance_vec(const Ref<MLPPVector> &y_hat, const Ref<MLPPVector> &output_set);
real_t performance_mat(const Ref<MLPPMatrix> &y_hat, const Ref<MLPPMatrix> &y);
real_t performance_pool_int_array_vec(PoolIntArray y_hat, const Ref<MLPPVector> &output_set);
2023-01-24 19:00:54 +01:00
// Parameter Saving Functions
2023-01-27 13:01:16 +01:00
void saveParameters(std::string fileName, std::vector<real_t> weights, real_t bias, bool app = 0, int layer = -1);
void saveParameters(std::string fileName, std::vector<real_t> weights, std::vector<real_t> initial, real_t bias, bool app = 0, int layer = -1);
void saveParameters(std::string fileName, std::vector<std::vector<real_t>> weights, std::vector<real_t> bias, bool app = 0, int layer = -1);
2023-01-24 19:00:54 +01:00
// Gradient Descent related
2023-01-27 13:01:16 +01:00
static void UI(std::vector<real_t> weights, real_t bias);
static void UI(std::vector<real_t> weights, std::vector<real_t> initial, real_t bias);
static void UI(std::vector<std::vector<real_t>>, std::vector<real_t> bias);
static void CostInfo(int epoch, real_t cost_prev, real_t Cost);
2023-01-24 19:00:54 +01:00
2023-01-27 13:01:16 +01:00
static std::vector<std::vector<std::vector<real_t>>> createMiniBatches(std::vector<std::vector<real_t>> inputSet, int n_mini_batch);
static std::tuple<std::vector<std::vector<std::vector<real_t>>>, std::vector<std::vector<real_t>>> createMiniBatches(std::vector<std::vector<real_t>> inputSet, std::vector<real_t> outputSet, int n_mini_batch);
static std::tuple<std::vector<std::vector<std::vector<real_t>>>, std::vector<std::vector<std::vector<real_t>>>> createMiniBatches(std::vector<std::vector<real_t>> inputSet, std::vector<std::vector<real_t>> outputSet, int n_mini_batch);
2023-01-24 19:00:54 +01:00
// F1 score, Precision/Recall, TP, FP, TN, FN, etc.
2023-01-27 13:01:16 +01:00
std::tuple<real_t, real_t, real_t, real_t> TF_PN(std::vector<real_t> y_hat, std::vector<real_t> y); //TF_PN = "True", "False", "Positive", "Negative"
real_t recall(std::vector<real_t> y_hat, std::vector<real_t> y);
real_t precision(std::vector<real_t> y_hat, std::vector<real_t> y);
real_t accuracy(std::vector<real_t> y_hat, std::vector<real_t> y);
real_t f1_score(std::vector<real_t> y_hat, std::vector<real_t> y);
2023-01-24 19:00:54 +01:00
private:
};
2023-01-24 19:20:18 +01:00
#endif /* Utilities_hpp */