pmlpp/mlpp/softmax_reg/softmax_reg.h

88 lines
2.3 KiB
C
Raw Normal View History

2023-01-24 18:57:18 +01:00
#ifndef MLPP_SOFTMAX_REG_H
#define MLPP_SOFTMAX_REG_H
//
// SoftmaxReg.hpp
//
// Created by Marc Melikyan on 10/2/20.
//
2023-01-27 13:01:16 +01:00
#include "core/math/math_defs.h"
2023-04-28 21:23:37 +02:00
#include "core/object/resource.h"
2023-02-10 19:31:54 +01:00
#include "../lin_alg/mlpp_matrix.h"
#include "../lin_alg/mlpp_vector.h"
#include "../regularization/reg.h"
2023-04-28 21:23:37 +02:00
class MLPPSoftmaxReg : public Resource {
GDCLASS(MLPPSoftmaxReg, Resource);
2023-01-24 19:00:54 +01:00
public:
2023-04-28 21:23:37 +02:00
Ref<MLPPMatrix> get_input_set() const;
2023-02-10 19:31:54 +01:00
void set_input_set(const Ref<MLPPMatrix> &val);
2023-04-28 21:23:37 +02:00
Ref<MLPPMatrix> get_output_set() const;
2023-02-10 19:31:54 +01:00
void set_output_set(const Ref<MLPPMatrix> &val);
2023-04-28 21:23:37 +02:00
MLPPReg::RegularizationType get_reg() const;
2023-02-10 19:31:54 +01:00
void set_reg(const MLPPReg::RegularizationType val);
2023-04-28 21:23:37 +02:00
real_t get_lambda() const;
2023-02-10 19:31:54 +01:00
void set_lambda(const real_t val);
2023-04-28 21:23:37 +02:00
real_t get_alpha() const;
2023-02-10 19:31:54 +01:00
void set_alpha(const real_t val);
2023-04-28 21:23:37 +02:00
Ref<MLPPMatrix> data_y_hat_get() const;
void data_y_hat_set(const Ref<MLPPMatrix> &val);
Ref<MLPPMatrix> data_weights_get() const;
void data_weights_set(const Ref<MLPPMatrix> &val);
Ref<MLPPVector> data_bias_get() const;
void data_bias_set(const Ref<MLPPVector> &val);
2023-02-10 19:31:54 +01:00
Ref<MLPPVector> model_test(const Ref<MLPPVector> &x);
Ref<MLPPMatrix> model_set_test(const Ref<MLPPMatrix> &X);
2023-04-28 21:23:37 +02:00
void train_gradient_descent(real_t learning_rate, int max_epoch, bool ui = false);
void train_sgd(real_t learning_rate, int max_epoch, bool ui = false);
void train_mbgd(real_t learning_rate, int max_epoch, int mini_batch_size, bool ui = false);
2023-02-10 19:31:54 +01:00
2023-01-27 13:01:16 +01:00
real_t score();
2023-01-24 19:00:54 +01:00
2023-04-28 21:23:37 +02:00
bool needs_init() const;
2023-02-10 19:31:54 +01:00
void initialize();
MLPPSoftmaxReg(const Ref<MLPPMatrix> &p_input_set, const Ref<MLPPMatrix> &p_output_set, MLPPReg::RegularizationType p_reg = MLPPReg::REGULARIZATION_TYPE_NONE, real_t p_lambda = 0.5, real_t p_alpha = 0.5);
MLPPSoftmaxReg();
~MLPPSoftmaxReg();
2023-01-24 19:00:54 +01:00
2023-02-10 19:31:54 +01:00
protected:
real_t cost(const Ref<MLPPMatrix> &y_hat, const Ref<MLPPMatrix> &y);
2023-01-24 19:00:54 +01:00
2023-02-10 19:31:54 +01:00
Ref<MLPPVector> evaluatev(const Ref<MLPPVector> &x);
Ref<MLPPMatrix> evaluatem(const Ref<MLPPMatrix> &X);
2023-01-24 19:00:54 +01:00
2023-02-10 19:31:54 +01:00
void forward_pass();
static void _bind_methods();
Ref<MLPPMatrix> _input_set;
Ref<MLPPMatrix> _output_set;
2023-04-28 21:07:35 +02:00
// Regularization Params
MLPPReg::RegularizationType _reg;
real_t _lambda;
real_t _alpha; /* This is the controlling param for Elastic Net*/
2023-02-10 19:31:54 +01:00
Ref<MLPPMatrix> _y_hat;
Ref<MLPPMatrix> _weights;
Ref<MLPPVector> _bias;
2023-01-24 19:00:54 +01:00
};
2023-01-24 19:20:18 +01:00
#endif /* SoftmaxReg_hpp */