pmlpp/mlpp/ann/ann.h

102 lines
3.8 KiB
C
Raw Normal View History

2023-01-24 18:57:18 +01:00
#ifndef MLPP_ANN_H
#define MLPP_ANN_H
//
// ANN.hpp
//
// Created by Marc Melikyan on 11/4/20.
//
2023-01-27 13:01:16 +01:00
#include "core/math/math_defs.h"
2023-02-12 18:35:53 +01:00
#include "core/object/reference.h"
2023-02-17 16:55:00 +01:00
#include "../lin_alg/mlpp_matrix.h"
#include "../lin_alg/mlpp_vector.h"
#include "../hidden_layer/hidden_layer.h"
#include "../output_layer/output_layer.h"
2023-02-17 16:55:00 +01:00
#include "../activation/activation.h"
#include "../cost/cost.h"
#include "../regularization/reg.h"
#include "../utilities/utilities.h"
2023-02-12 18:35:53 +01:00
class MLPPANN : public Reference {
GDCLASS(MLPPANN, Reference);
2023-01-24 19:00:54 +01:00
public:
2023-02-17 16:55:00 +01:00
enum SchedulerType {
SCHEDULER_TYPE_NONE = 0,
SCHEDULER_TYPE_TIME,
SCHEDULER_TYPE_EPOCH,
SCHEDULER_TYPE_STEP,
SCHEDULER_TYPE_EXPONENTIAL,
};
public:
Ref<MLPPVector> model_set_test(const Ref<MLPPMatrix> &X);
real_t model_test(const Ref<MLPPVector> &x);
2023-02-12 15:07:26 +01:00
void gradient_descent(real_t learning_rate, int max_epoch, bool ui = false);
void sgd(real_t learning_rate, int max_epoch, bool ui = false);
void mbgd(real_t learning_rate, int max_epoch, int mini_batch_size, bool ui = false);
void momentum(real_t learning_rate, int max_epoch, int mini_batch_size, real_t gamma, bool nag, bool ui = false);
void adagrad(real_t learning_rate, int max_epoch, int mini_batch_size, real_t e, bool ui = false);
void adadelta(real_t learning_rate, int max_epoch, int mini_batch_size, real_t b1, real_t e, bool ui = false);
void adam(real_t learning_rate, int max_epoch, int mini_batch_size, real_t b1, real_t b2, real_t e, bool ui = false);
void adamax(real_t learning_rate, int max_epoch, int mini_batch_size, real_t b1, real_t b2, real_t e, bool ui = false);
void nadam(real_t learning_rate, int max_epoch, int mini_batch_size, real_t b1, real_t b2, real_t e, bool ui = false);
void amsgrad(real_t learning_rate, int max_epoch, int mini_batch_size, real_t b1, real_t b2, real_t e, bool ui = false);
2023-01-27 13:01:16 +01:00
real_t score();
2023-02-17 16:55:00 +01:00
void save(const String &file_name);
2023-02-12 15:07:26 +01:00
2023-02-17 16:55:00 +01:00
void set_learning_rate_scheduler(SchedulerType type, real_t decay_constant);
void set_learning_rate_scheduler_drop(SchedulerType type, real_t decay_constant, real_t drop_rate);
2023-01-24 19:00:54 +01:00
2023-02-17 16:55:00 +01:00
void add_layer(int n_hidden, MLPPActivation::ActivationFunction activation, MLPPUtilities::WeightDistributionType weight_init = MLPPUtilities::WEIGHT_DISTRIBUTION_TYPE_DEFAULT, MLPPReg::RegularizationType reg = MLPPReg::REGULARIZATION_TYPE_NONE, real_t lambda = 0.5, real_t alpha = 0.5);
void add_output_layer(MLPPActivation::ActivationFunction activation, MLPPCost::CostTypes loss, MLPPUtilities::WeightDistributionType weight_init = MLPPUtilities::WEIGHT_DISTRIBUTION_TYPE_DEFAULT, MLPPReg::RegularizationType reg = MLPPReg::REGULARIZATION_TYPE_NONE, real_t lambda = 0.5, real_t alpha = 0.5);
2023-01-24 19:00:54 +01:00
2023-02-17 16:55:00 +01:00
MLPPANN(const Ref<MLPPMatrix> &p_input_set, const Ref<MLPPVector> &p_output_set);
2023-02-12 15:07:26 +01:00
MLPPANN();
~MLPPANN();
2023-01-24 19:00:54 +01:00
2023-02-12 18:35:53 +01:00
protected:
2023-02-13 00:56:09 +01:00
real_t apply_learning_rate_scheduler(real_t learning_rate, real_t decay_constant, real_t epoch, real_t drop_rate);
2023-01-24 19:00:54 +01:00
2023-02-17 16:55:00 +01:00
real_t cost(const Ref<MLPPVector> &y_hat, const Ref<MLPPVector> &y);
2023-01-24 19:00:54 +01:00
2023-02-12 15:07:26 +01:00
void forward_pass();
2023-02-17 16:55:00 +01:00
void update_parameters(const Vector<Ref<MLPPMatrix>> &hidden_layer_updations, const Ref<MLPPVector> &output_layer_updation, real_t learning_rate);
2023-01-24 19:00:54 +01:00
2023-02-17 16:55:00 +01:00
struct ComputeGradientsResult {
Vector<Ref<MLPPMatrix>> cumulative_hidden_layer_w_grad;
Ref<MLPPVector> output_w_grad;
};
ComputeGradientsResult compute_gradients(const Ref<MLPPVector> &y_hat, const Ref<MLPPVector> &_output_set);
void print_ui(int epoch, real_t cost_prev, const Ref<MLPPVector> &y_hat, const Ref<MLPPVector> &p_output_set);
2023-01-24 19:00:54 +01:00
2023-02-12 18:35:53 +01:00
static void _bind_methods();
2023-02-17 16:55:00 +01:00
Ref<MLPPMatrix> _input_set;
Ref<MLPPVector> _output_set;
Ref<MLPPVector> _y_hat;
2023-01-24 19:00:54 +01:00
2023-02-17 16:55:00 +01:00
Vector<Ref<MLPPHiddenLayer>> _network;
Ref<MLPPOutputLayer> _output_layer;
2023-01-24 19:00:54 +01:00
2023-02-13 00:56:09 +01:00
int _n;
int _k;
2023-01-24 19:00:54 +01:00
2023-02-17 16:55:00 +01:00
SchedulerType _lr_scheduler;
2023-02-13 00:56:09 +01:00
real_t _decay_constant;
real_t _drop_rate;
2023-01-24 19:00:54 +01:00
};
2023-02-17 16:55:00 +01:00
VARIANT_ENUM_CAST(MLPPANN::SchedulerType);
#endif /* ANN_hpp */