pmlpp/mlpp/wgan/wgan.h

93 lines
3.1 KiB
C
Raw Normal View History

2023-01-24 18:57:18 +01:00
#ifndef MLPP_WGAN_H
#define MLPP_WGAN_H
//
// WGAN.hpp
//
// Created by Marc Melikyan on 11/4/20.
//
2023-02-05 18:46:12 +01:00
#include "core/containers/vector.h"
#include "core/math/math_defs.h"
#include "core/string/ustring.h"
#include "core/object/reference.h"
#include "../lin_alg/mlpp_matrix.h"
2023-05-01 10:37:28 +02:00
#include "../lin_alg/mlpp_tensor3.h"
2023-02-05 18:46:12 +01:00
#include "../lin_alg/mlpp_vector.h"
2023-01-24 18:12:23 +01:00
#include "../hidden_layer/hidden_layer.h"
#include "../output_layer/output_layer.h"
2023-02-06 02:36:22 +01:00
#include "../activation/activation.h"
#include "../cost/cost.h"
#include "../regularization/reg.h"
#include "../utilities/utilities.h"
2023-02-05 18:46:12 +01:00
class MLPPWGAN : public Reference {
GDCLASS(MLPPWGAN, Reference);
2023-02-05 18:16:34 +01:00
public:
2023-02-06 12:20:52 +01:00
Ref<MLPPMatrix> get_output_set();
void set_output_set(const Ref<MLPPMatrix> &val);
int get_k() const;
void set_k(const int val);
2023-02-06 02:36:22 +01:00
Ref<MLPPMatrix> generate_example(int n);
void gradient_descent(real_t learning_rate, int max_epoch, bool ui = false);
2023-02-05 18:16:34 +01:00
real_t score();
2023-02-06 02:36:22 +01:00
void save(const String &file_name);
2023-02-05 18:16:34 +01:00
2023-05-01 10:37:28 +02:00
void create_layer(int n_hidden, MLPPActivation::ActivationFunction activation, MLPPUtilities::WeightDistributionType weight_init = MLPPUtilities::WEIGHT_DISTRIBUTION_TYPE_DEFAULT, MLPPReg::RegularizationType reg = MLPPReg::REGULARIZATION_TYPE_NONE, real_t lambda = 0.5, real_t alpha = 0.5);
void add_layer(Ref<MLPPHiddenLayer> layer);
Ref<MLPPHiddenLayer> get_layer(const int index);
void remove_layer(const int index);
int get_layer_count() const;
2023-02-06 02:36:22 +01:00
void add_output_layer(MLPPUtilities::WeightDistributionType weight_init = MLPPUtilities::WEIGHT_DISTRIBUTION_TYPE_DEFAULT, MLPPReg::RegularizationType reg = MLPPReg::REGULARIZATION_TYPE_NONE, real_t lambda = 0.5, real_t alpha = 0.5);
2023-02-05 18:16:34 +01:00
2023-05-01 10:44:40 +02:00
MLPPWGAN(int k, const Ref<MLPPMatrix> &output_set);
2023-02-05 18:46:12 +01:00
MLPPWGAN();
~MLPPWGAN();
protected:
2023-02-06 02:36:22 +01:00
Ref<MLPPMatrix> model_set_test_generator(const Ref<MLPPMatrix> &X); // Evaluator for the generator of the WGAN.
Ref<MLPPVector> model_set_test_discriminator(const Ref<MLPPMatrix> &X); // Evaluator for the discriminator of the WGAN.
2023-02-05 18:46:12 +01:00
2023-02-06 02:36:22 +01:00
real_t cost(const Ref<MLPPVector> &y_hat, const Ref<MLPPVector> &y);
2023-02-05 18:16:34 +01:00
2023-02-05 18:46:12 +01:00
void forward_pass();
2023-05-01 10:37:28 +02:00
void update_discriminator_parameters(Ref<MLPPTensor3> hidden_layer_updations, const Ref<MLPPVector> &output_layer_updation, real_t learning_rate);
void update_generator_parameters(Ref<MLPPTensor3> hidden_layer_updations, real_t learning_rate);
2023-02-06 02:36:22 +01:00
struct DiscriminatorGradientResult {
2023-05-01 10:37:28 +02:00
Ref<MLPPTensor3> cumulative_hidden_layer_w_grad; // Tensor containing ALL hidden grads.
2023-02-06 02:36:22 +01:00
Ref<MLPPVector> output_w_grad;
2023-05-01 10:37:28 +02:00
DiscriminatorGradientResult() {
cumulative_hidden_layer_w_grad.instance();
output_w_grad.instance();
}
2023-02-06 02:36:22 +01:00
};
DiscriminatorGradientResult compute_discriminator_gradients(const Ref<MLPPVector> &y_hat, const Ref<MLPPVector> &output_set);
2023-05-01 10:37:28 +02:00
Ref<MLPPTensor3> compute_generator_gradients(const Ref<MLPPVector> &y_hat, const Ref<MLPPVector> &output_set);
2023-02-05 18:16:34 +01:00
2023-02-06 02:36:22 +01:00
void handle_ui(int epoch, real_t cost_prev, const Ref<MLPPVector> &y_hat, const Ref<MLPPVector> &output_set);
2023-02-05 18:16:34 +01:00
2023-02-05 18:46:12 +01:00
static void _bind_methods();
2023-02-05 18:16:34 +01:00
2023-02-13 00:19:16 +01:00
Ref<MLPPMatrix> _output_set;
2023-05-01 10:44:40 +02:00
int _k;
2023-02-05 18:16:34 +01:00
2023-02-13 00:19:16 +01:00
Vector<Ref<MLPPHiddenLayer>> _network;
Ref<MLPPOutputLayer> _output_layer;
2023-02-05 18:16:34 +01:00
2023-05-01 10:44:40 +02:00
Ref<MLPPVector> _y_hat;
2023-02-05 18:16:34 +01:00
};
#endif /* WGAN_hpp */