#ifndef MLPP_SVC_H #define MLPP_SVC_H // // SVC.hpp // // Created by Marc Melikyan on 10/2/20. // // https://towardsdatascience.com/svm-implementation-from-scratch-python-2db2fc52e5c2 // Illustratd a practical definition of the Hinge Loss function and its gradient when optimizing with SGD. #include "core/math/math_defs.h" #include "core/object/resource.h" #include "../lin_alg/mlpp_matrix.h" #include "../lin_alg/mlpp_vector.h" #include "../regularization/reg.h" class MLPPSVC : public Resource { GDCLASS(MLPPSVC, Resource); public: Ref get_input_set() const; void set_input_set(const Ref &val); Ref get_output_set() const; void set_output_set(const Ref &val); real_t get_c() const; void set_c(const real_t val); Ref data_z_get() const; void data_z_set(const Ref &val); Ref data_y_hat_get() const; void data_y_hat_set(const Ref &val); Ref data_weights_get() const; void data_weights_set(const Ref &val); real_t data_bias_get() const; void data_bias_set(const real_t val); Ref model_set_test(const Ref &X); real_t model_test(const Ref &x); void train_gradient_descent(real_t learning_rate, int max_epoch, bool ui = false); void train_sgd(real_t learning_rate, int max_epoch, bool ui = false); void train_mbgd(real_t learning_rate, int max_epoch, int mini_batch_size, bool ui = false); real_t score(); bool needs_init() const; void initialize(); MLPPSVC(const Ref &input_set, const Ref &output_set, real_t c); MLPPSVC(); ~MLPPSVC(); protected: real_t cost(const Ref &z, const Ref &y, const Ref &weights, real_t c); Ref evaluatem(const Ref &X); Ref propagatem(const Ref &X); real_t evaluatev(const Ref &x); real_t propagatev(const Ref &x); void forward_pass(); static void _bind_methods(); Ref _input_set; Ref _output_set; real_t _c; Ref _z; Ref _y_hat; Ref _weights; real_t _bias; }; #endif /* SVC_hpp */