#ifndef MLPP_SOFTMAX_NET_OLD_H #define MLPP_SOFTMAX_NET_OLD_H // // SoftmaxNet.hpp // // Created by Marc Melikyan on 10/2/20. // #include "core/math/math_defs.h" #include #include class MLPPSoftmaxNetOld { public: MLPPSoftmaxNetOld(std::vector> inputSet, std::vector> outputSet, int n_hidden, std::string reg = "None", real_t lambda = 0.5, real_t alpha = 0.5); std::vector modelTest(std::vector x); std::vector> modelSetTest(std::vector> X); void gradientDescent(real_t learning_rate, int max_epoch, bool UI = false); void SGD(real_t learning_rate, int max_epoch, bool UI = false); void MBGD(real_t learning_rate, int max_epoch, int mini_batch_size, bool UI = false); real_t score(); void save(std::string fileName); std::vector> getEmbeddings(); // This class is used (mostly) for word2Vec. This function returns our embeddings. private: real_t Cost(std::vector> y_hat, std::vector> y); std::vector> Evaluate(std::vector> X); std::tuple>, std::vector>> propagate(std::vector> X); std::vector Evaluate(std::vector x); std::tuple, std::vector> propagate(std::vector x); void forwardPass(); std::vector> inputSet; std::vector> outputSet; std::vector> y_hat; std::vector> weights1; std::vector> weights2; std::vector bias1; std::vector bias2; std::vector> z2; std::vector> a2; int n; int k; int n_class; int n_hidden; // Regularization Params std::string reg; real_t lambda; real_t alpha; /* This is the controlling param for Elastic Net*/ }; #endif /* SoftmaxNet_hpp */