pmlpp/mlpp/output_layer/output_layer.h

58 lines
1.3 KiB
C
Raw Normal View History

2023-01-24 18:57:18 +01:00
#ifndef MLPP_OUTPUT_LAYER_H
#define MLPP_OUTPUT_LAYER_H
//
// OutputLayer.hpp
//
// Created by Marc Melikyan on 11/4/20.
//
2023-01-24 18:12:23 +01:00
#include "../activation/activation.h"
#include "../cost/cost.h"
#include <map>
#include <string>
2023-01-24 19:00:54 +01:00
#include <vector>
2023-01-24 19:20:18 +01:00
2023-01-24 19:00:54 +01:00
class OutputLayer {
public:
OutputLayer(int n_hidden, std::string activation, std::string cost, std::vector<std::vector<double>> input, std::string weightInit, std::string reg, double lambda, double alpha);
int n_hidden;
std::string activation;
std::string cost;
std::vector<std::vector<double>> input;
std::vector<double> weights;
double bias;
std::vector<double> z;
std::vector<double> a;
std::map<std::string, std::vector<double> (Activation::*)(std::vector<double>, bool)> activation_map;
std::map<std::string, double (Activation::*)(double, bool)> activationTest_map;
std::map<std::string, double (Cost::*)(std::vector<double>, std::vector<double>)> cost_map;
std::map<std::string, std::vector<double> (Cost::*)(std::vector<double>, std::vector<double>)> costDeriv_map;
double z_test;
double a_test;
std::vector<double> delta;
// Regularization Params
std::string reg;
double lambda; /* Regularization Parameter */
double alpha; /* This is the controlling param for Elastic Net*/
std::string weightInit;
2023-01-24 19:00:54 +01:00
void forwardPass();
void Test(std::vector<double> x);
};
2023-01-24 19:20:18 +01:00
#endif /* OutputLayer_hpp */