mirror of
https://github.com/Relintai/pmlpp.git
synced 2024-11-08 13:12:09 +01:00
Removed new things from MLPPRegOld.
This commit is contained in:
parent
0b378a2682
commit
a30a92171d
@ -14,258 +14,6 @@
|
||||
#include <iostream>
|
||||
#include <random>
|
||||
|
||||
real_t MLPPRegOld::reg_termv(const Ref<MLPPVector> &weights, real_t lambda, real_t alpha, MLPPRegOld::RegularizationType p_reg) {
|
||||
int size = weights->size();
|
||||
const real_t *weights_ptr = weights->ptr();
|
||||
|
||||
if (p_reg == REGULARIZATION_TYPE_RIDGE) {
|
||||
real_t reg = 0;
|
||||
for (int i = 0; i < size; ++i) {
|
||||
real_t wi = weights_ptr[i];
|
||||
reg += wi * wi;
|
||||
}
|
||||
return reg * lambda / 2;
|
||||
} else if (p_reg == REGULARIZATION_TYPE_LASSO) {
|
||||
real_t reg = 0;
|
||||
for (int i = 0; i < size; ++i) {
|
||||
reg += ABS(weights_ptr[i]);
|
||||
}
|
||||
return reg * lambda;
|
||||
} else if (p_reg == REGULARIZATION_TYPE_ELASTIC_NET) {
|
||||
real_t reg = 0;
|
||||
for (int i = 0; i < size; ++i) {
|
||||
real_t wi = weights_ptr[i];
|
||||
reg += alpha * ABS(wi); // Lasso Reg
|
||||
reg += ((1 - alpha) / 2) * wi * wi; // Ridge Reg
|
||||
}
|
||||
return reg * lambda;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
real_t MLPPRegOld::reg_termm(const Ref<MLPPMatrix> &weights, real_t lambda, real_t alpha, MLPPRegOld::RegularizationType p_reg) {
|
||||
int size = weights->data_size();
|
||||
const real_t *weights_ptr = weights->ptr();
|
||||
|
||||
if (p_reg == REGULARIZATION_TYPE_RIDGE) {
|
||||
real_t reg = 0;
|
||||
for (int i = 0; i < size; ++i) {
|
||||
real_t wi = weights_ptr[i];
|
||||
reg += wi * wi;
|
||||
}
|
||||
return reg * lambda / 2;
|
||||
} else if (p_reg == REGULARIZATION_TYPE_LASSO) {
|
||||
real_t reg = 0;
|
||||
for (int i = 0; i < size; ++i) {
|
||||
reg += ABS(weights_ptr[i]);
|
||||
}
|
||||
return reg * lambda;
|
||||
} else if (p_reg == REGULARIZATION_TYPE_ELASTIC_NET) {
|
||||
real_t reg = 0;
|
||||
for (int i = 0; i < size; ++i) {
|
||||
real_t wi = weights_ptr[i];
|
||||
reg += alpha * ABS(wi); // Lasso Reg
|
||||
reg += ((1 - alpha) / 2) * wi * wi; // Ridge Reg
|
||||
}
|
||||
return reg * lambda;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
Ref<MLPPVector> MLPPRegOld::reg_weightsv(const Ref<MLPPVector> &weights, real_t lambda, real_t alpha, MLPPRegOld::RegularizationType p_reg) {
|
||||
MLPPLinAlg alg;
|
||||
|
||||
if (p_reg == REGULARIZATION_TYPE_WEIGHT_CLIPPING) {
|
||||
return reg_deriv_termv(weights, lambda, alpha, p_reg);
|
||||
}
|
||||
|
||||
return alg.subtractionnv(weights, reg_deriv_termv(weights, lambda, alpha, p_reg));
|
||||
|
||||
// for(int i = 0; i < weights.size(); i++){
|
||||
// weights[i] -= regDerivTerm(weights, lambda, alpha, reg, i);
|
||||
// }
|
||||
// return weights;
|
||||
}
|
||||
Ref<MLPPMatrix> MLPPRegOld::reg_weightsm(const Ref<MLPPMatrix> &weights, real_t lambda, real_t alpha, MLPPRegOld::RegularizationType reg) {
|
||||
MLPPLinAlg alg;
|
||||
|
||||
if (reg == REGULARIZATION_TYPE_WEIGHT_CLIPPING) {
|
||||
return reg_deriv_termm(weights, lambda, alpha, reg);
|
||||
}
|
||||
|
||||
return alg.subtractionm(weights, reg_deriv_termm(weights, lambda, alpha, reg));
|
||||
|
||||
// for(int i = 0; i < weights.size(); i++){
|
||||
// for(int j = 0; j < weights[i].size(); j++){
|
||||
// weights[i][j] -= regDerivTerm(weights, lambda, alpha, reg, i, j);
|
||||
// }
|
||||
// }
|
||||
// return weights;
|
||||
}
|
||||
|
||||
Ref<MLPPVector> MLPPRegOld::reg_deriv_termv(const Ref<MLPPVector> &weights, real_t lambda, real_t alpha, MLPPRegOld::RegularizationType reg) {
|
||||
Ref<MLPPVector> reg_driv;
|
||||
reg_driv.instance();
|
||||
|
||||
int size = weights->size();
|
||||
|
||||
reg_driv->resize(size);
|
||||
|
||||
real_t *reg_driv_ptr = reg_driv->ptrw();
|
||||
|
||||
for (int i = 0; i < size; ++i) {
|
||||
reg_driv_ptr[i] = reg_deriv_termvr(weights, lambda, alpha, reg, i);
|
||||
}
|
||||
|
||||
return reg_driv;
|
||||
}
|
||||
Ref<MLPPMatrix> MLPPRegOld::reg_deriv_termm(const Ref<MLPPMatrix> &weights, real_t lambda, real_t alpha, MLPPRegOld::RegularizationType reg) {
|
||||
Ref<MLPPMatrix> reg_driv;
|
||||
reg_driv.instance();
|
||||
|
||||
Size2i size = weights->size();
|
||||
|
||||
reg_driv->resize(size);
|
||||
|
||||
real_t *reg_driv_ptr = reg_driv->ptrw();
|
||||
|
||||
for (int i = 0; i < size.y; ++i) {
|
||||
for (int j = 0; j < size.x; ++j) {
|
||||
reg_driv_ptr[reg_driv->calculate_index(i, j)] = reg_deriv_termmr(weights, lambda, alpha, reg, i, j);
|
||||
}
|
||||
}
|
||||
|
||||
return reg_driv;
|
||||
}
|
||||
|
||||
MLPPRegOld::MLPPRegOld() {
|
||||
}
|
||||
MLPPRegOld::~MLPPRegOld() {
|
||||
}
|
||||
|
||||
void MLPPRegOld::_bind_methods() {
|
||||
ClassDB::bind_method(D_METHOD("reg_termv", "weights", "lambda", "alpha", "reg"), &MLPPRegOld::reg_termv);
|
||||
ClassDB::bind_method(D_METHOD("reg_termm", "weights", "lambda", "alpha", "reg"), &MLPPRegOld::reg_termm);
|
||||
|
||||
ClassDB::bind_method(D_METHOD("reg_weightsv", "weights", "lambda", "alpha", "reg"), &MLPPRegOld::reg_weightsv);
|
||||
ClassDB::bind_method(D_METHOD("reg_weightsm", "weights", "lambda", "alpha", "reg"), &MLPPRegOld::reg_weightsm);
|
||||
|
||||
ClassDB::bind_method(D_METHOD("reg_deriv_termv", "weights", "lambda", "alpha", "reg"), &MLPPRegOld::reg_deriv_termv);
|
||||
ClassDB::bind_method(D_METHOD("reg_deriv_termm", "weights", "lambda", "alpha", "reg"), &MLPPRegOld::reg_deriv_termm);
|
||||
|
||||
BIND_ENUM_CONSTANT(REGULARIZATION_TYPE_NONE);
|
||||
BIND_ENUM_CONSTANT(REGULARIZATION_TYPE_RIDGE);
|
||||
BIND_ENUM_CONSTANT(REGULARIZATION_TYPE_LASSO);
|
||||
BIND_ENUM_CONSTANT(REGULARIZATION_TYPE_ELASTIC_NET);
|
||||
BIND_ENUM_CONSTANT(REGULARIZATION_TYPE_WEIGHT_CLIPPING);
|
||||
}
|
||||
|
||||
real_t MLPPRegOld::reg_deriv_termvr(const Ref<MLPPVector> &weights, real_t lambda, real_t alpha, MLPPRegOld::RegularizationType reg, int j) {
|
||||
MLPPActivation act;
|
||||
|
||||
real_t wj = weights->get_element(j);
|
||||
|
||||
if (reg == REGULARIZATION_TYPE_RIDGE) {
|
||||
return lambda * wj;
|
||||
} else if (reg == REGULARIZATION_TYPE_LASSO) {
|
||||
return lambda * act.sign(wj);
|
||||
} else if (reg == REGULARIZATION_TYPE_ELASTIC_NET) {
|
||||
return alpha * lambda * act.sign(wj) + (1 - alpha) * lambda * wj;
|
||||
} else if (reg == REGULARIZATION_TYPE_WEIGHT_CLIPPING) { // Preparation for Wasserstein GANs.
|
||||
// We assume lambda is the lower clipping threshold, while alpha is the higher clipping threshold.
|
||||
// alpha > lambda.
|
||||
if (wj > alpha) {
|
||||
return alpha;
|
||||
} else if (wj < lambda) {
|
||||
return lambda;
|
||||
} else {
|
||||
return wj;
|
||||
}
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
real_t MLPPRegOld::reg_deriv_termmr(const Ref<MLPPMatrix> &weights, real_t lambda, real_t alpha, MLPPRegOld::RegularizationType reg, int i, int j) {
|
||||
MLPPActivation act;
|
||||
|
||||
real_t wj = weights->get_element(i, j);
|
||||
|
||||
if (reg == REGULARIZATION_TYPE_RIDGE) {
|
||||
return lambda * wj;
|
||||
} else if (reg == REGULARIZATION_TYPE_LASSO) {
|
||||
return lambda * act.sign(wj);
|
||||
} else if (reg == REGULARIZATION_TYPE_ELASTIC_NET) {
|
||||
return alpha * lambda * act.sign(wj) + (1 - alpha) * lambda * wj;
|
||||
} else if (reg == REGULARIZATION_TYPE_WEIGHT_CLIPPING) { // Preparation for Wasserstein GANs.
|
||||
// We assume lambda is the lower clipping threshold, while alpha is the higher clipping threshold.
|
||||
// alpha > lambda.
|
||||
if (wj > alpha) {
|
||||
return alpha;
|
||||
} else if (wj < lambda) {
|
||||
return lambda;
|
||||
} else {
|
||||
return wj;
|
||||
}
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
real_t MLPPRegOld::regTerm(std::vector<real_t> weights, real_t lambda, real_t alpha, std::string p_reg) {
|
||||
if (p_reg == "Ridge") {
|
||||
real_t reg = 0;
|
||||
for (uint32_t i = 0; i < weights.size(); i++) {
|
||||
reg += weights[i] * weights[i];
|
||||
}
|
||||
return reg * lambda / 2;
|
||||
} else if (p_reg == "Lasso") {
|
||||
real_t reg = 0;
|
||||
for (uint32_t i = 0; i < weights.size(); i++) {
|
||||
reg += abs(weights[i]);
|
||||
}
|
||||
return reg * lambda;
|
||||
} else if (p_reg == "ElasticNet") {
|
||||
real_t reg = 0;
|
||||
for (uint32_t i = 0; i < weights.size(); i++) {
|
||||
reg += alpha * abs(weights[i]); // Lasso Reg
|
||||
reg += ((1 - alpha) / 2) * weights[i] * weights[i]; // Ridge Reg
|
||||
}
|
||||
return reg * lambda;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
real_t MLPPRegOld::regTerm(std::vector<std::vector<real_t>> weights, real_t lambda, real_t alpha, std::string p_reg) {
|
||||
if (p_reg == "Ridge") {
|
||||
real_t reg = 0;
|
||||
for (uint32_t i = 0; i < weights.size(); i++) {
|
||||
for (uint32_t j = 0; j < weights[i].size(); j++) {
|
||||
reg += weights[i][j] * weights[i][j];
|
||||
}
|
||||
}
|
||||
return reg * lambda / 2;
|
||||
} else if (p_reg == "Lasso") {
|
||||
real_t reg = 0;
|
||||
for (uint32_t i = 0; i < weights.size(); i++) {
|
||||
for (uint32_t j = 0; j < weights[i].size(); j++) {
|
||||
reg += abs(weights[i][j]);
|
||||
}
|
||||
}
|
||||
return reg * lambda;
|
||||
} else if (p_reg == "ElasticNet") {
|
||||
real_t reg = 0;
|
||||
for (uint32_t i = 0; i < weights.size(); i++) {
|
||||
for (uint32_t j = 0; j < weights[i].size(); j++) {
|
||||
reg += alpha * abs(weights[i][j]); // Lasso Reg
|
||||
reg += ((1 - alpha) / 2) * weights[i][j] * weights[i][j]; // Ridge Reg
|
||||
}
|
||||
}
|
||||
return reg * lambda;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
std::vector<real_t> MLPPRegOld::regWeights(std::vector<real_t> weights, real_t lambda, real_t alpha, std::string reg) {
|
||||
MLPPLinAlg alg;
|
||||
if (reg == "WeightClipping") {
|
||||
|
@ -11,45 +11,10 @@
|
||||
|
||||
#include "core/math/math_defs.h"
|
||||
|
||||
#include "core/object/reference.h"
|
||||
|
||||
#include "../lin_alg/mlpp_matrix.h"
|
||||
#include "../lin_alg/mlpp_vector.h"
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
class MLPPRegOld : public Reference {
|
||||
GDCLASS(MLPPRegOld, Reference);
|
||||
|
||||
public:
|
||||
enum RegularizationType {
|
||||
REGULARIZATION_TYPE_NONE = 0,
|
||||
REGULARIZATION_TYPE_RIDGE,
|
||||
REGULARIZATION_TYPE_LASSO,
|
||||
REGULARIZATION_TYPE_ELASTIC_NET,
|
||||
REGULARIZATION_TYPE_WEIGHT_CLIPPING,
|
||||
};
|
||||
|
||||
real_t reg_termv(const Ref<MLPPVector> &weights, real_t lambda, real_t alpha, RegularizationType reg);
|
||||
real_t reg_termm(const Ref<MLPPMatrix> &weights, real_t lambda, real_t alpha, RegularizationType reg);
|
||||
|
||||
Ref<MLPPVector> reg_weightsv(const Ref<MLPPVector> &weights, real_t lambda, real_t alpha, RegularizationType reg);
|
||||
Ref<MLPPMatrix> reg_weightsm(const Ref<MLPPMatrix> &weights, real_t lambda, real_t alpha, RegularizationType reg);
|
||||
|
||||
Ref<MLPPVector> reg_deriv_termv(const Ref<MLPPVector> &weights, real_t lambda, real_t alpha, RegularizationType reg);
|
||||
Ref<MLPPMatrix> reg_deriv_termm(const Ref<MLPPMatrix> &weights, real_t lambda, real_t alpha, RegularizationType reg);
|
||||
|
||||
MLPPRegOld();
|
||||
~MLPPRegOld();
|
||||
|
||||
protected:
|
||||
static void _bind_methods();
|
||||
|
||||
private:
|
||||
real_t reg_deriv_termvr(const Ref<MLPPVector> &weights, real_t lambda, real_t alpha, RegularizationType reg, int j);
|
||||
real_t reg_deriv_termmr(const Ref<MLPPMatrix> &weights, real_t lambda, real_t alpha, RegularizationType reg, int i, int j);
|
||||
|
||||
class MLPPRegOld {
|
||||
public:
|
||||
// ======== OLD =========
|
||||
|
||||
@ -67,6 +32,4 @@ private:
|
||||
real_t regDerivTerm(std::vector<std::vector<real_t>> weights, real_t lambda, real_t alpha, std::string reg, int i, int j);
|
||||
};
|
||||
|
||||
VARIANT_ENUM_CAST(MLPPRegOld::RegularizationType);
|
||||
|
||||
#endif /* Reg_hpp */
|
||||
|
Loading…
Reference in New Issue
Block a user