pmlpp/regularization/reg.cpp

242 lines
8.5 KiB
C++
Raw Normal View History

2023-12-30 00:41:59 +01:00
/*************************************************************************/
/* reg.cpp */
/*************************************************************************/
/* This file is part of: */
/* PMLPP Machine Learning Library */
/* https://github.com/Relintai/pmlpp */
/*************************************************************************/
2023-12-30 00:43:39 +01:00
/* Copyright (c) 2023-present Péter Magyar. */
2023-12-30 00:41:59 +01:00
/* Copyright (c) 2022-2023 Marc Melikyan */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
2023-01-24 18:12:23 +01:00
#include "reg.h"
2023-02-04 00:49:16 +01:00
2024-01-25 13:42:45 +01:00
#ifdef USING_SFW
#include "sfw.h"
#else
2023-02-04 00:49:16 +01:00
#include "core/math/math_defs.h"
2024-01-25 13:42:45 +01:00
#endif
2023-02-04 00:49:16 +01:00
2023-01-24 18:12:23 +01:00
#include "../activation/activation.h"
2023-01-24 19:00:54 +01:00
#include "../lin_alg/lin_alg.h"
2023-02-04 00:49:16 +01:00
2023-01-24 19:00:54 +01:00
#include <iostream>
#include <random>
2023-02-12 23:03:25 +01:00
real_t MLPPReg::reg_termv(const Ref<MLPPVector> &weights, real_t lambda, real_t alpha, MLPPReg::RegularizationType p_reg) {
2023-02-04 00:49:16 +01:00
int size = weights->size();
const real_t *weights_ptr = weights->ptr();
2023-01-24 19:20:18 +01:00
2023-02-12 23:03:25 +01:00
if (p_reg == REGULARIZATION_TYPE_RIDGE) {
2023-02-04 00:49:16 +01:00
real_t reg = 0;
for (int i = 0; i < size; ++i) {
real_t wi = weights_ptr[i];
reg += wi * wi;
}
return reg * lambda / 2;
2023-02-12 23:03:25 +01:00
} else if (p_reg == REGULARIZATION_TYPE_LASSO) {
2023-02-04 00:49:16 +01:00
real_t reg = 0;
for (int i = 0; i < size; ++i) {
reg += ABS(weights_ptr[i]);
}
return reg * lambda;
2023-02-12 23:03:25 +01:00
} else if (p_reg == REGULARIZATION_TYPE_ELASTIC_NET) {
2023-02-04 00:49:16 +01:00
real_t reg = 0;
for (int i = 0; i < size; ++i) {
real_t wi = weights_ptr[i];
reg += alpha * ABS(wi); // Lasso Reg
reg += ((1 - alpha) / 2) * wi * wi; // Ridge Reg
}
return reg * lambda;
}
return 0;
}
2023-02-12 23:03:25 +01:00
real_t MLPPReg::reg_termm(const Ref<MLPPMatrix> &weights, real_t lambda, real_t alpha, MLPPReg::RegularizationType p_reg) {
2023-02-04 00:49:16 +01:00
int size = weights->data_size();
const real_t *weights_ptr = weights->ptr();
2023-02-12 23:03:25 +01:00
if (p_reg == REGULARIZATION_TYPE_RIDGE) {
2023-02-04 00:49:16 +01:00
real_t reg = 0;
for (int i = 0; i < size; ++i) {
real_t wi = weights_ptr[i];
reg += wi * wi;
}
return reg * lambda / 2;
2023-02-12 23:03:25 +01:00
} else if (p_reg == REGULARIZATION_TYPE_LASSO) {
2023-02-04 00:49:16 +01:00
real_t reg = 0;
for (int i = 0; i < size; ++i) {
reg += ABS(weights_ptr[i]);
}
return reg * lambda;
2023-02-12 23:03:25 +01:00
} else if (p_reg == REGULARIZATION_TYPE_ELASTIC_NET) {
2023-02-04 00:49:16 +01:00
real_t reg = 0;
for (int i = 0; i < size; ++i) {
real_t wi = weights_ptr[i];
reg += alpha * ABS(wi); // Lasso Reg
reg += ((1 - alpha) / 2) * wi * wi; // Ridge Reg
}
return reg * lambda;
}
return 0;
}
2023-02-12 23:03:25 +01:00
Ref<MLPPVector> MLPPReg::reg_weightsv(const Ref<MLPPVector> &weights, real_t lambda, real_t alpha, MLPPReg::RegularizationType p_reg) {
2023-02-04 00:49:16 +01:00
MLPPLinAlg alg;
2023-02-12 23:03:25 +01:00
if (p_reg == REGULARIZATION_TYPE_WEIGHT_CLIPPING) {
return reg_deriv_termv(weights, lambda, alpha, p_reg);
2023-02-04 00:49:16 +01:00
}
2023-02-12 23:03:25 +01:00
return alg.subtractionnv(weights, reg_deriv_termv(weights, lambda, alpha, p_reg));
2023-02-04 00:49:16 +01:00
// for(int i = 0; i < weights.size(); i++){
// weights[i] -= regDerivTerm(weights, lambda, alpha, reg, i);
// }
// return weights;
}
Ref<MLPPMatrix> MLPPReg::reg_weightsm(const Ref<MLPPMatrix> &weights, real_t lambda, real_t alpha, MLPPReg::RegularizationType reg) {
MLPPLinAlg alg;
if (reg == REGULARIZATION_TYPE_WEIGHT_CLIPPING) {
return reg_deriv_termm(weights, lambda, alpha, reg);
}
2023-04-22 14:23:51 +02:00
return alg.subtractionnm(weights, reg_deriv_termm(weights, lambda, alpha, reg));
2023-02-04 00:49:16 +01:00
// for(int i = 0; i < weights.size(); i++){
// for(int j = 0; j < weights[i].size(); j++){
// weights[i][j] -= regDerivTerm(weights, lambda, alpha, reg, i, j);
// }
// }
// return weights;
}
Ref<MLPPVector> MLPPReg::reg_deriv_termv(const Ref<MLPPVector> &weights, real_t lambda, real_t alpha, MLPPReg::RegularizationType reg) {
Ref<MLPPVector> reg_driv;
reg_driv.instance();
int size = weights->size();
reg_driv->resize(size);
real_t *reg_driv_ptr = reg_driv->ptrw();
for (int i = 0; i < size; ++i) {
reg_driv_ptr[i] = reg_deriv_termvr(weights, lambda, alpha, reg, i);
}
return reg_driv;
}
Ref<MLPPMatrix> MLPPReg::reg_deriv_termm(const Ref<MLPPMatrix> &weights, real_t lambda, real_t alpha, MLPPReg::RegularizationType reg) {
Ref<MLPPMatrix> reg_driv;
reg_driv.instance();
Size2i size = weights->size();
reg_driv->resize(size);
real_t *reg_driv_ptr = reg_driv->ptrw();
for (int i = 0; i < size.y; ++i) {
for (int j = 0; j < size.x; ++j) {
reg_driv_ptr[reg_driv->calculate_index(i, j)] = reg_deriv_termmr(weights, lambda, alpha, reg, i, j);
}
}
return reg_driv;
}
2023-02-04 00:54:27 +01:00
MLPPReg::MLPPReg() {
}
MLPPReg::~MLPPReg() {
}
void MLPPReg::_bind_methods() {
ClassDB::bind_method(D_METHOD("reg_termv", "weights", "lambda", "alpha", "reg"), &MLPPReg::reg_termv);
ClassDB::bind_method(D_METHOD("reg_termm", "weights", "lambda", "alpha", "reg"), &MLPPReg::reg_termm);
ClassDB::bind_method(D_METHOD("reg_weightsv", "weights", "lambda", "alpha", "reg"), &MLPPReg::reg_weightsv);
ClassDB::bind_method(D_METHOD("reg_weightsm", "weights", "lambda", "alpha", "reg"), &MLPPReg::reg_weightsm);
ClassDB::bind_method(D_METHOD("reg_deriv_termv", "weights", "lambda", "alpha", "reg"), &MLPPReg::reg_deriv_termv);
ClassDB::bind_method(D_METHOD("reg_deriv_termm", "weights", "lambda", "alpha", "reg"), &MLPPReg::reg_deriv_termm);
2023-02-05 00:58:00 +01:00
BIND_ENUM_CONSTANT(REGULARIZATION_TYPE_NONE);
2023-02-04 00:54:27 +01:00
BIND_ENUM_CONSTANT(REGULARIZATION_TYPE_RIDGE);
BIND_ENUM_CONSTANT(REGULARIZATION_TYPE_LASSO);
BIND_ENUM_CONSTANT(REGULARIZATION_TYPE_ELASTIC_NET);
BIND_ENUM_CONSTANT(REGULARIZATION_TYPE_WEIGHT_CLIPPING);
}
2023-02-04 00:49:16 +01:00
real_t MLPPReg::reg_deriv_termvr(const Ref<MLPPVector> &weights, real_t lambda, real_t alpha, MLPPReg::RegularizationType reg, int j) {
MLPPActivation act;
real_t wj = weights->element_get(j);
2023-02-04 00:49:16 +01:00
if (reg == REGULARIZATION_TYPE_RIDGE) {
return lambda * wj;
} else if (reg == REGULARIZATION_TYPE_LASSO) {
2023-04-22 14:11:07 +02:00
return lambda * act.sign_normr(wj);
2023-02-04 00:49:16 +01:00
} else if (reg == REGULARIZATION_TYPE_ELASTIC_NET) {
2023-04-22 14:11:07 +02:00
return alpha * lambda * act.sign_normr(wj) + (1 - alpha) * lambda * wj;
2023-02-04 00:49:16 +01:00
} else if (reg == REGULARIZATION_TYPE_WEIGHT_CLIPPING) { // Preparation for Wasserstein GANs.
// We assume lambda is the lower clipping threshold, while alpha is the higher clipping threshold.
// alpha > lambda.
if (wj > alpha) {
return alpha;
} else if (wj < lambda) {
return lambda;
} else {
return wj;
}
} else {
return 0;
}
}
real_t MLPPReg::reg_deriv_termmr(const Ref<MLPPMatrix> &weights, real_t lambda, real_t alpha, MLPPReg::RegularizationType reg, int i, int j) {
MLPPActivation act;
real_t wj = weights->element_get(i, j);
2023-02-04 00:49:16 +01:00
if (reg == REGULARIZATION_TYPE_RIDGE) {
return lambda * wj;
} else if (reg == REGULARIZATION_TYPE_LASSO) {
2023-04-22 14:11:07 +02:00
return lambda * act.sign_normr(wj);
2023-02-04 00:49:16 +01:00
} else if (reg == REGULARIZATION_TYPE_ELASTIC_NET) {
2023-04-22 14:11:07 +02:00
return alpha * lambda * act.sign_normr(wj) + (1 - alpha) * lambda * wj;
2023-02-04 00:49:16 +01:00
} else if (reg == REGULARIZATION_TYPE_WEIGHT_CLIPPING) { // Preparation for Wasserstein GANs.
// We assume lambda is the lower clipping threshold, while alpha is the higher clipping threshold.
// alpha > lambda.
if (wj > alpha) {
return alpha;
} else if (wj < lambda) {
return lambda;
} else {
return wj;
}
} else {
return 0;
}
}