mirror of
https://github.com/Relintai/MLPP.git
synced 2025-02-04 15:55:53 +01:00
Added LinAlg.exponentiate for vectors, “vectorized” arsinh, arcosh
This commit is contained in:
parent
4d400e96d0
commit
db159bfe5b
@ -626,37 +626,15 @@ namespace MLPP{
|
||||
}
|
||||
|
||||
std::vector<double> Activation::arsinh(std::vector<double> z, bool deriv){
|
||||
if(deriv){
|
||||
std::vector<double> deriv;
|
||||
deriv.resize(z.size());
|
||||
for(int i = 0; i < z.size(); i++){
|
||||
deriv[i] = arsinh(z[i], 1);
|
||||
}
|
||||
return deriv;
|
||||
}
|
||||
std::vector<double> a;
|
||||
a.resize(z.size());
|
||||
for(int i = 0; i < z.size(); i++){
|
||||
a[i] = arsinh(z[i]);
|
||||
}
|
||||
return a;
|
||||
LinAlg alg;
|
||||
if(deriv){ return alg.elementWiseDivision(alg.onevec(z.size()), alg.exponentiate(alg.addition(alg.hadamard_product(z, z), alg.onevec(z.size())), 0.5)); }
|
||||
return alg.log(alg.addition(z, alg.exponentiate(alg.addition(alg.hadamard_product(z, z), alg.onevec(z.size())), 0.5)));
|
||||
}
|
||||
|
||||
std::vector<std::vector<double>> Activation::arsinh(std::vector<std::vector<double>> z, bool deriv){
|
||||
if(deriv){
|
||||
std::vector<std::vector<double>> deriv;
|
||||
deriv.resize(z.size());
|
||||
for(int i = 0; i < z.size(); i++){
|
||||
deriv[i] = arsinh(z[i], 1);
|
||||
}
|
||||
return deriv;
|
||||
}
|
||||
std::vector<std::vector<double>> a;
|
||||
a.resize(z.size());
|
||||
for(int i = 0; i < z.size(); i++){
|
||||
a[i] = arsinh(z[i]);
|
||||
}
|
||||
return a;
|
||||
LinAlg alg;
|
||||
if(deriv){ return alg.elementWiseDivision(alg.onemat(z.size(), z[0].size()), alg.exponentiate(alg.addition(alg.hadamard_product(z, z), alg.onemat(z.size(), z[0].size())), 0.5)); }
|
||||
return alg.log(alg.addition(z, alg.exponentiate(alg.addition(alg.hadamard_product(z, z), alg.onemat(z.size(), z[0].size())), 0.5)));
|
||||
}
|
||||
|
||||
double Activation::arcosh(double z, bool deriv){
|
||||
@ -668,38 +646,14 @@ namespace MLPP{
|
||||
|
||||
std::vector<double> Activation::arcosh(std::vector<double> z, bool deriv){
|
||||
LinAlg alg;
|
||||
if(deriv){
|
||||
std::vector<double> deriv;
|
||||
deriv.resize(z.size());
|
||||
for(int i = 0; i < z.size(); i++){
|
||||
deriv[i] = arcosh(z[i], 1);
|
||||
}
|
||||
return deriv;
|
||||
}
|
||||
std::vector<double> a;
|
||||
a.resize(z.size());
|
||||
for(int i = 0; i < z.size(); i++){
|
||||
a[i] = arcosh(z[i]);
|
||||
}
|
||||
return a;
|
||||
if(deriv){ return alg.elementWiseDivision(alg.onevec(z.size()), alg.exponentiate(alg.subtraction(alg.hadamard_product(z, z), alg.onevec(z.size())), 0.5)); }
|
||||
return alg.log(alg.addition(z, alg.exponentiate(alg.subtraction(alg.hadamard_product(z, z), alg.onevec(z.size())), 0.5)));
|
||||
}
|
||||
|
||||
std::vector<std::vector<double>> Activation::arcosh(std::vector<std::vector<double>> z, bool deriv){
|
||||
LinAlg alg;
|
||||
if(deriv){
|
||||
std::vector<std::vector<double>> deriv;
|
||||
deriv.resize(z.size());
|
||||
for(int i = 0; i < z.size(); i++){
|
||||
deriv[i] = arcosh(z[i], 1);
|
||||
}
|
||||
return deriv;
|
||||
}
|
||||
std::vector<std::vector<double>> a;
|
||||
a.resize(z.size());
|
||||
for(int i = 0; i < z.size(); i++){
|
||||
a[i] = arcosh(z[i]);
|
||||
}
|
||||
return a;
|
||||
if(deriv){ return alg.elementWiseDivision(alg.onemat(z.size(), z[0].size()), alg.exponentiate(alg.subtraction(alg.hadamard_product(z, z), alg.onemat(z.size(), z[0].size())), 0.5)); }
|
||||
return alg.log(alg.addition(z, alg.exponentiate(alg.subtraction(alg.hadamard_product(z, z), alg.onemat(z.size(), z[0].size())), 0.5)));
|
||||
}
|
||||
|
||||
double Activation::artanh(double z, bool deriv){
|
||||
|
@ -636,6 +636,15 @@ namespace MLPP{
|
||||
return b;
|
||||
}
|
||||
|
||||
std::vector<double> LinAlg::exponentiate(std::vector<double> a, double p){
|
||||
std::vector<double> b;
|
||||
b.resize(a.size());
|
||||
for(int i = 0; i < b.size(); i++){
|
||||
b[i] = pow(a[i], p);
|
||||
}
|
||||
return b;
|
||||
}
|
||||
|
||||
double LinAlg::dot(std::vector<double> a, std::vector<double> b){
|
||||
double c = 0;
|
||||
for(int i = 0; i < a.size(); i++){
|
||||
|
@ -101,6 +101,8 @@ namespace MLPP{
|
||||
std::vector<double> exp(std::vector<double> a);
|
||||
|
||||
std::vector<double> erf(std::vector<double> a);
|
||||
|
||||
std::vector<double> exponentiate(std::vector<double> a, double p);
|
||||
|
||||
double dot(std::vector<double> a, std::vector<double> b);
|
||||
|
||||
@ -131,6 +133,7 @@ namespace MLPP{
|
||||
|
||||
// TENSOR FUNCTIONS
|
||||
std::vector<double> flatten(std::vector<std::vector<std::vector<double>>> A);
|
||||
|
||||
void printTensor(std::vector<std::vector<std::vector<double>>> A);
|
||||
|
||||
|
||||
|
16
main.cpp
16
main.cpp
@ -350,18 +350,18 @@ int main() {
|
||||
|
||||
// // Testing new Functions
|
||||
// double z_s = 4;
|
||||
// std::cout << avn.coth(z_s) << std::endl;
|
||||
// std::cout << avn.coth(z_s, 1) << std::endl;
|
||||
// std::cout << avn.arcosh(z_s) << std::endl;
|
||||
// std::cout << avn.arcosh(z_s, 1) << std::endl;
|
||||
|
||||
// std::vector<double> z_v = {4, 5};
|
||||
// alg.printVector(avn.coth(z_v));
|
||||
// alg.printVector(avn.coth(z_v, 1));
|
||||
// alg.printVector(avn.arcosh(z_v));
|
||||
// alg.printVector(avn.arcosh(z_v, 1));
|
||||
|
||||
// std::vector<std::vector<double>> Z_m = {{4, 5}};
|
||||
// alg.printMatrix(avn.coth(Z_m));
|
||||
// alg.printMatrix(avn.coth(Z_m, 1));
|
||||
// alg.printMatrix(avn.arcosh(Z_m));
|
||||
// alg.printMatrix(avn.arcosh(Z_m, 1));
|
||||
|
||||
// // alg.printMatrix(alg.pinverse({{1,2}, {3,4}}));
|
||||
// alg.printMatrix(alg.pinverse({{1,2}, {3,4}}));
|
||||
|
||||
return 0;
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user