added rgb2gray

This commit is contained in:
novak_99 2021-11-30 15:00:29 -08:00
parent 66883a3f5d
commit 2e935a4d87
3 changed files with 41 additions and 31 deletions

View File

@ -139,14 +139,18 @@ namespace MLPP{
}
// Images
void Data::getImage(std::string fileName, std::vector<double>& image){
std::ifstream img(fileName, std::ios::binary);
if(!img.is_open()){
std::cout << "The file failed to open." << std::endl;
std::vector<std::vector<double>> Data::rgb2gray(std::vector<std::vector<std::vector<double>>> input){
std::vector<std::vector<double>> grayScale;
grayScale.resize(input[0].size());
for(int i = 0; i < grayScale.size(); i++){
grayScale[i].resize(input[0][i].size());
}
std::vector<double> v{std::istreambuf_iterator<char>{img}, {}};
image = v;
for(int i = 0; i < grayScale.size(); i++){
for(int j = 0; j < grayScale[i].size(); j++){
grayScale[i][j] = 0.299 * input[0][i][j] + 0.587 * input[1][i][j] + 0.114 * input[2][i][j]
}
}
return grayScale;
}
// TEXT-BASED & NLP
@ -449,7 +453,7 @@ namespace MLPP{
if(type == "Skipgram"){
model = new SoftmaxNet(outputSet, inputSet, dimension);
}
else { // else = CBOW. We maintain it is a default, however.
else { // else = CBOW. We maintain it is a default.
model = new SoftmaxNet(inputSet, outputSet, dimension);
}
model->gradientDescent(learning_rate, max_epoch, 1);

View File

@ -29,7 +29,7 @@ class Data{
void printData(std::string& inputName, std::string& outputName, std::vector <double>& inputSet, std::vector <double>& outputSet);
// Images
void getImage(std::string fileName, std::vector<double>& image);
std::vector<std::vector<double>> rgb2gray(std::vector<std::vector<std::vector<double>>> input);
// Text-Based & NLP
std::string toLower(std::string text);

View File

@ -121,9 +121,9 @@ int main() {
// // OBJECTS
Stat stat;
LinAlg alg;
// Activation avn;
// Cost cost;
// Data data;
Activation avn;
Cost cost;
Data data;
Convolutions conv;
// DATA SETS
@ -305,10 +305,12 @@ int main() {
// // MLP
// std::vector<std::vector<double>> inputSet = {{0,0,1,1}, {0,1,0,1}};
// inputSet = alg.transpose(inputSet);
// std::vector<double> outputSet = {0,1,1,0};
// MLP model(alg.transpose(inputSet), outputSet, 2);
// MLP model(inputSet, outputSet, 2);
// model.gradientDescent(0.1, 10000, 0);
// alg.printVector(model.modelSetTest(alg.transpose(inputSet)));
// alg.printVector(model.modelSetTest(inputSet));
// std::cout << "ACCURACY: " << 100 * model.score() << "%" << std::endl;
// // SOFTMAX NETWORK
@ -343,17 +345,21 @@ int main() {
// alg.printVector(ann.modelSetTest(alg.transpose(inputSet)));
// std::cout << "ACCURACY: " << 100 * ann.score() << "%" << std::endl;
// std::vector<std::vector<double>> inputSet = {{0,0,1,1}, {0,1,0,1}};
// std::vector<double> outputSet = {0,1,1,0};
// ANN ann(alg.transpose(inputSet), outputSet);
// ann.addLayer(10, "Sigmoid");
// ann.addLayer(10, "Sigmoid");
// ann.addLayer(10, "Sigmoid");
// typedef std::vector<std::vector<double>> Matrix;
// typedef std::vector<double> Vector;
// Matrix inputSet = {{0,0}, {0,1}, {1,0}, {1,1}}; // XOR
// Vector outputSet = {0,1,1,0};
// ANN ann(inputSet, outputSet);
// ann.addLayer(10, "Sigmoid");
// ann.addLayer(10, "Sigmoid"); // Add more layers as needed.
// ann.addOutputLayer("Sigmoid", "LogLoss");
// ann.gradientDescent(0.1, 80000, 0);
// alg.printVector(ann.modelSetTest(alg.transpose(inputSet)));
// std::cout << "ACCURACY: " << 100 * ann.score() << "%" << std::endl;
// ann.gradientDescent(0.1, 20000, 0);
// Vector predictions = ann.modelSetTest(inputSet);
// alg.printVector(predictions); // Testing out the model's preds for train set.
// std::cout << "ACCURACY: " << 100 * ann.score() << "%" << std::endl; // Accuracy.
// // DYNAMICALLY SIZED MANN (Multidimensional Output ANN)
// std::vector<std::vector<double>> inputSet = {{1,2,3},{2,4,6},{3,6,9},{4,8,12}};
@ -542,13 +548,13 @@ int main() {
// alg.printMatrix(R);
// // Checking positive-definiteness checker. For Cholesky Decomp.
std::vector<std::vector<double>> A =
{
{1,-1,-1,-1},
{-1,2,2,2},
{-1,2,3,1},
{-1,2,1,4}
};
// std::vector<std::vector<double>> A =
// {
// {1,-1,-1,-1},
// {-1,2,2,2},
// {-1,2,3,1},
// {-1,2,1,4}
// };
// std::cout << std::boolalpha << alg.positiveDefiniteChecker(A) << std::endl;
// auto [L, Lt] = alg.chol(A); // works.
@ -604,7 +610,7 @@ int main() {
// alg.printMatrix(conv.dx(A));
// alg.printMatrix(conv.dy(A));
alg.printMatrix(conv.gradOrientation(A));
// alg.printMatrix(conv.gradOrientation(A));
return 0;
}