LCOV - code coverage report
Current view: top level - src - NeuralNetworkBuilder.cpp (source / functions) Hit Total Coverage
Test: lcov.info Lines: 154 169 91.1 %
Date: 2024-12-28 17:36:05 Functions: 12 13 92.3 %

          Line data    Source code
       1             : #include "NeuralNetworkBuilder.h"
       2             : #include "Common.h"
       3             : #include "Layer.h"
       4             : #include "LayerHidden.h"
       5             : #include "LayerInput.h"
       6             : #include "LayerOutput.h"
       7             : #include "Manager.h"
       8             : #include "NeuralNetworkImportExportFacade.h"
       9             : #include "SimpleLogger.h"
      10             : #include "exception/NeuralNetworkException.h"
      11             : #include <cstddef>
      12             : #include <filesystem>
      13             : #include <opencv2/core/matx.hpp>
      14             : 
      15             : using namespace sipai;
      16             : 
      17           9 : NeuralNetworkBuilder::NeuralNetworkBuilder()
      18           9 :     : app_params_(Manager::getInstance().app_params),
      19          18 :       network_params_(Manager::getInstance().network_params) {}
      20             : 
      21           0 : NeuralNetworkBuilder::NeuralNetworkBuilder(AppParams &appParams,
      22           0 :                                            NeuralNetworkParams &networkParams)
      23           0 :     : app_params_(appParams), network_params_(networkParams) {}
      24             : 
      25             : // TODO: refactor this class by splitting create and import functions
      26           8 : NeuralNetworkBuilder &NeuralNetworkBuilder::createOrImport() {
      27           9 :   if (!app_params_.network_to_import.empty() &&
      28           9 :       std::filesystem::exists(app_params_.network_to_import)) {
      29           1 :     NeuralNetworkImportExportFacade neuralNetworkImportExport;
      30           1 :     SimpleLogger::LOG_INFO("Importing the neural network from ",
      31           1 :                            app_params_.network_to_import, "...");
      32             :     network_ =
      33           1 :         neuralNetworkImportExport.importModel(app_params_, network_params_);
      34           1 :     isImported = true;
      35           1 :   } else {
      36           7 :     SimpleLogger::LOG_INFO("Creating the neural network...");
      37           7 :     network_ = std::make_unique<NeuralNetwork>();
      38           7 :     isImported = false;
      39           7 :     _incrementProgress(10);
      40             :   }
      41           8 :   return *this;
      42             : }
      43             : 
      44           8 : NeuralNetworkBuilder &NeuralNetworkBuilder::addLayers() {
      45           8 :   if (isImported) {
      46           1 :     return *this;
      47             :   }
      48             : 
      49           7 :   SimpleLogger::LOG_INFO("Adding layers...");
      50           7 :   if (!network_) {
      51           0 :     throw NeuralNetworkException("neural network null");
      52             :   }
      53           7 :   if (!network_->layers.empty()) {
      54           0 :     throw NeuralNetworkException("layers not empty");
      55             :   }
      56             : 
      57             :   // Add Input Layer
      58           7 :   auto inputLayer = new LayerInput(network_params_.input_size_x,
      59           7 :                                    network_params_.input_size_y);
      60           7 :   network_->layers.push_back(inputLayer);
      61           7 :   _incrementProgress(10);
      62             : 
      63             :   // Add Hidden Layers
      64          17 :   for (size_t i = 0; i < network_params_.hiddens_count; ++i) {
      65          10 :     auto hiddenLayer = new LayerHidden(network_params_.hidden_size_x,
      66          10 :                                        network_params_.hidden_size_y);
      67          10 :     hiddenLayer->eactivationFunction =
      68          10 :         network_params_.hidden_activation_function;
      69          10 :     hiddenLayer->activationFunctionAlpha =
      70          10 :         network_params_.hidden_activation_alpha;
      71          10 :     network_->layers.push_back(hiddenLayer);
      72          10 :     _incrementProgress(10 * ((int)i + 1) / (int)network_params_.hiddens_count);
      73             :   }
      74             : 
      75             :   // Add Output Layer
      76           7 :   auto outputLayer = new LayerOutput(network_params_.output_size_x,
      77           7 :                                      network_params_.output_size_y);
      78           7 :   outputLayer->eactivationFunction = network_params_.output_activation_function;
      79           7 :   outputLayer->activationFunctionAlpha =
      80           7 :       network_params_.output_activation_alpha;
      81           7 :   network_->layers.push_back(outputLayer);
      82           7 :   _incrementProgress(10);
      83           7 :   return *this;
      84             : }
      85             : 
      86           8 : NeuralNetworkBuilder &NeuralNetworkBuilder::bindLayers() {
      87           8 :   SimpleLogger::LOG_INFO("Binding layers...");
      88           8 :   if (!network_) {
      89           0 :     throw NeuralNetworkException("neural network null");
      90             :   }
      91           8 :   if (network_->layers.empty()) {
      92           0 :     throw NeuralNetworkException("empty layers");
      93             :   }
      94          35 :   for (size_t i = 0; i < network_->layers.size(); ++i) {
      95          27 :     if (i > 0) {
      96          19 :       network_->layers.at(i)->previousLayer = network_->layers.at(i - 1);
      97             :     }
      98          27 :     if (i < network_->layers.size() - 1) {
      99          19 :       network_->layers.at(i)->nextLayer = network_->layers.at(i + 1);
     100             :     }
     101             :   }
     102           8 :   return *this;
     103             : }
     104             : 
     105           8 : NeuralNetworkBuilder &NeuralNetworkBuilder::addNeighbors() {
     106           8 :   SimpleLogger::LOG_INFO("Adding neurons neighbors connections...");
     107           8 :   if (!network_) {
     108           0 :     throw NeuralNetworkException("neural network null");
     109             :   }
     110           8 :   if (network_->layers.empty()) {
     111           0 :     throw NeuralNetworkException("empty layers");
     112             :   }
     113             : 
     114             :   // For each possible direction (up, down, left, right), check if there
     115             :   // is a neuron in that direction and, if so, establish a connection
     116             :   std::vector<std::pair<int, int>> directions = {
     117           8 :       {-1, 0}, {1, 0}, {0, -1}, {0, 1}};
     118             : 
     119           8 :   int counter = 0;
     120          35 :   for (auto layer : network_->layers) {
     121          27 :     if (layer->layerType == LayerType::LayerInput) {
     122           8 :       continue;
     123             :     }
     124          65 :     for (auto &rows : layer->neurons) {
     125         184 :       for (auto &neuron : rows) {
     126         138 :         size_t pos_x = neuron.index_x;
     127         138 :         size_t pos_y = neuron.index_y;
     128         690 :         for (auto [dx, dy] : directions) {
     129         552 :           int nx = static_cast<int>(pos_x) + dx;
     130         552 :           int ny = static_cast<int>(pos_y) + dy;
     131             : 
     132         552 :           if (nx >= 0 && nx < static_cast<int>(layer->size_x) && ny >= 0 &&
     133         403 :               ny < static_cast<int>(layer->size_y)) {
     134         346 :             Neuron &neighbor = layer->neurons[ny][nx];
     135             : 
     136             :             cv::Vec4f weight =
     137         346 :                 isImported ? cv::Vec4f::all(0.0) : cv::Vec4f::randn(0.0, 1.0);
     138             : 
     139         346 :             neuron.neighbors.push_back(NeuronConnection(&neighbor, weight));
     140             :           }
     141             :         }
     142             :       }
     143             :     }
     144          19 :     counter++;
     145             :   }
     146             : 
     147           8 :   return *this;
     148           8 : }
     149             : 
     150           8 : NeuralNetworkBuilder &NeuralNetworkBuilder::initializeWeights() {
     151           8 :   if (isImported) {
     152           1 :     NeuralNetworkImportExportFacade neuralNetworkImportExport;
     153             :     std::string filenameCsv =
     154           1 :         Common::getFilenameCsv(app_params_.network_to_import);
     155           1 :     SimpleLogger::LOG_INFO("Importing layers neurons weights from ",
     156             :                            filenameCsv, "...");
     157           1 :     neuralNetworkImportExport.importWeights(
     158           1 :         network_, app_params_, progressCallback_, progressCallbackValue_);
     159           1 :     return *this;
     160           1 :   }
     161             : 
     162           7 :   SimpleLogger::LOG_INFO("Initializing layers neurons weights...");
     163           7 :   if (!network_) {
     164           0 :     throw NeuralNetworkException("neural network null");
     165             :   }
     166           7 :   if (network_->layers.empty()) {
     167           0 :     throw NeuralNetworkException("empty layers");
     168             :   }
     169             :   // Initialize and get the max_weights at same time
     170           7 :   network_->max_weights = 0;
     171           7 :   int counter = 0;
     172          31 :   for (auto layer : network_->layers) {
     173          24 :     if (layer->previousLayer != nullptr) {
     174          58 :       for (auto &rows : layer->neurons) {
     175         164 :         for (auto &n : rows) {
     176         123 :           n.initWeights(layer->previousLayer->size_x,
     177         123 :                         layer->previousLayer->size_y);
     178         123 :           size_t new_size = layer->previousLayer->total();
     179         123 :           if (new_size > network_->max_weights) {
     180          14 :             network_->max_weights = new_size;
     181             :           }
     182             :         }
     183             :       }
     184             :     }
     185          24 :     _incrementProgress(60 * (counter + 1) / (int)network_->layers.size());
     186          24 :     counter++;
     187             :   }
     188           7 :   return *this;
     189             : }
     190             : 
     191          15 : NeuralNetworkBuilder &NeuralNetworkBuilder::setActivationFunction() {
     192          15 :   SimpleLogger::LOG_INFO("Setting neurons activation functions...");
     193          15 :   if (!network_) {
     194           0 :     throw NeuralNetworkException("neural network null");
     195             :   }
     196          15 :   if (network_->layers.empty()) {
     197           0 :     throw NeuralNetworkException("empty layers");
     198             :   }
     199             :   EActivationFunction activation_function;
     200          15 :   float activation_alpha = 0.0f;
     201          15 :   int counter = 0;
     202          48 :   for (auto layer : network_->layers) {
     203             :     // Get the parameters
     204          34 :     switch (layer->layerType) {
     205           8 :     case LayerType::LayerInput:
     206           8 :       continue; // no activation function for input layer
     207          18 :     case LayerType::LayerHidden:
     208          18 :       activation_function = network_params_.hidden_activation_function;
     209          18 :       activation_alpha = network_params_.hidden_activation_alpha;
     210          18 :       break;
     211           8 :     case LayerType::LayerOutput:
     212           8 :       activation_function = network_params_.output_activation_function;
     213           8 :       activation_alpha = network_params_.output_activation_alpha;
     214           8 :       break;
     215           0 :     default:
     216           0 :       continue;
     217             :     }
     218             :     // Set the activation functions
     219          26 :     switch (activation_function) {
     220           1 :     case EActivationFunction::ELU:
     221           1 :       layer->setActivationFunction(
     222           5 :           [activation_alpha](auto x) { return elu(x, activation_alpha); },
     223           1 :           [activation_alpha](auto x) {
     224           2 :             return eluDerivative(x, activation_alpha);
     225             :           });
     226           1 :       break;
     227          20 :     case EActivationFunction::LReLU:
     228          20 :       layer->setActivationFunction(leakyRelu, leakyReluDerivative);
     229          20 :       break;
     230           1 :     case EActivationFunction::PReLU:
     231           1 :       layer->setActivationFunction(
     232           2 :           [activation_alpha](auto x) {
     233           2 :             return parametricRelu(x, activation_alpha);
     234             :           },
     235           1 :           [activation_alpha](auto x) {
     236           2 :             return parametricReluDerivative(x, activation_alpha);
     237             :           });
     238           1 :       break;
     239           1 :     case EActivationFunction::ReLU:
     240           1 :       layer->setActivationFunction(relu, reluDerivative);
     241           1 :       break;
     242           1 :     case EActivationFunction::Sigmoid:
     243           1 :       layer->setActivationFunction(sigmoid, sigmoidDerivative);
     244           1 :       break;
     245           1 :     case EActivationFunction::Tanh:
     246           1 :       layer->setActivationFunction(tanhFunc, tanhDerivative);
     247           1 :       break;
     248           1 :     default:
     249           1 :       throw NeuralNetworkException("Unimplemented Activation Function");
     250             :     }
     251          25 :     counter++;
     252             :   }
     253             : 
     254          14 :   return *this;
     255             : }
     256             : 
     257           8 : std::unique_ptr<NeuralNetwork> NeuralNetworkBuilder::build() {
     258           8 :   return std::move(network_);
     259             : }

Generated by: LCOV version 1.16