LCOV - code coverage report
Current view: top level - src - Layer.cpp (source / functions) Hit Total Coverage
Test: lcov.info Lines: 42 44 95.5 %
Date: 2024-12-28 17:36:05 Functions: 3 3 100.0 %

          Line data    Source code
       1             : #include "Layer.h"
       2             : #include "VulkanController.h"
       3             : #include <algorithm>
       4             : #include <cmath>
       5             : #include <opencv2/core/hal/interface.h>
       6             : #include <stdexcept>
       7             : 
       8             : using namespace sipai;
       9             : 
      10         123 : void Layer::forwardPropagation() {
      11         123 :   if (previousLayer == nullptr) {
      12           0 :     return;
      13             :   }
      14             : 
      15         430 :   for (size_t y = 0; y < neurons.size(); ++y) {
      16        1228 :     for (size_t x = 0; x < neurons[y].size(); ++x) {
      17         921 :       Neuron &currentNeuron = neurons[y][x];
      18             :       // Compute matrix multiplication between previous layer values
      19             :       // and current neuron weights
      20         921 :       cv::Mat dotProduct = previousLayer->values.mul(currentNeuron.weights);
      21             :       // Convert the result matrix to a single value by summing all elements
      22         921 :       cv::Vec4f result = cv::sum(dotProduct);
      23             :       // Update the neuron value using the activation function
      24         921 :       values.at<cv::Vec4f>((int)y, (int)x) = activationFunction(result);
      25         921 :     }
      26             :   }
      27             : }
      28             : 
      29          84 : void Layer::backwardPropagation(const float &error_min,
      30             :                                 const float &error_max) {
      31          84 :   if (nextLayer == nullptr) {
      32          42 :     return;
      33             :   }
      34             : 
      35         126 :   for (int y = 0; y < (int)neurons.size(); ++y) {
      36         336 :     for (int x = 0; x < (int)neurons[y].size(); ++x) {
      37         252 :       Neuron &currentNeuron = neurons[y][x];
      38         252 :       cv::Vec4f error(0.0f);
      39             : 
      40             :       // Add next layer neurons error ponderated with weights for this neuron
      41        1008 :       for (const auto &nextLayerNeuronRow : nextLayer->neurons) {
      42        3024 :         for (const auto &nextLayerNeuron : nextLayerNeuronRow) {
      43        2268 :           const cv::Vec4f currentError = nextLayer->errors.at<cv::Vec4f>(
      44        2268 :               (int)nextLayerNeuron.index_y, (int)nextLayerNeuron.index_x);
      45        2268 :           const cv::Vec4f weight = nextLayerNeuron.weights.at<cv::Vec4f>(y, x);
      46        2268 :           error += currentError.mul(weight);
      47             :         }
      48             :       }
      49             :       // Consider errors of adjacent neurons
      50         840 :       for (const NeuronConnection &conn : currentNeuron.neighbors) {
      51        1176 :         error += conn.weight.mul(errors.at<cv::Vec4f>(
      52        1176 :             (int)conn.neuron->index_y, (int)conn.neuron->index_x));
      53             :       }
      54             :       // Use the derivative of the activation function
      55             :       const cv::Vec4f activationDerivative =
      56         252 :           activationFunctionDerivative(values.at<cv::Vec4f>(y, x));
      57             :       const cv::Vec4f clampedError = Common::clamp4f(
      58         252 :           activationDerivative.mul(error), error_min, error_max);
      59             : 
      60         252 :       errors.at<cv::Vec4f>(y, x) = clampedError;
      61             :     }
      62             :   }
      63             : }
      64             : 
      65          85 : void Layer::updateWeights(float learningRate) {
      66          85 :   if (previousLayer == nullptr) {
      67           0 :     return;
      68             :   }
      69             : 
      70         298 :   for (int y = 0; y < (int)neurons.size(); ++y) {
      71         852 :     for (int x = 0; x < (int)neurons[y].size(); ++x) {
      72         639 :       Neuron &neuron = neurons[y][x];
      73             : 
      74             :       // Get the error of current neuron, mult by the learningRate
      75             :       const cv::Vec4f learningRateError =
      76         639 :           errors.at<cv::Vec4f>(y, x) * cv::Vec4f::all(learningRate);
      77             : 
      78             :       // Create a matrix with dimensions of neuron weights
      79             :       // and previous learningRateError
      80             :       cv::Mat learningRateErrorMat(neuron.weights.size(), neuron.weights.type(),
      81         639 :                                    learningRateError);
      82             : 
      83             :       // Update neuron weights that are connections weights with previous layers
      84         639 :       neuron.weights -= previousLayer->values.mul(learningRateErrorMat);
      85             : 
      86             :       // Update neighbors connections weights
      87        2259 :       for (NeuronConnection &conn : neuron.neighbors) {
      88        1620 :         conn.weight -= values
      89        1620 :                            .at<cv::Vec4f>((int)conn.neuron->index_y,
      90        1620 :                                           (int)conn.neuron->index_x)
      91        1620 :                            .mul(learningRateError);
      92             :       }
      93         639 :     }
      94             :   }
      95             : }

Generated by: LCOV version 1.16