Line data Source code
1 : /** 2 : * @file NeuralNetwork.h 3 : * @author Damien Balima (www.dams-labs.net) 4 : * @brief NeuralNetwork 5 : * @date 2024-03-08 6 : * 7 : * @copyright Damien Balima (c) CC-BY-NC-SA-4.0 2024 8 : * 9 : */ 10 : #pragma once 11 : #include "Common.h" 12 : #include "Layer.h" 13 : #include <algorithm> 14 : #include <atomic> 15 : #include <cstddef> 16 : #include <cstdint> 17 : 18 : namespace sipai { 19 : 20 : /** 21 : * @class NeuralNetwork 22 : * @brief This class represents a neural network for image processing. 23 : */ 24 : class NeuralNetwork { 25 : public: 26 : /** 27 : * @brief Default constructor for the Network class. 28 : */ 29 11 : NeuralNetwork() = default; 30 : // Rule of Five: 31 : NeuralNetwork(const NeuralNetwork &other) = delete; // Copy constructor 32 : NeuralNetwork & 33 : operator=(const NeuralNetwork &other) = delete; // Copy assignment operator 34 : NeuralNetwork(NeuralNetwork &&other) = delete; // Move constructor 35 : NeuralNetwork & 36 : operator=(NeuralNetwork &&other) = delete; // Move assignment operator 37 11 : ~NeuralNetwork() { 38 39 : for (auto layer : layers) { 39 28 : if (layer != nullptr) { 40 28 : delete layer; 41 : } 42 : } 43 11 : } 44 : 45 : /** 46 : * @brief A vector of pointers to Layer objects, representing the layers in 47 : * the network. 48 : */ 49 : std::vector<Layer *> layers; 50 : 51 : /** 52 : * @brief Performs forward propagation on the network using the given input 53 : * values. 54 : * 55 : * @param inputValues The input values for forward propagation. 56 : * @return A vector of output values from the output layer after forward 57 : * propagation. 58 : */ 59 : cv::Mat forwardPropagation(const cv::Mat &inputValues); 60 : 61 : /** 62 : * @brief Performs backward propagation on the network using the given 63 : * expected values. 64 : * 65 : * @param expectedValues The expected values for backward propagation. 66 : * @param error_min error minimum 67 : * @param error_max error maximum 68 : */ 69 : void backwardPropagation(const cv::Mat &expectedValues, 70 : const float &error_min, const float &error_max); 71 : 72 : /** 73 : * @brief Updates the weights of the neurons in the network using the learning 74 : * rate. 75 : * 76 : * @param learning_rate The learning rate 77 : */ 78 : void updateWeights(float learning_rate); 79 : 80 : /** 81 : * @brief max weights of all neurons, useful for csv export 82 : * It is also the maximum layer neurons. 83 : * Updated during neural network import or creation 84 : */ 85 : size_t max_weights = 0; 86 : }; 87 : 88 : } // namespace sipai