Advertisement
Trainlover08

start of the passes class

Apr 11th, 2024 (edited)
154
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
C++ 2.13 KB | None | 0 0
  1. #include <iostream>
  2. #include <vector>
  3. #include <cmath>
  4.  
  5. using namespace std;
  6.  
  7. // Function to apply ReLU activation
  8. float ReLU(float x) {
  9.     return max(0.0f, x);
  10. }
  11.  
  12. class LayerDense {
  13. public:
  14.     void initialize(size_t n_inputs, size_t n_neurons, const vector<vector<float>>& weights, const vector<float>& biases);
  15.     vector<vector<float>> forward(const vector<vector<float>>& inputs);
  16. private:
  17.     vector<vector<float>> weights;
  18.     vector<float> biases;
  19. };
  20.  
  21. void LayerDense::initialize(size_t n_inputs, size_t n_neurons, const vector<vector<float>>& init_weights, const vector<float>& init_biases) {
  22.     weights = init_weights;
  23.     biases = init_biases;
  24. }
  25.  
  26. vector<vector<float>> LayerDense::forward(const vector<vector<float>>& inputs) {
  27.     vector<vector<float>> output(inputs.size(), vector<float>(biases.size(), 0.0));
  28.     for (size_t i = 0; i < inputs.size(); ++i) {
  29.         for (size_t k = 0; k < biases.size(); ++k) {
  30.             float sum = 0.0;
  31.             for (size_t j = 0; j < inputs[i].size(); ++j) {
  32.                 sum += inputs[i][j] * weights[j][k];
  33.             }
  34.             output[i][k] = sum + biases[k];
  35.             // Apply ReLU activation
  36.             output[i][k] = ReLU(output[i][k]);
  37.         }
  38.     }
  39.     return output;
  40. }
  41.  
  42. int main() {
  43.     // Example usage
  44.     LayerDense layer;
  45.     size_t num_layers = 2; // Number of layers
  46.     size_t neurons_per_layer = 2; // Number of neurons per layer
  47.  
  48.     // Specify weights and biases for each neuron
  49.     vector<vector<float>> weights = {{0.1, 0.2}, {0.3, 0.4}, {0.5, 0.6}}; // Example weights
  50.     vector<float> biases = {0.7, 0.8}; // Example biases
  51.  
  52.     layer.initialize(3, neurons_per_layer, weights, biases); // Number of inputs is 3
  53.  
  54.     vector<vector<float>> inputs = {{1.2f, 2.3f, 3.4f}, {4.5f, 5.6f, 6.7f}}; // Example input data
  55.     vector<vector<float>> output = layer.forward(inputs); // Perform forward pass
  56.  
  57.     // Display the output
  58.     cout << "Output after forward pass:" << endl;
  59.     for (const auto& row : output) {
  60.         for (float val : row) {
  61.             cout << val << " ";
  62.         }
  63.         cout << endl;
  64.     }
  65.     return 0;
  66. }
  67.  
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement