Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- #include <iostream>
- #include <vector>
- #include <cmath>
- using namespace std;
- // Function to apply ReLU activation
- float ReLU(float x) {
- return max(0.0f, x);
- }
- class LayerDense {
- public:
- void initialize(size_t n_inputs, size_t n_neurons, const vector<vector<float>>& weights, const vector<float>& biases);
- vector<vector<float>> forward(const vector<vector<float>>& inputs);
- private:
- vector<vector<float>> weights;
- vector<float> biases;
- };
- void LayerDense::initialize(size_t n_inputs, size_t n_neurons, const vector<vector<float>>& init_weights, const vector<float>& init_biases) {
- weights = init_weights;
- biases = init_biases;
- }
- vector<vector<float>> LayerDense::forward(const vector<vector<float>>& inputs) {
- vector<vector<float>> output(inputs.size(), vector<float>(biases.size(), 0.0));
- for (size_t i = 0; i < inputs.size(); ++i) {
- for (size_t k = 0; k < biases.size(); ++k) {
- float sum = 0.0;
- for (size_t j = 0; j < inputs[i].size(); ++j) {
- sum += inputs[i][j] * weights[j][k];
- }
- output[i][k] = sum + biases[k];
- // Apply ReLU activation
- output[i][k] = ReLU(output[i][k]);
- }
- }
- return output;
- }
- int main() {
- // Example usage
- LayerDense layer;
- size_t num_layers = 2; // Number of layers
- size_t neurons_per_layer = 2; // Number of neurons per layer
- // Specify weights and biases for each neuron
- vector<vector<float>> weights = {{0.1, 0.2}, {0.3, 0.4}, {0.5, 0.6}}; // Example weights
- vector<float> biases = {0.7, 0.8}; // Example biases
- layer.initialize(3, neurons_per_layer, weights, biases); // Number of inputs is 3
- vector<vector<float>> inputs = {{1.2f, 2.3f, 3.4f}, {4.5f, 5.6f, 6.7f}}; // Example input data
- vector<vector<float>> output = layer.forward(inputs); // Perform forward pass
- // Display the output
- cout << "Output after forward pass:" << endl;
- for (const auto& row : output) {
- for (float val : row) {
- cout << val << " ";
- }
- cout << endl;
- }
- return 0;
- }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement