Advertisement
pasholnahuy

layer.cpp

Jan 17th, 2024
97
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.43 KB | None | 0 0
  1. //
  2. // Created by Denis Ryapolov on 06.01.2024.
  3. //
  4.  
  5. #include "Layer.h"
  6.  
  7. namespace network {
  8. using VectorXd = Layer::VectorXd;
  9. using MatrixXd = Layer::MatrixXd;
  10.  
  11. Layer::Layer(Threshold_Id id, int rows, int columns)
  12. : threshold_func_(Threshold_Func::create(id)), A_(getNormal(rows, columns)),
  13. b_(getNormal(rows, 1)) {}
  14.  
  15. MatrixXd Layer::apply(const MatrixXd &x) const { // vector of values
  16. return threshold_func_.apply(A_ * x + b_);
  17. }
  18.  
  19. MatrixXd Layer::derive(const MatrixXd &vec)
  20. const { // vec is a matrix of y_i = (Ax + b)_i - result of apply
  21. return threshold_func_.derive(vec).asDiagonal();
  22. }
  23.  
  24. MatrixXd Layer::gradA(const VectorXd &x, const VectorXd &u,
  25. const VectorXd &vec) const { // u is a gradient vector
  26. return derive(vec) * u * x.transpose();
  27. }
  28.  
  29. MatrixXd Layer::gradb(const VectorXd &u, const VectorXd &vec) const {
  30. return derive(vec) * u;
  31. }
  32.  
  33. VectorXd Layer::gradx(const VectorXd &u, const VectorXd &vec) const {
  34. return A_.transpose() * derive(vec) * u;
  35. }
  36.  
  37. void Layer::apply_gradA(const MatrixXd &grad, double step) {
  38. A_ -= step * grad;
  39. }
  40.  
  41. void Layer::apply_gradb(const VectorXd &grad, double step) {
  42. b_ -= step * grad;
  43. }
  44.  
  45. MatrixXd Layer::getNormal(int rows, int columns) {
  46. assert(rows > 0 && "rows must be positive!");
  47. assert(columns > 0 && "columns must be positive!");
  48. return Eigen::Rand::normal<MatrixXd>(rows, columns, urng);
  49. }
  50.  
  51. } // namespace network
  52.  
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement