Advertisement
pasholnahuy

Untitled

Jan 17th, 2024
97
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 3.17 KB | None | 0 0
  1. #pragma once
  2.  
  3. #include <Eigen/Eigen>
  4. #include <EigenRand/EigenRand>
  5. #include <utility>
  6.  
  7. namespace network {
  8.  
  9. enum class Score_Id { MSE, MAE, CrossEntropy };
  10.  
  11. struct Score_Database {
  12. using MatrixXd = Eigen::MatrixXd;
  13. using VectorXd = Eigen::VectorXd;
  14.  
  15. static MatrixXd SoftMax(const MatrixXd &vec);
  16.  
  17. template <Score_Id> static double score(const MatrixXd &, const MatrixXd &);
  18.  
  19. template <Score_Id>
  20. static MatrixXd gradient(const MatrixXd &, const MatrixXd &);
  21.  
  22. template <>
  23. inline double score<Score_Id::MSE>(const MatrixXd &x,
  24. const MatrixXd &reference) {
  25. return ((x - reference).transpose() * (x - reference)).trace() / x.cols();
  26. }
  27.  
  28. template <>
  29. inline MatrixXd gradient<Score_Id::MSE>(const MatrixXd &x,
  30. const MatrixXd &reference) {
  31. return 2.0 * (x - reference);
  32. }
  33.  
  34. template <>
  35. inline double score<Score_Id::MAE>(const MatrixXd &x,
  36. const MatrixXd &reference) {
  37. return (x - reference).array().abs().sum();
  38. }
  39.  
  40. template <>
  41. inline MatrixXd gradient<Score_Id::MAE>(const MatrixXd &x,
  42. const MatrixXd &reference) {
  43. return (x - reference).unaryExpr([](double el) {
  44. return el > 0 ? 1.0 : -1.0;
  45. });
  46. }
  47. template <>
  48. inline double score<Score_Id::CrossEntropy>(const MatrixXd &x,
  49. const MatrixXd &reference) {
  50. double res = 0;
  51. for (int i = 0; i < x.cols(); ++i) {
  52. res += -(reference.col(i).transpose() / x.cols()) *
  53. VectorXd(SoftMax(x).col(i)).unaryExpr([](double el) {
  54. return log(el);
  55. });
  56. }
  57. return res;
  58. }
  59. template <>
  60. inline MatrixXd gradient<Score_Id::CrossEntropy>(const MatrixXd &x,
  61. const MatrixXd &reference) {
  62. auto sf_x = SoftMax(x);
  63. auto sf_reference = SoftMax(reference);
  64. MatrixXd res(x.rows(), x.cols());
  65. double exp_sum;
  66. VectorXd const_vec;
  67. for (int i = 0; i < x.cols(); ++i) {
  68. exp_sum = VectorXd(sf_x.col(i)).array().exp().sum();
  69. const_vec = sf_x.unaryExpr([](double el) { return exp(el); }) -
  70. exp_sum * VectorXd::Ones(x.size());
  71. res.col(i) = const_vec * sf_x / exp_sum;
  72. }
  73. return res;
  74. }
  75. };
  76.  
  77. class Score_Func {
  78.  
  79. public:
  80. using MatrixXd = Eigen::MatrixXd;
  81. using VectorXd = Eigen::VectorXd;
  82.  
  83. private:
  84. using ScoreFuncType =
  85. std::function<double(const VectorXd &, const VectorXd &)>;
  86. using GradientFuncType =
  87. std::function<VectorXd(const VectorXd &, const VectorXd &)>;
  88.  
  89. public:
  90. Score_Func(ScoreFuncType score_func, GradientFuncType gradient_func);
  91.  
  92. template <Score_Id Id> static Score_Func create() {
  93. return Score_Func(Score_Database::score<Id>, Score_Database::gradient<Id>);
  94. }
  95.  
  96. static Score_Func create(Score_Id score);
  97.  
  98. double score(const MatrixXd &x, const MatrixXd &reference) const;
  99.  
  100. VectorXd gradient(const VectorXd &x, const VectorXd &reference) const;
  101.  
  102. private:
  103. ScoreFuncType score_func_;
  104. GradientFuncType gradient_func_;
  105. };
  106. } // namespace network
  107.  
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement