nezvers

Backpropogation functions

Sep 24th, 2020 (edited)
458
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 1.52 KB | None | 0 0
  1. #https://mattmazur.com/2015/03/17/a-step-by-step-backpropagation-example/
  2.  
  3. static log2(x:float)->float:
  4.     return log(x) / log(2)
  5.  
  6. class Neuron:
  7.     var bias:float
  8.     var weights:float
  9.     var output:float    #holds result for backpropogation
  10.     var error:float     #holds error for backpropogation
  11.     func _init(_bias)->void:
  12.         bias = _bias
  13.  
  14. # 1) get total net input for nodes
  15. static func calculate_total_net_input(inst:Object, inputs:Array)->float:
  16.     var total: = 0.0
  17.     for i in inputs.size():
  18.         total += inputs[i] * inst.weights[i]
  19.     return total + bias
  20.  
  21. total_net_input = w1*i1 + w2*i2 + b*1
  22.  
  23.  
  24. # 2) squash it with logistic function
  25. static func squash(total_net_input:float)->float:
  26.     return 1 / (1 + exp(-total_net_input))
  27.  
  28. h1_out = squash(total_net_input)
  29.  
  30. static func calculate_output(inst:Object, inputs:Array)->float:
  31.     inst.output = squash(calculate_total_net_input(inst, inputs))
  32.     return inst.output
  33.  
  34. # 3) do the same for each output node
  35.  
  36.  
  37. # 4) calculate error
  38. static func calculate_error(inst:Object, target_output:float, training_rate:float = 0.5)->float:
  39.     inst.error = training_rate * pow((target_output - inst.output), 2)
  40.     return inst.error
  41.  
  42. # 5) sum all errors together
  43. static func calculate_total_error(instances:Array)->float:
  44.     var total: = 0.0
  45.     for inst in instances:
  46.         total += inst.error
  47.     return total
  48.  
  49. static func cross_entropy(p:Array, q:Array)->float:
  50.     var total: = 0.0
  51.     for i in p.size():
  52.         total += p[i] * log2(q[i])
Add Comment
Please, Sign In to add comment