Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- #https://mattmazur.com/2015/03/17/a-step-by-step-backpropagation-example/
- static log2(x:float)->float:
- return log(x) / log(2)
- class Neuron:
- var bias:float
- var weights:float
- var output:float #holds result for backpropogation
- var error:float #holds error for backpropogation
- func _init(_bias)->void:
- bias = _bias
- # 1) get total net input for nodes
- static func calculate_total_net_input(inst:Object, inputs:Array)->float:
- var total: = 0.0
- for i in inputs.size():
- total += inputs[i] * inst.weights[i]
- return total + bias
- total_net_input = w1*i1 + w2*i2 + b*1
- # 2) squash it with logistic function
- static func squash(total_net_input:float)->float:
- return 1 / (1 + exp(-total_net_input))
- h1_out = squash(total_net_input)
- static func calculate_output(inst:Object, inputs:Array)->float:
- inst.output = squash(calculate_total_net_input(inst, inputs))
- return inst.output
- # 3) do the same for each output node
- # 4) calculate error
- static func calculate_error(inst:Object, target_output:float, training_rate:float = 0.5)->float:
- inst.error = training_rate * pow((target_output - inst.output), 2)
- return inst.error
- # 5) sum all errors together
- static func calculate_total_error(instances:Array)->float:
- var total: = 0.0
- for inst in instances:
- total += inst.error
- return total
- static func cross_entropy(p:Array, q:Array)->float:
- var total: = 0.0
- for i in p.size():
- total += p[i] * log2(q[i])
Add Comment
Please, Sign In to add comment