Skip to content

Commit

Permalink
Update surrogate derivative
Browse files Browse the repository at this point in the history
  • Loading branch information
Jinqi Huang committed Feb 28, 2022
1 parent 167d178 commit 4ea5f86
Show file tree
Hide file tree
Showing 4 changed files with 5 additions and 5 deletions.
2 changes: 1 addition & 1 deletion NeuroCores/core_Izhikevich.py
Original file line number Diff line number Diff line change
Expand Up @@ -227,7 +227,7 @@ def plast(net, time):
if abs(net.state.NeurAccum[time][neuron - net.inputNum] - net.params.get('FIRETH', 0.001)) > net.params.get('FIRETH', 0.001):
error[neuron] = 0
else:
error[neuron] = delta * 0.5 * net.state.NeurAccum[time][neuron - net.inputNum] / net.params.get('FIRETH', 0.001)
error[neuron] = delta * 0.5 / net.params.get('FIRETH', 0.001)
print("neuron %d has delta %f and error %f" % (neuron, delta, error[neuron]))
elif neuron < fullNum - net.outputNum and neuron >= net.inputNum: # hidden neurons
if abs(net.state.NeurAccum[time][neuron - net.inputNum] - net.params.get('FIRETH', 0.001)) > net.params.get('FIRETH', 0.001):
Expand Down
4 changes: 2 additions & 2 deletions NeuroCores/core_LIF_supervisedlearning.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,13 +199,13 @@ def plast(net, time):
if abs(net.state.NeurAccum[time][neuron - net.inputNum] - net.params.get('FIRETH', 0.001)) > net.params.get('FIRETH', 0.001):
error[neuron] = 0
else:
error[neuron] = delta * 0.5 * net.state.NeurAccum[time][neuron - net.inputNum]) / net.params.get('FIRETH', 0.001)
error[neuron] = delta * 0.5 / net.params.get('FIRETH', 0.001)
print("neuron %d has expected output %d and real output %d, delta %f and error %f" % (neuron, outputLabel[neuron], rawin[neuron], delta, error[neuron]))
elif neuron < fullNum - net.outputNum and neuron >= net.inputNum: # hidden neurons
if abs(net.state.NeurAccum[time][neuron - net.inputNum] - net.params.get('FIRETH', 0.001)) > net.params.get('FIRETH', 0.001):
sur_deriv = 0
else:
sur_deriv = 0.5 * net.state.NeurAccum[time][neuron - net.inputNum] / net.params.get('FIRETH', 0.001)
sur_deriv = 0.5 / net.params.get('FIRETH', 0.001)
for postidx in np.where(net.ConnMat[neuron,:, 0] != 0)[0]: # add up all error back propagated from the next layer
delta_error = error[postidx] * net.state.weights[neuron, postidx - net.inputNum, time] * sur_deriv
error[neuron] += delta_error
Expand Down
2 changes: 1 addition & 1 deletion NeuroCores/core_LIF_supervisedlearning_wta.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,7 @@ def plast(net, time):
if abs(net.state.NeurAccum[time][neuron - net.inputNum] - net.params.get('FIRETH', 0.001)) > net.params.get('FIRETH', 0.001):
sur_deriv = 0
else:
sur_deriv = 0.5 * net.state.NeurAccum[time][neuron - net.inputNum] / net.params.get('FIRETH', 0.001)
sur_deriv = 0.5 / net.params.get('FIRETH', 0.001)
for postidx in np.where(net.ConnMat[neuron,:, 0] != 0)[0]: # add up all error back propagated from the next layer
delta_error = error[postidx] * net.state.weights[neuron, postidx - net.inputNum, time] * sur_deriv
error[neuron] += delta_error
Expand Down
2 changes: 1 addition & 1 deletion NeuroCores/core_LIF_supervisedlearning_wta_debugver.py
Original file line number Diff line number Diff line change
Expand Up @@ -231,7 +231,7 @@ def plast(net, time):
if abs(net.state.NeurAccum[time][neuron - net.inputNum] - net.params.get('FIRETH', 0.001)) > net.params.get('FIRETH', 0.001):
sur_deriv = 0
else:
sur_deriv = 0.5 * net.state.NeurAccum[time][neuron - net.inputNum] / net.params.get('FIRETH', 0.001)
sur_deriv = 0.5 / net.params.get('FIRETH', 0.001)
for postidx in np.where(net.ConnMat[neuron,:, 0] != 0)[0]: # add up all error back propagated from the next layer
delta_error = error[postidx] * net.state.weights[neuron, postidx - net.inputNum, time] * sur_deriv
error[neuron] += delta_error
Expand Down

0 comments on commit 4ea5f86

Please sign in to comment.