@@ -168,7 +168,7 @@ def _setTheanoFunctions(self) :
168
168
propagateTest returns the testOutputs, some decorators might not be applied.
169
169
This is called after decorating"""
170
170
self .propagate = MWRAP .TheanoFunction ("propagate" , self , [("outputs" , self .outputs )], allow_input_downcast = True )
171
- self .propagateTest = MWRAP .TheanoFunction ("propagateTest" , self , [("testOutputs " , self .testOutputs )], allow_input_downcast = True )
171
+ self .propagateTest = MWRAP .TheanoFunction ("propagateTest" , self , [("outputs " , self .testOutputs )], allow_input_downcast = True )
172
172
173
173
def setCustomTheanoFunctions (self ) :
174
174
"""This is where you should put the definitions of your custom theano functions. Theano functions
@@ -523,11 +523,11 @@ def setCustomTheanoFunctions(self) :
523
523
clasAcc = tt .mean ( tt .eq (self .targets , clas ) )
524
524
predAcc = tt .mean ( tt .eq (self .targets , pred ) )
525
525
526
- self .classificationAccuracy = MWRAP .TheanoFunction ("accuracy " , self , [("accuracy" , clasAcc )], { "targets" : self .targets }, allow_input_downcast = True )
527
- self .predictionAccuracy = MWRAP .TheanoFunction ("accuracy " , self , [("accuracy" , predAcc )], { "targets" : self .targets }, allow_input_downcast = True )
526
+ self .classificationAccuracy = MWRAP .TheanoFunction ("classificationAccuracy " , self , [("accuracy" , clasAcc )], { "targets" : self .targets }, allow_input_downcast = True )
527
+ self .predictionAccuracy = MWRAP .TheanoFunction ("predictionAccuracy " , self , [("accuracy" , predAcc )], { "targets" : self .targets }, allow_input_downcast = True )
528
528
529
- self .trainAndAccuracy = MWRAP .TheanoFunction ("accuracy " , self , [("score" , self .cost ), ("accuracy" , clasAcc )], { "targets" : self .targets }, updates = self .updates , allow_input_downcast = True )
530
- self .testAndAccuracy = MWRAP .TheanoFunction ("accuracy " , self , [("score" , self .testCost ), ("accuracy" , predAcc )], { "targets" : self .targets }, allow_input_downcast = True )
529
+ self .trainAndAccuracy = MWRAP .TheanoFunction ("trainAndAccuracy " , self , [("score" , self .cost ), ("accuracy" , clasAcc )], { "targets" : self .targets }, updates = self .updates , allow_input_downcast = True )
530
+ self .testAndAccuracy = MWRAP .TheanoFunction ("testAndAccuracy " , self , [("score" , self .testCost ), ("accuracy" , predAcc )], { "targets" : self .targets }, allow_input_downcast = True )
531
531
532
532
class Regression (Output_ABC ) :
533
533
"""For regressions, works great with a mean squared error cost"""
0 commit comments