Skip to content

Commit

Permalink
Final touches.
Browse files Browse the repository at this point in the history
  • Loading branch information
abergeron committed Oct 29, 2014
1 parent ea21204 commit 99aa9f5
Show file tree
Hide file tree
Showing 4 changed files with 54 additions and 0 deletions.
22 changes: 22 additions & 0 deletions 09_opt/01_opt.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
from scalmulop import ScalMulV1
from doubleop import DoubleOp

from theano.gof import local_optimizer

from theano.tensor.opt import register_specialize

@register_specialize
@local_optimizer([ScalMulV1])
def local_scalmul_double_v1(node):
if not (isinstance(node.op, ScalMulV1)
and node.op.scal == 2):
return False

return [DoubleOp()(node.inputs[0])]

from theano.gof.opt import OpSub

local_scalmul_double_v2 = OpSub(ScalMulV1(2), DoubleOp())

register_specialize(local_scalmul_double_v2,
name='local_scalmul_double_v2')
15 changes: 15 additions & 0 deletions 09_opt/01_opt_soln.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
from doubleop import DoubleOp
from doublec import DoubleC

from theano.gof import local_optimizer

from theano.tensor.opt import register_specialize

@register_specialize
@local_optimizer([DoubleOp])
def local_scalmul_double_v1(node):
if not (isinstance(node.op, DoubleOp)
and node.inputs[0].ndim == 1):
return False

return [DoubleC()(node.inputs[0])]
1 change: 1 addition & 0 deletions advanced.tex
Original file line number Diff line number Diff line change
Expand Up @@ -383,6 +383,7 @@ \section{Optimizations}
\end{frame}

\begin{frame}{Exercice 4}
Work through the "09\_opt" directory available at \url{https://github.com/abergeron/ccw_tutorial_theano.git}.
\begin{itemize}
\item Make an optimization that replace DoubleOp with DoubleC (or DoubleCOp)
\item Write tests to make sure your optimization is applied correctly
Expand Down
16 changes: 16 additions & 0 deletions test_opt.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
import theano

from scalmulop import ScalMulV1
from doubleop import DoubleOp
import opt

def test_scalmul_double():
x = theano.tensor.matrix()
y = ScalMulV1(2)(x)
f = theano.function([x], y)

assert not any(isinstance(n.op, ScalMulV1)
for n in f.maker.fgraph.toposort())
assert any(isinstance(n.op, DoubleOp)
for n in f.maker.fgraph.toposort())

0 comments on commit 99aa9f5

Please sign in to comment.