Skip to content

Commit

Permalink
Fix a bit of bugs and touch up the optimization section.
Browse files Browse the repository at this point in the history
  • Loading branch information
abergeron committed Oct 29, 2014
1 parent 965e444 commit ea21204
Show file tree
Hide file tree
Showing 7 changed files with 14 additions and 19 deletions.
2 changes: 1 addition & 1 deletion 06_scalmulop/01_scalmulop_soln.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from theano.scalar import as_scalar_variable

class ScalMulV1(Op):
__props__ = ('scal')
__props__ = ('scal',)

def __init__(self, scal):
if not isinstance(scal, int):
Expand Down
2 changes: 1 addition & 1 deletion 07_scalmulgrad/01_scalmulop.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from theano.scalar import as_scalar_variable

class ScalMul(Op):
__props__ = ('scal')
__props__ = ('scal',)

def __init__(self, scal):
if not isinstance(scal, int):
Expand Down
2 changes: 1 addition & 1 deletion 07_scalmulgrad/01_scalmulop_soln.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from theano.scalar import as_scalar_variable

class ScalMul(Op):
__props__ = ('scal')
__props__ = ('scal',)

def __init__(self, scal):
if not isinstance(scal, int):
Expand Down
2 changes: 1 addition & 1 deletion 08_scalmulc/01_scalmulc_soln.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from theano.tensor import as_tensor_variable

class ScalMulC(Op):
__props__ = ('scal')
__props__ = ('scal',)

def __init__(self, scal):
if not isinstance(scal, int):
Expand Down
14 changes: 4 additions & 10 deletions advanced.tex
Original file line number Diff line number Diff line change
Expand Up @@ -50,13 +50,6 @@

\section*{}

\begin{frame}{Setup}
\begin{enumerate}
\item Make sure you have Theano installed somewhere
\item Clone this repository: \url{https://github.com/abergeron/ccw_tutorial_theano.git}
\end{enumerate}
\end{frame}

\begin{frame}{Outline}
\begin{enumerate}
\item How to Make an Op (Python) (45 min)
Expand Down Expand Up @@ -93,6 +86,7 @@ \section{How to Make an Op (Python)}
\item Optional (although very useful)
\item Generates \code{__hash__}, \code{__eq__} and \code{__str__} methods if present
\item Empty tuple signifies no properties that should take part in comparison
\item If you have only one property, make sure you add a final comma: \code{('property',)}
\end{itemize}
\begin{alertblock}{}
Make sure \code{__hash__}, \code{__eq__} and \code{__str__} are not defined in a superclass if you don't inherit directly from Op since otherwise your methods will get shadowed.
Expand Down Expand Up @@ -369,7 +363,7 @@ \section{Optimizations}
\end{frame}

\begin{frame}{Replace an Op (V1)}
Here is code to use \code{DoubleOp} instead of \code{ScalMul(2)}.
Here is code to use \code{DoubleOp()} instead of \code{ScalMul(2)}.
\lstinputlisting[linerange={1-5,9-15}]{opt.py}
\end{frame}

Expand All @@ -381,11 +375,11 @@ \section{Optimizations}
\begin{frame}{Registering}
In any case you need to register your optimization.
\lstinputlisting[linerange={6-10}]{opt.py}
\lstinputlisting[linerange={21-21}]{opt.py}
\lstinputlisting[linerange={21-22}]{opt.py}
\end{frame}

\begin{frame}{Tests}
\color{red}Test optimization application (op replacement, correct run)
\lstinputlisting{test_opt.py}
\end{frame}

\begin{frame}{Exercice 4}
Expand Down
5 changes: 3 additions & 2 deletions opt.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,11 @@ def local_scalmul_double_v1(node):
and node.op.scal == 2):
return False

return DoubleOp()(node.inputs[0])
return [DoubleOp()(node.inputs[0])]

from theano.gof.opt import OpSub

local_scalmul_double_v2 = OpSub(ScalMulV1(2), DoubleOp())

register_specialize(local_scalmul_double_v2)
register_specialize(local_scalmul_double_v2,
name='local_scalmul_double_v2')
6 changes: 3 additions & 3 deletions scalmulop.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
from theano import Op, Apply
from theano.tensor import as_tensor_variable
from theano.scalar import as_scalar_variable
from theano.scalar import as_scalar

class ScalMulV1(Op):
__props__ = ('scal')
__props__ = ('scal',)

def __init__(self, scal):
if not isinstance(scal, int):
Expand Down Expand Up @@ -36,7 +36,7 @@ class ScalMulV2(Op):

def make_node(self, x, scal):
x = as_tensor_variable(x)
scal = as_scalar_variable(scal)
scal = as_scalar(scal)
return Apply(self, [x, scal], [x.type()])

def perform(self, node, inputs, output_storage):
Expand Down

0 comments on commit ea21204

Please sign in to comment.