Skip to content

Commit

Permalink
pep8
Browse files Browse the repository at this point in the history
  • Loading branch information
nouiz committed Sep 26, 2014
1 parent 2e62eb4 commit cb77254
Show file tree
Hide file tree
Showing 14 changed files with 38 additions and 10 deletions.
5 changes: 4 additions & 1 deletion 01_building_expressions/01_scalar.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,20 +7,23 @@
from theano import function
raise NotImplementedError("TODO: add any other imports you need")


def make_scalar():
"""
Returns a new Theano scalar.
"""

raise NotImplementedError("TODO: implement this function.")


def log(x):
"""
Returns the logarithm of a Theano scalar x.
"""

raise NotImplementedError("TODO: implement this function.")


def add(x, y):
"""
Adds two theano scalars together and returns the result.
Expand All @@ -36,7 +39,7 @@ def add(x, y):
f = function([a, b], d)
a = np.cast[a.dtype](1.)
b = np.cast[b.dtype](2.)
actual = f(a,b)
actual = f(a, b)
expected = 1. + np.log(2.)
assert np.allclose(actual, expected)
print "SUCCESS!"
5 changes: 4 additions & 1 deletion 01_building_expressions/01_scalar_soln.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,20 +2,23 @@
from theano import function
import theano.tensor as T


def make_scalar():
"""
Returns a new Theano scalar.
"""

return T.scalar()


def log(x):
"""
Returns the logarithm of a Theano scalar x.
"""

return T.log(x)


def add(x, y):
"""
Adds two theano scalars together and returns the result.
Expand All @@ -31,7 +34,7 @@ def add(x, y):
f = function([a, b], d)
a = np.cast[a.dtype](1.)
b = np.cast[b.dtype](2.)
actual = f(a,b)
actual = f(a, b)
expected = 1. + np.log(2.)
assert np.allclose(actual, expected)
print "SUCCESS!"
4 changes: 4 additions & 0 deletions 01_building_expressions/02_vector_mat.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,20 +7,23 @@
from theano import function
raise NotImplementedError("TODO: add any other imports you need")


def make_vector():
"""
Returns a new Theano vector.
"""

raise NotImplementedError("TODO: implement this function.")


def make_matrix():
"""
Returns a new Theano matrix.
"""

raise NotImplementedError("TODO: implement this function.")


def elemwise_mul(a, b):
"""
a: A theano matrix
Expand All @@ -30,6 +33,7 @@ def elemwise_mul(a, b):

raise NotImplementedError("TODO: implement this function.")


def matrix_vector_mul(a, b):
"""
a: A theano matrix
Expand Down
4 changes: 4 additions & 0 deletions 01_building_expressions/02_vector_mat_soln.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,20 +2,23 @@
from theano import function
import theano.tensor as T


def make_vector():
"""
Returns a new Theano vector.
"""

return T.vector()


def make_matrix():
"""
Returns a new Theano matrix.
"""

return T.matrix()


def elemwise_mul(a, b):
"""
a: A theano matrix
Expand All @@ -25,6 +28,7 @@ def elemwise_mul(a, b):

return a * b


def matrix_vector_mul(a, b):
"""
a: A theano matrix
Expand Down
5 changes: 4 additions & 1 deletion 01_building_expressions/03_tensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from theano import function
raise NotImplementedError("TODO: add any other imports you need")


def make_tensor(dim):
"""
Returns a new Theano tensor with no broadcastable dimensions.
Expand All @@ -16,6 +17,7 @@ def make_tensor(dim):

raise NotImplementedError("TODO: implement this function.")


def broadcasted_add(a, b):
"""
a: a 3D theano tensor
Expand All @@ -29,6 +31,7 @@ def broadcasted_add(a, b):

raise NotImplementedError("TODO: implement this function.")


def partial_max(a):
"""
a: a 4D theano tensor
Expand All @@ -48,7 +51,7 @@ def partial_max(a):
c = broadcasted_add(a, b)
d = partial_max(c)

f = function([a, b,], d)
f = function([a, b], d)

rng = np.random.RandomState([1, 2, 3])
a_value = rng.randn(2, 2, 2).astype(a.dtype)
Expand Down
5 changes: 4 additions & 1 deletion 01_building_expressions/03_tensor_soln.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from theano import function
import theano.tensor as T


def make_tensor(dim):
"""
Returns a new Theano tensor with no broadcastable dimensions.
Expand All @@ -10,6 +11,7 @@ def make_tensor(dim):

return T.TensorType(broadcastable=tuple([False] * dim), dtype='float32')()


def broadcasted_add(a, b):
"""
a: a 3D theano tensor
Expand All @@ -23,6 +25,7 @@ def broadcasted_add(a, b):

return a.dimshuffle(2, 'x', 1, 0) + b


def partial_max(a):
"""
a: a 4D theano tensor
Expand All @@ -42,7 +45,7 @@ def partial_max(a):
c = broadcasted_add(a, b)
d = partial_max(c)

f = function([a, b,], d)
f = function([a, b], d)

rng = np.random.RandomState([1, 2, 3])
a_value = rng.randn(2, 2, 2).astype(a.dtype)
Expand Down
1 change: 1 addition & 0 deletions 02_compiling_and_running/01_function.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from theano import tensor as T
raise NotImplementedError("TODO: add any other imports you need")


def evaluate(x, y, expr, x_value, y_value):
"""
x: A theano variable
Expand Down
1 change: 1 addition & 0 deletions 02_compiling_and_running/01_function_soln.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from theano import tensor as T
from theano import function


def evaluate(x, y, expr, x_value, y_value):
"""
x: A theano variable
Expand Down
4 changes: 3 additions & 1 deletion 02_compiling_and_running/02_shared.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import numpy as np
raise NotImplementedError("TODO: add any other imports you need")


def make_shared(shape):
"""
Returns a theano shared variable containing a tensor of the specified
Expand All @@ -14,6 +15,7 @@ def make_shared(shape):
"""
raise NotImplementedError("TODO: implement the function")


def exchange_shared(a, b):
"""
a: a theano shared variable
Expand All @@ -22,6 +24,7 @@ def exchange_shared(a, b):
"""
raise NotImplementedError("TODO: implement the function")


def make_exchange_func(a, b):
"""
a: a theano shared variable
Expand All @@ -34,7 +37,6 @@ def make_exchange_func(a, b):
raise NotImplementedError("TODO: implement the function")



if __name__ == "__main__":
a = make_shared((5, 4, 3))
assert a.get_value().shape == (5, 4, 3)
Expand Down
4 changes: 3 additions & 1 deletion 02_compiling_and_running/02_shared_soln.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from theano import function
from theano import shared


def make_shared(shape):
"""
Returns a theano shared variable containing a tensor of the specified
Expand All @@ -11,6 +12,7 @@ def make_shared(shape):
"""
return shared(np.zeros(shape))


def exchange_shared(a, b):
"""
a: a theano shared variable
Expand All @@ -21,6 +23,7 @@ def exchange_shared(a, b):
a.set_value(b.get_value())
b.set_value(temp)


def make_exchange_func(a, b):
"""
a: a theano shared variable
Expand All @@ -38,7 +41,6 @@ def make_exchange_func(a, b):
return f



if __name__ == "__main__":
a = make_shared((5, 4, 3))
assert a.get_value().shape == (5, 4, 3)
Expand Down
3 changes: 2 additions & 1 deletion 03_modifying/01_grad.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
# compute some derivative.
from theano import tensor as T


def grad_sum(x, y, z):
"""
x: A theano variable
Expand All @@ -22,5 +23,5 @@ def grad_sum(x, y, z):
y = T.scalar()
z = x + y
s = grad_sum(x, y, z)
assert s.eval({x: 0, y:0}) == 2
assert s.eval({x: 0, y: 0}) == 2
print "SUCCESS!"
5 changes: 2 additions & 3 deletions 03_modifying/01_grad_soln.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
#
from theano import tensor as T


def grad_sum(x, y, z):
"""
x: A theano variable
Expand All @@ -14,12 +15,10 @@ def grad_sum(x, y, z):

return sum(T.grad(z, [x, y]))



if __name__ == "__main__":
x = T.scalar()
y = T.scalar()
z = x + y
s = grad_sum(x, y, z)
assert s.eval({x: 0, y:0}) == 2
assert s.eval({x: 0, y: 0}) == 2
print "SUCCESS!"
1 change: 1 addition & 0 deletions 03_modifying/02_traverse.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from theano import tensor as T
raise NotImplementedError("Add any imports you need.")


def arg_to_softmax(prob):
"""
Oh no! Someone has passed you the probability output,
Expand Down
1 change: 1 addition & 0 deletions 03_modifying/02_traverse_soln.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from theano.gof import Variable
from theano import tensor as T


def arg_to_softmax(prob):
"""
Oh no! Someone has passed you the probability output,
Expand Down

0 comments on commit cb77254

Please sign in to comment.