A python package to do auto differentiation.

## Project description

This library aims to provide an easy implementation of doing symbolic operations in python
## Operators allowed

- *exp(x)* and *log(x)*
Python
def testOps():
x = Integer('x')
y = ops.log(x)
z = ops.exp(y)
graph.getOutput({x: 1})

- *Sigmoid(x)*
Python

def activ_fns():
x = Double('x')
z = ops.sigmoid(x)
graph.getOutput({x: 110.5})

- *Tanh(x)*
Python

def activ_fns():
x = Double('x')
z = ops.tanh(x)
graph.getOutput({x: 110.5})


## Testing

Python
a = Integer("a")
b = Integer("b")
c = a + b
d = b + 6
e = c * d
graph.getOutput({a: 32,
b: 11})

### Here is a more complex example.
Python
x = Integer("Int1x")
y = Integer("Int2y")
z = Integer("Int3z")
p = Integer("Int4p")
k = p * z
t = y * k
m = k + t
n = m * z
graph.getOutput({x: 9,
y: 9,
z: 9,
p: 2})

### This is the same examples as above but the commands are not three op commands.

Python
x = Integer("Int1x")
y = Integer("Int2y")
z = Integer("Int3z")
p = Integer("Int4p")
k = p * z
n = (k + (y * p * z)) * z
graph.getOutput({x: 9,
y: 9,
z: 9,
p: 2})

Tensor Operations
Python

def dotProduct():
x = DoubleTensor("Tensor1")
y = x.dot([3, 4])
z = y.dot([4, 5])
output = graph.getOutput({x: [3, 4]})
assert(np.all(output == [100, 125]))
assert(np.all(x.gradient == [[ 12., 16.], [ 15., 20.]]))

def TensorOp():
x = DoubleTensor("Tensor1")
y = x - [3, 4]
z = ops.log(y * x)
output = graph.getOutput({x: [10]})
assert(np.all(np.isclose(output, np.log(10 * (10 - np.asarray([3, 4]))))))
a = 2 * 10 - np.asarray([3, 4])
b = 1.0/np.exp(np.asarray(output))



## Project details

Uploaded source