Add tanh method to tensor

This commit is contained in:
Aodhnait Étaín 2020-11-15 21:59:25 +00:00
parent f182af0cdd
commit f8101400c2

View file

@ -92,6 +92,18 @@ class Tensor:
tensor._back = back tensor._back = back
return tensor return tensor
def tanh(self):
tensor = Tensor(np.tanh(self.value))
tensor._save(self)
def back(upstream):
# dtanh(x)/dx = 1 - tanh2(x)
a, = tensor._parents
return [1 - np.dot(np.tanh(a.value) ** 2, upstream)]
tensor._back = back
return tensor
# TODO Compute gradients only for tensors that need it. # TODO Compute gradients only for tensors that need it.
def _backprop(self, upstream): def _backprop(self, upstream):
# Backprop through the tensor iff it has any parents. # Backprop through the tensor iff it has any parents.