新发现一个函数求导的包= =
测试了下tanh的6阶导.
效果如下

然后写了点simple and naive的neural network..

代码

感觉用这个算梯度很好啊~~ 就是这个貌似不能传入数组进行broadcast,所以要自己定义一个elementwise_grad,如下

def elementwise_grad(fun):
    return grad(lambda x: np.sum(fun(x)))
from autograd import grad
import autograd.numpy as np 
import matplotlib.pyplot as plt 
import seaborn as sns 

def tanh(x):
    y = np.exp(-x)
    return (1-y)/(1+y)

def elementwise_grad(fun):
    return grad(lambda x: np.sum(fun(x)))

grad_tanh = elementwise_grad(tanh)
grad_tanh1 = elementwise_grad(grad_tanh)
grad_tanh2 = elementwise_grad(grad_tanh1)
grad_tanh3 = elementwise_grad(grad_tanh2)
grad_tanh4 = elementwise_grad(grad_tanh3)
grad_tanh5 = elementwise_grad(grad_tanh4)




x = np.linspace(-7,7,100)

plt.plot(x,tanh(x), x, grad_tanh(x), x, grad_tanh1(x), x, grad_tanh2(x), x, grad_tanh3(x), x, grad_tanh4(x),\
    x, grad_tanh5(x))

plt.show()