RNN的简单的推导演算公式(BPTT)-LMLPHP

附上y=2x-b拟合的简单的代码.

 import numpy as np
x = np.asarray([2,1,3,5,6]);
y = np.zeros((1,5));
learning_rate=0.1;
w=5; for i in range(len(x)):
y[0][i]= func(x[i]); def func(x):
return 2*x -6; def forward(w,x):
return w*x -6; def backward(w,x,y):
pred_y = w*x -6;
loss = (y - pred_y);
delta_w = loss;
w += (learning_rate*loss);
return w; def train(w):
for epoch in range(5):
for i in range(len(x)):
print 'w = {} ,pred_y = {} ,y ={}'.format(w,forward(w,x[i]),y[0][i]);
w = backward(w,x[i],y[0][i]);
if __name__ == '__main__':
train(w);
w = 5 ,pred_y = 4 ,y =-2.0
w = 4.4 ,pred_y = -1.6 ,y =-4.0
w = 4.16 ,pred_y = 6.48 ,y =0.0
w = 3.512 ,pred_y = 11.56 ,y =4.0
w = 2.756 ,pred_y = 10.536 ,y =6.0
w = 2.3024 ,pred_y = -1.3952 ,y =-2.0
w = 2.24192 ,pred_y = -3.75808 ,y =-4.0
w = 2.217728 ,pred_y = 0.653184 ,y =0.0
w = 2.1524096 ,pred_y = 4.762048 ,y =4.0
w = 2.0762048 ,pred_y = 6.4572288 ,y =6.0
w = 2.03048192 ,pred_y = -1.93903616 ,y =-2.0
w = 2.024385536 ,pred_y = -3.975614464 ,y =-4.0
w = 2.0219469824 ,pred_y = 0.0658409472 ,y =0.0
w = 2.01536288768 ,pred_y = 4.0768144384 ,y =4.0
w = 2.00768144384 ,pred_y = 6.04608866304 ,y =6.0
w = 2.00307257754 ,pred_y = -1.99385484493 ,y =-2.0
w = 2.00245806203 ,pred_y = -3.99754193797 ,y =-4.0
w = 2.00221225583 ,pred_y = 0.00663676747776 ,y =0.0
w = 2.00154857908 ,pred_y = 4.00774289539 ,y =4.0
w = 2.00077428954 ,pred_y = 6.00464573723 ,y =6.0
w = 2.00030971582 ,pred_y = -1.99938056837 ,y =-2.0
w = 2.00024777265 ,pred_y = -3.99975222735 ,y =-4.0
w = 2.00022299539 ,pred_y = 0.000668986161758 ,y =0.0
w = 2.00015609677 ,pred_y = 4.00078048386 ,y =4.0
w = 2.00007804839 ,pred_y = 6.00046829031 ,y =6.0

  

04-30 03:53