# 05.4 还原参数值

## 5.4 还原参数值⚓︎

### 5.4.1 对比结果⚓︎

params = HyperParameters(eta=0.1, max_epoch=10, batch_size=1, eps = 1e-5)


9 0 437.5399553941636 [[-35.46926435] [399.01136072]] [[252.69305588]]
9 100 420.78580862641473 [[-36.93198181] [400.03047293]] [[251.26503706]]
......
9 900 413.7210407763991 [[-36.67601742] [406.55322285]] [[246.8067483]]


params = HyperParameters(eta=0.01, max_epoch=500, batch_size=10, eps=1e-5)


• 学习率缩小10倍，变成0.01
• max_epoch扩大50倍，从10变成500，让网络得到充分训练（但实际上不需要这么多次的循环，大家可以自己试验）
• batch_size=10，使用mini-batch批量样本训练，提高精度，减缓个别样本引起的跳跃程度

......
499 89 380.62299460835936 [[-40.2782923 ] [399.34224968]] [[244.14309928]]
499 99 380.5935045560184 [[-40.26440193] [399.39472352]] [[244.3928586]]


### 5.4.2 还原真实的 $W,B$ 值⚓︎

y = x_1 w_1 + x_2 w_2 + b \tag{y是标签值}
z = x_1' w_1' + x_2' w_2' + b' \tag{z是预测值}

x_1 w_1 + x_2 w_2 + b = \frac{x_1 - x_{1min}}{xm_1} w_1' + \frac{x_2 - x_{2min}}{xm_2} w_2' + b' \\ =x_1 \frac{w_1'}{xm_1} + x_2 \frac{w_2'}{xm_2}+b'-\frac{w_1'x_{1min}}{xm_1}-\frac{w_2'x_{2min}}{xm_2} \tag{3}

w_1 = \frac{w_1'}{xm_1} \tag{4}  w_2 = \frac{w_2'}{xm_2} \tag{5}  b = b'-\frac{w_1'x_{1min}}{xm_1}-\frac{w_2'x_{2min}}{xm_2} \tag{6}

w_1 = \frac{w_1'}{x_{1max}-x_{1min}} = \frac{-40.2}{21.96-2.02} = -2.016 \tag{7}  w_2 = \frac{w_2'}{x_{2max}-x_{2min}} = \frac{399.3}{119-40} = 5.054 \tag{8}  b=244.5-(-2.016) \cdot 2.02 - 5.054 \cdot 40=46.412 \tag{9}

### 5.4.3 代码实现⚓︎

# get real weights and bias
W_real = np.zeros_like(net.W)
for i in range(W_real.shape[0]):
W_real[i,0] = net.W[i,0] / X_Norm[i,1]
B_real = net.B - W_real[0,0]*X_Norm[0,0] - W_real[1,0]*X_Norm[1,0]
return W_real, B_real


X_Norm是我们在做标准化时保留下来的样本的两个特征向量的最小值和数值范围（最大值减去最小值）。

if __name__ == '__main__':
......
# inference
W_real, B_real = DeNormalizeWeightsBias(net, reader)
print("W_real=", W_real)
print("B_real=", B_real)

x1 = 15
x2 = 93
x = np.array([x1,x2]).reshape(1,2)
z = np.dot(x, W_real) + B_real
print("Z=", z)


net.train()方法返回之后，训练好的WB的值就保存在NeuralNet类的属性里了。然后通过调用DeNormalizeWeightsBias()函数，把它们转换成真实的W_realB_real值，就好比我们不做标准化而能训练出来的权重值一样。

### 5.4.4 运行结果⚓︎

......
499 99 380.5934686827507
[[-40.23261123] [399.36389489]] [[244.39118797]]
W= [[-40.23261123]
[399.36389489]]
B= [[244.39118797]]
W_real= [[-2.01737219]
[ 5.05523918]]
B_real= [[46.26647363]]
Z= [[486.14313417]]


w1 w2 b

ch05, Level4