# test the deep learning library. in my book

## test the deep learning library. in my book

Posted by xuepro on October 19, 2019

test dense layer

import numpy as np
import matplotlib.pyplot as plt
%matplotlib inline

from NeuralNetwork import *
import util

np.random.seed(1)

pts = 100

k = 2
b =1
X = np.random.randn(pts,1)*10                 # 随机采样一些x坐标
Y = k*X+b
Y  = Y+ np.random.randn(pts,1)*4              #给 Y随机噪声
plt.plot(X,Y,'o')
plt.xlabel('x')
plt.ylabel('y')
plt.show()

import util
dense = Dense(1,1,('no',0.01))
losses = []
epochs = 100
reg  = 0.
learning_rate = 1e-2

nn = dense
for epoch in range(epochs):
for i in range(len(nn.params)):
#print(nn.params[0],nn.params[1])
F = nn.forward(X)
for i in range(len(nn.params)):
print(loss)
losses.append(loss)
plt.plot(losses)


test NeuralNetwork

 import numpy as np
import matplotlib.pyplot as plt
%matplotlib inline

from NeuralNetwork import *
import util
pts = 1000

k = 2
b =1
X = np.random.randn(pts,1)*10                 # 随机采样一些x坐标
Y = k*X+b
Y  = Y+ np.random.randn(pts,1)*4              #给 Y随机噪声
plt.plot(X,Y,'o')
plt.xlabel('x')
plt.ylabel('y')
plt.show()

m,n = X.shape[0],X.shape[1]
print("n",n)
nn = NeuralNetwork()
dense = Dense(1,1,('no',0.01))

learning_rate = 1e-3
momentum = 0 #0.9
optimizer = SGD(nn.parameters(),learning_rate,momentum)

losses = []
epochs = 100
reg  = 0.

for epoch in range(epochs):
F = nn.forward(X)