代码运行到此处
就会总会报错
import numpy as np
from dataset.mnist import load_mnist
from Two_layer_net import TwoLayerNet
import matplotlib.pylab as plt
(x_train,t_train),(x_test,t_test)= load_mnist(normalize=True,one_hot_label=True)
train_loss_list=[]
iters_num=10
train_size=x_train.shape[0]
batch_size=100
learning_rate=0.1
network = TwoLayerNet(input_size=784,hidden_size=50,output_size=10)
for i in range(iters_num):
print(f'第{i}次训练')
batch_mask = np.random.choice(train_size,batch_size)
x_batch = x_train[batch_mask]
t_batch = t_train[batch_mask]
grads = network.numerical_gradient(x_batch, t_batch)
for key in ('W1','b1','W2','b2'):
network.params[key] -= learning_rate * grads[key]
loss = network.loss(x_batch, t_batch)
print(loss)
train_loss_list.append(loss)
看看 grads 里有啥
print(grads)