param.grad==none,请问为什么呢

具体错误:
Traceback (most recent call last):
File “/home/anthony/桌面/test/test.py”, line 90, in
SGD(params,learning_rate)
File “/home/anthony/桌面/test/test.py”, line 79, in SGD
param[:] = param - lr * param.grad
TypeError: unsupported operand type(s) for *: ‘float’ and ‘NoneType’
具体代码:
num_inputs = 2
num_examples = 1000

true_w = [2,-3.4]
true_b = 4.2

X = nd.random_normal(shape=(num_examples,num_inputs))
y = true_w[0]*X[:,0]+true_w[1]X[:,1]+true_b
y += .01
nd.random_normal(shape=y.shape)

batch_size = 10
def data_iter():
idx = list(range(num_examples))
random.shuffle(idx)
for i in range(0,num_examples,batch_size):
j = nd.array(idx[i:min(i+batch_size,num_examples)])
yield nd.take(X,j),nd.take(y,j)
w = nd.random_normal(shape=(num_inputs,1))
b = nd.zeros((1,))
params = [w,b]

for params in params:
params.attach_grad()

def net(X):
return nd.dot(X,w)+b

def square_loss(yhat,y):
return (yhat-y.reshape(yhat.shape)) **2

def SGD(params,lr):
for param in params:
param[:] = param - lr * param.grad

epochs = 5
learning_rate = .001
for e in range(epochs):
total_loss = 0
for data,label in data_iter():
with autograd.record():
output = net(data)
loss = square_loss(output,label)
loss.backward()
SGD(params,learning_rate)
total_loss +=nd.sum(loss).asscalar()
print(“Epoch %d, average loss: %f” % (e,total_loss/num_examples))
谢谢谢谢,请求回答

请问 题主解决了吗 我也遇到了同样的问题 导致无法进行梯度裁剪