徐州企业网站建设公司,泉州网站设计找哪家,手工制作图片作品,泰安网络营销推广import torch
from torch import nn# ①定义互相关运算
def corr2d(X, K):计算二维互相关运算。# 获取K的形状 行为h,列为wh, w K.shape# 生成全0的矩阵#xff0c;行为X的行减去h加上1#xff0c;列为X的列减去w加上1Y torch.zeros((…import torch
from torch import nn# ①定义互相关运算
def corr2d(X, K):计算二维互相关运算。# 获取K的形状 行为h,列为wh, w K.shape# 生成全0的矩阵行为X的行减去h加上1列为X的列减去w加上1Y torch.zeros((X.shape[0] - h 1, X.shape[1] - w 1))for i in range(Y.shape[0]):for j in range(Y.shape[1]):# 两层循环相乘求和Y[i, j] (X[i:i h, j:j w] * K).sum()# 返回Yreturn Y# ②实现二维卷积层
class Conv2D(nn.Module):def __init__(self, kernel_size):super().__init__()# 定义权重self.weight nn.Parameter(torch.rand(kernel_size))# 定义偏移self.bias nn.Parameter(torch.zeros(1))# 定义正向传播def forward(self, x):return corr2d(x, self.weight) self.biasif __name__ __main__:# 定义模型conv2d nn.Conv2d(1, 1, kernel_size(1, 2), biasFalse)# 定义XX torch.ones((6, 8))X[:, 2:6] 0# 定义KK torch.tensor([[1.0, -1.0]])# 计算YY corr2d(X, K)X X.reshape((1, 1, 6, 8))Y Y.reshape((1, 1, 6, 7))# 训练10轮for i in range(10):# 计算Y_hatY_hat conv2d(X)# 损失l (Y_hat - Y)**2# 梯度归零conv2d.zero_grad()# 后向传播l.sum().backward()# 优化函数 学习率3e-2conv2d.weight.data[:] - 3e-2 * conv2d.weight.gradif (i 1) % 2 0:print(fbatch {i1}, loss {l.sum():.3f})# 经过10轮学习的权重为print(conv2d.weight.data.reshape((1, 2)))
结果
batch 2, loss 1.463
batch 4, loss 0.358
batch 6, loss 0.106
batch 8, loss 0.037
batch 10, loss 0.014
tensor([[ 1.0066, -0.9830]])Process finished with exit code 0