数据维度为[N,C,H,W]
1、BatchNorm
归一化维度:[N,H,W] 计算C次均值方差
import torch
from torch import nn
input = torch.randn((8,32,32,3))
input_ = input.permute(0,3,1,2)
print(input_.shape)
BN = nn.BatchNorm2d(3)
output = BN(input_)
print(output[:,0,:,:])
X = input_[:,0,:,:]
print(X.shape)
mean = torch.mean(X)
var = torch.var(X)
BN_one = (input_[:,0,:,:] - mean) / torch.pow(var + BN.eps,0.5) * BN.weight[0] + BN.bias[0]
print("BN_one:",BN_one)
torch.Size([8, 3, 32, 32])
tensor([[[ 1.6338e-01, -2.38e-01, -1.2965e+00, ..., 9.6178e-01,
-4.1027e-01, 1.4072e+00],
[-9.9580e-02, -8.1695e-02, -2.2693e-01, ..., 1.1076e+00,
2.3096e-01, -1.4278e+00],
[ 1.2291e+00, 1.0623e+00, -5.4392e-01, ..., -4.3424e-02,
-2.2262e-01, -5.1729e-01],
...,
[-1.8724e+00, 5.1297e-01, 6.1065e-01, ..., 1.4684e-01,
-8.5345e-02, -1.9820e+00],
[-1.8862e-02, -5.1397e-01, -8.9240e-01, ..., 1.0580e+00,
1.3407e+00, 2.6915e+00],
[ 2.9983e-01, -8.8519e-01, -7.3509e-01, ..., 3.3777e-01,
4.3252e-01, 3.9348e-01]]], grad_fn=<SliceBackward0>)
torch.Size([8, 32, 32])
BN_one: tensor([[[ 1.6337e-01, -2.37e-01, -1.29e+00, ..., 9.6173e-01,
-4.1024e-01, 1.4071e+00],
[-9.9574e-02, -8.1690e-02, -2.2692e-01, ..., 1.1076e+00,
2.3094e-01, -1.4277e+00],
[ 1.2290e+00, 1.0623e+00, -5.4388e-01, ..., -4.3422e-02,
-2.2260e-01, -5.1726e-01],
..