层内形式nn.火炬内顺序

2024-04-25 08:01:19 发布

您现在位置:Python中文网/ 问答频道 /正文

我试图在torch中使用LayerForm insidenn.Sequential。这就是我要找的-

import torch.nn as nn

class LayerNormCnn(nn.Module):
    def __init__(self):
        super(LayerNormCnn, self).__init__()
        self.net = nn.Sequential(
                nn.Conv2d(3, 32, kernel_size=3, stride=2, padding=1),
                nn.LayerNorm(),
                nn.ReLU(),
                nn.Conv2d(32, 64, kernel_size=3, stride=2, padding=1),
                nn.LayerNorm(),
                nn.ReLU(),
            )

    def forward(self, x):
        x = self.net(x)
        return x

不幸的是,它不起作用,因为LayerNorm需要normalized_shape作为输入。上面的代码引发以下异常-

    nn.LayerNorm(),
TypeError: __init__() missing 1 required positional argument: 'normalized_shape'

现在,我就是这样实施的-

import torch.nn as nn
import torch.nn.functional as F


class LayerNormCnn(nn.Module):
    def __init__(self, state_shape):
        super(LayerNormCnn, self).__init__()
        self.conv1 = nn.Conv2d(state_shape[0], 32, kernel_size=3, stride=2, padding=1)
        self.conv2 = nn.Conv2d(32, 64, kernel_size=3, stride=2, padding=1)

        # compute shape by doing a forward pass
        with torch.no_grad():
            fake_input = torch.randn(1, *state_shape)
            out        = self.conv1(fake_input)
            bn1_size   = out.size()[1:]
            out        = self.conv2(out)
            bn2_size   = out.size()[1:]

        self.bn1 = nn.LayerNorm(bn1_size)
        self.bn2 = nn.LayerNorm(bn2_size)

    def forward(self, x):
        x = F.relu(self.bn1(self.conv1(x)))
        x = F.relu(self.bn2(self.conv2(x)))
        return x

if __name__ == '__main__':
    in_shape   = (3, 128, 128)
    batch_size = 32

    model = LayerNormCnn(in_shape)
    x = torch.randn((batch_size,) + in_shape)
    out = model(x)
    print(out.shape)

可以在nn.Sequential内部使用LayerForm吗


Tags: selfsizeinitdefnntorchoutkernel