Pyrotch concat层具有顺序

2024-04-23 23:36:49 发布

您现在位置:Python中文网/ 问答频道 /正文

所以我以前多次使用nn.Sequential,但现在我遇到了一个奇怪的错误,其中一层将向下一层发送3个输出,而不是两个(如预期的那样)

我已将我的图层定义为:

class BidirectionalGRU(nn.Module):
    def __init__(self, rnn_dim, hidden_size, dropout=0.0, batch_first=False):
        super(BidirectionalGRU, self).__init__()

        self.BiGRU = nn.GRU(
            input_size=rnn_dim,
            hidden_size=hidden_size,
            num_layers=1,
            batch_first=batch_first,
            bidirectional=True,
        )

    def forward(self, x, hidden):
        x, hidden = self.BiGRU(x)
        return x, hidden

我在这里的代码中使用它:

class Listener(nn.Module):
    def __init__(
        self, input_feature_dim_listener, hidden_size_listener, num_layers_listener
    ):
        super(Listener, self).__init__()
        assert num_layers_listener >= 1, "Listener should have at least 1 layer"
        self.hidden_size = hidden_size_listener
        self.gru_1 = nn.Sequential(
            BidirectionalGRU(
                rnn_dim=input_feature_dim_listener,
                hidden_size=hidden_size_listener,
                batch_first=True,
            ),
            BidirectionalGRU(
                rnn_dim=hidden_size_listener * 2,
                hidden_size=hidden_size_listener,
                batch_first=True,
            ),
            BidirectionalGRU(
                rnn_dim=hidden_size_listener * 2,
                hidden_size=hidden_size_listener,
                batch_first=True,
            ),
            BidirectionalGRU(
                rnn_dim=hidden_size_listener * 2,
                hidden_size=hidden_size_listener,
                batch_first=True,
            ),
        )

    def initHidden(self):
        return torch.zeros([2, 8, 512])

    def forward(self, x):
        x = x.squeeze().permute(0, 2, 1)
        fake_hidden = self.initHidden()
        output, hidden = self.gru_1(x, fake_hidden)
        #output, hidden = self.gru_2(output, hidden)
        #output, hidden = self.gru_3(output, hidden)
        #output, hidden = self.gru_4(output, hidden)
        return output, hidden

这不起作用,给我带来了一个错误:

    las(spectrograms, spectrograms, 0.5)
  File "/Users/jaime/anaconda3/envs/torch/lib/python3.7/site-packages/torch/nn/modules/module.py", line 727, in _call_impl
    result = self.forward(*input, **kwargs)
  File "caca.py", line 116, in forward
    listener_feature, hidden = self.listener(batch_data)
  File "/Users/jaime/anaconda3/envs/torch/lib/python3.7/site-packages/torch/nn/modules/module.py", line 727, in _call_impl
    result = self.forward(*input, **kwargs)
  File "caca.py", line 84, in forward
    output, hidden = self.gru_1(x, fake_hidden)
  File "/Users/jaime/anaconda3/envs/torch/lib/python3.7/site-packages/torch/nn/modules/module.py", line 727, in _call_impl
    result = self.forward(*input, **kwargs)
TypeError: forward() takes 2 positional arguments but 3 were given

但是,如果我手动定义每个层而不是使用nn.Sequential并传递输出,隐藏我自己,那么它就会工作:

class Listener(nn.Module):
    def __init__(
        self, input_feature_dim_listener, hidden_size_listener, num_layers_listener
    ):
        super(Listener, self).__init__()
        assert num_layers_listener >= 1, "Listener should have at least 1 layer"
        self.hidden_size = hidden_size_listener
        self.gru_1 = BidirectionalGRU(
            rnn_dim=input_feature_dim_listener,
            hidden_size=hidden_size_listener,
            batch_first=True,
        )
        self.gru_2 = BidirectionalGRU(
            rnn_dim=hidden_size_listener * 2,
            hidden_size=hidden_size_listener,
            batch_first=True,
        )
        self.gru_3 = BidirectionalGRU(
            rnn_dim=hidden_size_listener * 2,
            hidden_size=hidden_size_listener,
            batch_first=True,
        )
        self.gru_4 = BidirectionalGRU(
            rnn_dim=hidden_size_listener * 2,
            hidden_size=hidden_size_listener,
            batch_first=True,
        )
    def initHidden(self):
        return torch.zeros([2, 8, 512])

    def forward(self, x):
        x = x.squeeze().permute(0, 2, 1)
        fake_hidden = self.initHidden()
        output, hidden = self.gru_1(x, fake_hidden)
        output, hidden = self.gru_2(output, hidden)
        output, hidden = self.gru_3(output, hidden)
        output, hidden = self.gru_4(output, hidden)
        return output, hidden

这很好用。我想让我的代码基于一个参数创建多个层,并且使用带有for循环的nn.Sequential将允许它


Tags: selftrueoutputsizedefbatchnnhidden