天天看點

torch.nn.Linear 筆記

最多支援兩維,

我準備用這個代替1*1的卷積核

import torch

x = torch.randn(128, 20)  # 輸入的次元是(128,20)
m = torch.nn.Linear(20, 50)  # 20,30是指次元
output = m(x)

print('m.weight.shape: ', m.weight.shape)
print('m.bias.shape:', m.bias.shape)

print('output.shape:', output.shape)

# ans = torch.mm(input,torch.t(m.weight))+m.bias 等價于下面的
ans = torch.mm(x, m.weight.t()) + m.bias
print('ans.shape:', ans.shape)

print(torch.equal(ans, output))
           
class ClassHead(nn.Module):
    def __init__(self, inchannels=512,  num_classes=2):
        super(ClassHead, self).__init__()
        self.num_classes = num_classes
        self.conv1x1 = nn.Conv2d(inchannels, self.num_classes, kernel_size=(1, 1), stride=1, padding=0)

    def forward(self, x):
        out = self.conv1x1(x)
        out = out.permute(0, 2, 3, 1).contiguous()

        return out.view(out.shape[0], -1, self.num_classes)