天天看點

【PyTorch】SiLU激活函數

問題

方法

SiLU激活函數介紹

SiLU激活函數介紹

import torch
from torch import nn

class Net(nn.Module):

    def __init__(self) -> None:
        super().__init__()

        self.conv = nn.Conv2d(3, 32, 3, padding=1, stride=1)
        self.silu = nn.SiLU()

    def forward(self, x):

        x = self.conv(x)
        out = self.silu(x)

        return out

if __name__ == '__main__':

    import netron
    
    device = 'cuda:0' if torch.cuda.is_available() else 'cpu'
    x = torch.randn(1, 3, 224, 224).to(device)

    net = Net()

    model_file = 'demo.pth'
    torch.onnx.export(net, x, model_file)
    netron.start(model_file)      

結語

繼續閱讀