普通的交叉熵损失
![](https://img.laitimes.com/img/9ZDMuAjOiMmIsIjOiQnIsIyZuBnLjBzM2EjZ4UjMkZWN1QWY4ETNjRDNiNTYhZjN2ATNyUzLc52YucWbp5GZzNmLn9Gbi1yZtl2Lc9CX6MHc0RHaiojIsJye.png)
BCEWithLogitsLoss
就是在外边复合一层sigmoid函数,
import torch
target = torch.ones([10, 64], dtype=torch.float32) # 64 classes, batch size = 10
output = torch.full([10, 64], 1.5) # A prediction (logit)
pos_weight = torch.ones([64]) # All weights are equal to 1
criterion = torch.nn.BCEWithLogitsLoss(pos_weight=pos_weight)
criterion(output, target) # -log(sigmoid(1.5))
验证
import numpy as np
def sigmoid(x):
return 1/(1+np.exp(-x))
loss = -target*np.log(sigmoid(output)) - (1-target)*np.log(1-sigmoid(output))
torch.mean(loss)