-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathloss.py
22 lines (16 loc) · 804 Bytes
/
loss.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
import torch
from torch import autograd
from torch import nn
class CrossEntropyLoss(nn.Module):
"""
This criterion (`CrossEntropyLoss`) combines `LogSoftMax` and `NLLLoss` in one single class.
NOTE: Computes per-element losses for a mini-batch (instead of the average loss over the entire mini-batch).
"""
log_softmax = nn.LogSoftmax()
def __init__(self, class_weights):
super(CrossEntropyLoss,self).__init__()
self.class_weights = autograd.Variable(torch.FloatTensor(class_weights).cuda())
def forward(self, logits, target):
log_probabilities = self.log_softmax(logits)
# NLLLoss(x, class) = -weights[class] * x[class]
return -self.class_weights.index_select(0, target) * log_probabilities.index_select(-1, target).diag()