|
- # Copyright (c) Microsoft Corporation.
- # Licensed under the MIT license.
-
- import torch
- import torch.nn as nn
-
-
- class CrossEntropyLabelSmooth(nn.Module):
-
- def __init__(self, num_classes, epsilon):
- super(CrossEntropyLabelSmooth, self).__init__()
- self.num_classes = num_classes
- self.epsilon = epsilon
- self.logsoftmax = nn.LogSoftmax(dim=1)
-
- def forward(self, inputs, targets):
- log_probs = self.logsoftmax(inputs)
- # todo , device="cuda:6"
- targets = torch.zeros_like(log_probs).scatter_(1, targets.unsqueeze(1), 1)
- targets = (1 - self.epsilon) * targets + self.epsilon / self.num_classes
- loss = (-targets * log_probs).mean(0).sum()
- return loss
-
-
- def accuracy(output, target, topk=(1, 5)):
- """ Computes the precision@k for the specified values of k """
- maxk = max(topk)
- batch_size = target.size(0)
-
- _, pred = output.topk(maxk, 1, True, True)
- pred = pred.t()
- # one-hot case
- if target.ndimension() > 1:
- target = target.max(1)[1]
-
- correct = pred.eq(target.view(1, -1).expand_as(pred))
-
- res = dict()
- for k in topk:
- # correct_k = correct[:k].view(-1).float().sum(0) # 原始结果
- correct_k = correct[:k].reshape(-1).float().sum(0) # .view(-1)不支持
- res["acc{}".format(k)] = correct_k.mul_(1.0 / batch_size).item()
- return res
|