-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathloss.py
68 lines (53 loc) · 1.81 KB
/
loss.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
import torch.nn as nn
import torch.nn.functional as F
class CharbonnierLoss(nn.Module):
def __init__(self):
super(CharbonnierLoss, self).__init__()
self.eps = 1e-6
def forward(self, x, y):
diff = torch.add(x, -y)
error = torch.sqrt(diff * diff + self.eps)
loss = torch.mean(error)
return loss
class ContentLoss(nn.Module):
def __init__(self, target):
super(ContentLoss, self).__init__()
self.target = target.detach()
def forward(self, input):
self.loss = F.mse_loss(input, self.target)
return input
class ContentLossChar(nn.Module):
def __init__(self, target):
super(ContentLossChar, self).__init__()
self.target = target.detach()
def forward(self, input):
F2 = CharbonnierLoss()
self.loss = F2(input, self.target)
return input
class StyleLoss(nn.Module):
def __init__(self, target_feature):
super(StyleLoss, self).__init__()
self.target = self.gram_matrix(target_feature).detach()
def forward(self, input):
G = self.gram_matrix(input)
self.loss = F.mse_loss(G, self.target)
return input
def gram_matrix(self, input):
a, b, c, d = input.size()
features = input.view(a * b, c * d)
G = torch.mm(features, features.t())
return G.div(a * b * c * d)
class StyleLossChar(nn.Module):
def __init__(self, target_feature):
super(StyleLoss, self).__init__()
self.target = self.gram_matrix(target_feature).detach()
def forward(self, input):
G = self.gram_matrix(input)
F2 = CharbonnierLoss()
self.loss = F2(G, self.target)
return input
def gram_matrix(self, input):
a, b, c, d = input.size()
features = input.view(a * b, c * d)
G = torch.mm(features, features.t())
return G.div(a * b * c * d)