-
Notifications
You must be signed in to change notification settings - Fork 1
/
loss.py
41 lines (32 loc) · 1.36 KB
/
loss.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
class MyLoss(nn.Module):
def __init__(self):
super(MyLoss, self).__init__()
self.L1_Loss = nn.L1Loss()
def forward(self, output, label):
l1_loss = self.L1_Loss(output, label)
return l1_loss
class Spectral_Loss(nn.Module):
def __init__(self):
super(Spectral_Loss, self).__init__()
def forward(self, output, rgb, srf):
rgb_re = torch.matmul(output.permute(0, 2, 3, 1), srf.permute(1, 0)).permute(0, 3, 1, 2)
spectral_loss = torch.pow((rgb_re - rgb), 2).mean()
return spectral_loss
class Spatial_Loss(nn.Module):
def __init__(self):
super(Spatial_Loss, self).__init__()
def forward(self, output, hsi_down, kernel):
down_output = F.conv2d(output, kernel, groups=31, stride=8, padding=0)
spatial_loss = torch.pow((down_output - hsi_down), 2).mean()
return spatial_loss
class SAM_Loss(nn.Module):
def __init__(self):
super(SAM_Loss, self).__init__()
def forward(self, output, label):
ratio = (torch.sum((output+1e-8).mul(label+1e-8), dim=1)) / (torch.sqrt(torch.sum((output+1e-8).mul(output+1e-8), dim=1)*torch.sum((label+1e-8).mul(label+1e-8), dim=1)))
angle = torch.acos(ratio)
return torch.mean(angle)