-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathmodel_components.py
More file actions
33 lines (27 loc) · 1006 Bytes
/
model_components.py
File metadata and controls
33 lines (27 loc) · 1006 Bytes
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
import torch
from torch import nn
class ClampedActivation(nn.Module):
"""Shift input to range [a, b] using the Sigmoid activation."""
def __init__(self, a: float, b: float):
super().__init__()
self.a = a
self.b = b
def forward(self, x):
return self.a + (self.b - self.a) * torch.sigmoid(x)
class ShiftedReLU(nn.Module):
"""Shift input to range [a, inf] or [-inf, b] using the ReLU activation."""
def __init__(self, a: float = None, b: float = None):
super().__init__()
self.a = a
self.b = b
def forward(self, x):
if self.a is not None:
relu_output = torch.relu(x)
shifted_output = relu_output + self.a
return shifted_output
elif self.b is not None:
relu_output = torch.relu(x)
shifted_output = torch.clamp(relu_output, max=self.b)
return shifted_output
else:
raise ValueError("One of a or b must be specified.")