-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathinference_notes.txt
More file actions
45 lines (32 loc) · 1.44 KB
/
inference_notes.txt
File metadata and controls
45 lines (32 loc) · 1.44 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
I changed activations.py
C:\Users\pqlet\pass\IQA\test_task_IQA\iqa_env\Lib\site-packages\torch\nn\modules\activations.py
Particularly:
class GELU(Module):
r"""Applies the Gaussian Error Linear Units function:
.. math:: \text{GELU}(x) = x * \Phi(x)
where :math:`\Phi(x)` is the Cumulative Distribution Function for Gaussian Distribution.
When the approximate argument is 'tanh', Gelu is estimated with:
.. math:: \text{GELU}(x) = 0.5 * x * (1 + \text{Tanh}(\sqrt(2 / \pi) * (x + 0.044715 * x^3)))
Args:
approximate (str, optional): the gelu approximation algorithm to use:
``'none'`` | ``'tanh'``. Default: ``'none'``
Shape:
- Input: :math:`(*)`, where :math:`*` means any number of dimensions.
- Output: :math:`(*)`, same shape as the input.
.. image:: ../scripts/activation_images/GELU.png
Examples::
>>> m = nn.GELU()
>>> input = torch.randn(2)
>>> output = m(input)
"""
__constants__ = ['approximate']
approximate: str
def __init__(self, approximate: str = 'none') -> None:
super(GELU, self).__init__()
print('HELLO')
self.approximate = approximate
def forward(self, input: Tensor) -> Tensor:
# WAS LIKE THIS return F.gelu(input, approximate=self.approximate)
return F.gelu(input, approximate='none')
def extra_repr(self) -> str:
return 'approximate={}'.format(repr(self.approximate))