-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathresnet.py
More file actions
101 lines (84 loc) · 3.42 KB
/
resnet.py
File metadata and controls
101 lines (84 loc) · 3.42 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
import torch as t
import torch.nn as nn
class ResidualBlock(t.nn.Module):
def __init__(self, in_feats: int, out_feats: int, first_stride=1):
"""
A single residual block as per https://arxiv.org/pdf/1512.03385
"""
super().__init__()
self.left = nn.Sequential(*[
nn.Conv2d(in_feats, out_feats, 3, first_stride, 1),
nn.BatchNorm2d(out_feats),
nn.ReLU(),
nn.Conv2d(out_feats, out_feats, 3, 1, 1),
nn.BatchNorm2d(out_feats)
])
if first_stride == 1:
self.right = nn.Identity()
else:
self.right = nn.Sequential(*[
nn.Conv2d(in_feats, out_feats, 1, first_stride),
nn.BatchNorm2d(out_feats)
])
self.relu = nn.ReLU()
def forward(self, x: t.Tensor) -> t.Tensor:
"""
x: shape (batch, in_feats, height, width)
Return: shape (batch, out_feats, height / stride, width / stride)
If no downsampling block is present, the residual branch (right branch)
adds the input to the output. If there is downsampling, then the residual
(skip) connection includes a convolution and a batch norm.
"""
return self.relu(self.left(x) + self.right(x))
class BlockGroup(nn.Module):
def __init__(self, n_blocks: int, in_feats: int, out_feats: int, first_stride=1, dropout_fraction: float = 0.0):
'''An n_blocks-long sequence of ResidualBlock where only the first block uses the provided stride.'''
super().__init__()
blocks = [nn.Dropout(p=dropout_fraction)] + [ResidualBlock(in_feats, out_feats, first_stride)] + [ResidualBlock(out_feats, out_feats) for n in range(n_blocks - 1)]
self.blocks = nn.Sequential(*blocks)
def forward(self, x: t.Tensor) -> t.Tensor:
'''
Compute the forward pass.
x: shape (batch, in_feats, height, width)
Return: shape (batch, out_feats, height / first_stride, width / first_stride)
'''
return self.blocks(x)
class AveragePool(nn.Module):
def forward(self, x: t.Tensor) -> t.Tensor:
"""
x: shape (batch, channels, height, width)
Return: shape (batch, channels)
"""
return t.mean(x, (2, 3))
class ResNet34(nn.Module):
def __init__(
self,
n_blocks_per_group=[3, 4, 6, 3],
out_features_per_group=[64, 128, 256, 512],
first_strides_per_group=[1, 2, 2, 2],
n_classes=1000,
dropout_fraction=0.0,
first_conv_size=7,
):
super().__init__()
self.comps = nn.Sequential(
nn.Conv2d(3, out_features_per_group[0], first_conv_size, 2, 3),
nn.BatchNorm2d(out_features_per_group[0]),
nn.ReLU(),
nn.MaxPool2d(3, 2),
*[BlockGroup(n, in_features, out_features, first_stride, dropout_fraction)
for n, in_features, out_features, first_stride in zip(
n_blocks_per_group,
[out_features_per_group[0]] + out_features_per_group,
out_features_per_group,
first_strides_per_group)
],
AveragePool(),
nn.Linear(out_features_per_group[-1], n_classes)
)
def forward(self, x: t.Tensor) -> t.Tensor:
'''
x: shape (batch, channels, height, width)
Return: shape (batch, n_classes)
'''
return self.comps(x)