-
Notifications
You must be signed in to change notification settings - Fork 13
Expand file tree
/
Copy pathmodules.py
More file actions
132 lines (110 loc) · 4.87 KB
/
modules.py
File metadata and controls
132 lines (110 loc) · 4.87 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
import tensorflow as tf
import sonnet as snt
class EdgeUpdate(snt.Module):
def __init__(self, input_size, output_size, layer_sizes=None, output_activation=False,
w_init=None, b_init=None, name=None):
super(EdgeUpdate, self).__init__(name=name)
self.input_size, self.output_size = input_size, output_size
self.output_activation = output_activation
layer_sizes = [output_size] if layer_sizes is None else layer_sizes
self.n_layers = len(layer_sizes)
self.layers = []
for i in range(self.n_layers):
self.layers.append(snt.Linear(layer_sizes[i],
w_init=w_init,
b_init=b_init,
name='linear{0:03d}'.format(i)))
self.layers.append(snt.Linear(output_size,
w_init=w_init,
b_init=b_init,
name='linear_out'))
def __call__(self, x):
for layer in self.layers[:-1]:
x = layer(x)
x = tf.nn.leaky_relu(x)
x = self.layers[-1](x)
if self.output_activation == 'leaky_relu':
x = tf.nn.leaky_relu(x)
elif self.output_activation == 'relu':
x = tf.nn.relu(x)
elif self.output_activation == 'softplus':
x = tf.nn.softplus(x)
elif self.output_activation == 'sigmoid':
x = tf.nn.sigmoid(x)
elif self.output_activation == 'none':
pass
else:
assert self.output_activation == False
return x
class NodeUpdate(snt.Module):
def __init__(self, input_size, output_size, layer_sizes=None, output_activation=False,
w_init=None, b_init=None, name=None):
super(NodeUpdate, self).__init__(name=name)
self.input_size, self.output_size = input_size, output_size
self.output_activation = output_activation
layer_sizes = [output_size] if layer_sizes is None else layer_sizes
self.n_layers = len(layer_sizes)
self.layers = []
for i in range(self.n_layers):
self.layers.append(snt.Linear(layer_sizes[i],
w_init=w_init,
b_init=b_init,
name='linear{0:03d}'.format(i)))
self.layers.append(snt.Linear(output_size,
w_init=w_init,
b_init=b_init,
name='linear_out'))
def __call__(self, x):
for layer in self.layers[:-1]:
x = layer(x)
x = tf.nn.leaky_relu(x)
x = self.layers[-1](x)
if self.output_activation == 'leaky_relu':
x = tf.nn.leaky_relu(x)
elif self.output_activation == 'relu':
x = tf.nn.relu(x)
elif self.output_activation == 'softplus':
x = tf.nn.softplus(x)
elif self.output_activation == 'sigmoid':
x = tf.nn.sigmoid(x)
elif self.output_activation == 'none':
pass
else:
assert self.output_activation == False
return x
class GlobalUpdate(snt.Module):
def __init__(self, input_size, output_size, layer_sizes=None, output_activation=False,
w_init=None, b_init=None, name=None):
super(GlobalUpdate, self).__init__(name=name)
self.input_size, self.output_size = input_size, output_size
self.output_activation = output_activation
layer_sizes = [output_size] if layer_sizes is None else layer_sizes
self.n_layers = len(layer_sizes)
self.layers = []
for i in range(self.n_layers):
self.layers.append(snt.Linear(layer_sizes[i],
w_init=w_init,
b_init=b_init,
name='linear{0:03d}'.format(i)))
self.layers.append(snt.Linear(output_size,
w_init=w_init,
b_init=b_init,
name='linear_out'))
def __call__(self, x):
for layer in self.layers[:-1]:
x = layer(x)
x = tf.nn.leaky_relu(x)
x = self.layers[-1](x)
if self.output_activation == 'leaky_relu':
x = tf.nn.leaky_relu(x)
elif self.output_activation == 'relu':
x = tf.nn.relu(x)
elif self.output_activation == 'softplus':
x = tf.nn.softplus(x)
elif self.output_activation == 'sigmoid':
x = tf.nn.sigmoid(x)
elif self.output_activation == 'none':
pass
else:
assert self.output_activation == False
return x