-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathmlp.py
30 lines (25 loc) · 967 Bytes
/
mlp.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
""" A simple yet generic MLP """
import torch
import torch.nn as nn
class MLP(nn.Module):
def __init__(self, in_feats, n_hidden, n_classes,
n_layers, activation, dropout):
super(MLP, self).__init__()
self.layers = nn.ModuleList()
self.layers.append(nn.Linear(in_feats, n_hidden))
for i in range(n_layers - 1):
self.layers.append(nn.Linear(n_hidden, n_hidden))
self.layers.append(nn.Linear(n_hidden, n_classes))
self.dropout = nn.Dropout(p=dropout) if dropout else None
assert callable(activation)
self.activation = activation
def forward(self, g, features):
h = features
for layer in self.layers[:-1]:
h = layer(h)
if self.dropout is not None:
h = self.dropout(h)
if self.activation is not None:
h = self.activation(h)
logits = self.layers[-1](h)
return logits