-
Notifications
You must be signed in to change notification settings - Fork 4
/
Copy pathmodels.py
120 lines (95 loc) · 3.6 KB
/
models.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
import torch.nn as nn
class Generator64(nn.Module):
def __init__(self, nc_in, nc_out, ndim):
super(Generator64, self).__init__()
self.l1 = nn.Sequential(
nn.Conv2d(nc_in, ndim, 4, 2, 1, bias=False),
nn.LeakyReLU(0.2, inplace=True))
self.l2 = nn.Sequential(
nn.Conv2d(ndim, ndim * 2, 4, 2, 1, bias=False),
nn.BatchNorm2d(ndim * 2),
nn.LeakyReLU(0.2, inplace=True))
self.l3 = nn.Sequential(
nn.Conv2d(ndim * 2, ndim * 4, 4, 2, 1, bias=False),
nn.BatchNorm2d(ndim * 4),
nn.LeakyReLU(0.2, inplace=True))
self.l4 = nn.Sequential(
nn.Conv2d(ndim * 4, ndim * 8, 4, 2, 1, bias=False),
nn.BatchNorm2d(ndim * 8),
nn.LeakyReLU(0.2, inplace=True))
self.l5 = nn.Sequential(
nn.ConvTranspose2d(ndim * 8, ndim * 4, 4, 2, 1, bias=False),
nn.BatchNorm2d(ndim * 4),
nn.ReLU(True))
self.l6 = nn.Sequential(
nn.ConvTranspose2d(ndim * 4, ndim * 2, 4, 2, 1, bias=False),
nn.BatchNorm2d(ndim * 2),
nn.ReLU(True))
self.l7 = nn.Sequential(
nn.ConvTranspose2d(ndim * 2, ndim, 4, 2, 1, bias=False),
nn.BatchNorm2d(ndim),
nn.ReLU(True))
self.l8 = nn.Sequential(
nn.ConvTranspose2d(ndim, nc_out, 4, 2, 1, bias=True),
)
self.sig = nn.Sigmoid()
def forward(self, input, use_sigmoid=True):
out = self.l1(input)
out = self.l2(out)
out = self.l3(out)
out = self.l4(out)
out = self.l5(out)
out = self.l6(out)
out = self.l7(out)
out = self.l8(out)
if use_sigmoid:
out = self.sig(out)
return out
class Generator257(nn.Module):
def __init__(self, nc_in, nc_out, ndim):
super(Generator257, self).__init__()
self.l1 = nn.Sequential(
nn.Conv2d(nc_in, ndim, 4, 2, 1, bias=False),
nn.LeakyReLU(0.2, inplace=True))
self.l2 = nn.Sequential(
nn.Conv2d(ndim, ndim * 2, 4, 2, 1, bias=False),
nn.BatchNorm2d(ndim * 2),
nn.LeakyReLU(0.2, inplace=True))
self.l3 = nn.Sequential(
nn.Conv2d(ndim * 2, ndim * 4, 4, 2, 1, bias=False),
nn.BatchNorm2d(ndim * 4),
nn.LeakyReLU(0.2, inplace=True))
self.l4 = nn.Sequential(
nn.Conv2d(ndim * 4, ndim * 8, 4, 2, 1, bias=False),
nn.BatchNorm2d(ndim * 8),
nn.LeakyReLU(0.2, inplace=True))
self.l5 = nn.Sequential(
nn.ConvTranspose2d(ndim * 8, ndim * 4, 4, 2, 1, bias=False),
nn.BatchNorm2d(ndim * 4),
nn.ReLU(True))
self.l6 = nn.Sequential(
nn.ConvTranspose2d(ndim * 4, ndim * 2, 4, 2, 1, bias=False),
nn.BatchNorm2d(ndim * 2),
nn.ReLU(True))
self.l7 = nn.Sequential(
nn.ConvTranspose2d(ndim * 2, ndim, 4, 2, 1, bias=False),
nn.BatchNorm2d(ndim),
nn.ReLU(True))
self.l8 = nn.Sequential(
nn.ConvTranspose2d(ndim, nc_out, 4, 2, 1, bias=True),
)
self.pad = nn.ReflectionPad2d((0,0,0,1))
self.sig = nn.Sigmoid()
def forward(self, input, use_sigmoid=True):
out = self.l1(input)
out = self.l2(out)
out = self.l3(out)
out = self.l4(out)
out = self.l5(out)
out = self.l6(out)
out = self.l7(out)
out = self.l8(out)
out = self.pad(out)
if use_sigmoid:
out = self.sig(out)
return out