Abracadabra

WGAN implemented by PyTorch

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
#!/usr/bin/env python
# Wasserstein Generative Adversarial Networks (WGAN) example in PyTorch.
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.autograd import Variable
# Data params
data_mean = 4
data_stddev = 1.25
# Model params
g_input_size = 1 # Random noise dimension coming into generator, per output vector
g_hidden_size = 50 # Generator complexity
g_output_size = 1 # size of generated output vector
d_input_size = 100 # Minibatch size - cardinality of distributions
d_hidden_size = 50 # Discriminator complexity
d_output_size = 1 # Single dimension for 'real' vs. 'fake'
minibatch_size = d_input_size
d_learning_rate = 2e-4 # 2e-4
g_learning_rate = 2e-4
# optim_betas = (0.9, 0.999)
num_epochs = 30000
print_interval = 200
# d_steps = 1 # 'k' steps in the original GAN paper. Can put the discriminator on higher training freq than generator
d_steps = 5
g_steps = 1
# ### Uncomment only one of these
#(name, preprocess, d_input_func) = ("Raw data", lambda data: data, lambda x: x)
(name, preprocess, d_input_func) = ("Data and variances", lambda data: decorate_with_diffs(data, 2.0), lambda x: x * 2)
print("Using data [%s]" % (name))
# ##### DATA: Target data and generator input data
def get_distribution_sampler(mu, sigma):
return lambda n: torch.Tensor(np.random.normal(mu, sigma, (1, n))) # Gaussian
def get_generator_input_sampler():
return lambda m, n: torch.rand(m, n) # Uniform-dist data into generator, _NOT_ Gaussian
# ##### MODELS: Generator model and discriminator model
class Generator(nn.Module):
def __init__(self, input_size, hidden_size, output_size):
super(Generator, self).__init__()
self.map1 = nn.Linear(input_size, hidden_size)
self.map2 = nn.Linear(hidden_size, hidden_size)
self.map3 = nn.Linear(hidden_size, output_size)
def forward(self, x):
x = F.elu(self.map1(x))
x = F.sigmoid(self.map2(x))
return self.map3(x)
class Discriminator(nn.Module):
def __init__(self, input_size, hidden_size, output_size):
super(Discriminator, self).__init__()
self.map1 = nn.Linear(input_size, hidden_size)
self.map2 = nn.Linear(hidden_size, hidden_size)
self.map3 = nn.Linear(hidden_size, output_size)
def forward(self, x):
x = F.elu(self.map1(x))
x = F.elu(self.map2(x))
# return F.sigmoid(self.map3(x))
return self.map3(x)
def extract(v):
return v.data.storage().tolist()
def stats(d):
return [np.mean(d), np.std(d)]
def decorate_with_diffs(data, exponent):
mean = torch.mean(data.data, 1)
mean_broadcast = torch.mul(torch.ones(data.size()), mean.tolist()[0][0])
diffs = torch.pow(data - Variable(mean_broadcast), exponent)
return torch.cat([data, diffs], 1)
d_sampler = get_distribution_sampler(data_mean, data_stddev)
gi_sampler = get_generator_input_sampler()
G = Generator(input_size=g_input_size, hidden_size=g_hidden_size, output_size=g_output_size)
D = Discriminator(input_size=d_input_func(d_input_size), hidden_size=d_hidden_size, output_size=d_output_size)
# criterion = nn.BCELoss() # Binary cross entropy: http://pytorch.org/docs/nn.html#bceloss
# d_optimizer = optim.Adam(D.parameters(), lr=d_learning_rate, betas=optim_betas)
# g_optimizer = optim.Adam(G.parameters(), lr=g_learning_rate, betas=optim_betas)
d_optimizer = optim.RMSprop(D.parameters(), lr=d_learning_rate)
g_optimizer = optim.Adam(G.parameters(), lr=g_learning_rate)
for epoch in range(num_epochs):
for d_index in range(d_steps):
# 1. Train D on real+fake
D.zero_grad()
# 1A: Train D on real
d_real_data = Variable(d_sampler(d_input_size))
d_real_decision = D(preprocess(d_real_data))
# d_real_error = criterion(d_real_decision, Variable(torch.ones(1))) # ones = true
d_real_error = -torch.mean(d_real_decision)
d_real_error.backward() # compute/store gradients, but don't change params
# 1B: Train D on fake
d_gen_input = Variable(gi_sampler(minibatch_size, g_input_size))
d_fake_data = G(d_gen_input).detach() # detach to avoid training G on these labels
d_fake_decision = D(preprocess(d_fake_data.t()))
# d_fake_error = criterion(d_fake_decision, Variable(torch.zeros(1))) # zeros = fake
d_fake_error = torch.mean(d_fake_decision)
d_fake_error.backward()
d_optimizer.step() # Only optimizes D's parameters; changes based on stored gradients from backward()
# Weight Clipping
for p in D.parameters():
p.data.clamp_(-0.01, 0.01)
for g_index in range(g_steps):
# 2. Train G on D's response (but DO NOT train D on these labels)
G.zero_grad()
gen_input = Variable(gi_sampler(minibatch_size, g_input_size))
g_fake_data = G(gen_input)
dg_fake_decision = D(preprocess(g_fake_data.t()))
# g_error = criterion(dg_fake_decision, Variable(torch.ones(1))) # we want to fool, so pretend it's all genuine
g_error = -torch.mean(dg_fake_decision)
g_error.backward()
g_optimizer.step() # Only optimizes G's parameters
if epoch % print_interval == 0:
print("%s: D: %s/%s G: %s (Real: %s, Fake: %s) " % (epoch,
extract(d_real_error)[0],
extract(d_fake_error)[0],
extract(g_error)[0],
stats(extract(d_real_data)),
stats(extract(d_fake_data))))

之前的文章所做的修改仅仅只有以下几点(理论支持参考我之前转发的一篇博文):

  • 判别模型最后一层直接用线型激活函数,而不是用Sigmoid函数

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    class Discriminator(nn.Module):
    def __init__(self, input_size, hidden_size, output_size):
    super(Discriminator, self).__init__()
    self.map1 = nn.Linear(input_size, hidden_size)
    self.map2 = nn.Linear(hidden_size, hidden_size)
    self.map3 = nn.Linear(hidden_size, output_size)
    def forward(self, x):
    x = F.elu(self.map1(x))
    x = F.elu(self.map2(x))
    # return F.sigmoid(self.map3(x))
    return self.map3(x)
  • 生成模型与判别模型的loss函数进行修改

    1
    2
    3
    4
    5
    # 生成模型
    # d_real_error = criterion(d_real_decision, Variable(torch.ones(1))) # ones = true
    d_real_error = -torch.mean(d_real_decision)
    # d_fake_error = criterion(d_fake_decision, Variable(torch.zeros(1))) # zeros = fake
    d_fake_error = torch.mean(d_fake_decision)
    1
    2
    3
    # 判别模型
    # g_error = criterion(dg_fake_decision, Variable(torch.ones(1))) # we want to fool, so pretend it's all genuine
    g_error = -torch.mean(dg_fake_decision)
  • 每次更新判别器的参数之后把它们的绝对值截断到不超过一个固定常数c (这里取的是0.01)

    1
    2
    3
    # Weight Clipping
    for p in D.parameters():
    p.data.clamp_(-0.01, 0.01)
  • 不要用基于动量的优化算法(包括momentum和Adam),推荐RMSProp,SGD也行

    1
    2
    3
    4
    # d_optimizer = optim.Adam(D.parameters(), lr=d_learning_rate, betas=optim_betas)
    # g_optimizer = optim.Adam(G.parameters(), lr=g_learning_rate, betas=optim_betas)
    d_optimizer = optim.RMSprop(D.parameters(), lr=d_learning_rate)
    g_optimizer = optim.Adam(G.parameters(), lr=g_learning_rate)

实验结果如下:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
ewan@ubuntu:~/Documents/gan/pytorch-generative-adversarial-networks$ python wgan_pytorch.py
Using data [Data and variances]
0: D: -0.00291868206114/-0.0098686888814 G: 0.0101090818644 (Real: [3.9948547959327696, 1.1746644935894675], Fake: [-0.49681734740734101, 0.012067284766516822])
200: D: -0.607654631138/0.150195807219 G: -0.148662015796 (Real: [3.8201908415555952, 1.2529761319208725], Fake: [1.3578049659729003, 0.068574913818859801])
400: D: -0.463035583496/0.187745466828 G: -0.199109002948 (Real: [3.9679448902606964, 1.0966020511088672], Fake: [2.7924281167984009, 0.10128610818888226])
600: D: -0.195529654622/-0.0762325078249 G: 0.0709114596248 (Real: [4.0289887523651124, 1.130490874393266], Fake: [3.2025665378570558, 0.11113662831727719])
800: D: -0.267909675837/-0.0125531600788 G: 0.0149036226794 (Real: [3.8386318933963777, 1.1596351907184081], Fake: [2.9168305301666262, 0.18930262941797507])
1000: D: -0.305421292782/0.0375043526292 G: -0.0430304855108 (Real: [4.036220012307167, 1.2074152140825467], Fake: [2.980299861431122, 0.34328656032877736])
1200: D: -0.52364641428/0.34957420826 G: -0.336933553219 (Real: [4.2644650164060298, 1.3088487291781874], Fake: [3.5564545428752901, 0.93418534418781807])
1400: D: 0.0167735591531/-0.0165516249835 G: 0.0153960846364 (Real: [4.005841153860092, 1.2205788960289556], Fake: [3.6258796131610871, 1.3573166859479273])
1600: D: 0.00350501108915/-0.0680181980133 G: 0.0898797661066 (Real: [4.0096039956808092, 1.3040836884406217], Fake: [4.2868031549453738, 1.1195239069375269])
1800: D: -0.017161777243/-0.0345846936107 G: 0.00348377227783 (Real: [3.8140131759643556, 1.2696980193364791], Fake: [3.6976867783069611, 1.3915195404268279])
2000: D: 0.0342473760247/-0.0408688522875 G: 0.042895399034 (Real: [3.8277990472316743, 1.2935257967493754], Fake: [4.0553032100200657, 1.0920039067237071])
2200: D: -0.0247789677233/-0.0973515734076 G: 0.0561916455626 (Real: [4.0955437314510341, 1.3877739508665123], Fake: [4.2196925377845762, 1.1830430815754616])
2400: D: 0.0279140714556/-0.0485894307494 G: 0.051317743957 (Real: [4.1299532175064089, 1.2504224526907901], Fake: [3.6290897476673125, 1.4143234578612853])
2600: D: -0.0277859847993/0.0174758173525 G: -0.0226532723755 (Real: [4.1205433750152585, 1.1041964193630893], Fake: [4.1067905998229977, 1.1112897398730086])
2800: D: 0.0298485141248/-0.0404594913125 G: 0.0436173528433 (Real: [3.8474615824222567, 1.376119005659207], Fake: [4.1015409564971925, 1.1240560154112995])
3000: D: -0.00891616754234/-0.0320432707667 G: -0.00200085714459 (Real: [4.2869654643535613, 1.2452766642692439], Fake: [4.0315418589115142, 1.1215360762164166])
3200: D: 0.125043600798/-0.141845062375 G: 0.180229827762 (Real: [4.1041129958629607, 1.2669502216408666], Fake: [3.9350157177448271, 1.2041076720740758])
3400: D: 0.00801010616124/-0.0085571501404 G: 0.00837498996407 (Real: [4.1750692510604859, 1.1555020360853467], Fake: [3.7647246885299683, 1.3171958013324914])
3600: D: -0.0108975172043/0.00422720238566 G: 0.0679717883468 (Real: [4.2474800306558613, 1.1525478772018374], Fake: [3.9568253087997438, 1.2016376545965635])
3800: D: 0.174184441566/-0.0896890684962 G: 0.132265836 (Real: [3.6444931725133212, 1.4372372290167961], Fake: [4.1011261808872224, 1.2724649929743026])
4000: D: 0.0152352238074/-0.0211527496576 G: 0.0241769701242 (Real: [4.298748409748077, 1.2334686924805018], Fake: [3.8711180412769317, 1.2375391560481097])
4200: D: 0.00989393051714/-0.00974932964891 G: 0.00978021323681 (Real: [3.8817882406711579, 1.2274675510251392], Fake: [4.4020989084243771, 1.1135816847780859])
4400: D: 0.110887765884/-0.195888444781 G: 0.185447320342 (Real: [4.0501037514209743, 1.3391687317184524], Fake: [3.9222843647003174, 1.0870922014501809])
4600: D: 0.0116609586403/0.0201185699552 G: -0.0251631941646 (Real: [4.097090389728546, 1.190104784646782], Fake: [4.0819661796092985, 1.3105115963188185])
4800: D: 0.00524073652923/-0.00464708916843 G: 0.0057549579069 (Real: [3.8242294645309447, 1.2650652243397946], Fake: [4.1804288566112522, 1.2938617118884317])
5000: D: -0.142288714647/0.0809833407402 G: -0.128578931093 (Real: [3.7870366251468659, 1.1074026548781364], Fake: [3.9050006806850432, 1.298625653396472])
5200: D: 0.00282126059756/-0.000789406709373 G: 0.00220172246918 (Real: [3.8225140625238421, 1.2743034472730719], Fake: [4.1409763026237485, 1.1529764181372026])
5400: D: 0.0688827335835/-0.143126890063 G: 0.177940413356 (Real: [3.9872682169079781, 1.3030584347635661], Fake: [4.1435868382453922, 1.1051301998899086])
5600: D: -0.0711650624871/0.0871955379844 G: -0.134067937732 (Real: [3.9407234787940979, 1.1742557675838305], Fake: [4.2017855679988863, 1.2602829191705458])
5800: D: 0.000587910413742/0.000934307463467 G: 0.00103192776442 (Real: [4.0573597419261933, 1.2623953329979454], Fake: [3.8340791404247283, 1.339685454959999])
6000: D: 0.00821333751082/-0.12042221427 G: 0.0573511943221 (Real: [4.1211176145076749, 1.2369626300361085], Fake: [3.6600258636474607, 1.3520569881721223])
6200: D: 0.00682129478082/0.001195830293 G: 0.00338123179972 (Real: [4.0544225633144375, 1.2749644040623289], Fake: [4.1039247584342959, 1.2693975476155579])
6400: D: -0.00134055688977/0.00293467193842 G: -0.00249383598566 (Real: [4.0987548109889032, 1.4076174670922545], Fake: [3.8387181401252746, 1.0786043697026602])
6600: D: -0.0879130512476/0.00771049968898 G: 0.0105132861063 (Real: [4.0482780200242994, 1.3183274437573238], Fake: [4.1890638065338131, 1.0659647273618436])
6800: D: -0.0613053664565/0.00630968250334 G: 0.00345144513994 (Real: [3.9884191691875457, 1.2496578805847449], Fake: [4.0083020174503323, 1.1951200826269044])
7000: D: -0.00451065413654/0.0126703362912 G: -0.0153036154807 (Real: [4.1685840785503387, 1.0996732796623405], Fake: [3.8199899888038633, 1.3533216043161698])
7200: D: -0.00164794549346/-0.026672417298 G: 0.00926311034709 (Real: [3.9697488701343535, 1.1614389493998623], Fake: [4.0069102811813355, 1.332521020789126])
7400: D: 0.0479753166437/-0.00875021051615 G: 0.0273390654474 (Real: [3.9136831092834474, 1.3941734665017038], Fake: [3.9792356503009798, 1.2934269648663987])
7600: D: 0.0299390181899/-0.0244860406965 G: 0.0235633179545 (Real: [3.9529241484403612, 1.3003400363613378], Fake: [4.1008431494235991, 1.1966721541073959])
7800: D: -0.106096304953/-0.00319136725739 G: 0.0128062078729 (Real: [3.8472019118070602, 1.3776392180901436], Fake: [3.9847766911983489, 1.1441746730859625])
8000: D: -0.0541454330087/0.0360651388764 G: -0.0368629023433 (Real: [4.001156520843506, 1.2686070678293795], Fake: [3.7170648825168611, 1.2630303399418346])
8200: D: 0.0385981723666/-0.0308057032526 G: 0.0258536860347 (Real: [4.0773776215314861, 1.1340129155680212], Fake: [4.025383379459381, 1.327217397616157])
8400: D: 0.0323679596186/-0.0363558754325 G: 0.0379030331969 (Real: [4.068932784795761, 1.1369141540559231], Fake: [3.9889052593708039, 1.292853623065962])
8600: D: -0.00726405344903/-0.0198955982924 G: -0.0463897511363 (Real: [4.1387977415323256, 1.2983278993502099], Fake: [3.9634271264076233, 1.2541944672524785])
8800: D: 0.0214307252318/-0.0323143824935 G: 0.0147992642596 (Real: [3.8878944924473764, 1.2858782523769321], Fake: [3.9738967609405518, 1.2617951400969825])
9000: D: 0.0408670082688/-0.0408971831203 G: 0.0338222235441 (Real: [3.8935359448194502, 1.2102182389881371], Fake: [4.1026345968246458, 1.1619291320679421])
9200: D: 0.0334619283676/-0.0487795248628 G: 0.043896459043 (Real: [4.0024692767858507, 1.3035652548917089], Fake: [4.2494437253475192, 1.1284849306040097])
9400: D: -0.0662252604961/0.0567465648055 G: -0.0975001305342 (Real: [3.9983484780788423, 1.2727864024938771], Fake: [4.1652837800979619, 1.2757452301144367])
9600: D: -0.0437398403883/0.0547546446323 G: -0.0755473896861 (Real: [3.9568819630146028, 1.2089398910557572], Fake: [4.0577589499950406, 1.254854081501209])
9800: D: 0.00763822672889/-0.00536214653403 G: 0.00614025257528 (Real: [4.0391950635612011, 1.3067671354062065], Fake: [3.8441065263748171, 1.3304282270617658])
10000: D: 0.0420219749212/-0.000623900443316 G: 0.0955700650811 (Real: [4.0145307508111001, 1.2332284552616837], Fake: [4.1720886218547824, 1.3184165599194013])
10200: D: -0.0580518990755/-0.0247586201876 G: 0.0602744668722 (Real: [3.9131186211109164, 1.1547087942243295], Fake: [3.8442363095283509, 1.3100046689992075])
10400: D: 0.0350324884057/-0.0446610674262 G: 0.0443669557571 (Real: [3.9732863992452621, 1.0900301299537192], Fake: [4.1616083049774169, 1.1977412391369193])
10600: D: 0.0309124011546/-0.0327286012471 G: 0.0324002951384 (Real: [4.1375643616914752, 1.3491791182650394], Fake: [4.1360740911960603, 1.2026694938475944])
10800: D: 0.0251356009394/-0.0600365921855 G: 0.0182816889137 (Real: [3.9463955080509185, 1.209152327657528], Fake: [4.0492063975334167, 1.1931266255697688])
11000: D: -0.0226037632674/0.0645630285144 G: -0.00730620510876 (Real: [4.0881260240077975, 1.1610880829221104], Fake: [4.1015665113925932, 1.2508656591000114])
11200: D: -0.203874662519/0.129180550575 G: -0.137796327472 (Real: [3.9598375034332274, 1.3812077142172803], Fake: [4.0204527139663693, 1.2581185304639424])
11400: D: -0.0908113643527/0.0762611478567 G: -0.0800914615393 (Real: [4.0449822235107424, 1.3556268019161497], Fake: [3.6170706963539123, 1.2538775159913775])
11600: D: 0.0127945197746/-0.0136474575847 G: 0.0115108992904 (Real: [3.8434849847108126, 1.4191038384690144], Fake: [3.6834572017192841, 1.3749317238019667])
11800: D: -0.0162955205888/0.00703074596822 G: 0.0635928660631 (Real: [4.0656388866901398, 1.1733235519103811], Fake: [4.2119219648838042, 1.2884029757138897])
12000: D: 0.00804834254086/0.0114726442844 G: -0.0416676998138 (Real: [4.0812106788158413, 1.2768383065648503], Fake: [3.8802548873424532, 1.1682818121544778])
12200: D: 0.00880087539554/-0.00853784382343 G: 0.00878115184605 (Real: [3.9501210238039492, 1.2609298922930623], Fake: [4.016851776838303, 1.1958214043365074])
12400: D: -0.0908231809735/0.0565089061856 G: -0.0594271346927 (Real: [4.2189184671640394, 1.2027120432908258], Fake: [4.0232754671573643, 1.0601718488768348])
12600: D: 0.0851941630244/-0.0584048479795 G: 0.0588090792298 (Real: [3.7772543743252753, 1.130624908263915], Fake: [3.9319257283210756, 1.2051865367836399])
12800: D: -0.0560997053981/-0.0248175561428 G: -0.0423211455345 (Real: [4.1257915179431439, 1.3557555020469465], Fake: [3.9178791642189026, 1.1446278900771538])
13000: D: -0.021879715845/-0.0102085536346 G: 0.049164660275 (Real: [3.8891402572393416, 1.340302981622111], Fake: [4.1098264539241791, 1.1973190716986095])
13200: D: 0.00609071925282/0.000411780551076 G: 0.000873317010701 (Real: [4.0079734873771669, 1.0734378076269375], Fake: [4.16044829249382, 1.24589904041035])
13400: D: 0.0619652941823/-0.0918542221189 G: 0.0685269758105 (Real: [4.0059312301874161, 1.2294789910478197], Fake: [3.935395474433899, 1.2204450041984987])
13600: D: -0.0172225553542/0.0116953141987 G: -0.0139160379767 (Real: [3.9669277960062028, 1.2823045137798716], Fake: [3.9422059106826781, 1.1863138013678882])
13800: D: -0.0343380719423/-0.0341883003712 G: 0.0315745696425 (Real: [3.9349321211874484, 1.3515663905606217], Fake: [4.0361522984504701, 1.1889982801815446])
14000: D: -0.0781251713634/0.0379043146968 G: -0.0811991766095 (Real: [3.9622140777111055, 1.3270647840200485], Fake: [3.958692445755005, 1.1882249562538854])
14200: D: -0.00332566350698/0.00831608474255 G: -0.00968919880688 (Real: [4.0868309581279751, 1.2649052154720533], Fake: [3.9996533656120299, 1.2424544463340046])
14400: D: 0.00310544949025/-0.00344840623438 G: 0.002937767189 (Real: [3.9016156983375549, 1.3394072373207904], Fake: [3.8578492951393129, 1.2802578210924642])
14600: D: 0.00954662263393/-0.00955961830914 G: 0.00952168926597 (Real: [3.951248247921467, 1.3720542385537113], Fake: [3.9343765902519228, 1.3196731296807518])
14800: D: -0.118950776756/-0.0234697107226 G: -0.0475859940052 (Real: [4.224924056529999, 1.2198087928062376], Fake: [3.8152624690532684, 1.407979253312801])
15000: D: -0.0943605676293/0.0735622048378 G: -0.104274556041 (Real: [3.8776874673366546, 1.2303474890793162], Fake: [3.8042025637626646, 1.2641632638711853])
15200: D: -0.000172574073076/-0.0136091653258 G: -0.0342488661408 (Real: [3.9725669431686401, 1.3636566655582356], Fake: [3.7739255595207215, 1.286560381931142])
15400: D: 0.0314685925841/-0.0321847423911 G: 0.0224884226918 (Real: [3.9619563330709933, 1.191049295263032], Fake: [3.7949125266075132, 1.144446158701051])
15600: D: 0.00764724984765/-0.00575984269381 G: 0.0064948592335 (Real: [3.7679578655958177, 1.3149928065248815], Fake: [4.2461013138294224, 1.0951171764483221])
15800: D: -0.0777092948556/0.0849689692259 G: -0.0924058929086 (Real: [3.932852659225464, 1.2573061632959293], Fake: [4.1913282787799835, 1.2836186853339466])
16000: D: -0.050300322473/-0.0388206243515 G: 0.0357397347689 (Real: [4.0962446802854542, 1.4029011906591213], Fake: [4.070586755275726, 1.1271350494375147])
16200: D: 0.0753296241164/-0.0198806431144 G: 0.0808434784412 (Real: [3.8760965394973756, 1.1409524988246751], Fake: [3.8057461333274842, 1.2098168757605468])
16400: D: -0.0372299104929/0.0351875349879 G: -0.0454745069146 (Real: [4.0939353704452515, 1.2848196043395506], Fake: [3.9558720147609709, 1.2728235384902225])
16600: D: -0.0101340338588/0.0110626723617 G: -0.0111222248524 (Real: [3.986977145075798, 1.3259823635587689], Fake: [3.9554380464553831, 1.2907862191410846])
16800: D: -0.0494117587805/0.0523075163364 G: -0.0535500720143 (Real: [3.8448826253414152, 1.3117905469567066], Fake: [3.7438095784187317, 1.2535150365672076])
17000: D: 0.0156182665378/-0.0128254238516 G: 0.0146374739707 (Real: [3.9421124708652497, 1.1052540236280552], Fake: [3.8871842885017394, 1.2453511923222738])
17200: D: 0.0429224148393/-0.0480623096228 G: 0.0399292707443 (Real: [3.9799196243286135, 1.2941615666073001], Fake: [4.1375756561756134, 1.2109081564509361])
17400: D: 0.00968278944492/-0.00968171562999 G: 0.00966327264905 (Real: [3.935849468111992, 1.2695645007229639], Fake: [3.8996728241443632, 1.3144268300578967])
17600: D: -0.00301436148584/-0.000785265117884 G: 0.00103102996945 (Real: [3.9284519279003143, 1.2341036313393001], Fake: [3.6972431838512421, 1.3855687155856462])
17800: D: 0.116903491318/-0.0937560945749 G: 0.172590240836 (Real: [4.2645069471001626, 1.3080363040531007], Fake: [3.9567726898193358, 1.2967345311449683])
18000: D: -0.0608675032854/0.0476493611932 G: -0.00500288326293 (Real: [4.0269851100444791, 1.2116770270672328], Fake: [4.1152600276470181, 1.281199668474674])
18200: D: -0.0734401643276/0.0987718477845 G: -0.0819599106908 (Real: [3.8394976514577865, 1.2749873300796422], Fake: [4.0419886147975923, 1.327963817546014])
18400: D: 0.0497582927346/-0.155175164342 G: 0.13303783536 (Real: [3.7719902545213699, 1.0897407967420649], Fake: [3.7615046393871308, 1.3089916470515932])
18600: D: 0.0239700898528/-0.0381186343729 G: 0.0276864990592 (Real: [4.188409751355648, 1.285584105229516], Fake: [4.0233318042755126, 1.2681527004757882])
18800: D: 0.00111512281001/-0.0264507420361 G: 0.0286112166941 (Real: [3.9199141567945479, 1.2738313063627613], Fake: [4.1139781177043915, 1.330488711219485])
19000: D: -0.0473541393876/0.111352369189 G: -0.0523310601711 (Real: [3.7932651308923959, 1.3147127405682739], Fake: [3.7947627007961273, 1.0531299503292175])
19200: D: -0.0304779503495/0.045797213912 G: -0.0440187454224 (Real: [4.0896886540949344, 1.3392233824907658], Fake: [3.8646358847618103, 1.304593284039177])
19400: D: 0.194737583399/-0.192367076874 G: 0.230072781444 (Real: [3.9661449289321897, 1.2822216197459986], Fake: [4.0850893747806545, 1.3070266600721223])
19600: D: -0.195656016469/0.194369539618 G: -0.204969212413 (Real: [3.9445683220028878, 1.2908669424594961], Fake: [4.0273511683940884, 1.3484937484757897])
19800: D: 0.276149004698/-0.262592494488 G: 0.261271834373 (Real: [3.9244625726342202, 1.2138755313418907], Fake: [3.896045311689377, 1.3239168205792633])
20000: D: -0.037402831018/0.0541176348925 G: -0.0254273694009 (Real: [3.7887831997871397, 1.0838328443531984], Fake: [4.1803205323219297, 1.2069399210575202])
20200: D: -0.14391182363/0.154710128903 G: -0.127932995558 (Real: [3.9718186306953429, 1.1938920103826984], Fake: [3.8623993241786958, 1.1992380687067719])
20400: D: 0.277315825224/-0.276595175266 G: 0.280247867107 (Real: [3.9932824140787124, 1.2951435399231526], Fake: [3.9807376277446749, 1.1784780448683547])
20600: D: -0.213297829032/0.245908752084 G: -0.243222758174 (Real: [3.8720276713371278, 1.2542419688526467], Fake: [3.8206098222732545, 1.1661960388796837])
20800: D: 0.114619217813/-0.100926779211 G: 0.0922625884414 (Real: [3.9682870441675187, 1.3188621677189192], Fake: [3.5771069145202636, 1.1369803011602813])
21000: D: -0.303231596947/0.294602781534 G: -0.288874447346 (Real: [3.991482014656067, 1.0697520343686426], Fake: [3.674229063987732, 1.162594834704991])
21200: D: -0.074034973979/0.0798109993339 G: -0.0742214098573 (Real: [3.5809044003486634, 1.1568557007313405], Fake: [4.0297869884967801, 1.262183063172349])
21400: D: 0.262162327766/-0.297971874475 G: 0.296678453684 (Real: [4.0233621561527251, 1.1153293685921177], Fake: [4.3256152606010438, 1.293378983535336])
21600: D: 0.253285288811/-0.265974611044 G: 0.271079391241 (Real: [3.8655495065450669, 1.3046362904478612], Fake: [4.0383575105667111, 1.1593536714254398])
21800: D: -0.668483495712/0.693548798561 G: -0.597621560097 (Real: [4.0561192989349362, 1.3785832256993071], Fake: [4.0196917986869813, 1.1727416034901368])
22000: D: -0.247271433473/0.260498434305 G: -0.254284113646 (Real: [4.0449540507793422, 1.1182831642815363], Fake: [3.9410277414321899, 1.35662918383663])
22200: D: 0.0106530245394/-0.0105826444924 G: 0.010412142612 (Real: [3.9709725368022917, 1.1935909496194108], Fake: [3.6618342864513398, 1.1302755516153604])
22400: D: -0.0474079549313/0.0512998178601 G: -0.0483585894108 (Real: [4.0366528975963591, 1.255590190060166], Fake: [4.4536384451389317, 1.1817009846117434])
22600: D: -0.322408914566/0.294503211975 G: -0.294557034969 (Real: [4.1648625326156612, 1.2910376071493044], Fake: [3.9514351594448089, 1.2428792207747439])
22800: D: -0.0832418426871/0.0778618454933 G: -0.0830294713378 (Real: [4.1286677682399748, 1.2808552112825371], Fake: [4.0503418278694152, 1.2931609764101457])
23000: D: -0.369321852922/0.350715816021 G: -0.379378199577 (Real: [4.0539671546220779, 1.2841527209665038], Fake: [3.7385779893398285, 1.226034767157562])
23200: D: -0.20978730917/0.198253154755 G: -0.20125605166 (Real: [3.8997612628340721, 1.2476609639285596], Fake: [3.9131766259670258, 1.1745094337139723])
23400: D: -0.0713088735938/0.070287771523 G: -0.0685144215822 (Real: [3.8823761761188509, 1.2554855061572396], Fake: [3.916521146297455, 1.1589148704590277])
23600: D: 0.0427192002535/-0.0458992123604 G: 0.0468493178487 (Real: [4.2497683775424955, 1.3534774394799314], Fake: [3.7455072367191313, 1.2035723328660535])
23800: D: 0.0886824280024/-0.089180290699 G: 0.0824339240789 (Real: [4.1368276840448379, 1.3053732424006685], Fake: [3.7440953600406646, 1.3403098424499473])
24000: D: 0.0765529945493/-0.0702198073268 G: 0.067143753171 (Real: [4.1424573111534118, 1.1894154051554844], Fake: [3.9408028304576872, 1.311870950939225])
24200: D: -0.0332999974489/0.0289861243218 G: -0.0238233078271 (Real: [4.0625021523237228, 1.3193496247910601], Fake: [4.0214765596389768, 1.3626613178115112])
24400: D: 0.0116833550856/-0.0433083474636 G: 0.0294151268899 (Real: [4.155729653835297, 1.2443573708805233], Fake: [4.0276014816761014, 1.2064370896635035])
24600: D: -0.143586605787/0.176585748792 G: -0.18224260211 (Real: [4.1486411762237552, 1.1859516848633762], Fake: [4.1132693731784817, 1.1922180729014844])
24800: D: -0.0138712525368/0.0168411824852 G: -0.0119427125901 (Real: [4.1591709744930263, 1.2359258557380455], Fake: [4.1677398359775539, 1.3845231707709731])
25000: D: 0.255919009447/-0.294253230095 G: 0.279962956905 (Real: [3.9463270044326784, 1.1874795319708413], Fake: [4.2903580510616299, 1.3555421660554561])
25200: D: -0.0276325326413/0.0174208488315 G: -0.0236964281648 (Real: [3.9243721216917038, 1.0837602743237815], Fake: [3.6880193889141082, 1.3551960082382857])
25400: D: 0.0133695462719/-0.0217840373516 G: 0.0382910817862 (Real: [3.9248281943798067, 1.3498579423514441], Fake: [3.9377611076831815, 1.3147392264391])
25600: D: 0.0533282607794/-0.0582511797547 G: 0.0426382124424 (Real: [3.9252138528227807, 1.2343049898537437], Fake: [4.1364144349098204, 1.2410536065514364])
25800: D: -0.00288704037666/0.00770187750459 G: -0.0114914979786 (Real: [3.9242496091127395, 1.2788150012319115], Fake: [4.0345127677917478, 1.1882337663095883])
26000: D: -0.0608727261424/0.0541118755937 G: -0.0474198237062 (Real: [4.0897465288639072, 1.3095601996023096], Fake: [4.1400825273990627, 1.2148829163174772])
26200: D: -0.130559697747/0.0733794793487 G: -0.104144588113 (Real: [4.2607862049341199, 1.2942193499055861], Fake: [3.8867506885528567, 1.1942672801186012])
26400: D: -0.0439343079925/0.0573879256845 G: -0.0878697857261 (Real: [3.7808335113525389, 1.0880880845236942], Fake: [3.9782328522205352, 1.1620106342824015])
26600: D: 0.0152015341446/0.00366508681327 G: 0.041159953922 (Real: [3.8900859886407853, 1.1779470629112894], Fake: [3.7596992158889773, 1.2139592079531667])
26800: D: 0.0352714285254/-0.1031877473 G: 0.067874789238 (Real: [4.0695308989286421, 1.1837713697563146], Fake: [4.0929770147800442, 1.0965869589580517])
27000: D: -0.0881021544337/0.0813493356109 G: -0.0242269244045 (Real: [3.9890777540206908, 1.2553969722414431], Fake: [3.7988330614566803, 1.2567013288504758])
27200: D: 0.0763045027852/-0.0917293503881 G: 0.114218316972 (Real: [4.0028738850355152, 1.3423566094628674], Fake: [3.9770897746086122, 1.3219552807466088])
27400: D: 0.0594872310758/-0.0451167076826 G: 0.0368666872382 (Real: [4.0800592017173765, 1.2152901513624952], Fake: [3.9476736617088317, 1.2989705597833583])
27600: D: 0.0153470486403/-0.0201481245458 G: -0.000402322039008 (Real: [4.1604018148779867, 1.3359014716469342], Fake: [3.9977971708774565, 1.2944576179632961])
27800: D: -0.00789823569357/0.00908922962844 G: -0.0111076626927 (Real: [4.0212037134170533, 1.1874018724012747], Fake: [4.1083386635780332, 1.2509297017041943])
28000: D: 0.00757996272296/-0.00654019229114 G: 0.00611820165068 (Real: [3.7911120998859404, 1.1977103659955959], Fake: [4.0841165268421173, 1.1898253993115502])
28200: D: 0.0131957577541/0.00322831980884 G: -0.00111622922122 (Real: [4.1888789300620557, 1.3496568725947327], Fake: [4.0611115002632143, 1.3183184144220856])
28400: D: -0.0306499581784/0.0331647247076 G: -0.0338053703308 (Real: [4.1849153059720994, 1.3391440077022734], Fake: [3.8500063753128053, 1.3092803392722017])
28600: D: -0.0750854164362/0.0745137408376 G: -0.0692436397076 (Real: [4.2219353467226028, 1.3228632865628431], Fake: [3.9156518685817718, 1.322625042830196])
28800: D: 0.0400990955532/-0.0271217841655 G: 0.0072197439149 (Real: [4.1668396210670471, 1.1685380084057959], Fake: [3.8380984902381896, 1.362370341203504])
29000: D: -0.0643707811832/0.0576644167304 G: -0.100686855614 (Real: [3.8912058281898498, 1.1764897014192157], Fake: [4.1498241519927976, 1.2432322677870791])
29200: D: 0.0442187860608/-0.0331076569855 G: 0.0377507209778 (Real: [3.995900819301605, 1.1999502583881319], Fake: [3.9349853229522704, 1.3676764998638458])
29400: D: -0.0614512637258/0.0583380833268 G: -0.059112302959 (Real: [4.1833238875865932, 1.4038158613161691], Fake: [4.1426575899124147, 1.2694314780433735])
29600: D: -0.0337703973055/0.0392336845398 G: -0.0504648312926 (Real: [4.1217511665821078, 1.2264251023812502], Fake: [3.838116307258606, 1.2309841481033876])
29800: D: 0.129453405738/-0.13672092557 G: 0.143395990133 (Real: [3.8660407388210296, 1.2221890139039508], Fake: [4.0156518769264222, 1.3044469158238432])