-
Notifications
You must be signed in to change notification settings - Fork 0
/
metagame_comparison.py
230 lines (176 loc) · 17.7 KB
/
metagame_comparison.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
# William Kavanagh September 2020
# Compare the metagame as it evolved in either season to what we would expect based on idealised choices.
from helper_fns import *
import matplotlib.pyplot as plt
import math
import numpy as np
# Optimal relationships in both seasons
beta_optimality = [
[0.5, 0.5219, 0.5048, 0.4234, 0.389, 0.4148, 0.4626, 0.6031, 0.6107, 0.3839, 0.4375, 0.4183, 0.5543, 0.4445, 0.4806, 0.3239, 0.4793, 0.4277, 0.4369, 0.3203, 0.4048, 0.472, 0.3372, 0.3535, 0.3614, 0.3009, 0.3509, 0.3416],
[0.4781, 0.5, 0.4926, 0.3857, 0.419, 0.4352, 0.435, 0.5766, 0.585, 0.448, 0.4904, 0.481, 0.5765, 0.492, 0.5216, 0.3419, 0.521, 0.4639, 0.4849, 0.3881, 0.4486, 0.5085, 0.4639, 0.3573, 0.4335, 0.3714, 0.4184, 0.389],
[0.4952, 0.5074, 0.5, 0.3989, 0.4523, 0.3333, 0.4629, 0.583, 0.5927, 0.4697, 0.5273, 0.4045, 0.5766, 0.5145, 0.6118, 0.407, 0.4598, 0.4904, 0.5104, 0.4563, 0.4274, 0.5481, 0.4681, 0.3497, 0.4608, 0.3891, 0.486, 0.3707],
[0.5766, 0.6143, 0.6011, 0.5, 0.4634, 0.4573, 0.5493, 0.6839, 0.6547, 0.5046, 0.5058, 0.4741, 0.6396, 0.6273, 0.6793, 0.3702, 0.5767, 0.4244, 0.6063, 0.408, 0.5557, 0.5428, 0.4776, 0.4201, 0.481, 0.3756, 0.3588, 0.4058],
[0.611, 0.581, 0.5477, 0.5366, 0.5, 0.4793, 0.5361, 0.6494, 0.6175, 0.6109, 0.5717, 0.5377, 0.6256, 0.4582, 0.6269, 0.4527, 0.4747, 0.4906, 0.5409, 0.4519, 0.4949, 0.5458, 0.5083, 0.4933, 0.4969, 0.4312, 0.4741, 0.4366],
[0.5852, 0.5648, 0.6667, 0.5427, 0.5207, 0.5, 0.6392, 0.6544, 0.6741, 0.552, 0.6011, 0.5177, 0.6474, 0.4605, 0.7031, 0.4247, 0.5126, 0.5329, 0.5876, 0.4192, 0.645, 0.5993, 0.5721, 0.4614, 0.5291, 0.4508, 0.5333, 0.474],
[0.5374, 0.565, 0.5371, 0.4507, 0.4639, 0.3608, 0.5, 0.6615, 0.6797, 0.5226, 0.57, 0.4653, 0.6641, 0.5827, 0.5911, 0.4217, 0.507, 0.5585, 0.5697, 0.476, 0.4566, 0.6058, 0.5291, 0.4228, 0.5058, 0.3981, 0.5071, 0.4007],
[0.3969, 0.4234, 0.417, 0.3161, 0.3506, 0.3456, 0.3385, 0.5, 0.4817, 0.2356, 0.3422, 0.3289, 0.4647, 0.38, 0.3087, 0.3018, 0.4147, 0.3136, 0.3044, 0.2759, 0.3065, 0.3531, 0.2635, 0.2556, 0.2671, 0.2576, 0.3178, 0.2844],
[0.3893, 0.415, 0.4073, 0.3453, 0.3825, 0.3259, 0.3203, 0.5183, 0.5, 0.2507, 0.4529, 0.3007, 0.4728, 0.3489, 0.3234, 0.3856, 0.3778, 0.3264, 0.3154, 0.3398, 0.278, 0.3458, 0.289, 0.286, 0.2777, 0.3072, 0.3857, 0.2653],
[0.6161, 0.552, 0.5303, 0.4954, 0.3891, 0.448, 0.4774, 0.7644, 0.7493, 0.5, 0.4476, 0.4814, 0.698, 0.5015, 0.5247, 0.2691, 0.5026, 0.3418, 0.4842, 0.3236, 0.4711, 0.4135, 0.3402, 0.3234, 0.3516, 0.2811, 0.3281, 0.3178],
[0.5625, 0.5096, 0.4727, 0.4942, 0.4283, 0.3989, 0.43, 0.6578, 0.5471, 0.5524, 0.5, 0.493, 0.5549, 0.3535, 0.5006, 0.4248, 0.4291, 0.4329, 0.4019, 0.3532, 0.413, 0.4988, 0.3711, 0.4345, 0.3974, 0.3624, 0.3922, 0.401],
[0.5817, 0.519, 0.5955, 0.5259, 0.4623, 0.4823, 0.5347, 0.6711, 0.6993, 0.5186, 0.507, 0.5, 0.6185, 0.344, 0.5625, 0.3369, 0.4755, 0.4062, 0.51, 0.3633, 0.5589, 0.5397, 0.4083, 0.3891, 0.4562, 0.3427, 0.4567, 0.4091],
[0.4457, 0.4235, 0.4234, 0.3604, 0.3744, 0.3526, 0.3359, 0.5353, 0.5272, 0.302, 0.4451, 0.3815, 0.5, 0.3532, 0.3619, 0.3216, 0.4818, 0.3418, 0.3786, 0.3178, 0.3147, 0.3715, 0.3319, 0.3195, 0.2942, 0.31, 0.3679, 0.2899],
[0.5555, 0.508, 0.4855, 0.3727, 0.5418, 0.5395, 0.4173, 0.62, 0.6511, 0.4985, 0.6465, 0.656, 0.6468, 0.5, 0.5877, 0.5042, 0.6029, 0.4243, 0.5361, 0.5054, 0.5749, 0.4468, 0.5613, 0.3637, 0.4858, 0.6113, 0.5151, 0.5098],
[0.5194, 0.4784, 0.3882, 0.3207, 0.3731, 0.2969, 0.4089, 0.6913, 0.6766, 0.4753, 0.4994, 0.4375, 0.6381, 0.4123, 0.5, 0.2614, 0.423, 0.2169, 0.3953, 0.3043, 0.3666, 0.3378, 0.3799, 0.221, 0.2559, 0.2919, 0.1858, 0.2248],
[0.6761, 0.6581, 0.593, 0.6298, 0.5473, 0.5753, 0.5783, 0.6982, 0.6144, 0.7309, 0.5752, 0.6631, 0.6784, 0.4958, 0.7386, 0.5, 0.6974, 0.5068, 0.6122, 0.3965, 0.6307, 0.531, 0.5718, 0.6232, 0.6201, 0.5578, 0.4463, 0.6029],
[0.5207, 0.479, 0.5402, 0.4233, 0.5253, 0.4874, 0.493, 0.5853, 0.6222, 0.4974, 0.5709, 0.5245, 0.5182, 0.3971, 0.577, 0.3026, 0.5, 0.3868, 0.5758, 0.4408, 0.6441, 0.4749, 0.5145, 0.3803, 0.4454, 0.4845, 0.4631, 0.4553],
[0.5723, 0.5361, 0.5096, 0.5756, 0.5094, 0.4671, 0.4415, 0.6864, 0.6736, 0.6582, 0.5671, 0.5938, 0.6582, 0.5757, 0.7831, 0.4932, 0.6132, 0.5, 0.696, 0.4203, 0.5326, 0.5017, 0.6129, 0.5474, 0.654, 0.4714, 0.4726, 0.4624],
[0.5631, 0.5151, 0.4896, 0.3937, 0.4591, 0.4124, 0.4303, 0.6956, 0.6846, 0.5158, 0.5981, 0.49, 0.6214, 0.4639, 0.6047, 0.3878, 0.4242, 0.304, 0.5, 0.3919, 0.4647, 0.4291, 0.4594, 0.2885, 0.4031, 0.3662, 0.3543, 0.3107],
[0.6797, 0.6119, 0.5437, 0.592, 0.5481, 0.5808, 0.524, 0.7241, 0.6602, 0.6764, 0.6468, 0.6367, 0.6822, 0.4946, 0.6957, 0.6035, 0.5592, 0.5797, 0.6081, 0.5, 0.6238, 0.5425, 0.59, 0.5868, 0.5709, 0.5574, 0.5382, 0.5864],
[0.5952, 0.5514, 0.5726, 0.4443, 0.5051, 0.355, 0.5434, 0.6935, 0.722, 0.5289, 0.587, 0.4411, 0.6853, 0.4251, 0.6334, 0.3693, 0.3559, 0.4674, 0.5353, 0.3762, 0.5, 0.5472, 0.5513, 0.3339, 0.4719, 0.3041, 0.4623, 0.3678],
[0.528, 0.4915, 0.4519, 0.4572, 0.4542, 0.4007, 0.3942, 0.6469, 0.6542, 0.5865, 0.5012, 0.4603, 0.6285, 0.5532, 0.6622, 0.469, 0.5251, 0.4983, 0.5709, 0.4575, 0.4528, 0.5, 0.5065, 0.4366, 0.5063, 0.4057, 0.509, 0.3903],
[0.6628, 0.5361, 0.5319, 0.5224, 0.4917, 0.4279, 0.4709, 0.7365, 0.711, 0.6598, 0.6289, 0.5917, 0.6681, 0.4387, 0.6201, 0.4282, 0.4855, 0.3871, 0.5406, 0.41, 0.4487, 0.4935, 0.5, 0.4818, 0.4817, 0.4188, 0.3806, 0.395],
[0.6465, 0.6427, 0.6503, 0.5799, 0.5067, 0.5386, 0.5772, 0.7444, 0.714, 0.6766, 0.5655, 0.6109, 0.6805, 0.6363, 0.779, 0.3768, 0.6197, 0.4526, 0.7115, 0.4132, 0.6661, 0.5634, 0.5182, 0.5, 0.5794, 0.4298, 0.3813, 0.4166],
[0.6386, 0.5665, 0.5392, 0.519, 0.5031, 0.4709, 0.4942, 0.7329, 0.7223, 0.6484, 0.6026, 0.5438, 0.7058, 0.5142, 0.7441, 0.3799, 0.5546, 0.346, 0.5969, 0.4291, 0.5281, 0.4937, 0.5183, 0.4206, 0.5, 0.4187, 0.3778, 0.39],
[0.6991, 0.6286, 0.6109, 0.6244, 0.5688, 0.5492, 0.6019, 0.7424, 0.6928, 0.7189, 0.6376, 0.6573, 0.69, 0.3887, 0.7081, 0.4422, 0.5155, 0.5286, 0.6338, 0.4426, 0.6959, 0.5943, 0.5812, 0.5702, 0.5813, 0.5, 0.5122, 0.5077],
[0.6491, 0.5816, 0.514, 0.6412, 0.5259, 0.4667, 0.4929, 0.6822, 0.6143, 0.6719, 0.6078, 0.5433, 0.6321, 0.4849, 0.8142, 0.5537, 0.5369, 0.5274, 0.6457, 0.4618, 0.5377, 0.491, 0.6194, 0.6187, 0.6222, 0.4878, 0.5, 0.4511],
[0.6584, 0.611, 0.6293, 0.5942, 0.5634, 0.526, 0.5993, 0.7156, 0.7347, 0.6822, 0.599, 0.5909, 0.7101, 0.4902, 0.7752, 0.3971, 0.5447, 0.5376, 0.6893, 0.4136, 0.6322, 0.6097, 0.605, 0.5834, 0.61, 0.4923, 0.5489, 0.5]
]
tango_optimality = [
[0.5, 0.6552, 0.7224, 0.5131, 0.5547, 0.5311, 0.4846, 0.692, 0.6191, 0.5253, 0.5593, 0.5183, 0.6239, 0.6057, 0.4541, 0.5052, 0.6168, 0.5109, 0.5348, 0.5461, 0.6402, 0.5018, 0.5076, 0.4618, 0.4895, 0.5409, 0.5749, 0.4995],
[0.3448, 0.5, 0.4712, 0.2989, 0.4873, 0.4572, 0.3865, 0.5062, 0.508, 0.3169, 0.5271, 0.5125, 0.4931, 0.4025, 0.4773, 0.3952, 0.6188, 0.3473, 0.4055, 0.4636, 0.5952, 0.4119, 0.5285, 0.4285, 0.36, 0.6223, 0.4232, 0.5291],
[0.2776, 0.5288, 0.5, 0.354, 0.4497, 0.4802, 0.416, 0.505, 0.5065, 0.2991, 0.4827, 0.3892, 0.4814, 0.4891, 0.529, 0.4572, 0.621, 0.4585, 0.4698, 0.4823, 0.594, 0.523, 0.5541, 0.4743, 0.3974, 0.5794, 0.4652, 0.5144],
[0.4869, 0.7011, 0.646, 0.5, 0.5525, 0.6062, 0.4953, 0.6607, 0.6512, 0.4991, 0.6184, 0.5029, 0.5976, 0.6189, 0.6701, 0.4851, 0.6529, 0.4936, 0.6123, 0.5295, 0.6259, 0.5336, 0.581, 0.4859, 0.5128, 0.5498, 0.518, 0.4309],
[0.4453, 0.5127, 0.5503, 0.4475, 0.5, 0.5476, 0.4088, 0.5451, 0.5269, 0.4935, 0.5751, 0.4791, 0.5258, 0.354, 0.5539, 0.4632, 0.5457, 0.4239, 0.4436, 0.4642, 0.5655, 0.4837, 0.5173, 0.4828, 0.4358, 0.5425, 0.4747, 0.4646],
[0.4689, 0.5428, 0.5198, 0.3938, 0.4524, 0.5, 0.4358, 0.5752, 0.5255, 0.4509, 0.4789, 0.4912, 0.5683, 0.5301, 0.6706, 0.4442, 0.6187, 0.5325, 0.4229, 0.425, 0.6339, 0.3681, 0.5363, 0.4634, 0.4595, 0.4892, 0.4651, 0.4329],
[0.5154, 0.6135, 0.584, 0.5047, 0.5912, 0.5642, 0.5, 0.5385, 0.5475, 0.3788, 0.5771, 0.4911, 0.5301, 0.5177, 0.6179, 0.5054, 0.6707, 0.4785, 0.5415, 0.543, 0.615, 0.5484, 0.5831, 0.5627, 0.4924, 0.6219, 0.5527, 0.536],
[0.308, 0.4938, 0.495, 0.3393, 0.4549, 0.4248, 0.4615, 0.5, 0.4643, 0.3304, 0.3831, 0.2866, 0.447, 0.411, 0.2367, 0.3997, 0.4707, 0.3435, 0.386, 0.4509, 0.4476, 0.4113, 0.3525, 0.3279, 0.353, 0.3894, 0.4819, 0.3204],
[0.3809, 0.492, 0.4935, 0.3488, 0.4731, 0.4745, 0.4525, 0.5357, 0.5, 0.3868, 0.5096, 0.4639, 0.5004, 0.373, 0.2705, 0.4527, 0.4871, 0.3421, 0.3813, 0.4759, 0.4528, 0.4172, 0.3817, 0.3265, 0.3547, 0.4785, 0.4925, 0.3435],
[0.4747, 0.6831, 0.7009, 0.5009, 0.5065, 0.5491, 0.6212, 0.6696, 0.6132, 0.5, 0.4824, 0.3874, 0.6028, 0.6284, 0.4401, 0.4483, 0.5899, 0.5263, 0.5235, 0.4895, 0.5799, 0.5985, 0.4554, 0.4438, 0.4627, 0.4298, 0.5109, 0.4471],
[0.4407, 0.4729, 0.5173, 0.3816, 0.4249, 0.5211, 0.4229, 0.6169, 0.4904, 0.5176, 0.5, 0.437, 0.496, 0.3755, 0.4073, 0.4023, 0.4378, 0.4159, 0.341, 0.4176, 0.4784, 0.4581, 0.4015, 0.3744, 0.383, 0.4227, 0.4527, 0.3898],
[0.4817, 0.4875, 0.6108, 0.4971, 0.5209, 0.5088, 0.5089, 0.7134, 0.5361, 0.6126, 0.563, 0.5, 0.6282, 0.5411, 0.541, 0.4617, 0.5585, 0.4473, 0.4316, 0.4613, 0.689, 0.4093, 0.4779, 0.3965, 0.493, 0.5031, 0.5438, 0.472],
[0.3761, 0.5069, 0.5186, 0.4024, 0.4742, 0.4317, 0.4699, 0.553, 0.4996, 0.3972, 0.504, 0.3718, 0.5, 0.4631, 0.3232, 0.4276, 0.5033, 0.3971, 0.4246, 0.4692, 0.4611, 0.4616, 0.4524, 0.3714, 0.3801, 0.4637, 0.4995, 0.3634],
[0.3943, 0.5975, 0.5109, 0.3811, 0.646, 0.4699, 0.4823, 0.589, 0.627, 0.3716, 0.6245, 0.4589, 0.5369, 0.5, 0.5226, 0.5385, 0.6126, 0.4334, 0.5273, 0.5476, 0.548, 0.4615, 0.5913, 0.4549, 0.4835, 0.6551, 0.5443, 0.469],
[0.5459, 0.5227, 0.471, 0.3299, 0.4461, 0.3294, 0.3821, 0.7633, 0.7295, 0.5599, 0.5927, 0.459, 0.6768, 0.4774, 0.5, 0.4067, 0.5415, 0.3228, 0.3744, 0.3973, 0.4827, 0.3893, 0.4602, 0.3308, 0.282, 0.4352, 0.3566, 0.332],
[0.4948, 0.6048, 0.5428, 0.5149, 0.5368, 0.5558, 0.4946, 0.6003, 0.5473, 0.5517, 0.5977, 0.5383, 0.5724, 0.4615, 0.5933, 0.5, 0.6389, 0.4375, 0.4902, 0.4122, 0.5827, 0.4613, 0.5453, 0.5664, 0.5035, 0.5942, 0.4592, 0.5176],
[0.3832, 0.3812, 0.379, 0.3471, 0.4543, 0.3813, 0.3293, 0.5293, 0.5129, 0.4101, 0.5622, 0.4415, 0.4967, 0.3874, 0.4585, 0.3611, 0.5, 0.3334, 0.4948, 0.4431, 0.5101, 0.347, 0.559, 0.3824, 0.4186, 0.5322, 0.428, 0.4406],
[0.4891, 0.6527, 0.5415, 0.5064, 0.5761, 0.4675, 0.5215, 0.6565, 0.6579, 0.4737, 0.5841, 0.5527, 0.6029, 0.5666, 0.6772, 0.5625, 0.6666, 0.5, 0.6336, 0.5197, 0.5964, 0.5106, 0.6546, 0.5428, 0.564, 0.6074, 0.5628, 0.5283],
[0.4652, 0.5945, 0.5302, 0.3877, 0.5564, 0.5771, 0.4585, 0.614, 0.6187, 0.4765, 0.659, 0.5684, 0.5754, 0.4727, 0.6256, 0.5098, 0.5052, 0.3664, 0.5, 0.4764, 0.5172, 0.4136, 0.5349, 0.4166, 0.4242, 0.4802, 0.4764, 0.3686],
[0.4539, 0.5364, 0.5177, 0.4705, 0.5358, 0.575, 0.457, 0.5491, 0.5241, 0.5105, 0.5824, 0.5387, 0.5308, 0.4524, 0.6027, 0.5878, 0.5569, 0.4803, 0.5236, 0.5, 0.5927, 0.4633, 0.5778, 0.5423, 0.5037, 0.5912, 0.5213, 0.5331],
[0.3598, 0.4048, 0.406, 0.3741, 0.4345, 0.3661, 0.385, 0.5524, 0.5472, 0.4201, 0.5216, 0.311, 0.5389, 0.452, 0.5173, 0.4173, 0.4899, 0.4036, 0.4828, 0.4073, 0.5, 0.4412, 0.5948, 0.395, 0.431, 0.4759, 0.4538, 0.416],
[0.4982, 0.5881, 0.477, 0.4664, 0.5163, 0.6319, 0.4516, 0.5887, 0.5828, 0.4015, 0.5419, 0.5907, 0.5384, 0.5385, 0.6107, 0.5387, 0.653, 0.4894, 0.5864, 0.5367, 0.5588, 0.5, 0.5789, 0.5157, 0.5377, 0.5738, 0.5704, 0.5044],
[0.4924, 0.4715, 0.4459, 0.419, 0.4827, 0.4637, 0.4169, 0.6475, 0.6183, 0.5446, 0.5985, 0.5221, 0.5476, 0.4087, 0.5398, 0.4547, 0.441, 0.3454, 0.4651, 0.4222, 0.4052, 0.4211, 0.5, 0.4341, 0.4174, 0.4364, 0.4229, 0.3542],
[0.5382, 0.5715, 0.5257, 0.5141, 0.5172, 0.5366, 0.4373, 0.6721, 0.6735, 0.5562, 0.6256, 0.6035, 0.6286, 0.5451, 0.6692, 0.4336, 0.6176, 0.4572, 0.5834, 0.4577, 0.605, 0.4843, 0.5659, 0.5, 0.5015, 0.5235, 0.4781, 0.4791],
[0.5105, 0.64, 0.6026, 0.4872, 0.5642, 0.5405, 0.5076, 0.647, 0.6453, 0.5373, 0.617, 0.507, 0.6199, 0.5165, 0.718, 0.4965, 0.5814, 0.436, 0.5758, 0.4963, 0.569, 0.4623, 0.5826, 0.4985, 0.5, 0.5101, 0.4836, 0.4086],
[0.4591, 0.3777, 0.4206, 0.4502, 0.4575, 0.5108, 0.3781, 0.6106, 0.5215, 0.5702, 0.5773, 0.4969, 0.5363, 0.3449, 0.5648, 0.4058, 0.4678, 0.3926, 0.5198, 0.4088, 0.5241, 0.4262, 0.5636, 0.4765, 0.4899, 0.5, 0.4669, 0.4561],
[0.4251, 0.5768, 0.5348, 0.482, 0.5253, 0.5349, 0.4473, 0.5181, 0.5075, 0.4891, 0.5473, 0.4562, 0.5005, 0.4557, 0.6434, 0.5408, 0.572, 0.4372, 0.5236, 0.4787, 0.5462, 0.4296, 0.5771, 0.5219, 0.5164, 0.5331, 0.5, 0.4353],
[0.5005, 0.4709, 0.4856, 0.5691, 0.5354, 0.5671, 0.464, 0.6796, 0.6565, 0.5529, 0.6102, 0.528, 0.6366, 0.531, 0.668, 0.4824, 0.5594, 0.4717, 0.6314, 0.4669, 0.584, 0.4956, 0.6458, 0.5209, 0.5914, 0.5439, 0.5647, 0.5]
]
def find_distribution(season, pair):
# distribute 10 points through pairs based on what would win vs the given pair.
ret_d = {}
opt_table = tango_optimality
if season == 1:
opt_table = beta_optimality
winning_pairs = list()
for p in pairs:
if opt_table[pairs.index(pair)][pairs.index(p)] <= 0.5:
winning_pairs += [p]
if opt_table[pairs.index(pair)][pairs.index(p)] <= 0.45:
winning_pairs += [p]
if opt_table[pairs.index(pair)][pairs.index(p)] <= 0.4:
winning_pairs += [p]
if opt_table[pairs.index(pair)][pairs.index(p)] <= 0.35:
winning_pairs += [p]
if opt_table[pairs.index(pair)][pairs.index(p)] <= 0.3:
winning_pairs += [p]
if opt_table[pairs.index(pair)][pairs.index(p)] <= 0.25:
winning_pairs += [p]
return winning_pairs
def find_distribution_2(season, pair):
# distribute 10 points through pairs based on what would win vs the given pair.
ret_d = {}
opt_table = tango_optimality
if season == 1:
opt_table = beta_optimality
winning_pairs = list()
for p in pairs:
if opt_table[pairs.index(pair)][pairs.index(p)] <= 0.5:
winning_pairs += [p]
return winning_pairs
# find_distribution(2,"KA")
s1_expected = {} # Store all expected distributions as a dictionary
s1_expected_flat = {}
s2_expected = {}
s2_expected_flat = {}
for p in pairs:
s1_expected[p] = {k:0 for k in pairs}
s2_expected[p] = {k:0 for k in pairs}
s1_expected_flat[p] = {k:0 for k in pairs}
s2_expected_flat[p] = {k:0 for k in pairs}
for q in find_distribution(1, p):
s1_expected[p][q] += 1 / len(find_distribution(1, p))
for q in find_distribution(2, p):
s2_expected[p][q] += 1 / len(find_distribution(2, p))
for q in find_distribution_2(1, p):
s1_expected_flat[p][q] += 1 / len(find_distribution_2(1, p))
for q in find_distribution_2(2, p):
s2_expected_flat[p][q] += 1 / len(find_distribution_2(2, p))
# pair_to_add = pairs[i]
# s1_expected[pair_to_add] = {}
# s2_expected[pair_to_add] = {}
# for j in range(len(pairs)):
# # For each pair, the distribution of what pairs are expected = the cell value / the column sum.
# s1_expected[pair_to_add][pairs[j]] = beta_optimality[j][i] / sum([beta_optimality[i][0] for i in range(len(pairs))])
# s2_expected[pair_to_add][pairs[j]] = (tango_optimality[j][i]-min([tango_optimality[i][0] for i in range(len(pairs))])) / sum([tango_optimality[i][0] for i in range(len(pairs))]) + min([tango_optimality[i][0] for i in range(len(pairs))])
# #(tango_optimality[i][j] - min(tango_optimality[i])) / (sum(tango_optimality[i]) - min(tango_optimality[i])*28)
def predict_values(n, m, axis):
game_count = 0
segment_count = 0
segments = math.ceil(db.completed_games.count_documents({"winner":{"$exists":True}, "balance_code":"1.2"}) / n)
s2_popularity = {}
for s in range(segments):
s2_popularity[s] = {k:0 for k in pairs}
for g in db.completed_games.find({"winner":{"$exists":True}, "balance_code":"1.2"}):
if game_count >= n:
segment_count+=1
game_count=0
p1 = g["p1c1"][0]+g["p1c2"][0]
p2 = g["p2c1"][0]+g["p2c2"][0]
for p in [p1,p2]:
if p in s2_popularity[segment_count]:
s2_popularity[segment_count][p] += 1
else:
s2_popularity[segment_count][p[1]+p[0]] += 1
game_count+=1
x = np.arange(len(pairs))
width = 0.25
# Let's make a prediction about s2_popularity[2]...
s2_prediction = {p:0 for p in pairs}
s2_flat_prediction = {p:0 for p in pairs}
for p in pairs:
# for each pair
for q in pairs:
# their predicted value is the sum for every possible opponent of:
# The number of times the opponent is played in the previous segment TIMES the proportion they are expected to be played against that opponent
s2_prediction[p] += s2_popularity[m][q] * s2_expected[q][p]
s2_flat_prediction[p] += s2_popularity[m][q] * s2_expected_flat[q][p]
# print(s2_prediction)
# print(s2_popularity[2])
axis.bar(x-width, list(s2_prediction.values()), width, label="dynamic prediction")
axis.bar(x, list(s2_popularity[m+1].values()), width, label="actual" )
axis.bar(x+width, list(s2_flat_prediction.values()), width, label="flat prediction")
axis.set_title("Predicting popularity in games {0}-{1} based on games {2}-{3}".format(m*n, n*(m+1)-1, n*(m-1), (m*n)-1))
axis.set_xticks(range(len(pairs)))
axis.set_xticklabels(pairs)
axis.legend()
flat_error = 0
dynamic_error = 0
for p in pairs:
flat_error += abs(s2_popularity[m+1][p] - s2_flat_prediction[p])
dynamic_error += abs(s2_popularity[m+1][p] - s2_prediction[p])
print("Dynamic error: {0}, flat error: {1}".format(dynamic_error/28, flat_error/28))
fig, ((ax0, ax1), (ax2, ax3)) = plt.subplots(2,2)
axes = [None, ax0, ax1, ax2, ax3]
for m in range(1,5):
predict_values(75, m, axes[m])
plt.show()