-
Notifications
You must be signed in to change notification settings - Fork 8
/
individual.py
80 lines (61 loc) · 2.13 KB
/
individual.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
import random
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation
from keras.optimizers import RMSprop
from config import Config
class Layer:
""" Specification of one layer.
Includes size, regularization, activaton.
"""
def __init__(self):
pass
def randomInit(self):
self.size = random.randint(Config.MIN_LAYER_SIZE, Config.MAX_LAYER_SIZE)
self.dropout = random.choice(Config.DROPOUT)
self.activation = random.choice(Config.ACTIVATIONS)
return self
def __str__(self):
return " #{} dropout={} activation={}".format(self.size, self.dropout, self.activation)
class Individual:
""" Individual coding network architecture.
"""
def __init__(self):
self.input_shape = Config.input_shape
self.noutputs = Config.noutputs
#print(self.input_shape, self.noutputs)
def randomInit(self):
self.layers = []
num_layers = random.randint(1, Config.MAX_LAYERS)
for l in range(num_layers):
layer = Layer().randomInit()
self.layers.append(layer)
def createNetwork(self):
model = Sequential()
firstlayer = True
for l in self.layers:
if firstlayer:
model.add(Dense(l.size, input_shape=self.input_shape))
firstlayer = False
else:
model.add(Dense(l.size))
model.add(Activation(l.activation))
if l.dropout > 0:
model.add(Dropout(l.dropout))
# final part
model.add(Dense(self.noutputs))
if Config.task_type == "classification":
model.add(Activation('softmax'))
model.compile(loss=Config.loss,
optimizer=RMSprop())
return model
def __str__(self):
ret = "------------------------\n"
for l in self.layers:
ret += str(l)
ret += "\n"
ret += "------------------------\n"
return ret
def initIndividual(indclass):
ind = indclass()
ind.randomInit()
return ind