-
Notifications
You must be signed in to change notification settings - Fork 1
/
SHERPA Neural Network
108 lines (101 loc) · 5.04 KB
/
SHERPA Neural Network
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
from numpy import std
from matplotlib import pyplot
from sklearn.model_selection import KFold
from tensorflow.keras.utils import to_categorical
from keras.models import Sequential
from keras.layers import Conv2D
from keras.layers import MaxPooling2D
from keras.layers import Dense
from keras.layers import Flatten
from tensorflow.keras.optimizers import SGD
import pandas as pd
import math
import numpy as np
import tensorflow as tf
import datetime
import sherpa
def loaddataset():
df= pd.read_csv('/Users/ryanrushing/Desktop/SUSY.txt', names=['index','phi1','phi2','eta1','eta2','diphi','deta','deltaR','genweight','recoweight'])
susydatanormalized = df.copy()
susydatanormalized.drop(columns='index',inplace=True)
susydatanormalized.drop(columns='genweight',inplace=True)
susydatanormalized.drop(columns='recoweight',inplace=True)
phi1='phi1'
susydatanormalized[phi1] = susydatanormalized[phi1] /math.pi
phi2='phi2'
susydatanormalized[phi2] = susydatanormalized[phi2] /math.pi
eta1='eta1'
susydatanormalized[eta1] = susydatanormalized[eta1] /susydatanormalized[eta1].abs().max()
eta2='eta2'
susydatanormalized[eta2] = susydatanormalized[eta2] /susydatanormalized[eta2].abs().max()
diphi='diphi'
susydatanormalized[diphi] = susydatanormalized[diphi] /susydatanormalized[diphi].abs().max()
deta='deta'
susydatanormalized[deta] = susydatanormalized[deta] /susydatanormalized[deta].abs().max()
deltaR='deltaR'
susydatanormalized[deltaR] = susydatanormalized[deltaR] /susydatanormalized[deltaR].abs().max()
identifier=[1] * 6760
susydatanormalized['identifier']=identifier
df2= pd.read_csv('/Users/ryanrushing/Desktop/ttbarsignalplustau_mainSignal.txt', names=['index','phi1','phi2','eta1','eta2','diphi','deta','deltaR','genweight','recoweight'])
standarddatanormalized = df2.copy()
standarddatanormalized=standarddatanormalized[:6760]
standarddatanormalized.drop(columns='index',inplace=True)
standarddatanormalized.drop(columns='genweight',inplace=True)
standarddatanormalized.drop(columns='recoweight',inplace=True)
phi1='phi1'
standarddatanormalized[phi1] = standarddatanormalized[phi1] /math.pi
phi2='phi2'
standarddatanormalized[phi2] = standarddatanormalized[phi2] /math.pi
eta1='eta1'
standarddatanormalized[eta1] = standarddatanormalized[eta1] /standarddatanormalized[eta1].abs().max()
eta2='eta2'
standarddatanormalized[eta2] = standarddatanormalized[eta2] /standarddatanormalized[eta2].abs().max()
diphi='diphi'
standarddatanormalized[diphi] = standarddatanormalized[diphi] /standarddatanormalized[diphi].abs().max()
deta='deta'
standarddatanormalized[deta] = standarddatanormalized[deta] /standarddatanormalized[deta].abs().max()
deltaR='deltaR'
standarddatanormalized[deltaR] = standarddatanormalized[deltaR] /standarddatanormalized[deltaR].abs().max()
identifier=[0] * 6760
standarddatanormalized['identifier']=identifier
frames = [standarddatanormalized,susydatanormalized]
wholedata = pd.concat(frames)
wholedata=wholedata.to_numpy()
np.random.shuffle(wholedata)
trainx=wholedata[:(round(0.8*len(wholedata[:,2])))]
testx=wholedata[:(round(0.2*len(wholedata[:,2])))]
phi1x,phi2x,eta1x,eta2x,dihpix,detax,deltaRx,identifierx=np.hsplit(trainx,8)
trainX=np.concatenate((phi1x,phi2x,eta1x,eta2x,dihpix,detax,deltaRx),axis=1)
trainY=identifierx
phi1y,phi2y,eta1y,eta2y,dihpiy,detay,deltaRy,identifiery=np.hsplit(testx,8)
testX=np.concatenate((phi1y,phi2y,eta1y,eta2y,dihpiy,detay,deltaRy),axis=1)
testY=identifiery
print('trainx shape',trainX.shape)
print('trainy shape',trainY.shape)
print('testx shape',testX.shape)
print('testy shape',testY.shape)
trainY = to_categorical(trainY)
testY = to_categorical(testY)
return trainX, trainY, testX, testY
def main2():
trainX, trainY, testX, testY = loaddataset()
parameters = [sherpa.Continuous(name='lr', range=[0.005, 1])]
algorithm = sherpa.algorithms.RandomSearch(max_num_trials=20)
study = sherpa.Study(parameters=parameters,
algorithm=algorithm,
lower_is_better=True, dashboard_port=None, disable_dashboard=False)
for trial in study:
model = Sequential()
model.add(Dense(7, activation='relu', input_dim=7))
model.add(Dense(14, activation='relu', kernel_initializer='he_uniform'))
model.add(Dense(28, activation='relu', kernel_initializer='he_uniform'))
model.add(Dense(14, activation='relu', kernel_initializer='he_uniform'))
model.add(Dense(2, activation='sigmoid'))
opt = SGD(learning_rate=trial.parameters['lr'], momentum=0.5)
model.compile(optimizer=opt, loss='binary_crossentropy', metrics=['accuracy'])
model.fit(trainX, trainY, epochs=40, batch_size=10816, validation_data=(testX, testY), verbose=0,
callbacks=[study.keras_callback(trial, objective_name='val_loss')])
study.finalize(trial)
print(study.get_best_result())
input("Type enter to End the code")
main2()