-
Notifications
You must be signed in to change notification settings - Fork 13
/
build.gradle
129 lines (94 loc) · 4.06 KB
/
build.gradle
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
plugins {
id 'org.hidetake.ssh' version '1.5.0'
// required to build scala example
id 'scala'
// useful for working on scala example
id 'eclipse'
// we need a fat jar
id "com.github.johnrengelman.shadow" version "1.2.3"
}
// set the dependencies for compiling the groovy script
repositories {
mavenCentral()
}
dependencies {
// required to build the spark example
compile 'org.scala-lang:scala-library:2.10'
compile 'org.apache.spark:spark-core_2.10:1.6.2'
compile 'org.apache.spark:spark-streaming_2.10:1.6.2'
compile 'org.apache.kafka:kafka_2.10:0.9.0.0'
compile 'org.apache.kafka:kafka-clients:0.9.0.0'
compile 'org.apache.kafka:kafka-log4j-appender:0.9.0.0'
compile files('libs/messagehub.login-1.0.0.jar')
}
// load some common helper methods
apply from: "${projectDir}/../../shared/common-helpers.gradle"
// get the cluster connection details
Properties props = new Properties()
props.load(new FileInputStream("$projectDir/../../connection.properties"))
def jsonFile = file('vcap.json')
def mhProps = new groovy.json.JsonSlurper().parseText(jsonFile.text)
task ScalaWordCount {
dependsOn shadowJar
doLast {
def tmpDir = "test-${new Date().getTime()}"
// ssh plugin documentation: https://gradle-ssh-plugin.github.io/docs/
ssh.run {
// remotes.bicluster is defined in shared/common-helpers.gradle
session(remotes.bicluster) {
try {
// initialise kerberos
execute "kinit -k -t ${props.username}.keytab ${props.username}@IBM.COM"
}
catch (Exception e) {
println "problem running kinit - maybe this is a Basic cluster?"
}
// create temp local dir for holding LICENSE file before uploading it to hdfs
execute "mkdir ${tmpDir}"
println "Uploading SparkMessageHubScala-all.jar to cluster - this may take some time"
// upload spark script and text file to process
put from: "${projectDir}/build/libs/SparkMessageHubScala-all.jar",
into: "${tmpDir}/SparkMessageHubScala-all.jar"
println "Finished Uploading SparkMessageHubScala-all.jar"
def clz = "--class \"biginsights.examples.MessageHubConsumer\""
// FIXME! we are passing passwords as command line arguments which will be visible
// to other users. We could probably bundle the properties in the jar file
// and the app read them from there.
def kafka_brokers = "${mhProps.kafka_brokers_sasl}".minus('[').minus(']').replaceAll(/\s*/, "")
def args = "--conf spark.driver.args=\"${kafka_brokers} ${mhProps.user} ${mhProps.password} ${mhProps.api_key} ${mhProps.kafka_rest_url} ${mhProps.topic} ${props.username}\""
def cmd = "spark-submit ${args} --num-executors 3 --executor-cores 1 --executor-memory 1G ${clz} ${tmpDir}/SparkMessageHubScala-all.jar"
println "Running: ${cmd}"
execute cmd
}
}
}
}
task PsAll {
doLast {
// ssh plugin documentation: https://gradle-ssh-plugin.github.io/docs/
ssh.run {
// remotes.bicluster is defined in shared/common-helpers.gradle
session(remotes.bicluster) {
execute '''ps aux | grep biginsights.examples.MessageHubConsumer'''
}
}
}
}
task KillAll {
doLast {
// ssh plugin documentation: https://gradle-ssh-plugin.github.io/docs/
ssh.run {
// remotes.bicluster is defined in shared/common-helpers.gradle
session(remotes.bicluster) {
try {
execute '''kill $(ps aux | grep biginsights.examples.MessageHubConsumer | awk '{print $2}')'''
} catch (Exception e) {
println "Ignoring error from kill command"
}
}
}
}
}
task Example {
dependsOn ScalaWordCount
}