-
Notifications
You must be signed in to change notification settings - Fork 24
/
build.sbt
57 lines (36 loc) · 2.07 KB
/
build.sbt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
name := "h2o-sparkling-demo"
version := "1.0.0"
scalaVersion := "2.10.4"
organization := "0xdata.com"
// Add dependcy graph
net.virtualvoid.sbt.graph.Plugin.graphSettings
/** Add local .m2 cache */
//resolvers += Resolver.mavenLocal
/* Add sonatype repo to get H2O */
resolvers += "Sonatype OSS Snapshots" at "https://oss.sonatype.org/content/repositories/snapshots"
/** Add Akka repo for Spark */
resolvers += "Akka Repository" at "http://repo.akka.io/releases/"
/** Add cloudera repo */
resolvers += "Cloudera Repository" at "https://repository.cloudera.com/artifactory/cloudera-repos"
/* Dependencies - %% appends Scala version to artifactId */
libraryDependencies += "ai.h2o" % "h2o-core" % "2.5-SNAPSHOT" excludeAll( ExclusionRule("stax", "stax-api"), ExclusionRule("org.apache.xmlbeans","xmlbeans"))
libraryDependencies += "ai.h2o" %% "h2o-scala" % "2.5-SNAPSHOT"
libraryDependencies += "org.apache.spark" %% "spark-core" % "1.0.2" % "provided" // Spark-CORE: do not forget %% to select spark-core distribution reflecting Scala version
libraryDependencies += "org.apache.spark" %% "spark-sql" % "1.0.2" % "provided" // Spark-SQL
libraryDependencies += "org.tachyonproject" % "tachyon" % "0.4.1-thrift" % "provided" // To support inhale of data from Tachyon
// Put back compile time "provided" dependencies
run in Compile <<= Defaults.runTask(fullClasspath in Compile, mainClass in (Compile, run), runner in (Compile, run))
/* Setup run
- Fork in run
*/
fork in run := true
connectInput in run := true
outputStrategy in run := Some(StdoutOutput)
javaOptions in run ++= Seq("-Xmx4g", "-Xms4g", "-Djava.security.krb5.realm=", "-Djava.security.krb5.kdc=", "-Djava.security.krb5.conf=/dev/null")
// For debugging from Eclipse
//javaOptions in run += "-Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=1044"
// Provides a dedicated task to launch plain H2O without Spark demo BUT
// containing Spark demo jars on classpath (to access Spark classes and demo methods).
lazy val runH2O = taskKey[Unit]("Run H2O node")
fullRunTask(runH2O, Runtime, "water.Boot")
fork in runH2O := true