diff --git a/project/CassandraSparkBuild.scala b/project/CassandraSparkBuild.scala index c7712f25f..0b6ba7d17 100644 --- a/project/CassandraSparkBuild.scala +++ b/project/CassandraSparkBuild.scala @@ -22,6 +22,7 @@ object CassandraSparkBuild extends Build { lazy val root = Project(id = "spark-cassandra-connector", base = file(".")) .settings(Settings.defaultSettings: _*) + .settings(Settings.buildSettings: _*) .settings(libraryDependencies ++= Dependencies.spark) .configs(IntegrationTest) } diff --git a/project/Settings.scala b/project/Settings.scala index bc25e1e60..6cd6376bc 100644 --- a/project/Settings.scala +++ b/project/Settings.scala @@ -28,7 +28,8 @@ import scala.language.postfixOps object Settings extends Build { lazy val buildSettings = Seq( - name := "Apache Spark connector for Apache Cassandra from DataStax", + name := "DataStax Apache Cassandra connector for Apache Spark", + normalizedName := "spark-cassandra-connector", description := "A library that exposes Cassandra tables as Spark RDDs, writes Spark RDDs to Cassandra tables, " + "and executes CQL queries in Spark applications.", organization := "com.datastax.spark", @@ -39,25 +40,8 @@ object Settings extends Build { licenses := Seq(("Apache License, Version 2.0", url("http://www.apache.org/licenses/LICENSE-2.0"))) ) - /* - ToDo: with release plugin: publishTo settings would handle generating versions published from the snapshot, - so that any users of these artifacts NEVER use a SNAPSHOT via typical build code such as: - if (version endsWith "-SNAPSHOT") ("snapshots" at nexus + "content/repositories/snapshots") - else ("releases" at nexus + "service/local/staging/deploy/maven2") - */ - override lazy val settings = super.settings ++ buildSettings ++ Seq(shellPrompt := ShellPrompt.prompt) - - lazy val baseSettings = - Defaults.coreDefaultSettings ++ Defaults.itSettings ++ - IvyPlugin.projectSettings ++ JvmPlugin.projectSettings ++ Publish.settings - - lazy val parentSettings = baseSettings ++ Seq( - publishArtifact := false, - reportBinaryIssues := () // disable bin comp check - ) - // add ++ formatSettings - lazy val defaultSettings = baseSettings ++ testSettings ++ mimaSettings ++ releaseSettings ++ Seq( + lazy val defaultSettings = testSettings ++ mimaSettings ++ releaseSettings ++ Seq( scalacOptions in (Compile, doc) ++= Seq("-doc-root-content", "rootdoc.txt"), scalacOptions ++= Seq("-encoding", "UTF-8", s"-target:jvm-${Versions.JDK}", "-deprecation", "-feature", "-language:_", "-unchecked", "-Xlint"), javacOptions ++= Seq("-encoding", "UTF-8", "-source", Versions.JDK, "-target", Versions.JDK, "-Xlint:unchecked", "-Xlint:deprecation"), @@ -76,22 +60,13 @@ object Settings extends Build { Tests.Argument(TestFrameworks.JUnit, "-oDF", "-v", "-a") ) - val javaAgent = TaskKey[Seq[String]]("javaagent") - val javaAgentTask = javaAgent <<= (fullClasspath in IntegrationTest).map { cp => - val fileNames = cp.map(_.data.getPath) - val jamm = fileNames.find(_.matches("^.*jamm-.*\\.jar$")) - jamm.map("-javaagent:" + _).toSeq - } - - lazy val testSettings = tests ++ Seq( + lazy val testSettings = tests ++ Defaults.itSettings ++ Seq( parallelExecution in Test := false, parallelExecution in IntegrationTest := false, testOptions in Test ++= testOptionSettings, testOptions in IntegrationTest ++= testOptionSettings, fork in Test := true, - fork in IntegrationTest := true, - javaAgentTask, - javaOptions in IntegrationTest ++= Seq("-Xmx2g") ++ javaAgent.value + fork in IntegrationTest := true ) lazy val formatSettings = SbtScalariform.scalariformSettings ++ Seq( @@ -106,6 +81,8 @@ object Settings extends Build { .setPreference(AlignParameters, true) .setPreference(AlignSingleLineCaseStatements, true) } + + override lazy val settings = super.settings ++ buildSettings ++ Seq(shellPrompt := ShellPrompt.prompt) } /** diff --git a/rootdoc.txt b/rootdoc.txt index 04575b5b8..4549b99ba 100644 --- a/rootdoc.txt +++ b/rootdoc.txt @@ -1,2 +1,2 @@ -Cassandra driver for Apache Spark. +Cassandra connector for Apache Spark. See documentation of package [[com.datastax.spark.connector]]. \ No newline at end of file diff --git a/src/main/scala/com/datastax/spark/connector/package.scala b/src/main/scala/com/datastax/spark/connector/package.scala index 1431445b4..c79a9c910 100644 --- a/src/main/scala/com/datastax/spark/connector/package.scala +++ b/src/main/scala/com/datastax/spark/connector/package.scala @@ -11,7 +11,7 @@ import org.apache.spark.rdd.RDD import scala.reflect.ClassTag /** - * The root package of Cassandra driver for Apache Spark. + * The root package of Cassandra connector for Apache Spark. * Offers handy implicit conversions that add Cassandra-specific methods to `SparkContext` and `RDD`. * * Call [[com.datastax.spark.connector.SparkContextFunctions#cassandraTable cassandraTable]] method on the `SparkContext` object