Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Build warnings #88

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,8 @@ inScope(Global)(Seq(
licenses += "Apache License 2.0" -> url("http://www.apache.org/licenses/LICENSE-2.0"),
scalacOptions ++= Seq(
"-deprecation",
"-feature"
"-feature",
"-Werror"
),
scmInfo := Some(ScmInfo(url("https://github.com/lucidsoftware/relate"), "scm:git:[email protected]:lucidsoftware/relate.git")),
Benchmark / test / tags += benchmarkTag -> 1,
Expand All @@ -49,5 +50,5 @@ inScope(Global)(Seq(
version := sys.props.getOrElse("build.version", "0-SNAPSHOT")
))

skip in publish := true
publish / skip := true
publishTo := sonatypePublishToBundle.value
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@ package object macros {
@implicitNotFound("A value of type ${A} is never allowed as an RecordOption")
private trait RecordOptionValue[A]
private object RecordOptionValue {
implicit val bool = new RecordOptionValue[Boolean] {}
implicit val map = new RecordOptionValue[Map[String, String]] {}
implicit val bool: RecordOptionValue[Boolean] = new RecordOptionValue[Boolean] {}
implicit val map: RecordOptionValue[Map[String, String]] = new RecordOptionValue[Map[String, String]] {}
}

case class RecordOption[A: RecordOptionValue] private (key: String, value: A)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,7 @@ class RowParserTest extends Specification with Mockito {

"fail to compile for anything besides a case class" in {
illTyped("@Record() class Thing", "@Record must be used on a case class")
illTyped("@Record() def f()", "@Record must be used on a case class")
illTyped("@Record() def f(): Unit", "@Record must be used on a case class")
illTyped("@Record() object Thing {}", "@Record must be used on a case class")
illTyped("@Record() trait Thing", "@Record must be used on a case class")

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,10 @@ import scala.concurrent.duration.FiniteDuration

package object postgres {

implicit val pgIntervalParameterizable =
implicit val pgIntervalParameterizable: Parameterizable[PGInterval] =
Parameterizable(_.setObject(_, _: PGInterval), _.setNull(_, Types.JAVA_OBJECT))

implicit val finiteDurationParameterizable =
Parameterizable.from((value: FiniteDuration) => new PGInterval(0, 0, 0, 0, 0, value.toSeconds))
implicit val finiteDurationParameterizable: Parameterizable[FiniteDuration] =
Parameterizable.from((value: FiniteDuration) => new PGInterval(0, 0, 0, 0, 0, value.toSeconds.toDouble))

}
1 change: 0 additions & 1 deletion relate/build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ libraryDependencies ++= Seq(
"com.h2database" % "h2" % "1.4.191" % "test",
"com.storm-enroute" %% "scalameter" % "0.19" % Benchmark,
"com.storm-enroute" %% "scalameter" % "0.19" % Regression,
"org.scala-lang.modules" %% "scala-collection-compat" % "2.1.6"
)

libraryDependencies ++= (CrossVersion.binaryScalaVersion(scalaVersion.value) match {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package com.lucidchart.relate

import scala.collection.compat._
import scala.collection.Factory
import scala.language.higherKinds

trait CollectionsParser {
Expand All @@ -14,19 +14,19 @@ trait CollectionsParser {
}
}

builder.result
builder.result()
}

implicit def option[B: RowParser] = RowParser[Option[B]] { result =>
implicit def option[B: RowParser]: RowParser[Option[B]] = RowParser[Option[B]] { result =>
limitedCollection[B, List](1).parse(result).headOption
}

implicit def collection[B: RowParser, Col[_]](implicit factory: Factory[B, Col[B]]) =
implicit def collection[B: RowParser, Col[_]](implicit factory: Factory[B, Col[B]]): RowParser[Col[B]] =
limitedCollection[B, Col](Long.MaxValue)

implicit def pairCollection[Key: RowParser, Value: RowParser, PairCol[_, _]](implicit
factory: Factory[(Key, Value), PairCol[Key, Value]]
) =
): RowParser[PairCol[Key, Value]] =
RowParser { result =>

val builder = factory.newBuilder
Expand All @@ -37,6 +37,6 @@ trait CollectionsParser {
}
}

builder.result
builder.result()
}
}
18 changes: 9 additions & 9 deletions relate/src/main/scala/com/lucidchart/relate/CollectionsSql.scala
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
package com.lucidchart.relate

import java.sql.{Connection, PreparedStatement, ResultSet}
import scala.collection.compat._
import scala.collection.Factory
import scala.language.higherKinds

/**
Expand Down Expand Up @@ -51,9 +51,9 @@ trait CollectionsSql { self: Sql =>
* @return
* the results as an arbitrary collection of records
*/
def asCollection[U, T[_]](parser: SqlRow => U)(implicit factory: Factory[U, T[U]], connection: Connection): T[U] =
normalStatement.execute(_.asCollection(parser))
def asCollection[U: RowParser, T[_]]()(implicit factory: Factory[U, T[U]], connection: Connection): T[U] =
// def asCollection[U, T[_]](parser: SqlRow => U)(implicit factory: Factory[U, T[U]], connection: Connection): T[U] =
// normalStatement.execute(_.asCollection(parser))
def asCollection[U: RowParser, T[_]](implicit factory: Factory[U, T[U]], connection: Connection): T[U] =
normalStatement.execute(_.asCollection[U, T])

/**
Expand All @@ -65,11 +65,11 @@ trait CollectionsSql { self: Sql =>
* @return
* the results as an arbitrary collection of key value pairs
*/
def asPairCollection[U, V, T[_, _]](
parser: SqlRow => (U, V)
)(implicit factory: Factory[(U, V), T[U, V]], connection: Connection): T[U, V] =
normalStatement.execute(_.asPairCollection(parser))
def asPairCollection[U, V, T[_, _]]()(implicit
// def asPairCollection[U, V, T[_, _]](
// parser: SqlRow => (U, V)
// )(implicit factory: Factory[(U, V), T[U, V]], connection: Connection): T[U, V] =
// normalStatement.execute(_.asPairCollection(parser))
def asPairCollection[U, V, T[_, _]](implicit
factory: Factory[(U, V), T[U, V]],
connection: Connection,
p: RowParser[(U, V)]
Expand Down
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
package com.lucidchart.relate

import java.sql.ResultSetMetaData
import scala.collection.compat._
import scala.collection.Factory
import scala.collection.mutable
import scala.language.higherKinds

trait CollectionsSqlResult { self: SqlResult =>

def asCollection[U, T[_]](parser: SqlRow => U)(implicit factory: Factory[U, T[U]]): T[U] =
asCollection(parser, Long.MaxValue)
def asCollection[U: RowParser, T[_]]()(implicit factory: Factory[U, T[U]]): T[U] =
def asCollection[U: RowParser, T[_]](implicit factory: Factory[U, T[U]]): T[U] =
asCollection(implicitly[RowParser[U]].parse, Long.MaxValue)
protected def asCollection[U: RowParser, T[_]](maxRows: Long)(implicit factory: Factory[U, T[U]]): T[U] =
asCollection(implicitly[RowParser[U]].parse, maxRows)
Expand All @@ -22,10 +22,10 @@ trait CollectionsSqlResult { self: SqlResult =>
}
}

builder.result
builder.result()
}

def asPairCollection[U, V, T[_, _]]()(implicit p: RowParser[(U, V)], factory: Factory[(U, V), T[U, V]]): T[U, V] = {
def asPairCollection[U, V, T[_, _]](implicit p: RowParser[(U, V)], factory: Factory[(U, V), T[U, V]]): T[U, V] = {
asPairCollection(p.parse, Long.MaxValue)
}
def asPairCollection[U, V, T[_, _]](parser: SqlRow => (U, V))(implicit factory: Factory[(U, V), T[U, V]]): T[U, V] =
Expand All @@ -45,7 +45,7 @@ trait CollectionsSqlResult { self: SqlResult =>
}
}

builder.result
builder.result()
}

}
62 changes: 33 additions & 29 deletions relate/src/main/scala/com/lucidchart/relate/Parameterizable.scala
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,12 @@ trait Parameterizable[-A] {
/**
* Set the parameterized value at index {@code i} in the prepared statement to the {@code value}.
*/
def set(statement: PreparedStatement, i: Int, value: A)
def set(statement: PreparedStatement, i: Int, value: A): Unit

/**
* Set the parameterized value at index {@code i} in the prepared statement to {@code null}.
*/
def setNull(statement: PreparedStatement, i: Int)
def setNull(statement: PreparedStatement, i: Int): Unit
final def setOption(statement: PreparedStatement, i: Int, value: Option[A]) =
value.fold(setNull(statement, i))(set(statement, i, _))
}
Expand All @@ -39,38 +39,42 @@ object Parameterizable {

def from[A, B: Parameterizable](f: A => B) = implicitly[Parameterizable[B]].contraMap(f)

implicit val array = apply(_.setArray(_, _: Array), _.setNull(_, Types.ARRAY))
implicit val array: Parameterizable[Array] = apply(_.setArray(_, _: Array), _.setNull(_, Types.ARRAY))
// ideally, this would be named jBigDecimal, but that wouldn't be backwards compatibility
implicit val bigDecimal = apply(_.setBigDecimal(_, _: java.math.BigDecimal), _.setNull(_, Types.DECIMAL))
implicit val scalaBigDecimal = apply(
implicit val bigDecimal: Parameterizable[java.math.BigDecimal] =
apply(_.setBigDecimal(_, _: java.math.BigDecimal), _.setNull(_, Types.DECIMAL))
implicit val scalaBigDecimal: Parameterizable[scala.math.BigDecimal] = apply(
(stmt: PreparedStatement, i: Int, v: scala.math.BigDecimal) => stmt.setBigDecimal(i, v.bigDecimal),
_.setNull(_, Types.DECIMAL)
)
implicit val blob = apply(_.setBlob(_, _: Blob), _.setNull(_, Types.BLOB))
implicit val boolean = apply(_.setBoolean(_, _: Boolean), _.setNull(_, Types.BOOLEAN))
implicit val byte = apply(_.setByte(_, _: Byte), _.setNull(_, Types.TINYINT))
implicit val bytes = apply(_.setBytes(_, _: scala.Array[Byte]), _.setNull(_, Types.VARBINARY))
implicit val clob = apply(_.setClob(_, _: Clob), _.setNull(_, Types.CLOB))
implicit val date = apply(_.setDate(_, _: Date), _.setNull(_, Types.DATE))
implicit val double = apply(_.setDouble(_, _: Double), _.setNull(_, Types.DOUBLE))
implicit val float = apply(_.setFloat(_, _: Float), _.setNull(_, Types.FLOAT))
implicit val int = apply(_.setInt(_, _: Int), _.setNull(_, Types.INTEGER))
implicit val long = apply(_.setLong(_, _: Long), _.setNull(_, Types.BIGINT))
implicit val nClob = apply(_.setNClob(_, _: NClob), _.setNull(_, Types.NCLOB))
implicit val ref = apply(_.setRef(_, _: Ref), _.setNull(_, Types.REF))
implicit val rowId = apply(_.setRowId(_, _: RowId), _.setNull(_, Types.ROWID))
implicit val short = apply(_.setShort(_, _: Short), _.setNull(_, Types.SMALLINT))
implicit val sqlXml = apply(_.setSQLXML(_, _: SQLXML), _.setNull(_, Types.SQLXML))
implicit val string = apply(_.setString(_, _: String), _.setNull(_, Types.VARCHAR))
implicit val time = apply(_.setTime(_, _: Time), _.setNull(_, Types.TIME))
implicit val timestamp = apply(_.setTimestamp(_, _: Timestamp), _.setNull(_, Types.TIMESTAMP))
implicit val url = apply(_.setURL(_, _: URL), _.setNull(_, Types.DATALINK))
implicit val blob: Parameterizable[Blob] = apply(_.setBlob(_, _: Blob), _.setNull(_, Types.BLOB))
implicit val boolean: Parameterizable[Boolean] = apply(_.setBoolean(_, _: Boolean), _.setNull(_, Types.BOOLEAN))
implicit val byte: Parameterizable[Byte] = apply(_.setByte(_, _: Byte), _.setNull(_, Types.TINYINT))
implicit val bytes: Parameterizable[scala.Array[Byte]] =
apply(_.setBytes(_, _: scala.Array[Byte]), _.setNull(_, Types.VARBINARY))
implicit val clob: Parameterizable[Clob] = apply(_.setClob(_, _: Clob), _.setNull(_, Types.CLOB))
implicit val date: Parameterizable[Date] = apply(_.setDate(_, _: Date), _.setNull(_, Types.DATE))
implicit val double: Parameterizable[Double] = apply(_.setDouble(_, _: Double), _.setNull(_, Types.DOUBLE))
implicit val float: Parameterizable[Float] = apply(_.setFloat(_, _: Float), _.setNull(_, Types.FLOAT))
implicit val int: Parameterizable[Int] = apply(_.setInt(_, _: Int), _.setNull(_, Types.INTEGER))
implicit val long: Parameterizable[Long] = apply(_.setLong(_, _: Long), _.setNull(_, Types.BIGINT))
implicit val nClob: Parameterizable[NClob] = apply(_.setNClob(_, _: NClob), _.setNull(_, Types.NCLOB))
implicit val ref: Parameterizable[Ref] = apply(_.setRef(_, _: Ref), _.setNull(_, Types.REF))
implicit val rowId: Parameterizable[RowId] = apply(_.setRowId(_, _: RowId), _.setNull(_, Types.ROWID))
implicit val short: Parameterizable[Short] = apply(_.setShort(_, _: Short), _.setNull(_, Types.SMALLINT))
implicit val sqlXml: Parameterizable[SQLXML] = apply(_.setSQLXML(_, _: SQLXML), _.setNull(_, Types.SQLXML))
implicit val string: Parameterizable[String] = apply(_.setString(_, _: String), _.setNull(_, Types.VARCHAR))
implicit val time: Parameterizable[Time] = apply(_.setTime(_, _: Time), _.setNull(_, Types.TIME))
implicit val timestamp: Parameterizable[Timestamp] =
apply(_.setTimestamp(_, _: Timestamp), _.setNull(_, Types.TIMESTAMP))
implicit val url: Parameterizable[URL] = apply(_.setURL(_, _: URL), _.setNull(_, Types.DATALINK))

implicit val javaDate = from((date: java.util.Date) => new Timestamp(date.getTime))
implicit val localDate = from(Date.valueOf(_: LocalDate))
implicit val localTime = from(Time.valueOf(_: LocalTime))
implicit val instant = from(Timestamp.from)
implicit val uuid = from { uuid: UUID =>
// val foo = implicitly[Parameterizable[Timestamp]]
implicit val javaDate: Parameterizable[java.util.Date] = timestamp.contraMap(d => new Timestamp(d.getTime))
implicit val localDate: Parameterizable[LocalDate] = date.contraMap(Date.valueOf)
implicit val localTime: Parameterizable[LocalTime] = time.contraMap(Time.valueOf)
implicit val instant: Parameterizable[java.time.Instant] = timestamp.contraMap(Timestamp.from)
implicit val uuid: Parameterizable[UUID] = from { uuid: UUID =>
val bb = ByteBuffer.wrap(new scala.Array[Byte](16))
bb.putLong(uuid.getMostSignificantBits)
bb.putLong(uuid.getLeastSignificantBits)
Expand Down
Loading