Upgrade to Pro — share decks privately, control downloads, hide ads and more …

Anorm 2.6

Anorm 2.6

What's new in the release 2.6 of Anorm, the Scala simple SQL data access.

Avatar for Cédric Chantepie

Cédric Chantepie

December 22, 2017
Tweet

More Decks by Cédric Chantepie

Other Decks in Programming

Transcript

  1. Streaming results Reactively processing row per row (not all in

    memory) • Akka Stream • Play Iteratee
  2. Akka Stream "org.playframework.anorm" %% "anorm-akka" % "2.6.0" • Query results

    as Source : anorm.AkkaStream.source
 
 AkkaStream.source(
 SQL"SELECT * FROM Test", // Query statement
 SqlParser.scalar[String], // Row parser
 ColumnAliaser.empty) // Column aliasing
  3. Akka Stream import java.sql.Connection import scala.concurrent.Future import akka.stream.Materializer import akka.stream.scaladsl.Source

    import anorm._ def resultSource(
 implicit m: Materializer, con: Connection
 ): Source[String, Future[Int]] =
 AkkaStream.source(
 SQL"SELECT * FROM Test", SqlParser.scalar[String], ColumnAliaser.empty)
  4. Akka Stream import java.sql.Connection import scala.concurrent.Future import akka.stream.Materializer import akka.stream.scaladsl.{

    Sink, Source } import anorm._ def resultSource(
 implicit m: Materializer, con: Connection
 ): Source[String, Future[Int]] =
 AkkaStream.source(
 SQL"SELECT * FROM Test", SqlParser.scalar[String], ColumnAliaser.empty) def countStrings()(implicit m: Materializer, con: Connection): Future[Int] = resultSource.runWith( Sink.fold[Int, String](0) { (count, str) => count + str.length }) Consume the result source
  5. Play Iteratee "org.playframework.anorm" %% "anorm-iteratee" % "2.6.0" • Query results

    as Enumerator : anorm.Iteratee.from
 
 Iteratee.from(
 SQL"SELECT * FROM Test", // Query statement
 SqlParser.scalar[String]) // Row parser
  6. Play Iteratee import java.sql.Connection import scala.concurrent.ExecutionContext.Implicits.global import anorm._ import play.api.libs.iteratee._

    def resultAsEnumerator(implicit con: Connection): Enumerator[String] = Iteratees.from(SQL"SELECT * FROM Test", SqlParser.scalar[String])
  7. Before case class Bar(v: Int, w: String, x: Float) val

    bar1 = Bar(1, "Foo", 0.2F) // Manually bind each class properties SQL"INSERT INTO bar(v, w, x) VALUES (${bar.v}, ${bar.w}, ${bar.x})" SQL("INSERT INTO bar(v, w, x) VALUES ({v}, {w}, {x})").
 on("v" -> bar.v, "w"-> bar.w, "x"-> bar.x)
  8. ToParameterList case class Bar(v: Int, w: String, x: Float) val

    bar1 = Bar(1, "Foo", 0.2F) // Convert all supported properties as parameters implicit val toParams1: ToParameterList[Bar] = Macro.toParameters[Bar] SQL("INSERT INTO bar(v, w, x) VALUES ({v}, {w}, {x})").
 bind(bar1) // Automatic binding using implicit toParams1
  9. ToParameterList case class Bar(v: Int, w: String, x: Float) val

    bar1 = Bar(1, "Foo", 0.2F) // Convert all supported properties as parameters implicit val toParams1: ToParameterList[Bar] = Macro.toParameters[Bar] val params1: List[NamedParameter] = toParams1(bar1) // Prepared parameters val names1: List[String] = params1.map(_.name) // --> List(v, w, x) val placeholders = names1.map { n => s"{$n}" } mkString ", " // --> « {v}, {w}, {x} » val generatedStmt = 
 s"""INSERT INTO bar(${names1 mkString ", "}) VALUES ($placeholders)""" 
 val generatedSql1 = SQL(generatedStmt).on(params1: _*) Generate query statement according ToParameterList
  10. PostgreSQL conversions • Support Play JSON with PostgreSQL JSONB
 


    "org.playframework.anorm" %% "anorm-postgres" % "2.6.0"
  11. PostgreSQL JSON import java.sql._ import anorm._, postgresql._ import play.api.libs.json._ def

    jsObjectParameter(implicit con: Connection) = { val obj = Json.obj("bar" -> 1) SQL"""INSERT INTO test(id, json) VALUES (${"foo"}, $obj)""".executeUpdate() }
  12. PostgreSQL conversions • Support Play JSON with PostgreSQL JSONB •

    Passing JsValue or JsObject as parameters • Passing as parameters any type,
 that can be written as JSON using Writes :
 anorm.postgresql.asJson[T: Writes]
  13. PostgreSQL JSON import java.sql._ import anorm._, postgresql._ import play.api.libs.json._ sealed

    trait MyEnum case object Bar extends MyEnum case object Lorem extends MyEnum // Define the typeclass instance implicit val w: Writes[MyEnum] = Writes[MyEnum] { case Bar => Json.obj("bar" -> 1) case Lorem => Json.obj("lorem" -> 2) } def usingWrites(implicit con: Connection) = { val jsonParam = asJson[MyEnum](Lorem) SQL"INSERT INTO test(id, json) VALUES(${"x"}, ${jsonParam})".executeUpdate() }
  14. PostgreSQL conversions • Support Play JSON with PostgreSQL JSONB •

    Passing JsValue or JsObject as parameters • Passing as parameters any type,
 that can be written as JSON using Writes • Parsing textual or JSONB column as JsValue
  15. PostgreSQL JSON import java.sql._ import anorm._, postgresql._ import play.api.libs.json._ def

    selectJsValue(implicit con: Connection) = SQL"""SELECT json FROM test WHERE id = ${"foo"}""". as(SqlParser.scalar[JsValue].single) def selectJsObject(implicit con: Connection) = SQL"""SELECT json FROM test WHERE id = ${"foo"}""". as(SqlParser.scalar[JsObject].single)
  16. PostgreSQL conversions • Support Play JSON with PostgreSQL JSONB •

    Passing JsValue or JsObject as parameters • Passing as parameters any type,
 that can be written as JSON using Writes • Parsing textual or JSONB column as JsValue • Parsing any type stored as JSON column, 
 provided appropriate Reads: 
 anorm.postgresql.fromJson[T: Reads]
  17. PostgreSQL JSON import java.sql._ import anorm._, postgresql._ import play.api.libs.json._ sealed

    trait MyEnum case object Bar extends MyEnum case object Lorem extends MyEnum // Define the typeclass instance implicit val r: Reads[MyEnum] = Reads[MyEnum] { js => (js \ "bar").validate[Int].map { case 1 => Bar case _ => Lorem } } def selectFromJson(implicit con: Connection) = SQL"""SELECT json FROM test WHERE id = ${"foo"}""". as(SqlParser.scalar(fromJson[MyEnum]).single)
  18. PostgreSQL conversions • Support Play JSON with PostgreSQL JSONB •

    UUID: Passing as parameter & Parsing from results
  19. PostgreSQL UUID import java.util.UUID import java.sql._ import anorm._, postgresql._ def

    insertUUID(implicit con: Connection) = { SQL"INSERT INTO test_seq VALUES(${UUID.randomUUID()})".executeUpdate() // UUID is passed as PostgreSQL UUID type } def selectUUID(implicit con: Connection) = SQL"SELECT * FROM test_seq".as(SqlParser.scalar[UUID].*)
  20. offsetParser case class Foo(name: String, age: Int) import anorm._ val

    findAll = SQL"SELECT uninteresting_col, skip_col, name, age FROM foo" val fooParser = Macro.offsetParser[Foo](2) // ignore uninteresting_col & skip_col
  21. namedParser import anorm.{ Macro, RowParser }, Macro.ColumnNaming case class Info(name:

    String, lastModified: Long) val parser: RowParser[Info] = Macro.namedParser[Info](ColumnNaming.SnakeCase) /* Generated as: get[String]("name") ~ get[Long]("last_modified") map { case name ~ year => Info(name, year) } */
  22. sealedParser import anorm._ sealed trait Family case class Bar(v: Int)

    extends Family case object Lorem extends Family // First, RowParser instances for all the subtypes must be provided, // either by macros or by custom parsers implicit val barParser = Macro.namedParser[Bar] implicit val loremParser = RowParser[Lorem.type] { anyRowDiscriminatedAsLorem => Success(Lorem) } val familyParser = Macro.sealedParser[Family]
  23. sealedParser // Generate a parser as following... val generated: RowParser[Family]

    = SqlParser.str("classname").flatMap { case "scalaguide.sql.MacroFixtures.Bar" => implicitly[RowParser[Bar]]
 case "scalaguide.sql.MacroFixtures.Lorem" => implicitly[RowParser[Lorem.type]] 
 case _ => RowParser.failed[Family](Error(SqlMappingError("..")))) }
  24. ColumnAliaser => SELECT * FROM test t1 JOIN (SELECT *

    FROM test WHERE parent_id ISNULL) t2 ON t1.parent_id=t2.id WHERE t1.id='bar'; id | value | parent_id | id | value | parent_id -----+--------+-----------+-----+--------+----------- bar | value2 | foo | foo | value1 | (1 row) Consider the following query results:
  25. ColumnAliaser import anorm._ val parser: RowParser[(String, String, String, Option[String])] =

    
 SqlParser.str("id") ~ SqlParser.str("value") ~ 
 SqlParser.str("parent.value") ~ 
 SqlParser.str("parent.parent_id").? map(SqlParser.flatten) val aliaser: ColumnAliaser = 
 ColumnAliaser.withPattern((3 to 6).toSet, "parent.") val res: Try[(String, String, String, Option[String])] = 
 SQL"""SELECT * FROM test t1 
 JOIN (SELECT * FROM test WHERE parent_id ISNULL) 
 t2 ON t1.parent_id=t2.id WHERE t1.id=${"bar"}""".
 asTry(parser.single, aliaser)
  26. executeInsert1 // Choose 'generatedCol' and 'colB' from the generatedKeys val

    keys1 = SQL("INSERT INTO Test(x) VALUES ({x})"). on("x" -> "y").executeInsert1("generatedCol", "colB")() val keys2 = SQL("INSERT INTO Test(x) VALUES ({x})"). on("x" -> "y").executeInsert1("generatedCol")(scalar[String].singleOpt)
  27. Compatibility • Java 8 & 9 • Scala 2.11 &

    2.12 • Akka 2.4.x (2.4.10)