Skip to content

Commit

Permalink
[tests] Add unit test for FeaturesTableRepositoryDAO
Browse files Browse the repository at this point in the history
  • Loading branch information
Ludovic Claude committed Dec 31, 2018
1 parent d08ec99 commit 51a6650
Show file tree
Hide file tree
Showing 8 changed files with 358 additions and 68 deletions.
7 changes: 5 additions & 2 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@ lazy val `woken` =
library.akkaStreamTestkit % Test,
library.doobieScalaTest % Test,
library.catsScalaTest % Test,
library.acolyte % Test,
library.dockerTestKitScalaTest % IntegrationTest,
library.dockerTestKitSpotify % IntegrationTest,
library.diff % Test
Expand Down Expand Up @@ -118,6 +119,7 @@ lazy val library =
val treelog = "1.4.6"
val config = "1.3.3"
val doobie = "0.6.0"
val acolyte = "1.0.50"
val snakeyaml = "1.23"
val scalaCache = "0.21.0"
val dockerTestKit = "0.9.8"
Expand Down Expand Up @@ -150,7 +152,7 @@ lazy val library =
val akkaHttpSwagger: ModuleID = "com.github.swagger-akka-http" %% "swagger-akka-http" % Version.akkaHttpSwagger
val akkaManagementBase: ModuleID = "com.lightbend.akka.management" %% "akka-management" % Version.akkaManagement
val akkaManagementClusterHttp: ModuleID = "com.lightbend.akka.management" %% "akka-management-cluster-http" % Version.akkaManagement excludeAll ExclusionRules.excludeAkkaClusterSharding


// Kamon
val kamon: ModuleID = "io.kamon" %% "kamon-core" % Version.kamon excludeAll ExclusionRules.excludeLogback
Expand Down Expand Up @@ -178,7 +180,8 @@ lazy val library =
val doobieCore: ModuleID = "org.tpolecat" %% "doobie-core" % Version.doobie
val doobiePostgres: ModuleID = "org.tpolecat" %% "doobie-postgres" % Version.doobie
val doobieHikari: ModuleID = "org.tpolecat" %% "doobie-hikari" % Version.doobie
val doobieScalaTest: ModuleID = "org.tpolecat" %% "doobie-scalatest" % Version.doobie
val doobieScalaTest: ModuleID = "org.tpolecat" %% "doobie-scalatest" % Version.doobie
val acolyte: ModuleID = "org.eu.acolyte" %% "jdbc-scala" % Version.acolyte
val yaml: ModuleID = "org.yaml" % "snakeyaml" % Version.snakeyaml
val scalaCache: ModuleID = "com.github.cb372" %% "scalacache-core" % Version.scalaCache
val dockerTestKitScalaTest: ModuleID = "com.whisk" %% "docker-testkit-scalatest" % Version.dockerTestKit excludeAll ExclusionRules.excludeLogback
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ object sqlUtils {
def frType(col: TableColumn): Fragment = Fragment.const(toSql(col.sqlType))
def frConst(d: Int): Fragment = Fragment.const(d.toString)
def frConst(d: Double): Fragment = Fragment.const(d.toString)
def frConst(s: String): Fragment = Fragment.const(s""""$s"""")

def frNames(cols: List[TableColumn]): Fragment =
Fragment.const(cols.map(_.quotedName).mkString(","))
Expand Down Expand Up @@ -76,6 +77,6 @@ object sqlUtils {
}

def frWhereFilter(filter: Option[FilterRule]): Fragment =
filter.fold(fr"")(f => Fragment.const(" WHERE " + f.withAdaptedFieldName.toSqlWhere))
filter.fold(fr"")(f => Fragment.const("WHERE " + f.withAdaptedFieldName.toSqlWhere))

}
26 changes: 14 additions & 12 deletions src/main/scala/ch/chuv/lren/woken/dao/FeaturesRepositoryDAO.scala
Original file line number Diff line number Diff line change
Expand Up @@ -129,21 +129,23 @@ abstract class BaseFeaturesTableRepositoryDAO[F[_]: Effect] extends FeaturesTabl
protected def defaultDataset: String = table.table.name

override def count: F[Int] = {
val q: Fragment = fr"SELECT count(*) FROM " ++ frName(table)
val q: Fragment = fr"SELECT count(*) FROM" ++ frName(table)
q.query[Int]
.unique
.transact(xa)
}

override def count(dataset: DatasetId): F[Int] =
override def count(datasetId: DatasetId): F[Int] =
table.datasetColumn.fold {
if (dataset.code == table.quotedName || dataset.code == defaultDataset) count
if (datasetId.code == table.quotedName || datasetId.code == defaultDataset) count
else 0.pure[F]
} { datasetColumn =>
val q: Fragment = sql"SELECT count(*) FROM " ++ frName(table) ++ fr"WHERE " ++ frName(
datasetColumn
) ++ fr" = ${dataset.code}"
q.query[Int]
def countDataset(dataset: String): Fragment =
fr"SELECT count(*) FROM" ++ frName(table) ++ fr"WHERE" ++ frName(
datasetColumn
) ++ fr"= $dataset"
countDataset(datasetId.code)
.query[Int]
.unique
.transact(xa)
}
Expand All @@ -155,7 +157,7 @@ abstract class BaseFeaturesTableRepositoryDAO[F[_]: Effect] extends FeaturesTabl
* @return the number of rows in the dataset matching the filters, or the total number of rows if there are no filters
*/
override def count(filters: Option[FilterRule]): F[Int] = {
val q: Fragment = fr"SELECT count(*) FROM " ++ frName(table) ++ frWhereFilter(filters)
val q: Fragment = fr"SELECT count(*) FROM" ++ frName(table) ++ frWhereFilter(filters)
q.query[Int]
.unique
.transact(xa)
Expand All @@ -168,8 +170,8 @@ abstract class BaseFeaturesTableRepositoryDAO[F[_]: Effect] extends FeaturesTabl
*/
override def countGroupBy(groupByColumn: TableColumn,
filters: Option[FilterRule]): F[Map[String, Int]] = {
val q: Fragment = fr"SELECT " ++ frName(groupByColumn) ++ fr", count(*) FROM " ++
frName(table) ++ frWhereFilter(filters) ++ fr" GROUP BY " ++ frName(groupByColumn)
val q: Fragment = fr"SELECT" ++ frName(groupByColumn) ++ fr", count(*) FROM" ++
frName(table) ++ frWhereFilter(filters) ++ fr"GROUP BY" ++ frName(groupByColumn)
q.query[(String, Int)]
.to[List]
.transact(xa)
Expand Down Expand Up @@ -216,7 +218,7 @@ abstract class BaseFeaturesTableRepositoryDAO[F[_]: Effect] extends FeaturesTabl

}

class FeaturesTableRepositoryDAO[F[_]: Effect] private (
class FeaturesTableRepositoryDAO[F[_]: Effect] private[dao] (
override val xa: Transactor[F],
override val table: FeaturesTableDescription,
override val columns: FeaturesTableRepository.Headers,
Expand Down Expand Up @@ -245,7 +247,7 @@ object FeaturesTableRepositoryDAO {
wokenRepository: WokenRepository[F]): F[FeaturesTableRepository[F]] = {
implicit val han: LogHandler = LogHandler.jdkLogHandler

HC.prepareStatement(s"SELECT * FROM ${table.quotedName}")(prepareHeaders)
HC.prepareStatement(s"SELECT * FROM ${table.quotedName} LIMIT 1")(prepareHeaders)
.transact(xa)
.map { headers =>
new FeaturesTableRepositoryDAO(xa, table, headers, wokenRepository)
Expand Down
50 changes: 50 additions & 0 deletions src/test/scala/ch/chuv/lren/woken/dao/DAOTest.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
/*
* Copyright (C) 2017 LREN CHUV for Human Brain Project
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/

package ch.chuv.lren.woken.dao
import java.sql.Connection

import acolyte.jdbc.{ AbstractCompositeHandler, AcolyteDSL, ConnectionHandler }
import cats.effect.{ ContextShift, IO, Resource }
import cats.effect.internals.IOContextShift
import doobie.util.ExecutionContexts
import doobie.util.transactor.Transactor

trait DAOTest[DAO <: Repository] {

def withRepository(sqlHandler: AbstractCompositeHandler[_],
mkDAO: Transactor[IO] => DAO)(testCode: DAO => Any): Unit = {

val conn: Connection = AcolyteDSL.connection(sqlHandler)
implicit val cs: ContextShift[IO] = IOContextShift.global

// Resource yielding a Transactor[IO] wrapping the given `Connection`
def transactor(c: Connection): Resource[IO, Transactor[IO]] =
ExecutionContexts.cachedThreadPool[IO].flatMap { te =>
val t: Transactor[IO] = Transactor.fromConnection[IO](c, te)
Resource.liftF(t.configure(_ => IO.pure(t)))
}

transactor(conn)
.use { tr =>
val dao = mkDAO(tr)
IO.delay(testCode(dao))
}
.unsafeRunSync()
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,147 @@
/*
* Copyright (C) 2017 LREN CHUV for Human Brain Project
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/

package ch.chuv.lren.woken.dao

import acolyte.jdbc._
import acolyte.jdbc.RowLists.{ rowList1, rowList2 }
import acolyte.jdbc.Implicits._
import cats.effect.IO
import ch.chuv.lren.woken.core.model.database.TableColumn
import ch.chuv.lren.woken.messages.datasets.DatasetId
import ch.chuv.lren.woken.messages.query.filters._
import ch.chuv.lren.woken.messages.variables.SqlType
import org.scalatest.{ Matchers, WordSpec }

class FeaturesTableRepositoryDAOTest
extends WordSpec
with Matchers
with DAOTest[FeaturesTableRepositoryDAO[IO]]
with FeaturesTableTestSupport {

val wokenRepository = new WokenInMemoryRepository[IO]()

val sampleTableHandler: ScalaCompositeHandler = AcolyteDSL.handleStatement
.withQueryDetection("^SELECT ") // regex test from beginning
.withQueryHandler { e: QueryExecution =>
e.sql.trim match {

case """SELECT count(*) FROM "Sample"""" =>
rowList1(classOf[Int]) :+ 99

case """SELECT count(*) FROM "Sample" WHERE "score_test1" >= 2 AND "cognitive_task2" < 9""" =>
rowList1(classOf[Int]) :+ 5

case """SELECT "college_math" , count(*) FROM "Sample" GROUP BY "college_math"""" =>
(rowList2(classOf[String], classOf[Int])
:+ ("0", 47) // tuple as row
:+ ("1", 52))

case """SELECT "college_math" , count(*) FROM "Sample" WHERE "score_test1" >= 2 GROUP BY "college_math"""" =>
(rowList2(classOf[String], classOf[Int])
:+ ("0", 12) // tuple as row
:+ ("1", 22))

case _ => throw new IllegalArgumentException(s"Unhandled $e")
}
}

val cdeTableHandler: ScalaCompositeHandler = AcolyteDSL.handleStatement
.withQueryDetection("^SELECT ") // regex test from beginning
.withQueryHandler { e: QueryExecution =>
e.sql.trim match {

case """SELECT count(*) FROM "cde_features_a" WHERE "dataset" = ?"""
if e.parameters == List(DefinedParameter("datasetA", ParameterMetaData.Str)) =>
rowList1(classOf[Int]) :+ 5

case _ => throw new IllegalArgumentException(s"Unhandled $e")
}
}

"FeaturesTableRepositoryDAO" should {

"count all records in the table" in withRepository(
sampleTableHandler,
xa => new FeaturesTableRepositoryDAO[IO](xa, sampleTable, sampleHeaders, wokenRepository)
) { dao =>
dao.count.unsafeRunSync() shouldBe 99
}

"count all records matching a dataset for a table without a dataset column" in withRepository(
sampleTableHandler,
xa => new FeaturesTableRepositoryDAO[IO](xa, sampleTable, sampleHeaders, wokenRepository)
) { dao =>
dao.count(DatasetId(sampleTable.table.name)).unsafeRunSync() shouldBe 99
dao.count(DatasetId("other")).unsafeRunSync() shouldBe 0
}

"count all records matching a dataset for a table with a dataset column" in withRepository(
cdeTableHandler,
xa => new FeaturesTableRepositoryDAO[IO](xa, cdeTable, cdeHeaders, wokenRepository)
) { dao =>
dao.count(DatasetId("datasetA")).unsafeRunSync() shouldBe 5
}

"count all records matching a filter" in withRepository(
sampleTableHandler,
xa => new FeaturesTableRepositoryDAO[IO](xa, sampleTable, sampleHeaders, wokenRepository)
) { dao =>
val filter = CompoundFilterRule(
Condition.and,
rules = List(
SingleFilterRule("score_test1",
"score_test1",
"number",
InputType.number,
Operator.greaterOrEqual,
List("2")),
SingleFilterRule("cognitive_task2",
"cognitive_task2",
"number",
InputType.number,
Operator.less,
List("9"))
)
)
dao.count(None).unsafeRunSync() shouldBe 99
dao.count(Some(filter)).unsafeRunSync() shouldBe 5
}

"count records grouped by a field" in withRepository(
sampleTableHandler,
xa => new FeaturesTableRepositoryDAO[IO](xa, sampleTable, sampleHeaders, wokenRepository)
) { dao =>
dao.countGroupBy(TableColumn("college_math", SqlType.int), None).unsafeRunSync() shouldBe Map(
"0" -> 47,
"1" -> 52
)

val filter = SingleFilterRule("score_test1",
"score_test1",
"number",
InputType.number,
Operator.greaterOrEqual,
List("2"))

dao
.countGroupBy(TableColumn("college_math", SqlType.int), Some(filter))
.unsafeRunSync() shouldBe Map("0" -> 12, "1" -> 22)
}

}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
/*
* Copyright (C) 2017 LREN CHUV for Human Brain Project
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/

package ch.chuv.lren.woken.dao
import ch.chuv.lren.woken.core.model.database.{ FeaturesTableDescription, TableColumn, TableId }
import ch.chuv.lren.woken.messages.variables.SqlType
import spray.json.{ JsNumber, JsObject, JsString }

trait FeaturesTableTestSupport {

val database = "features_db"
val sampleTable = FeaturesTableDescription(TableId(database, None, "Sample"),
Nil,
None,
validateSchema = false,
None,
0.67)
val sampleHeaders = List(
TableColumn("ID", SqlType.int),
TableColumn("stress_before_test1", SqlType.numeric),
TableColumn("score_test1", SqlType.numeric),
TableColumn("IQ", SqlType.numeric),
TableColumn("cognitive_task2", SqlType.numeric),
TableColumn("practice_task2", SqlType.numeric),
TableColumn("response_time_task2", SqlType.numeric),
TableColumn("college_math", SqlType.numeric),
TableColumn("score_math_course1", SqlType.numeric),
TableColumn("score_math_course2", SqlType.numeric)
)

val sampleData = List(
JsObject("ID" -> JsNumber(1),
"stress_before_test1" -> JsNumber(2.0),
"score_test1" -> JsNumber(1.0))
)

val cdeTable = FeaturesTableDescription(
TableId(database, None, "cde_features_a"),
List(TableColumn("subjectcode", SqlType.varchar)),
Some(TableColumn("dataset", SqlType.varchar)),
validateSchema = false,
None,
0.67
)
val cdeHeaders = List(
TableColumn("subjectcode", SqlType.varchar),
TableColumn("apoe4", SqlType.int),
TableColumn("lefthippocampus", SqlType.numeric),
TableColumn("dataset", SqlType.varchar)
)

val cdeData = List(
JsObject("subjectcode" -> JsString("p001"),
"apoe4" -> JsNumber(2),
"lefthippocampus" -> JsNumber(1.37),
"dataset" -> JsString("desd-synthdata"))
)

}
Loading

0 comments on commit 51a6650

Please sign in to comment.