From 51a6650e3acd87bc1c4865912907601a01526ca8 Mon Sep 17 00:00:00 2001 From: Ludovic Claude Date: Mon, 31 Dec 2018 01:18:51 +0100 Subject: [PATCH] [tests] Add unit test for FeaturesTableRepositoryDAO --- build.sbt | 7 +- .../woken/core/model/database/sqlUtils.scala | 3 +- .../woken/dao/FeaturesRepositoryDAO.scala | 26 ++-- .../ch/chuv/lren/woken/dao/DAOTest.scala | 50 ++++++ .../dao/FeaturesTableRepositoryDAOTest.scala | 147 ++++++++++++++++++ .../woken/dao/FeaturesTableTestSupport.scala | 73 +++++++++ .../woken/dao/MetadataRepositoryDAOTest.scala | 67 +++++++- .../lren/woken/service/TestServices.scala | 53 +------ 8 files changed, 358 insertions(+), 68 deletions(-) create mode 100644 src/test/scala/ch/chuv/lren/woken/dao/DAOTest.scala create mode 100644 src/test/scala/ch/chuv/lren/woken/dao/FeaturesTableRepositoryDAOTest.scala create mode 100644 src/test/scala/ch/chuv/lren/woken/dao/FeaturesTableTestSupport.scala diff --git a/build.sbt b/build.sbt index 4b676e8a..16d0921e 100644 --- a/build.sbt +++ b/build.sbt @@ -69,6 +69,7 @@ lazy val `woken` = library.akkaStreamTestkit % Test, library.doobieScalaTest % Test, library.catsScalaTest % Test, + library.acolyte % Test, library.dockerTestKitScalaTest % IntegrationTest, library.dockerTestKitSpotify % IntegrationTest, library.diff % Test @@ -118,6 +119,7 @@ lazy val library = val treelog = "1.4.6" val config = "1.3.3" val doobie = "0.6.0" + val acolyte = "1.0.50" val snakeyaml = "1.23" val scalaCache = "0.21.0" val dockerTestKit = "0.9.8" @@ -150,7 +152,7 @@ lazy val library = val akkaHttpSwagger: ModuleID = "com.github.swagger-akka-http" %% "swagger-akka-http" % Version.akkaHttpSwagger val akkaManagementBase: ModuleID = "com.lightbend.akka.management" %% "akka-management" % Version.akkaManagement val akkaManagementClusterHttp: ModuleID = "com.lightbend.akka.management" %% "akka-management-cluster-http" % Version.akkaManagement excludeAll ExclusionRules.excludeAkkaClusterSharding - + // Kamon val kamon: ModuleID = "io.kamon" %% "kamon-core" % Version.kamon excludeAll ExclusionRules.excludeLogback @@ -178,7 +180,8 @@ lazy val library = val doobieCore: ModuleID = "org.tpolecat" %% "doobie-core" % Version.doobie val doobiePostgres: ModuleID = "org.tpolecat" %% "doobie-postgres" % Version.doobie val doobieHikari: ModuleID = "org.tpolecat" %% "doobie-hikari" % Version.doobie - val doobieScalaTest: ModuleID = "org.tpolecat" %% "doobie-scalatest" % Version.doobie + val doobieScalaTest: ModuleID = "org.tpolecat" %% "doobie-scalatest" % Version.doobie + val acolyte: ModuleID = "org.eu.acolyte" %% "jdbc-scala" % Version.acolyte val yaml: ModuleID = "org.yaml" % "snakeyaml" % Version.snakeyaml val scalaCache: ModuleID = "com.github.cb372" %% "scalacache-core" % Version.scalaCache val dockerTestKitScalaTest: ModuleID = "com.whisk" %% "docker-testkit-scalatest" % Version.dockerTestKit excludeAll ExclusionRules.excludeLogback diff --git a/src/main/scala/ch/chuv/lren/woken/core/model/database/sqlUtils.scala b/src/main/scala/ch/chuv/lren/woken/core/model/database/sqlUtils.scala index 793763a1..2096c19b 100644 --- a/src/main/scala/ch/chuv/lren/woken/core/model/database/sqlUtils.scala +++ b/src/main/scala/ch/chuv/lren/woken/core/model/database/sqlUtils.scala @@ -32,6 +32,7 @@ object sqlUtils { def frType(col: TableColumn): Fragment = Fragment.const(toSql(col.sqlType)) def frConst(d: Int): Fragment = Fragment.const(d.toString) def frConst(d: Double): Fragment = Fragment.const(d.toString) + def frConst(s: String): Fragment = Fragment.const(s""""$s"""") def frNames(cols: List[TableColumn]): Fragment = Fragment.const(cols.map(_.quotedName).mkString(",")) @@ -76,6 +77,6 @@ object sqlUtils { } def frWhereFilter(filter: Option[FilterRule]): Fragment = - filter.fold(fr"")(f => Fragment.const(" WHERE " + f.withAdaptedFieldName.toSqlWhere)) + filter.fold(fr"")(f => Fragment.const("WHERE " + f.withAdaptedFieldName.toSqlWhere)) } diff --git a/src/main/scala/ch/chuv/lren/woken/dao/FeaturesRepositoryDAO.scala b/src/main/scala/ch/chuv/lren/woken/dao/FeaturesRepositoryDAO.scala index 2eeb4755..1834e3c4 100644 --- a/src/main/scala/ch/chuv/lren/woken/dao/FeaturesRepositoryDAO.scala +++ b/src/main/scala/ch/chuv/lren/woken/dao/FeaturesRepositoryDAO.scala @@ -129,21 +129,23 @@ abstract class BaseFeaturesTableRepositoryDAO[F[_]: Effect] extends FeaturesTabl protected def defaultDataset: String = table.table.name override def count: F[Int] = { - val q: Fragment = fr"SELECT count(*) FROM " ++ frName(table) + val q: Fragment = fr"SELECT count(*) FROM" ++ frName(table) q.query[Int] .unique .transact(xa) } - override def count(dataset: DatasetId): F[Int] = + override def count(datasetId: DatasetId): F[Int] = table.datasetColumn.fold { - if (dataset.code == table.quotedName || dataset.code == defaultDataset) count + if (datasetId.code == table.quotedName || datasetId.code == defaultDataset) count else 0.pure[F] } { datasetColumn => - val q: Fragment = sql"SELECT count(*) FROM " ++ frName(table) ++ fr"WHERE " ++ frName( - datasetColumn - ) ++ fr" = ${dataset.code}" - q.query[Int] + def countDataset(dataset: String): Fragment = + fr"SELECT count(*) FROM" ++ frName(table) ++ fr"WHERE" ++ frName( + datasetColumn + ) ++ fr"= $dataset" + countDataset(datasetId.code) + .query[Int] .unique .transact(xa) } @@ -155,7 +157,7 @@ abstract class BaseFeaturesTableRepositoryDAO[F[_]: Effect] extends FeaturesTabl * @return the number of rows in the dataset matching the filters, or the total number of rows if there are no filters */ override def count(filters: Option[FilterRule]): F[Int] = { - val q: Fragment = fr"SELECT count(*) FROM " ++ frName(table) ++ frWhereFilter(filters) + val q: Fragment = fr"SELECT count(*) FROM" ++ frName(table) ++ frWhereFilter(filters) q.query[Int] .unique .transact(xa) @@ -168,8 +170,8 @@ abstract class BaseFeaturesTableRepositoryDAO[F[_]: Effect] extends FeaturesTabl */ override def countGroupBy(groupByColumn: TableColumn, filters: Option[FilterRule]): F[Map[String, Int]] = { - val q: Fragment = fr"SELECT " ++ frName(groupByColumn) ++ fr", count(*) FROM " ++ - frName(table) ++ frWhereFilter(filters) ++ fr" GROUP BY " ++ frName(groupByColumn) + val q: Fragment = fr"SELECT" ++ frName(groupByColumn) ++ fr", count(*) FROM" ++ + frName(table) ++ frWhereFilter(filters) ++ fr"GROUP BY" ++ frName(groupByColumn) q.query[(String, Int)] .to[List] .transact(xa) @@ -216,7 +218,7 @@ abstract class BaseFeaturesTableRepositoryDAO[F[_]: Effect] extends FeaturesTabl } -class FeaturesTableRepositoryDAO[F[_]: Effect] private ( +class FeaturesTableRepositoryDAO[F[_]: Effect] private[dao] ( override val xa: Transactor[F], override val table: FeaturesTableDescription, override val columns: FeaturesTableRepository.Headers, @@ -245,7 +247,7 @@ object FeaturesTableRepositoryDAO { wokenRepository: WokenRepository[F]): F[FeaturesTableRepository[F]] = { implicit val han: LogHandler = LogHandler.jdkLogHandler - HC.prepareStatement(s"SELECT * FROM ${table.quotedName}")(prepareHeaders) + HC.prepareStatement(s"SELECT * FROM ${table.quotedName} LIMIT 1")(prepareHeaders) .transact(xa) .map { headers => new FeaturesTableRepositoryDAO(xa, table, headers, wokenRepository) diff --git a/src/test/scala/ch/chuv/lren/woken/dao/DAOTest.scala b/src/test/scala/ch/chuv/lren/woken/dao/DAOTest.scala new file mode 100644 index 00000000..4c8ba084 --- /dev/null +++ b/src/test/scala/ch/chuv/lren/woken/dao/DAOTest.scala @@ -0,0 +1,50 @@ +/* + * Copyright (C) 2017 LREN CHUV for Human Brain Project + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as + * published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +package ch.chuv.lren.woken.dao +import java.sql.Connection + +import acolyte.jdbc.{ AbstractCompositeHandler, AcolyteDSL, ConnectionHandler } +import cats.effect.{ ContextShift, IO, Resource } +import cats.effect.internals.IOContextShift +import doobie.util.ExecutionContexts +import doobie.util.transactor.Transactor + +trait DAOTest[DAO <: Repository] { + + def withRepository(sqlHandler: AbstractCompositeHandler[_], + mkDAO: Transactor[IO] => DAO)(testCode: DAO => Any): Unit = { + + val conn: Connection = AcolyteDSL.connection(sqlHandler) + implicit val cs: ContextShift[IO] = IOContextShift.global + + // Resource yielding a Transactor[IO] wrapping the given `Connection` + def transactor(c: Connection): Resource[IO, Transactor[IO]] = + ExecutionContexts.cachedThreadPool[IO].flatMap { te => + val t: Transactor[IO] = Transactor.fromConnection[IO](c, te) + Resource.liftF(t.configure(_ => IO.pure(t))) + } + + transactor(conn) + .use { tr => + val dao = mkDAO(tr) + IO.delay(testCode(dao)) + } + .unsafeRunSync() + } + +} diff --git a/src/test/scala/ch/chuv/lren/woken/dao/FeaturesTableRepositoryDAOTest.scala b/src/test/scala/ch/chuv/lren/woken/dao/FeaturesTableRepositoryDAOTest.scala new file mode 100644 index 00000000..c3bf6327 --- /dev/null +++ b/src/test/scala/ch/chuv/lren/woken/dao/FeaturesTableRepositoryDAOTest.scala @@ -0,0 +1,147 @@ +/* + * Copyright (C) 2017 LREN CHUV for Human Brain Project + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as + * published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +package ch.chuv.lren.woken.dao + +import acolyte.jdbc._ +import acolyte.jdbc.RowLists.{ rowList1, rowList2 } +import acolyte.jdbc.Implicits._ +import cats.effect.IO +import ch.chuv.lren.woken.core.model.database.TableColumn +import ch.chuv.lren.woken.messages.datasets.DatasetId +import ch.chuv.lren.woken.messages.query.filters._ +import ch.chuv.lren.woken.messages.variables.SqlType +import org.scalatest.{ Matchers, WordSpec } + +class FeaturesTableRepositoryDAOTest + extends WordSpec + with Matchers + with DAOTest[FeaturesTableRepositoryDAO[IO]] + with FeaturesTableTestSupport { + + val wokenRepository = new WokenInMemoryRepository[IO]() + + val sampleTableHandler: ScalaCompositeHandler = AcolyteDSL.handleStatement + .withQueryDetection("^SELECT ") // regex test from beginning + .withQueryHandler { e: QueryExecution => + e.sql.trim match { + + case """SELECT count(*) FROM "Sample"""" => + rowList1(classOf[Int]) :+ 99 + + case """SELECT count(*) FROM "Sample" WHERE "score_test1" >= 2 AND "cognitive_task2" < 9""" => + rowList1(classOf[Int]) :+ 5 + + case """SELECT "college_math" , count(*) FROM "Sample" GROUP BY "college_math"""" => + (rowList2(classOf[String], classOf[Int]) + :+ ("0", 47) // tuple as row + :+ ("1", 52)) + + case """SELECT "college_math" , count(*) FROM "Sample" WHERE "score_test1" >= 2 GROUP BY "college_math"""" => + (rowList2(classOf[String], classOf[Int]) + :+ ("0", 12) // tuple as row + :+ ("1", 22)) + + case _ => throw new IllegalArgumentException(s"Unhandled $e") + } + } + + val cdeTableHandler: ScalaCompositeHandler = AcolyteDSL.handleStatement + .withQueryDetection("^SELECT ") // regex test from beginning + .withQueryHandler { e: QueryExecution => + e.sql.trim match { + + case """SELECT count(*) FROM "cde_features_a" WHERE "dataset" = ?""" + if e.parameters == List(DefinedParameter("datasetA", ParameterMetaData.Str)) => + rowList1(classOf[Int]) :+ 5 + + case _ => throw new IllegalArgumentException(s"Unhandled $e") + } + } + + "FeaturesTableRepositoryDAO" should { + + "count all records in the table" in withRepository( + sampleTableHandler, + xa => new FeaturesTableRepositoryDAO[IO](xa, sampleTable, sampleHeaders, wokenRepository) + ) { dao => + dao.count.unsafeRunSync() shouldBe 99 + } + + "count all records matching a dataset for a table without a dataset column" in withRepository( + sampleTableHandler, + xa => new FeaturesTableRepositoryDAO[IO](xa, sampleTable, sampleHeaders, wokenRepository) + ) { dao => + dao.count(DatasetId(sampleTable.table.name)).unsafeRunSync() shouldBe 99 + dao.count(DatasetId("other")).unsafeRunSync() shouldBe 0 + } + + "count all records matching a dataset for a table with a dataset column" in withRepository( + cdeTableHandler, + xa => new FeaturesTableRepositoryDAO[IO](xa, cdeTable, cdeHeaders, wokenRepository) + ) { dao => + dao.count(DatasetId("datasetA")).unsafeRunSync() shouldBe 5 + } + + "count all records matching a filter" in withRepository( + sampleTableHandler, + xa => new FeaturesTableRepositoryDAO[IO](xa, sampleTable, sampleHeaders, wokenRepository) + ) { dao => + val filter = CompoundFilterRule( + Condition.and, + rules = List( + SingleFilterRule("score_test1", + "score_test1", + "number", + InputType.number, + Operator.greaterOrEqual, + List("2")), + SingleFilterRule("cognitive_task2", + "cognitive_task2", + "number", + InputType.number, + Operator.less, + List("9")) + ) + ) + dao.count(None).unsafeRunSync() shouldBe 99 + dao.count(Some(filter)).unsafeRunSync() shouldBe 5 + } + + "count records grouped by a field" in withRepository( + sampleTableHandler, + xa => new FeaturesTableRepositoryDAO[IO](xa, sampleTable, sampleHeaders, wokenRepository) + ) { dao => + dao.countGroupBy(TableColumn("college_math", SqlType.int), None).unsafeRunSync() shouldBe Map( + "0" -> 47, + "1" -> 52 + ) + + val filter = SingleFilterRule("score_test1", + "score_test1", + "number", + InputType.number, + Operator.greaterOrEqual, + List("2")) + + dao + .countGroupBy(TableColumn("college_math", SqlType.int), Some(filter)) + .unsafeRunSync() shouldBe Map("0" -> 12, "1" -> 22) + } + + } +} diff --git a/src/test/scala/ch/chuv/lren/woken/dao/FeaturesTableTestSupport.scala b/src/test/scala/ch/chuv/lren/woken/dao/FeaturesTableTestSupport.scala new file mode 100644 index 00000000..f89f3ed2 --- /dev/null +++ b/src/test/scala/ch/chuv/lren/woken/dao/FeaturesTableTestSupport.scala @@ -0,0 +1,73 @@ +/* + * Copyright (C) 2017 LREN CHUV for Human Brain Project + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as + * published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +package ch.chuv.lren.woken.dao +import ch.chuv.lren.woken.core.model.database.{ FeaturesTableDescription, TableColumn, TableId } +import ch.chuv.lren.woken.messages.variables.SqlType +import spray.json.{ JsNumber, JsObject, JsString } + +trait FeaturesTableTestSupport { + + val database = "features_db" + val sampleTable = FeaturesTableDescription(TableId(database, None, "Sample"), + Nil, + None, + validateSchema = false, + None, + 0.67) + val sampleHeaders = List( + TableColumn("ID", SqlType.int), + TableColumn("stress_before_test1", SqlType.numeric), + TableColumn("score_test1", SqlType.numeric), + TableColumn("IQ", SqlType.numeric), + TableColumn("cognitive_task2", SqlType.numeric), + TableColumn("practice_task2", SqlType.numeric), + TableColumn("response_time_task2", SqlType.numeric), + TableColumn("college_math", SqlType.numeric), + TableColumn("score_math_course1", SqlType.numeric), + TableColumn("score_math_course2", SqlType.numeric) + ) + + val sampleData = List( + JsObject("ID" -> JsNumber(1), + "stress_before_test1" -> JsNumber(2.0), + "score_test1" -> JsNumber(1.0)) + ) + + val cdeTable = FeaturesTableDescription( + TableId(database, None, "cde_features_a"), + List(TableColumn("subjectcode", SqlType.varchar)), + Some(TableColumn("dataset", SqlType.varchar)), + validateSchema = false, + None, + 0.67 + ) + val cdeHeaders = List( + TableColumn("subjectcode", SqlType.varchar), + TableColumn("apoe4", SqlType.int), + TableColumn("lefthippocampus", SqlType.numeric), + TableColumn("dataset", SqlType.varchar) + ) + + val cdeData = List( + JsObject("subjectcode" -> JsString("p001"), + "apoe4" -> JsNumber(2), + "lefthippocampus" -> JsNumber(1.37), + "dataset" -> JsString("desd-synthdata")) + ) + +} diff --git a/src/test/scala/ch/chuv/lren/woken/dao/MetadataRepositoryDAOTest.scala b/src/test/scala/ch/chuv/lren/woken/dao/MetadataRepositoryDAOTest.scala index 6c8b4bab..5b582966 100644 --- a/src/test/scala/ch/chuv/lren/woken/dao/MetadataRepositoryDAOTest.scala +++ b/src/test/scala/ch/chuv/lren/woken/dao/MetadataRepositoryDAOTest.scala @@ -17,7 +17,70 @@ package ch.chuv.lren.woken.dao +import java.sql.Connection + import org.scalamock.scalatest.MockFactory -import org.scalatest.{ Matchers, WordSpec } +import org.scalatest.{ Matchers, WordSpec, fixture } +import acolyte.jdbc.{ AcolyteDSL, QueryExecution, UpdateExecution, Driver => AcolyteDriver } +import acolyte.jdbc.RowLists.{ rowList1, rowList3 } +import acolyte.jdbc.Implicits._ +import cats.effect.{ Async, ContextShift, IO, Resource } +import doobie.util.ExecutionContexts +import doobie.util.transactor.Transactor +import doobie.implicits._ +import cats.implicits._ +import cats.data._ +import cats._ +import cats.effect.internals.IOContextShift +import ch.chuv.lren.woken.JsonUtils +import ch.chuv.lren.woken.core.model.VariablesMeta +import ch.chuv.lren.woken.messages.variables.GroupMetaData +import ch.chuv.lren.woken.messages.variables.variablesProtocol._ + +class MetadataRepositoryDAOTest extends WordSpec with Matchers with MockFactory with JsonUtils { + + "VariablesMetaRepository" should { + // TODO: Acolyte should support pgObject and pgJsonb types + "put and get variables" ignore withVariablesMetaRepository { dao => + val churnHierarchy = loadJson("/metadata/churn_variables.json").convertTo[GroupMetaData] + val churnVariablesMeta = + VariablesMeta(1, "churn", churnHierarchy, "CHURN", List("state", "custserv_calls", "churn")) + + val updated = dao.put(churnVariablesMeta).unsafeRunSync() + + updated shouldBe churnVariablesMeta + + val retrieved = dao.get("CHURN").unsafeRunSync() + + retrieved shouldBe churnVariablesMeta + } + } + + def withVariablesMetaRepository(testCode: VariablesMetaRepositoryDAO[IO] => Any): Unit = { + + val handlerA = AcolyteDSL.handleQuery { q => + println(q.sql) + + 1 + + } + + val conn: Connection = AcolyteDSL.connection(handlerA) + implicit val cs: ContextShift[IO] = IOContextShift.global + + // Resource yielding a Transactor[IO] wrapping the given `Connection` + def transactor(c: Connection): Resource[IO, Transactor[IO]] = + ExecutionContexts.cachedThreadPool[IO].flatMap { te => + val t: Transactor[IO] = Transactor.fromConnection[IO](c, te) + Resource.liftF(t.configure(_ => IO.pure(t))) + } + + transactor(conn) + .use { tr => + val dao = new VariablesMetaRepositoryDAO[IO](tr) + IO.delay(testCode(dao)) + } + .unsafeRunSync() + } -class MetadataRepositoryDAOTest extends WordSpec with Matchers with MockFactory {} +} diff --git a/src/test/scala/ch/chuv/lren/woken/service/TestServices.scala b/src/test/scala/ch/chuv/lren/woken/service/TestServices.scala index 16ab57e3..fc564bdd 100644 --- a/src/test/scala/ch/chuv/lren/woken/service/TestServices.scala +++ b/src/test/scala/ch/chuv/lren/woken/service/TestServices.scala @@ -34,17 +34,16 @@ import ch.chuv.lren.woken.messages.query.{ ExperimentQuery, MiningQuery, QueryRe import ch.chuv.lren.woken.messages.remoting.RemoteLocation import ch.chuv.lren.woken.messages.variables.{ GroupMetaData, - SqlType, VariablesForDatasetsQuery, VariablesForDatasetsResponse } import ch.chuv.lren.woken.messages.variables.variablesProtocol._ -import spray.json.{ JsNumber, JsObject, JsString } +import spray.json.JsObject import scala.concurrent.ExecutionContext import scala.language.higherKinds -object TestServices extends JsonUtils { +object TestServices extends JsonUtils with FeaturesTableTestSupport { lazy val jobResultService: JobResultService[IO] = JobResultService( new WokenInMemoryRepository[IO]().jobResults @@ -91,54 +90,6 @@ object TestServices extends JsonUtils { lazy val algorithmLibraryService: AlgorithmLibraryService = AlgorithmLibraryService() - val database = "features_db" - val sampleTable = FeaturesTableDescription(TableId(database, None, "Sample"), - Nil, - None, - validateSchema = false, - None, - 0.67) - val sampleHeaders = List( - TableColumn("ID", SqlType.int), - TableColumn("stress_before_test1", SqlType.numeric), - TableColumn("score_test1", SqlType.numeric), - TableColumn("IQ", SqlType.numeric), - TableColumn("cognitive_task2", SqlType.numeric), - TableColumn("practice_task2", SqlType.numeric), - TableColumn("response_time_task2", SqlType.numeric), - TableColumn("college_math", SqlType.numeric), - TableColumn("score_math_course1", SqlType.numeric), - TableColumn("score_math_course2", SqlType.numeric) - ) - - val sampleData = List( - JsObject("ID" -> JsNumber(1), - "stress_before_test1" -> JsNumber(2.0), - "score_test1" -> JsNumber(1.0)) - ) - - val cdeTable = FeaturesTableDescription( - TableId(database, None, "cde_features_a"), - List(TableColumn("subjectcode", SqlType.varchar)), - Some(TableColumn("dataset", SqlType.varchar)), - validateSchema = false, - None, - 0.67 - ) - val cdeHeaders = List( - TableColumn("subjectcode", SqlType.varchar), - TableColumn("apoe4", SqlType.int), - TableColumn("lefthippocampus", SqlType.numeric), - TableColumn("dataset", SqlType.varchar) - ) - - val cdeData = List( - JsObject("subjectcode" -> JsString("p001"), - "apoe4" -> JsNumber(2), - "lefthippocampus" -> JsNumber(1.37), - "dataset" -> JsString("desd-synthdata")) - ) - val tables: Set[FeaturesTableDescription] = Set(sampleTable, cdeTable) val tablesContent: Map[TableId, (Headers, List[JsObject])] = Map( sampleTable.table -> (sampleHeaders -> sampleData),