diff --git a/parquet/src/main/scala/magnolify/parquet/MagnolifyParquetProperties.scala b/parquet/src/main/scala/magnolify/parquet/MagnolifyParquetProperties.scala index e3c1824e..7f576816 100644 --- a/parquet/src/main/scala/magnolify/parquet/MagnolifyParquetProperties.scala +++ b/parquet/src/main/scala/magnolify/parquet/MagnolifyParquetProperties.scala @@ -18,8 +18,10 @@ package magnolify.parquet import org.apache.hadoop.conf.Configuration -import java.util.Objects - +/** + * Properties for reading and writing Magnolify ParquetType classes, + * configurable via a Hadoop [[Configuration]] instance. + */ object MagnolifyParquetProperties { val WriteGroupedArrays: String = "magnolify.parquet.write-grouped-arrays" val WriteGroupedArraysDefault: Boolean = false @@ -32,8 +34,6 @@ object MagnolifyParquetProperties { // Hash any Configuration values that might affect schema creation to use as part of Schema cache key private[parquet] def hashValues(conf: Configuration): Int = { - Objects.hash( - Option(conf.get(WriteGroupedArrays)).map(_.toBoolean).getOrElse(WriteGroupedArraysDefault) - ) + Option(conf.get(WriteGroupedArrays)).map(_.toBoolean).getOrElse(WriteGroupedArraysDefault).hashCode() } } diff --git a/parquet/src/main/scala/magnolify/parquet/ParquetField.scala b/parquet/src/main/scala/magnolify/parquet/ParquetField.scala index e9585bd8..670b21ac 100644 --- a/parquet/src/main/scala/magnolify/parquet/ParquetField.scala +++ b/parquet/src/main/scala/magnolify/parquet/ParquetField.scala @@ -29,9 +29,9 @@ import org.apache.parquet.io.api._ import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName import org.apache.parquet.schema.Type.Repetition import org.apache.parquet.schema.{LogicalTypeAnnotation, Type, Types} -import org.typelevel.scalaccompat.annotation.nowarn -import scala.annotation.implicitNotFound +import scala.annotation.{implicitNotFound, nowarn} +import scala.collection.compat._ import scala.collection.concurrent sealed trait ParquetField[T] extends Serializable {