diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 3b6029dc..e4732753 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -39,8 +39,9 @@ jobs: - name: Publish plugins to maven local run: > ./gradlew - clean + compiler-plugin:clean compiler-plugin:publishToMavenLocal + gradle-plugin:clean gradle-plugin:publishToMavenLocal - name: Build with Gradle diff --git a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt index 3f04e166..6b7b0af1 100644 --- a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt +++ b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt @@ -92,7 +92,7 @@ inline fun List.toDF(spark: SparkSession, vararg colNames: String * Utility method to create dataset from list */ inline fun Array.toDS(spark: SparkSession): Dataset = - toList().toDS(spark) + asList().toDS(spark) /** * Utility method to create dataframe from list diff --git a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Rdd.kt b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Rdd.kt index 8ba90d25..0ab701b4 100644 --- a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Rdd.kt +++ b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Rdd.kt @@ -11,7 +11,7 @@ import java.io.Serializable fun JavaSparkContext.rddOf( vararg elements: T, numSlices: Int = defaultParallelism(), -): JavaRDD = parallelize(elements.toList(), numSlices) +): JavaRDD = parallelize(elements.asList(), numSlices) /** * Utility method to create an RDD from a list. diff --git a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt index 393f945f..00655de0 100644 --- a/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt +++ b/kotlin-spark-api/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt @@ -113,7 +113,7 @@ class KSparkSession(val spark: SparkSession) { * NOTE: [T] must be [Serializable]. */ fun rddOf(vararg elements: T, numSlices: Int = sc.defaultParallelism()): JavaRDD = - sc.toRDD(elements.toList(), numSlices) + sc.toRDD(elements.asList(), numSlices) /** * A collection of methods for registering user-defined functions (UDF). diff --git a/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TypeInferenceTest.kt b/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TypeInferenceTest.kt index 3cd8aa9f..332db122 100644 --- a/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TypeInferenceTest.kt +++ b/kotlin-spark-api/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TypeInferenceTest.kt @@ -226,7 +226,7 @@ class TypeInferenceTest : ShouldSpec({ should("generate valid serializer schema") { expect(schemaFor() as org.apache.spark.sql.types.StructType) { this - .feature("data type", { this.fields()?.toList() }) { + .feature("data type", { this.fields()?.asList() }) { this.notToEqualNull().toContain.inOrder.only.entry { this .feature("element name", { name() }) { toEqual("optionList") }