diff --git a/build.sbt b/build.sbt index ecd19ee..65cf055 100644 --- a/build.sbt +++ b/build.sbt @@ -33,18 +33,23 @@ ThisBuild / developers := List( ThisBuild / scalaVersion := scala211 val commonDependencies = Seq( - "io.tarantool" % "cartridge-driver" % "0.9.0", - "junit" % "junit" % "4.12" % Test, - "com.github.sbt" % "junit-interface" % "0.12" % Test, - "org.testcontainers" % "testcontainers" % "1.17.3" % Test, - "io.tarantool" % "testcontainers-java-tarantool" % "0.5.0" % Test, - "org.scalatest" %% "scalatest" % "3.2.14" % Test, - "org.scalamock" %% "scalamock" % "5.1.0" % Test, - "com.dimafeng" %% "testcontainers-scala-scalatest" % "0.39.5" % Test, - "org.slf4j" % "slf4j-api" % "1.7.36" % Test, - "ch.qos.logback" % "logback-core" % "1.2.5" % Test, - "ch.qos.logback" % "logback-classic" % "1.2.5" % Test, - "org.apache.derby" % "derby" % "10.11.1.1" % Test + "io.tarantool" % "cartridge-driver" % "0.10.0", + "junit" % "junit" % "4.12" % Test, + "com.github.sbt" % "junit-interface" % "0.12" % Test, + "org.scalatest" %% "scalatest" % "3.2.14" % Test, + "org.scalamock" %% "scalamock" % "5.2.0" % Test, + "com.dimafeng" %% "testcontainers-scala-scalatest" % "0.40.12" % Test, + "org.slf4j" % "slf4j-api" % "1.7.36" % Test, + "ch.qos.logback" % "logback-core" % "1.2.5" % Test, + "ch.qos.logback" % "logback-classic" % "1.2.5" % Test, + "org.apache.derby" % "derby" % "10.11.1.1" % Test, + "io.tarantool" % "testcontainers-java-tarantool" % "0.5.3" % Test +).map( + _.exclude("io.netty", "netty-all") + .exclude("io.netty", "netty-transport") + .exclude("io.netty", "netty-handler") + .exclude("io.netty", "netty-codec") + .exclude("io.netty", "netty-codec-http") ) lazy val root = (project in file(".")) diff --git a/src/main/scala/io/tarantool/spark/connector/rdd/TarantoolReadRDD.scala b/src/main/scala/io/tarantool/spark/connector/rdd/TarantoolReadRDD.scala index 89d95b4..968f27e 100644 --- a/src/main/scala/io/tarantool/spark/connector/rdd/TarantoolReadRDD.scala +++ b/src/main/scala/io/tarantool/spark/connector/rdd/TarantoolReadRDD.scala @@ -3,7 +3,8 @@ package io.tarantool.spark.connector.rdd import io.tarantool.driver.api.conditions.Conditions import io.tarantool.driver.api.tuple.TarantoolTuple import io.tarantool.driver.api.{TarantoolClient, TarantoolResult} -import io.tarantool.driver.mappers.{DefaultMessagePackMapperFactory, MessagePackMapper} +import io.tarantool.driver.mappers.MessagePackMapper +import io.tarantool.driver.mappers.factories.DefaultMessagePackMapperFactory import io.tarantool.spark.connector.config.{ReadConfig, TarantoolConfig} import io.tarantool.spark.connector.connection.TarantoolConnection import io.tarantool.spark.connector.partition.TarantoolPartition diff --git a/src/main/scala/io/tarantool/spark/connector/rdd/TarantoolWriteRDD.scala b/src/main/scala/io/tarantool/spark/connector/rdd/TarantoolWriteRDD.scala index f3fa025..b597ac5 100644 --- a/src/main/scala/io/tarantool/spark/connector/rdd/TarantoolWriteRDD.scala +++ b/src/main/scala/io/tarantool/spark/connector/rdd/TarantoolWriteRDD.scala @@ -5,7 +5,8 @@ import io.tarantool.driver.api.tuple.{DefaultTarantoolTupleFactory, TarantoolTup import io.tarantool.driver.api.TarantoolResult import io.tarantool.driver.api.space.options.proxy.ProxyInsertManyOptions import io.tarantool.driver.api.space.options.proxy.ProxyReplaceManyOptions -import io.tarantool.driver.mappers.{DefaultMessagePackMapperFactory, MessagePackMapper} +import io.tarantool.driver.mappers.MessagePackMapper +import io.tarantool.driver.mappers.factories.DefaultMessagePackMapperFactory import io.tarantool.spark.connector.{Logging, TarantoolSparkException} import io.tarantool.spark.connector.config.{TarantoolConfig, WriteConfig} import io.tarantool.spark.connector.connection.TarantoolConnection diff --git a/src/main/scala/org/apache/spark/sql/tarantool/DefaultSource.scala b/src/main/scala/org/apache/spark/sql/tarantool/DefaultSource.scala index ae68a1f..0c3f3a0 100644 --- a/src/main/scala/org/apache/spark/sql/tarantool/DefaultSource.scala +++ b/src/main/scala/org/apache/spark/sql/tarantool/DefaultSource.scala @@ -51,7 +51,7 @@ class DefaultSource case SaveMode.Append => relation.rdd.write(connection, data, overwrite = true) case SaveMode.Overwrite => { relation.rdd.truncate(connection) - relation.rdd.write(connection, data, overwrite = false) + relation.rdd.write(connection, data, overwrite = true) } case SaveMode.ErrorIfExists => { if (relation.rdd.nonEmpty(connection)) { @@ -60,11 +60,11 @@ class DefaultSource "already exists in Tarantool and contains data." ) } - relation.rdd.write(connection, data, overwrite = false) + relation.rdd.write(connection, data, overwrite = true) } case SaveMode.Ignore => if (relation.rdd.isEmpty(connection)) { - relation.rdd.write(connection, data, overwrite = false) + relation.rdd.write(connection, data, overwrite = true) } } diff --git a/src/main/scala/org/apache/spark/sql/tarantool/TarantoolReadRelation.scala b/src/main/scala/org/apache/spark/sql/tarantool/TarantoolReadRelation.scala index 3d5fa9e..b10a770 100644 --- a/src/main/scala/org/apache/spark/sql/tarantool/TarantoolReadRelation.scala +++ b/src/main/scala/org/apache/spark/sql/tarantool/TarantoolReadRelation.scala @@ -1,7 +1,8 @@ package org.apache.spark.sql.tarantool import io.tarantool.driver.api.tuple.TarantoolTuple -import io.tarantool.driver.mappers.{DefaultMessagePackMapperFactory, MessagePackMapper} +import io.tarantool.driver.mappers.MessagePackMapper +import io.tarantool.driver.mappers.factories.DefaultMessagePackMapperFactory import io.tarantool.spark.connector.rdd.TarantoolReadRDD import org.apache.spark.rdd.RDD import org.apache.spark.sql.sources.TableScan diff --git a/src/test/scala/io/tarantool/spark/connector/integration/TarantoolSparkReadClusterTest.scala b/src/test/scala/io/tarantool/spark/connector/integration/TarantoolSparkReadClusterTest.scala index 5ba635a..1739968 100644 --- a/src/test/scala/io/tarantool/spark/connector/integration/TarantoolSparkReadClusterTest.scala +++ b/src/test/scala/io/tarantool/spark/connector/integration/TarantoolSparkReadClusterTest.scala @@ -2,7 +2,7 @@ package io.tarantool.spark.connector.integration import io.tarantool.driver.api.conditions.Conditions import io.tarantool.driver.api.tuple.{DefaultTarantoolTupleFactory, TarantoolTuple} -import io.tarantool.driver.mappers.DefaultMessagePackMapperFactory +import io.tarantool.driver.mappers.factories.DefaultMessagePackMapperFactory import io.tarantool.spark.connector.config.TarantoolConfig import io.tarantool.spark.connector.connection.TarantoolConnection import io.tarantool.spark.connector.rdd.converter.FunctionBasedTupleConverterFactory diff --git a/src/test/scala/io/tarantool/spark/connector/integration/TarantoolSparkWriteClusterTest.scala b/src/test/scala/io/tarantool/spark/connector/integration/TarantoolSparkWriteClusterTest.scala index 4756037..02c3ed5 100644 --- a/src/test/scala/io/tarantool/spark/connector/integration/TarantoolSparkWriteClusterTest.scala +++ b/src/test/scala/io/tarantool/spark/connector/integration/TarantoolSparkWriteClusterTest.scala @@ -2,7 +2,7 @@ package io.tarantool.spark.connector.integration import io.tarantool.driver.api.conditions.Conditions import io.tarantool.driver.api.tuple.{DefaultTarantoolTupleFactory, TarantoolTuple} -import io.tarantool.driver.mappers.DefaultMessagePackMapperFactory +import io.tarantool.driver.mappers.factories.DefaultMessagePackMapperFactory import io.tarantool.spark.connector.toSparkContextFunctions import org.apache.spark.SparkException import org.apache.spark.sql.{Encoders, Row, SaveMode} @@ -311,7 +311,7 @@ class TarantoolSparkWriteClusterTest extends AnyFunSuite with Matchers with Tara ) ).toDS - intercept[SparkException] { + val ex = intercept[SparkException] { ds.write .format("org.apache.spark.sql.tarantool") .mode(SaveMode.Overwrite) @@ -321,6 +321,7 @@ class TarantoolSparkWriteClusterTest extends AnyFunSuite with Matchers with Tara .option("tarantool.batchSize", 2) .save() } + ex.getMessage should not(contain("Not all tuples of the batch")) val actual = spark.sparkContext.tarantoolSpace(space, Conditions.any()).collect() actual.length should equal(1) // only one tuple from the first batch should be successful diff --git a/src/test/scala/org/apache/spark/sql/tarantool/MapFunctionsSpec.scala b/src/test/scala/org/apache/spark/sql/tarantool/MapFunctionsSpec.scala index 9b3f1f1..d142598 100644 --- a/src/test/scala/org/apache/spark/sql/tarantool/MapFunctionsSpec.scala +++ b/src/test/scala/org/apache/spark/sql/tarantool/MapFunctionsSpec.scala @@ -3,7 +3,7 @@ package org.apache.spark.sql.tarantool import io.tarantool.driver.api.metadata.{TestSpaceMetadata, TestSpaceWithArrayMetadata, TestSpaceWithMapMetadata} import io.tarantool.driver.api.tuple.DefaultTarantoolTupleFactory import io.tarantool.driver.core.tuple.TarantoolTupleImpl -import io.tarantool.driver.mappers.DefaultMessagePackMapperFactory +import io.tarantool.driver.mappers.factories.DefaultMessagePackMapperFactory import org.apache.spark.sql.Row import org.apache.spark.sql.types._ import org.scalatest.Inspectors.forAll