diff --git a/hail/src/main/scala/is/hail/backend/local/LocalBackend.scala b/hail/src/main/scala/is/hail/backend/local/LocalBackend.scala index 9f8c83195ec..9fe854132f3 100644 --- a/hail/src/main/scala/is/hail/backend/local/LocalBackend.scala +++ b/hail/src/main/scala/is/hail/backend/local/LocalBackend.scala @@ -24,6 +24,7 @@ import scala.reflect.ClassTag import java.io.PrintWriter +import com.fasterxml.jackson.core.StreamReadConstraints import com.google.common.util.concurrent.MoreExecutors import org.apache.hadoop import org.json4s._ @@ -46,6 +47,9 @@ object LocalBackend { skipLoggingConfiguration: Boolean = false, ): LocalBackend = synchronized { require(theLocalBackend == null) + StreamReadConstraints.overrideDefaultStreamReadConstraints( + StreamReadConstraints.builder().maxStringLength(Integer.MAX_VALUE).build() + ) if (!skipLoggingConfiguration) HailContext.configureLogging(logFile, quiet, append) diff --git a/hail/src/main/scala/is/hail/backend/spark/SparkBackend.scala b/hail/src/main/scala/is/hail/backend/spark/SparkBackend.scala index ce02756bdc9..890ef46bbb8 100644 --- a/hail/src/main/scala/is/hail/backend/spark/SparkBackend.scala +++ b/hail/src/main/scala/is/hail/backend/spark/SparkBackend.scala @@ -31,6 +31,7 @@ import scala.util.control.NonFatal import java.io.{Closeable, PrintWriter} +import com.fasterxml.jackson.core.StreamReadConstraints import org.apache.hadoop import org.apache.hadoop.conf.Configuration import org.apache.spark._ @@ -257,6 +258,9 @@ object SparkBackend { gcsRequesterPaysBuckets: String = null, ): SparkBackend = synchronized { require(theSparkBackend == null) + StreamReadConstraints.overrideDefaultStreamReadConstraints( + StreamReadConstraints.builder().maxStringLength(Integer.MAX_VALUE).build() + ) if (!skipLoggingConfiguration) HailContext.configureLogging(logFile, quiet, append)