Console Output

Skipping 33,447 KB.. Full Log
[info] - cat with LazySimpleSerDe (92 milliseconds)
03:01:23.995 ERROR org.apache.spark.sql.hive.execution.HiveScriptTransformationWriterThread: Thread-ScriptTransformation-Feed exit cause by: 
java.lang.IllegalArgumentException: intentional exception
	at org.apache.spark.sql.execution.ExceptionInjectingOperator.$anonfun$doExecute$1(BaseScriptTransformationSuite.scala:430)
	at scala.collection.Iterator$$anon$10.next(Iterator.scala:459)
	at scala.collection.Iterator$$anon$10.next(Iterator.scala:459)
	at scala.collection.Iterator.foreach(Iterator.scala:941)
	at scala.collection.Iterator.foreach$(Iterator.scala:941)
	at scala.collection.AbstractIterator.foreach(Iterator.scala:1429)
	at org.apache.spark.sql.hive.execution.HiveScriptTransformationWriterThread.processRows(HiveScriptTransformationExec.scala:213)
	at org.apache.spark.sql.execution.BaseScriptTransformationWriterThread.$anonfun$run$1(BaseScriptTransformationExec.scala:287)
	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
	at org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)
	at org.apache.spark.sql.execution.BaseScriptTransformationWriterThread.run(BaseScriptTransformationExec.scala:280)
03:01:23.996 ERROR org.apache.spark.executor.Executor: Exception in task 0.0 in stage 12655.0 (TID 27553)
java.lang.IllegalArgumentException: intentional exception
	at org.apache.spark.sql.execution.ExceptionInjectingOperator.$anonfun$doExecute$1(BaseScriptTransformationSuite.scala:430)
	at scala.collection.Iterator$$anon$10.next(Iterator.scala:459)
	at scala.collection.Iterator$$anon$10.next(Iterator.scala:459)
	at scala.collection.Iterator.foreach(Iterator.scala:941)
	at scala.collection.Iterator.foreach$(Iterator.scala:941)
	at scala.collection.AbstractIterator.foreach(Iterator.scala:1429)
	at org.apache.spark.sql.hive.execution.HiveScriptTransformationWriterThread.processRows(HiveScriptTransformationExec.scala:213)
	at org.apache.spark.sql.execution.BaseScriptTransformationWriterThread.$anonfun$run$1(BaseScriptTransformationExec.scala:287)
	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
	at org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)
	at org.apache.spark.sql.execution.BaseScriptTransformationWriterThread.run(BaseScriptTransformationExec.scala:280)
03:01:23.996 ERROR org.apache.spark.sql.hive.execution.HiveScriptTransformationWriterThread: 
03:01:23.999 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 0.0 in stage 12655.0 (TID 27553) (172.17.0.1 executor driver): java.lang.IllegalArgumentException: intentional exception
	at org.apache.spark.sql.execution.ExceptionInjectingOperator.$anonfun$doExecute$1(BaseScriptTransformationSuite.scala:430)
	at scala.collection.Iterator$$anon$10.next(Iterator.scala:459)
	at scala.collection.Iterator$$anon$10.next(Iterator.scala:459)
	at scala.collection.Iterator.foreach(Iterator.scala:941)
	at scala.collection.Iterator.foreach$(Iterator.scala:941)
	at scala.collection.AbstractIterator.foreach(Iterator.scala:1429)
	at org.apache.spark.sql.hive.execution.HiveScriptTransformationWriterThread.processRows(HiveScriptTransformationExec.scala:213)
	at org.apache.spark.sql.execution.BaseScriptTransformationWriterThread.$anonfun$run$1(BaseScriptTransformationExec.scala:287)
	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
	at org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1996)
	at org.apache.spark.sql.execution.BaseScriptTransformationWriterThread.run(BaseScriptTransformationExec.scala:280)

03:01:23.999 ERROR org.apache.spark.scheduler.TaskSetManager: Task 0 in stage 12655.0 failed 1 times; aborting job
[info] - script transformation should not swallow errors from upstream operators (hive serde) (1 second, 100 milliseconds)
03:01:24.071 ERROR org.apache.spark.sql.hive.execution.HiveScriptTransformationWriterThread: /bin/bash: some_non_existent_command: command not found

03:01:24.071 ERROR org.apache.spark.sql.hive.execution.HiveScriptTransformationExec: /bin/bash: some_non_existent_command: command not found

03:01:24.072 ERROR org.apache.spark.sql.hive.execution.HiveScriptTransformationExec: /bin/bash: some_non_existent_command: command not found

03:01:24.072 ERROR org.apache.spark.executor.Executor: Exception in task 0.0 in stage 12656.0 (TID 27554)
org.apache.spark.SparkException: Subprocess exited with status 127. Error: /bin/bash: some_non_existent_command: command not found

	at org.apache.spark.sql.execution.BaseScriptTransformationExec.checkFailureAndPropagate(BaseScriptTransformationExec.scala:179)
	at org.apache.spark.sql.execution.BaseScriptTransformationExec.checkFailureAndPropagate$(BaseScriptTransformationExec.scala:156)
	at org.apache.spark.sql.hive.execution.HiveScriptTransformationExec.checkFailureAndPropagate(HiveScriptTransformationExec.scala:49)
	at org.apache.spark.sql.hive.execution.HiveScriptTransformationExec$$anon$1.hasNext(HiveScriptTransformationExec.scala:110)
	at scala.collection.Iterator$$anon$10.hasNext(Iterator.scala:458)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$getByteArrayRdd$1(SparkPlan.scala:345)
	at org.apache.spark.rdd.RDD.$anonfun$mapPartitionsInternal$2(RDD.scala:898)
	at org.apache.spark.rdd.RDD.$anonfun$mapPartitionsInternal$2$adapted(RDD.scala:898)
	at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)
	at org.apache.spark.rdd.RDD.iterator(RDD.scala:337)
	at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
	at org.apache.spark.scheduler.Task.run(Task.scala:131)
	at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:498)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1439)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:501)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
Caused by: org.apache.spark.SparkException: Subprocess exited with status 127. Error: /bin/bash: some_non_existent_command: command not found

	at org.apache.spark.sql.execution.BaseScriptTransformationExec.checkFailureAndPropagate(BaseScriptTransformationExec.scala:179)
	at org.apache.spark.sql.execution.BaseScriptTransformationExec.checkFailureAndPropagate$(BaseScriptTransformationExec.scala:156)
	at org.apache.spark.sql.hive.execution.HiveScriptTransformationExec.checkFailureAndPropagate(HiveScriptTransformationExec.scala:49)
	at org.apache.spark.sql.hive.execution.HiveScriptTransformationExec$$anon$1.hasNext(HiveScriptTransformationExec.scala:99)
	... 15 more
03:01:24.074 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 0.0 in stage 12656.0 (TID 27554) (172.17.0.1 executor driver): org.apache.spark.SparkException: Subprocess exited with status 127. Error: /bin/bash: some_non_existent_command: command not found

	at org.apache.spark.sql.execution.BaseScriptTransformationExec.checkFailureAndPropagate(BaseScriptTransformationExec.scala:179)
	at org.apache.spark.sql.execution.BaseScriptTransformationExec.checkFailureAndPropagate$(BaseScriptTransformationExec.scala:156)
	at org.apache.spark.sql.hive.execution.HiveScriptTransformationExec.checkFailureAndPropagate(HiveScriptTransformationExec.scala:49)
	at org.apache.spark.sql.hive.execution.HiveScriptTransformationExec$$anon$1.hasNext(HiveScriptTransformationExec.scala:110)
	at scala.collection.Iterator$$anon$10.hasNext(Iterator.scala:458)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$getByteArrayRdd$1(SparkPlan.scala:345)
	at org.apache.spark.rdd.RDD.$anonfun$mapPartitionsInternal$2(RDD.scala:898)
	at org.apache.spark.rdd.RDD.$anonfun$mapPartitionsInternal$2$adapted(RDD.scala:898)
	at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)
	at org.apache.spark.rdd.RDD.iterator(RDD.scala:337)
	at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
	at org.apache.spark.scheduler.Task.run(Task.scala:131)
	at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:498)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1439)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:501)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
Caused by: org.apache.spark.SparkException: Subprocess exited with status 127. Error: /bin/bash: some_non_existent_command: command not found

	at org.apache.spark.sql.execution.BaseScriptTransformationExec.checkFailureAndPropagate(BaseScriptTransformationExec.scala:179)
	at org.apache.spark.sql.execution.BaseScriptTransformationExec.checkFailureAndPropagate$(BaseScriptTransformationExec.scala:156)
	at org.apache.spark.sql.hive.execution.HiveScriptTransformationExec.checkFailureAndPropagate(HiveScriptTransformationExec.scala:49)
	at org.apache.spark.sql.hive.execution.HiveScriptTransformationExec$$anon$1.hasNext(HiveScriptTransformationExec.scala:99)
	... 15 more

03:01:24.074 ERROR org.apache.spark.scheduler.TaskSetManager: Task 0 in stage 12656.0 failed 1 times; aborting job
[info] - SPARK-14400 script transformation should fail for bad script command (hive serde) (75 milliseconds)
[info] - SPARK-24339 verify the result after pruning the unused columns (hive serde) (132 milliseconds)
03:01:24.289 ERROR org.apache.spark.sql.hive.execution.HiveScriptTransformationExec: /bin/bash: some_non_existent_command: command not found

03:01:24.289 ERROR org.apache.spark.sql.hive.execution.HiveScriptTransformationWriterThread: /bin/bash: some_non_existent_command: command not found

03:01:24.289 ERROR org.apache.spark.sql.hive.execution.HiveScriptTransformationExec: /bin/bash: some_non_existent_command: command not found

03:01:24.289 ERROR org.apache.spark.executor.Executor: Exception in task 0.0 in stage 12659.0 (TID 27557)
org.apache.spark.SparkException: Subprocess exited with status 127. Error: /bin/bash: some_non_existent_command: command not found

	at org.apache.spark.sql.execution.BaseScriptTransformationExec.checkFailureAndPropagate(BaseScriptTransformationExec.scala:179)
	at org.apache.spark.sql.execution.BaseScriptTransformationExec.checkFailureAndPropagate$(BaseScriptTransformationExec.scala:156)
	at org.apache.spark.sql.hive.execution.HiveScriptTransformationExec.checkFailureAndPropagate(HiveScriptTransformationExec.scala:49)
	at org.apache.spark.sql.hive.execution.HiveScriptTransformationExec$$anon$1.hasNext(HiveScriptTransformationExec.scala:110)
	at scala.collection.Iterator$$anon$10.hasNext(Iterator.scala:458)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$getByteArrayRdd$1(SparkPlan.scala:345)
	at org.apache.spark.rdd.RDD.$anonfun$mapPartitionsInternal$2(RDD.scala:898)
	at org.apache.spark.rdd.RDD.$anonfun$mapPartitionsInternal$2$adapted(RDD.scala:898)
	at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)
	at org.apache.spark.rdd.RDD.iterator(RDD.scala:337)
	at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
	at org.apache.spark.scheduler.Task.run(Task.scala:131)
	at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:498)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1439)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:501)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
Caused by: org.apache.spark.SparkException: Subprocess exited with status 127. Error: /bin/bash: some_non_existent_command: command not found

	at org.apache.spark.sql.execution.BaseScriptTransformationExec.checkFailureAndPropagate(BaseScriptTransformationExec.scala:179)
	at org.apache.spark.sql.execution.BaseScriptTransformationExec.checkFailureAndPropagate$(BaseScriptTransformationExec.scala:156)
	at org.apache.spark.sql.hive.execution.HiveScriptTransformationExec.checkFailureAndPropagate(HiveScriptTransformationExec.scala:49)
	at org.apache.spark.sql.hive.execution.HiveScriptTransformationExec$$anon$1.hasNext(HiveScriptTransformationExec.scala:99)
	... 15 more
03:01:24.292 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 0.0 in stage 12659.0 (TID 27557) (172.17.0.1 executor driver): org.apache.spark.SparkException: Subprocess exited with status 127. Error: /bin/bash: some_non_existent_command: command not found

	at org.apache.spark.sql.execution.BaseScriptTransformationExec.checkFailureAndPropagate(BaseScriptTransformationExec.scala:179)
	at org.apache.spark.sql.execution.BaseScriptTransformationExec.checkFailureAndPropagate$(BaseScriptTransformationExec.scala:156)
	at org.apache.spark.sql.hive.execution.HiveScriptTransformationExec.checkFailureAndPropagate(HiveScriptTransformationExec.scala:49)
	at org.apache.spark.sql.hive.execution.HiveScriptTransformationExec$$anon$1.hasNext(HiveScriptTransformationExec.scala:110)
	at scala.collection.Iterator$$anon$10.hasNext(Iterator.scala:458)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$getByteArrayRdd$1(SparkPlan.scala:345)
	at org.apache.spark.rdd.RDD.$anonfun$mapPartitionsInternal$2(RDD.scala:898)
	at org.apache.spark.rdd.RDD.$anonfun$mapPartitionsInternal$2$adapted(RDD.scala:898)
	at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)
	at org.apache.spark.rdd.RDD.iterator(RDD.scala:337)
	at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
	at org.apache.spark.scheduler.Task.run(Task.scala:131)
	at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:498)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1439)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:501)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
Caused by: org.apache.spark.SparkException: Subprocess exited with status 127. Error: /bin/bash: some_non_existent_command: command not found

	at org.apache.spark.sql.execution.BaseScriptTransformationExec.checkFailureAndPropagate(BaseScriptTransformationExec.scala:179)
	at org.apache.spark.sql.execution.BaseScriptTransformationExec.checkFailureAndPropagate$(BaseScriptTransformationExec.scala:156)
	at org.apache.spark.sql.hive.execution.HiveScriptTransformationExec.checkFailureAndPropagate(HiveScriptTransformationExec.scala:49)
	at org.apache.spark.sql.hive.execution.HiveScriptTransformationExec$$anon$1.hasNext(HiveScriptTransformationExec.scala:99)
	... 15 more

03:01:24.292 ERROR org.apache.spark.scheduler.TaskSetManager: Task 0 in stage 12659.0 failed 1 times; aborting job
[info] - SPARK-30973: TRANSFORM should wait for the termination of the script (hive serde) (84 milliseconds)
[info] - SPARK-32388: TRANSFORM should handle schema less correctly (hive serde) (646 milliseconds)
[info] - SPARK-32400: TRANSFORM should support basic data types as input (hive serde) (134 milliseconds)
[info] - SPARK-32400: TRANSFORM supports complex data types type (hive serde) (244 milliseconds)
[info] - SPARK-32400: TRANSFORM supports complex data types end to end (hive serde) (181 milliseconds)
03:01:25.676 ERROR org.apache.spark.executor.Executor: Exception in task 0.0 in stage 12678.0 (TID 27576)
org.apache.spark.sql.AnalysisException: interval cannot be converted to Hive TypeInfo
	at org.apache.spark.sql.hive.HiveInspectors$typeInfoConversions.toTypeInfo(HiveInspectors.scala:1068)
	at org.apache.spark.sql.hive.execution.HiveScriptIOSchema$.$anonfun$initSerDe$1(HiveScriptTransformationExec.scala:272)
	at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:238)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at scala.collection.TraversableLike.map(TraversableLike.scala:238)
	at scala.collection.TraversableLike.map$(TraversableLike.scala:231)
	at scala.collection.immutable.List.map(List.scala:298)
	at org.apache.spark.sql.hive.execution.HiveScriptIOSchema$.initSerDe(HiveScriptTransformationExec.scala:272)
	at org.apache.spark.sql.hive.execution.HiveScriptIOSchema$.$anonfun$initInputSerDe$1(HiveScriptTransformationExec.scala:238)
	at scala.Option.map(Option.scala:230)
	at org.apache.spark.sql.hive.execution.HiveScriptIOSchema$.initInputSerDe(HiveScriptTransformationExec.scala:236)
	at org.apache.spark.sql.hive.execution.HiveScriptTransformationExec.processIterator(HiveScriptTransformationExec.scala:143)
	at org.apache.spark.sql.execution.BaseScriptTransformationExec.$anonfun$doExecute$1(BaseScriptTransformationExec.scala:64)
	at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2(RDD.scala:863)
	at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2$adapted(RDD.scala:863)
	at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)
	at org.apache.spark.rdd.RDD.iterator(RDD.scala:337)
	at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)
	at org.apache.spark.rdd.RDD.iterator(RDD.scala:337)
	at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
	at org.apache.spark.scheduler.Task.run(Task.scala:131)
	at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:498)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1439)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:501)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
03:01:25.680 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 0.0 in stage 12678.0 (TID 27576) (172.17.0.1 executor driver): org.apache.spark.sql.AnalysisException: interval cannot be converted to Hive TypeInfo
	at org.apache.spark.sql.hive.HiveInspectors$typeInfoConversions.toTypeInfo(HiveInspectors.scala:1068)
	at org.apache.spark.sql.hive.execution.HiveScriptIOSchema$.$anonfun$initSerDe$1(HiveScriptTransformationExec.scala:272)
	at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:238)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at scala.collection.TraversableLike.map(TraversableLike.scala:238)
	at scala.collection.TraversableLike.map$(TraversableLike.scala:231)
	at scala.collection.immutable.List.map(List.scala:298)
	at org.apache.spark.sql.hive.execution.HiveScriptIOSchema$.initSerDe(HiveScriptTransformationExec.scala:272)
	at org.apache.spark.sql.hive.execution.HiveScriptIOSchema$.$anonfun$initInputSerDe$1(HiveScriptTransformationExec.scala:238)
	at scala.Option.map(Option.scala:230)
	at org.apache.spark.sql.hive.execution.HiveScriptIOSchema$.initInputSerDe(HiveScriptTransformationExec.scala:236)
	at org.apache.spark.sql.hive.execution.HiveScriptTransformationExec.processIterator(HiveScriptTransformationExec.scala:143)
	at org.apache.spark.sql.execution.BaseScriptTransformationExec.$anonfun$doExecute$1(BaseScriptTransformationExec.scala:64)
	at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2(RDD.scala:863)
	at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2$adapted(RDD.scala:863)
	at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)
	at org.apache.spark.rdd.RDD.iterator(RDD.scala:337)
	at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)
	at org.apache.spark.rdd.RDD.iterator(RDD.scala:337)
	at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
	at org.apache.spark.scheduler.Task.run(Task.scala:131)
	at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:498)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1439)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:501)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)

03:01:25.680 ERROR org.apache.spark.scheduler.TaskSetManager: Task 0 in stage 12678.0 failed 1 times; aborting job
03:01:25.773 ERROR org.apache.spark.executor.Executor: Exception in task 0.0 in stage 12679.0 (TID 27577)
org.apache.spark.sql.AnalysisException: array<double> cannot be converted to Hive TypeInfo
	at org.apache.spark.sql.hive.HiveInspectors$typeInfoConversions.toTypeInfo(HiveInspectors.scala:1068)
	at org.apache.spark.sql.hive.execution.HiveScriptIOSchema$.$anonfun$initSerDe$1(HiveScriptTransformationExec.scala:272)
	at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:238)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at scala.collection.TraversableLike.map(TraversableLike.scala:238)
	at scala.collection.TraversableLike.map$(TraversableLike.scala:231)
	at scala.collection.immutable.List.map(List.scala:298)
	at org.apache.spark.sql.hive.execution.HiveScriptIOSchema$.initSerDe(HiveScriptTransformationExec.scala:272)
	at org.apache.spark.sql.hive.execution.HiveScriptIOSchema$.$anonfun$initInputSerDe$1(HiveScriptTransformationExec.scala:238)
	at scala.Option.map(Option.scala:230)
	at org.apache.spark.sql.hive.execution.HiveScriptIOSchema$.initInputSerDe(HiveScriptTransformationExec.scala:236)
	at org.apache.spark.sql.hive.execution.HiveScriptTransformationExec.processIterator(HiveScriptTransformationExec.scala:143)
	at org.apache.spark.sql.execution.BaseScriptTransformationExec.$anonfun$doExecute$1(BaseScriptTransformationExec.scala:64)
	at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2(RDD.scala:863)
	at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2$adapted(RDD.scala:863)
	at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)
	at org.apache.spark.rdd.RDD.iterator(RDD.scala:337)
	at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)
	at org.apache.spark.rdd.RDD.iterator(RDD.scala:337)
	at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
	at org.apache.spark.scheduler.Task.run(Task.scala:131)
	at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:498)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1439)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:501)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
03:01:25.777 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 0.0 in stage 12679.0 (TID 27577) (172.17.0.1 executor driver): org.apache.spark.sql.AnalysisException: array<double> cannot be converted to Hive TypeInfo
	at org.apache.spark.sql.hive.HiveInspectors$typeInfoConversions.toTypeInfo(HiveInspectors.scala:1068)
	at org.apache.spark.sql.hive.execution.HiveScriptIOSchema$.$anonfun$initSerDe$1(HiveScriptTransformationExec.scala:272)
	at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:238)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at scala.collection.TraversableLike.map(TraversableLike.scala:238)
	at scala.collection.TraversableLike.map$(TraversableLike.scala:231)
	at scala.collection.immutable.List.map(List.scala:298)
	at org.apache.spark.sql.hive.execution.HiveScriptIOSchema$.initSerDe(HiveScriptTransformationExec.scala:272)
	at org.apache.spark.sql.hive.execution.HiveScriptIOSchema$.$anonfun$initInputSerDe$1(HiveScriptTransformationExec.scala:238)
	at scala.Option.map(Option.scala:230)
	at org.apache.spark.sql.hive.execution.HiveScriptIOSchema$.initInputSerDe(HiveScriptTransformationExec.scala:236)
	at org.apache.spark.sql.hive.execution.HiveScriptTransformationExec.processIterator(HiveScriptTransformationExec.scala:143)
	at org.apache.spark.sql.execution.BaseScriptTransformationExec.$anonfun$doExecute$1(BaseScriptTransformationExec.scala:64)
	at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2(RDD.scala:863)
	at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2$adapted(RDD.scala:863)
	at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)
	at org.apache.spark.rdd.RDD.iterator(RDD.scala:337)
	at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)
	at org.apache.spark.rdd.RDD.iterator(RDD.scala:337)
	at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
	at org.apache.spark.scheduler.Task.run(Task.scala:131)
	at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:498)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1439)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:501)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)

03:01:25.777 ERROR org.apache.spark.scheduler.TaskSetManager: Task 0 in stage 12679.0 failed 1 times; aborting job
[info] - SPARK-32400: TRANSFORM doesn't support CalenderIntervalType/UserDefinedType (hive serde) (278 milliseconds)
03:01:25.953 ERROR org.apache.spark.executor.Executor: Exception in task 0.0 in stage 12680.0 (TID 27578)
org.apache.spark.sql.AnalysisException: interval cannot be converted to Hive TypeInfo
	at org.apache.spark.sql.hive.HiveInspectors$typeInfoConversions.toTypeInfo(HiveInspectors.scala:1068)
	at org.apache.spark.sql.hive.execution.HiveScriptIOSchema$.$anonfun$initSerDe$1(HiveScriptTransformationExec.scala:272)
	at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:238)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at scala.collection.TraversableLike.map(TraversableLike.scala:238)
	at scala.collection.TraversableLike.map$(TraversableLike.scala:231)
	at scala.collection.immutable.List.map(List.scala:298)
	at org.apache.spark.sql.hive.execution.HiveScriptIOSchema$.initSerDe(HiveScriptTransformationExec.scala:272)
	at org.apache.spark.sql.hive.execution.HiveScriptIOSchema$.$anonfun$initInputSerDe$1(HiveScriptTransformationExec.scala:238)
	at scala.Option.map(Option.scala:230)
	at org.apache.spark.sql.hive.execution.HiveScriptIOSchema$.initInputSerDe(HiveScriptTransformationExec.scala:236)
	at org.apache.spark.sql.hive.execution.HiveScriptTransformationExec.processIterator(HiveScriptTransformationExec.scala:143)
	at org.apache.spark.sql.execution.BaseScriptTransformationExec.$anonfun$doExecute$1(BaseScriptTransformationExec.scala:64)
	at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2(RDD.scala:863)
	at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2$adapted(RDD.scala:863)
	at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)
	at org.apache.spark.rdd.RDD.iterator(RDD.scala:337)
	at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)
	at org.apache.spark.rdd.RDD.iterator(RDD.scala:337)
	at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
	at org.apache.spark.scheduler.Task.run(Task.scala:131)
	at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:498)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1439)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:501)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
03:01:25.956 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 0.0 in stage 12680.0 (TID 27578) (172.17.0.1 executor driver): org.apache.spark.sql.AnalysisException: interval cannot be converted to Hive TypeInfo
	at org.apache.spark.sql.hive.HiveInspectors$typeInfoConversions.toTypeInfo(HiveInspectors.scala:1068)
	at org.apache.spark.sql.hive.execution.HiveScriptIOSchema$.$anonfun$initSerDe$1(HiveScriptTransformationExec.scala:272)
	at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:238)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at scala.collection.TraversableLike.map(TraversableLike.scala:238)
	at scala.collection.TraversableLike.map$(TraversableLike.scala:231)
	at scala.collection.immutable.List.map(List.scala:298)
	at org.apache.spark.sql.hive.execution.HiveScriptIOSchema$.initSerDe(HiveScriptTransformationExec.scala:272)
	at org.apache.spark.sql.hive.execution.HiveScriptIOSchema$.$anonfun$initInputSerDe$1(HiveScriptTransformationExec.scala:238)
	at scala.Option.map(Option.scala:230)
	at org.apache.spark.sql.hive.execution.HiveScriptIOSchema$.initInputSerDe(HiveScriptTransformationExec.scala:236)
	at org.apache.spark.sql.hive.execution.HiveScriptTransformationExec.processIterator(HiveScriptTransformationExec.scala:143)
	at org.apache.spark.sql.execution.BaseScriptTransformationExec.$anonfun$doExecute$1(BaseScriptTransformationExec.scala:64)
	at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2(RDD.scala:863)
	at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2$adapted(RDD.scala:863)
	at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)
	at org.apache.spark.rdd.RDD.iterator(RDD.scala:337)
	at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)
	at org.apache.spark.rdd.RDD.iterator(RDD.scala:337)
	at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
	at org.apache.spark.scheduler.Task.run(Task.scala:131)
	at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:498)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1439)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:501)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)

03:01:25.956 ERROR org.apache.spark.scheduler.TaskSetManager: Task 0 in stage 12680.0 failed 1 times; aborting job
03:01:26.038 ERROR org.apache.spark.executor.Executor: Exception in task 0.0 in stage 12681.0 (TID 27579)
org.apache.spark.sql.AnalysisException: array<double> cannot be converted to Hive TypeInfo
	at org.apache.spark.sql.hive.HiveInspectors$typeInfoConversions.toTypeInfo(HiveInspectors.scala:1068)
	at org.apache.spark.sql.hive.execution.HiveScriptIOSchema$.$anonfun$initSerDe$1(HiveScriptTransformationExec.scala:272)
	at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:238)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at scala.collection.TraversableLike.map(TraversableLike.scala:238)
	at scala.collection.TraversableLike.map$(TraversableLike.scala:231)
	at scala.collection.immutable.List.map(List.scala:298)
	at org.apache.spark.sql.hive.execution.HiveScriptIOSchema$.initSerDe(HiveScriptTransformationExec.scala:272)
	at org.apache.spark.sql.hive.execution.HiveScriptIOSchema$.$anonfun$initInputSerDe$1(HiveScriptTransformationExec.scala:238)
	at scala.Option.map(Option.scala:230)
	at org.apache.spark.sql.hive.execution.HiveScriptIOSchema$.initInputSerDe(HiveScriptTransformationExec.scala:236)
	at org.apache.spark.sql.hive.execution.HiveScriptTransformationExec.processIterator(HiveScriptTransformationExec.scala:143)
	at org.apache.spark.sql.execution.BaseScriptTransformationExec.$anonfun$doExecute$1(BaseScriptTransformationExec.scala:64)
	at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2(RDD.scala:863)
	at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2$adapted(RDD.scala:863)
	at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)
	at org.apache.spark.rdd.RDD.iterator(RDD.scala:337)
	at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)
	at org.apache.spark.rdd.RDD.iterator(RDD.scala:337)
	at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
	at org.apache.spark.scheduler.Task.run(Task.scala:131)
	at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:498)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1439)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:501)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
03:01:26.040 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 0.0 in stage 12681.0 (TID 27579) (172.17.0.1 executor driver): org.apache.spark.sql.AnalysisException: array<double> cannot be converted to Hive TypeInfo
	at org.apache.spark.sql.hive.HiveInspectors$typeInfoConversions.toTypeInfo(HiveInspectors.scala:1068)
	at org.apache.spark.sql.hive.execution.HiveScriptIOSchema$.$anonfun$initSerDe$1(HiveScriptTransformationExec.scala:272)
	at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:238)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at scala.collection.TraversableLike.map(TraversableLike.scala:238)
	at scala.collection.TraversableLike.map$(TraversableLike.scala:231)
	at scala.collection.immutable.List.map(List.scala:298)
	at org.apache.spark.sql.hive.execution.HiveScriptIOSchema$.initSerDe(HiveScriptTransformationExec.scala:272)
	at org.apache.spark.sql.hive.execution.HiveScriptIOSchema$.$anonfun$initInputSerDe$1(HiveScriptTransformationExec.scala:238)
	at scala.Option.map(Option.scala:230)
	at org.apache.spark.sql.hive.execution.HiveScriptIOSchema$.initInputSerDe(HiveScriptTransformationExec.scala:236)
	at org.apache.spark.sql.hive.execution.HiveScriptTransformationExec.processIterator(HiveScriptTransformationExec.scala:143)
	at org.apache.spark.sql.execution.BaseScriptTransformationExec.$anonfun$doExecute$1(BaseScriptTransformationExec.scala:64)
	at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2(RDD.scala:863)
	at org.apache.spark.rdd.RDD.$anonfun$mapPartitions$2$adapted(RDD.scala:863)
	at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)
	at org.apache.spark.rdd.RDD.iterator(RDD.scala:337)
	at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)
	at org.apache.spark.rdd.RDD.iterator(RDD.scala:337)
	at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
	at org.apache.spark.scheduler.Task.run(Task.scala:131)
	at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:498)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1439)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:501)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)

03:01:26.040 ERROR org.apache.spark.scheduler.TaskSetManager: Task 0 in stage 12681.0 failed 1 times; aborting job
[info] - SPARK-32400: TRANSFORM doesn't support CalenderIntervalType/UserDefinedType end to end (hive serde) (264 milliseconds)
03:01:26.128 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:01:26.129 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:01:26.129 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:01:26.224 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:01:26.224 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:01:26.224 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
[info] HashAggregationQuerySuite:
[info] - group by function (515 milliseconds)
[info] - empty table (1 second, 631 milliseconds)
[info] - null literal (133 milliseconds)
[info] - only do grouping (2 seconds, 315 milliseconds)
[info] - case in-sensitive resolution (2 seconds, 61 milliseconds)
[info] - test average no key in output (481 milliseconds)
[info] - test average (1 second, 973 milliseconds)
[info] - first_value and last_value (517 milliseconds)
[info] - udaf (704 milliseconds)
[info] - non-deterministic children expressions of UDAF (24 milliseconds)
[info] - interpreted aggregate function (816 milliseconds)
[info] - interpreted and expression-based aggregation functions (1 second, 18 milliseconds)
[info] - single distinct column set (4 seconds, 110 milliseconds)
[info] - single distinct multiple columns set (978 milliseconds)
[info] - multiple distinct multiple columns sets (1 second, 683 milliseconds)
[info] - test count (1 second, 706 milliseconds)
[info] - pearson correlation (2 seconds, 154 milliseconds)
[info] - covariance: covar_pop and covar_samp (493 milliseconds)
[info] - no aggregation function (SPARK-11486) (205 milliseconds)
03:01:50.381 WARN org.apache.spark.scheduler.TaskSetManager: Stage 12907 contains a task of very large size (3820 KiB). The maximum recommended task size is 1000 KiB.
03:01:50.796 WARN org.apache.spark.scheduler.TaskSetManager: Stage 12909 contains a task of very large size (3820 KiB). The maximum recommended task size is 1000 KiB.
[info] - udaf with all data types (1 second, 100 milliseconds)
[info] - udaf without specifying inputSchema (716 milliseconds)
[info] - SPARK-15206: single distinct aggregate function in having clause (1 second, 37 milliseconds)
[info] - SPARK-15206: multiple distinct aggregate function in having clause (1 second, 140 milliseconds)
[info] - SPARK-24957: average with decimal followed by aggregation returning wrong result (540 milliseconds)
[info] - SPARK-29122: hash-based aggregates for unfixed-length decimals in the interpreter mode (165 milliseconds)
[info] - SPARK-29140: HashAggregateExec aggregating binary type doesn't break codegen compilation (596 milliseconds)
03:01:55.782 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:01:55.782 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:01:55.782 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:01:55.866 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:01:55.866 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:01:55.866 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
[info] HiveExplainSuite:
03:01:55.885 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src specified for non-external table:src
[info] - show cost in explain command (499 milliseconds)
[info] - explain extended command (99 milliseconds)
[info] - explain create table command (96 milliseconds)
[info] - explain output of physical plan should contain proper codegen stage ID (21 milliseconds)
[info] - EXPLAIN CODEGEN command (59 milliseconds)
03:01:56.665 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/tab specified for non-external table:tab
[info] - SPARK-23034 show relation names in Hive table scan nodes (119 milliseconds)
[info] - SPARK-26661: Show actual class name of the writing command in CTAS explain (19 milliseconds)
03:01:56.970 WARN org.apache.spark.sql.hive.test.TestHiveExternalCatalog: Couldn't find corresponding Hive SerDe for data source provider json. Persisting data source table `default`.`t` into Hive metastore in Spark SQL specific format, which is NOT compatible with Hive.
== Physical Plan ==
*(1) Sort [i#266328 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(i#266328 ASC NULLS FIRST, 5), ENSURE_REQUIREMENTS, [id=#201180]
   +- FileScan json default.t[i#266328,j#266329] Batched: false, DataFilters: [], Format: JSON, Location: CatalogFileIndex[file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/targe..., PartitionFilters: [], PushedFilters: [], ReadSchema: struct<i:int>


03:01:57.471 WARN org.apache.spark.sql.hive.test.TestHiveExternalCatalog: Couldn't find corresponding Hive SerDe for data source provider json. Persisting data source table `default`.`t` into Hive metastore in Spark SQL specific format, which is NOT compatible with Hive.
== Physical Plan ==
*(1) Sort [i#266340 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(i#266340 ASC NULLS FIRST, 5), ENSURE_REQUIREMENTS, [id=#201215]
   +- FileScan json default.t[i#266340,j#266341] Batched: false, DataFilters: [], Format: JSON, Location: CatalogFileIndex[file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/targe..., PartitionFilters: [], PushedFilters: [], ReadSchema: struct<i:int>


[info] - SPARK-28595: explain should not trigger partition listing (949 milliseconds)
03:01:57.773 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src does not exist; Force to delete it.
03:01:57.773 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src
03:01:57.834 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:01:57.834 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:01:57.835 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:01:57.935 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:01:57.935 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:01:57.936 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
[info] PruningSuite:
03:01:58.011 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:01:58.011 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:01:58.012 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:01:58.096 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:01:58.096 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:01:58.097 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:01:58.107 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart specified for non-external table:srcpart
[info] - Column pruning - with partitioned table - pruning test (797 milliseconds)
03:01:59.042 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart does not exist; Force to delete it.
03:01:59.042 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart
03:01:59.101 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:01:59.101 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:01:59.102 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:01:59.200 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:01:59.200 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:01:59.200 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:01:59.211 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src specified for non-external table:src
03:01:59.442 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart specified for non-external table:srcpart
[info] - Column pruning - with partitioned table - query test (1 second, 376 milliseconds)
[info] - Column pruning - with non-partitioned table - pruning test (20 milliseconds)
03:02:00.338 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src does not exist; Force to delete it.
03:02:00.338 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src
03:02:00.432 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart does not exist; Force to delete it.
03:02:00.432 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart
03:02:00.497 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:02:00.497 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:02:00.497 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:02:00.600 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:02:00.600 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:02:00.600 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:02:00.610 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src specified for non-external table:src
[info] - Column pruning - with non-partitioned table - query test (536 milliseconds)
[info] - Column pruning - with multiple projects - pruning test (22 milliseconds)
03:02:00.881 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src does not exist; Force to delete it.
03:02:00.881 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src
03:02:00.941 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:02:00.941 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:02:00.942 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:02:01.028 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:02:01.028 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:02:01.029 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:02:01.038 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src specified for non-external table:src
[info] - Column pruning - with multiple projects - query test (399 milliseconds)
[info] - Column pruning - projects alias substituting - pruning test (22 milliseconds)
03:02:01.302 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src does not exist; Force to delete it.
03:02:01.302 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src
03:02:01.361 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:02:01.361 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:02:01.361 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:02:01.451 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:02:01.452 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:02:01.452 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:02:01.462 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src specified for non-external table:src
[info] - Column pruning - projects alias substituting - query test (375 milliseconds)
[info] - Column pruning - filter alias in-lining - pruning test (22 milliseconds)
03:02:01.700 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src does not exist; Force to delete it.
03:02:01.700 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src
03:02:01.757 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:02:01.757 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:02:01.757 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:02:01.845 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:02:01.845 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:02:01.845 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:02:01.856 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src specified for non-external table:src
[info] - Column pruning - filter alias in-lining - query test (438 milliseconds)
[info] - Column pruning - without filters - pruning test (22 milliseconds)
03:02:02.164 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src does not exist; Force to delete it.
03:02:02.164 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src
03:02:02.225 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:02:02.225 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:02:02.225 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:02:02.312 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:02:02.312 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:02:02.313 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:02:02.323 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src specified for non-external table:src
[info] - Column pruning - without filters - query test (400 milliseconds)
[info] - Column pruning - simple top project without aliases - pruning test (26 milliseconds)
03:02:02.592 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src does not exist; Force to delete it.
03:02:02.592 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src
03:02:02.651 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:02:02.651 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:02:02.651 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:02:02.777 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:02:02.777 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:02:02.777 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:02:02.788 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src specified for non-external table:src
[info] - Column pruning - simple top project without aliases - query test (453 milliseconds)
[info] - Column pruning - non-trivial top project with aliases - pruning test (47 milliseconds)
03:02:03.100 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src does not exist; Force to delete it.
03:02:03.100 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src
03:02:03.173 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:02:03.173 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:02:03.173 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:02:03.276 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:02:03.276 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:02:03.276 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:02:03.288 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src specified for non-external table:src
[info] - Column pruning - non-trivial top project with aliases - query test (474 milliseconds)
[info] - Partition pruning - non-partitioned, non-trivial project - pruning test (24 milliseconds)
03:02:03.596 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src does not exist; Force to delete it.
03:02:03.596 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src
03:02:03.677 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:02:03.677 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:02:03.677 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:02:03.774 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:02:03.774 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:02:03.774 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:02:03.791 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src specified for non-external table:src
[info] - Partition pruning - non-partitioned, non-trivial project - query test (509 milliseconds)
[info] - Partition pruning - non-partitioned table - pruning test (20 milliseconds)
03:02:04.127 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src does not exist; Force to delete it.
03:02:04.127 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src
03:02:04.191 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:02:04.191 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:02:04.191 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:02:04.292 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:02:04.292 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:02:04.292 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:02:04.306 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src specified for non-external table:src
[info] - Partition pruning - non-partitioned table - query test (502 milliseconds)
03:02:04.603 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart1 specified for non-external table:srcpart1
[info] - Partition pruning - with filter on string partition key - pruning test (992 milliseconds)
03:02:05.619 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src does not exist; Force to delete it.
03:02:05.619 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src
03:02:05.755 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart1 does not exist; Force to delete it.
03:02:05.755 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart1
03:02:05.834 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:02:05.835 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:02:05.835 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:02:05.938 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:02:05.939 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:02:05.939 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:02:05.948 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src specified for non-external table:src
03:02:06.166 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart specified for non-external table:srcpart
03:02:06.886 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart1 specified for non-external table:srcpart1
[info] - Partition pruning - with filter on string partition key - query test (2 seconds, 210 milliseconds)
[info] - Partition pruning - with filter on int partition key - pruning test (96 milliseconds)
03:02:08.005 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src does not exist; Force to delete it.
03:02:08.005 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src
03:02:08.115 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart does not exist; Force to delete it.
03:02:08.115 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart
03:02:08.162 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart1 does not exist; Force to delete it.
03:02:08.162 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart1
03:02:08.222 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:02:08.223 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:02:08.223 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:02:08.322 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:02:08.322 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:02:08.322 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:02:08.332 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src specified for non-external table:src
03:02:08.510 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart specified for non-external table:srcpart
03:02:09.181 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart1 specified for non-external table:srcpart1
[info] - Partition pruning - with filter on int partition key - query test (2 seconds, 104 milliseconds)
[info] - Partition pruning - left only 1 partition - pruning test (89 milliseconds)
03:02:10.167 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src does not exist; Force to delete it.
03:02:10.168 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src
03:02:10.265 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart does not exist; Force to delete it.
03:02:10.265 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart
03:02:10.315 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart1 does not exist; Force to delete it.
03:02:10.315 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart1
03:02:10.375 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:02:10.375 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:02:10.375 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:02:10.502 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:02:10.502 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:02:10.502 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:02:10.517 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src specified for non-external table:src
03:02:10.742 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart specified for non-external table:srcpart
03:02:11.475 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart1 specified for non-external table:srcpart1
[info] - Partition pruning - left only 1 partition - query test (2 seconds, 254 milliseconds)
[info] - Partition pruning - all partitions pruned - pruning test (60 milliseconds)
03:02:12.435 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src does not exist; Force to delete it.
03:02:12.435 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src
03:02:12.513 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart does not exist; Force to delete it.
03:02:12.513 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart
03:02:12.560 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart1 does not exist; Force to delete it.
03:02:12.561 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart1
03:02:12.622 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:02:12.622 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:02:12.622 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:02:12.739 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:02:12.739 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:02:12.739 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:02:12.749 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src specified for non-external table:src
03:02:12.959 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart specified for non-external table:srcpart
03:02:13.749 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart1 specified for non-external table:srcpart1
[info] - Partition pruning - all partitions pruned - query test (2 seconds, 119 milliseconds)
[info] - Partition pruning - pruning with both column key and partition key - pruning test (77 milliseconds)
03:02:14.702 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src does not exist; Force to delete it.
03:02:14.702 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src
03:02:14.783 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart does not exist; Force to delete it.
03:02:14.783 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart
03:02:14.831 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart1 does not exist; Force to delete it.
03:02:14.831 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart1
03:02:14.895 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:02:14.896 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:02:14.896 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:02:14.993 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:02:14.993 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:02:14.993 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:02:15.003 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src specified for non-external table:src
03:02:15.200 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart specified for non-external table:srcpart
03:02:16.002 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart1 specified for non-external table:srcpart1
[info] - Partition pruning - pruning with both column key and partition key - query test (2 seconds, 265 milliseconds)
[info] - Partition pruning - with filter containing non-deterministic condition - pruning test (91 milliseconds)
03:02:17.059 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src does not exist; Force to delete it.
03:02:17.059 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src
03:02:17.139 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart does not exist; Force to delete it.
03:02:17.140 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart
03:02:17.185 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart1 does not exist; Force to delete it.
03:02:17.185 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart1
03:02:17.250 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:02:17.250 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:02:17.250 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:02:17.382 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:02:17.383 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:02:17.383 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:02:17.393 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src specified for non-external table:src
03:02:17.554 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart specified for non-external table:srcpart
03:02:18.309 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart1 specified for non-external table:srcpart1
[info] - Partition pruning - with filter containing non-deterministic condition - query test (2 seconds, 237 milliseconds)
03:02:19.238 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src does not exist; Force to delete it.
03:02:19.238 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/src
03:02:19.331 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart does not exist; Force to delete it.
03:02:19.331 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart
03:02:19.381 WARN org.apache.hadoop.hive.common.FileUtils: File file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart1 does not exist; Force to delete it.
03:02:19.381 ERROR org.apache.hadoop.hive.common.FileUtils: Failed to delete file:/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/sql/hive/target/tmp/hive_execution_test_group/warehouse-0da5d255-53c3-4629-bef1-e801e1df669e/srcpart1
03:02:19.450 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:02:19.450 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:02:19.450 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
03:02:19.588 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.internal.ss.authz.settings.applied.marker does not exist
03:02:19.588 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.jdbc.timeout does not exist
03:02:19.588 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of name hive.stats.retries.wait does not exist
[info] ScalaTest
[info] Run completed in 1 hour, 46 minutes, 23 seconds.
[info] Total number of tests run: 3976
[info] Suites: completed 137, aborted 0
[info] Tests: succeeded 3976, failed 0, canceled 0, ignored 601, pending 0
[info] All tests passed.
[info] Passed: Total 3979, Failed 0, Errors 0, Passed 3979, Ignored 601
[success] Total time: 6454 s (01:47:34), completed Nov 22, 2021 3:02:28 AM

========================================================================
Running PySpark tests
========================================================================
Running PySpark tests. Output is in /home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/python/unit-tests.log
Will test against the following Python executables: ['python3.6', 'pypy3']
Will test the following Python modules: ['pyspark-core', 'pyspark-ml', 'pyspark-mllib', 'pyspark-resource', 'pyspark-sql', 'pyspark-streaming']
python3.6 python_implementation is CPython
python3.6 version is: Python 3.6.8 :: Anaconda, Inc.
pypy3 python_implementation is PyPy
pypy3 version is: Python 3.6.9 (7.3.1+dfsg-4, Apr 22 2020, 05:15:29)
[PyPy 7.3.1 with GCC 9.3.0]
Starting test(pypy3): pyspark.sql.tests.test_catalog
Starting test(pypy3): pyspark.sql.tests.test_column
Starting test(pypy3): pyspark.sql.tests.test_conf
Starting test(pypy3): pyspark.sql.tests.test_context
Starting test(pypy3): pyspark.sql.tests.test_datasources
Starting test(pypy3): pyspark.sql.tests.test_arrow
Starting test(pypy3): pyspark.sql.tests.test_dataframe
Starting test(pypy3): pyspark.sql.tests.test_functions
Finished test(pypy3): pyspark.sql.tests.test_arrow (0s) ... 67 tests were skipped
Starting test(pypy3): pyspark.sql.tests.test_group
Finished test(pypy3): pyspark.sql.tests.test_conf (13s)
Starting test(pypy3): pyspark.sql.tests.test_pandas_cogrouped_map
Finished test(pypy3): pyspark.sql.tests.test_pandas_cogrouped_map (0s) ... 16 tests were skipped
Starting test(pypy3): pyspark.sql.tests.test_pandas_grouped_map
Finished test(pypy3): pyspark.sql.tests.test_pandas_grouped_map (0s) ... 21 tests were skipped
Starting test(pypy3): pyspark.sql.tests.test_pandas_map
Finished test(pypy3): pyspark.sql.tests.test_pandas_map (0s) ... 7 tests were skipped
Starting test(pypy3): pyspark.sql.tests.test_pandas_udf
Finished test(pypy3): pyspark.sql.tests.test_pandas_udf (0s) ... 6 tests were skipped
Starting test(pypy3): pyspark.sql.tests.test_pandas_udf_grouped_agg
Finished test(pypy3): pyspark.sql.tests.test_pandas_udf_grouped_agg (0s) ... 16 tests were skipped
Starting test(pypy3): pyspark.sql.tests.test_pandas_udf_scalar
Finished test(pypy3): pyspark.sql.tests.test_pandas_udf_scalar (1s) ... 51 tests were skipped
Starting test(pypy3): pyspark.sql.tests.test_pandas_udf_typehints
Finished test(pypy3): pyspark.sql.tests.test_pandas_udf_typehints (1s) ... 10 tests were skipped
Starting test(pypy3): pyspark.sql.tests.test_pandas_udf_window
Finished test(pypy3): pyspark.sql.tests.test_pandas_udf_window (1s) ... 14 tests were skipped
Starting test(pypy3): pyspark.sql.tests.test_readwriter
Finished test(pypy3): pyspark.sql.tests.test_catalog (25s)
Starting test(pypy3): pyspark.sql.tests.test_serde
Finished test(pypy3): pyspark.sql.tests.test_group (24s)
Starting test(pypy3): pyspark.sql.tests.test_session
Finished test(pypy3): pyspark.sql.tests.test_column (28s)
Starting test(pypy3): pyspark.sql.tests.test_streaming
Finished test(pypy3): pyspark.sql.tests.test_datasources (28s)
Starting test(pypy3): pyspark.sql.tests.test_types
Finished test(pypy3): pyspark.sql.tests.test_context (28s)
Starting test(pypy3): pyspark.sql.tests.test_udf
Finished test(pypy3): pyspark.sql.tests.test_dataframe (50s) ... 10 tests were skipped
Starting test(pypy3): pyspark.sql.tests.test_utils
Finished test(pypy3): pyspark.sql.tests.test_functions (53s)
Starting test(pypy3): pyspark.streaming.tests.test_context
Finished test(pypy3): pyspark.sql.tests.test_serde (34s)
Starting test(pypy3): pyspark.streaming.tests.test_dstream
Finished test(pypy3): pyspark.streaming.tests.test_dstream (1s) ... 21 tests were skipped
Starting test(pypy3): pyspark.streaming.tests.test_kinesis
Finished test(pypy3): pyspark.streaming.tests.test_kinesis (1s) ... 2 tests were skipped
Starting test(pypy3): pyspark.streaming.tests.test_listener
Finished test(pypy3): pyspark.sql.tests.test_readwriter (41s)
Starting test(pypy3): pyspark.tests.test_appsubmit
Finished test(pypy3): pyspark.sql.tests.test_utils (18s)
Starting test(pypy3): pyspark.tests.test_broadcast
Finished test(pypy3): pyspark.sql.tests.test_session (48s)
Starting test(pypy3): pyspark.tests.test_conf
Finished test(pypy3): pyspark.streaming.tests.test_listener (15s)
Starting test(pypy3): pyspark.tests.test_context
Finished test(pypy3): pyspark.sql.tests.test_streaming (50s)
Starting test(pypy3): pyspark.tests.test_daemon
Finished test(pypy3): pyspark.tests.test_daemon (6s)
Starting test(pypy3): pyspark.tests.test_install_spark
Finished test(pypy3): pyspark.streaming.tests.test_context (34s)
Starting test(pypy3): pyspark.tests.test_join
Finished test(pypy3): pyspark.sql.tests.test_types (59s)
Starting test(pypy3): pyspark.tests.test_profiler
Finished test(pypy3): pyspark.sql.tests.test_udf (65s)
Starting test(pypy3): pyspark.tests.test_rdd
Finished test(pypy3): pyspark.tests.test_conf (27s)
Starting test(pypy3): pyspark.tests.test_rddbarrier
Finished test(pypy3): pyspark.tests.test_join (17s)
Starting test(pypy3): pyspark.tests.test_readwrite
Finished test(pypy3): pyspark.tests.test_profiler (17s)
Starting test(pypy3): pyspark.tests.test_serializers
Finished test(pypy3): pyspark.tests.test_serializers (1s) ... 2 tests were skipped
Starting test(pypy3): pyspark.tests.test_shuffle
Finished test(pypy3): pyspark.tests.test_rddbarrier (13s)
Starting test(pypy3): pyspark.tests.test_taskcontext
Finished test(pypy3): pyspark.tests.test_install_spark (30s)
Starting test(pypy3): pyspark.tests.test_util
Finished test(pypy3): pyspark.tests.test_broadcast (54s)
Starting test(pypy3): pyspark.tests.test_worker
Finished test(pypy3): pyspark.tests.test_util (10s)
Starting test(python3.6): pyspark.ml.tests.test_algorithms
Finished test(pypy3): pyspark.tests.test_shuffle (20s)
Starting test(python3.6): pyspark.ml.tests.test_base
Finished test(pypy3): pyspark.tests.test_readwrite (28s)
Starting test(python3.6): pyspark.ml.tests.test_evaluation
Finished test(python3.6): pyspark.ml.tests.test_base (21s)
Starting test(python3.6): pyspark.ml.tests.test_feature
Finished test(pypy3): pyspark.tests.test_worker (27s)
Starting test(python3.6): pyspark.ml.tests.test_image
Finished test(pypy3): pyspark.tests.test_context (73s)
Starting test(python3.6): pyspark.ml.tests.test_linalg
Finished test(python3.6): pyspark.ml.tests.test_evaluation (22s)
Starting test(python3.6): pyspark.ml.tests.test_param
Finished test(python3.6): pyspark.ml.tests.test_image (20s)
Starting test(python3.6): pyspark.ml.tests.test_persistence
Finished test(python3.6): pyspark.ml.tests.test_param (28s)
Starting test(python3.6): pyspark.ml.tests.test_pipeline
Finished test(python3.6): pyspark.ml.tests.test_feature (40s)
Starting test(python3.6): pyspark.ml.tests.test_stat
Finished test(pypy3): pyspark.tests.test_taskcontext (76s)
Starting test(python3.6): pyspark.ml.tests.test_training_summary
Finished test(python3.6): pyspark.ml.tests.test_pipeline (8s)
Starting test(python3.6): pyspark.ml.tests.test_tuning
Finished test(python3.6): pyspark.ml.tests.test_linalg (41s)
Starting test(python3.6): pyspark.ml.tests.test_util
Finished test(pypy3): pyspark.tests.test_appsubmit (139s)
Starting test(python3.6): pyspark.ml.tests.test_wrapper
Finished test(python3.6): pyspark.ml.tests.test_stat (22s)
Starting test(python3.6): pyspark.mllib.tests.test_algorithms
Finished test(pypy3): pyspark.tests.test_rdd (127s)
Starting test(python3.6): pyspark.mllib.tests.test_feature
Finished test(python3.6): pyspark.ml.tests.test_util (31s)
Starting test(python3.6): pyspark.mllib.tests.test_linalg
Finished test(python3.6): pyspark.ml.tests.test_wrapper (26s)
Starting test(python3.6): pyspark.mllib.tests.test_stat
Finished test(python3.6): pyspark.ml.tests.test_algorithms (116s)
Starting test(python3.6): pyspark.mllib.tests.test_streaming_algorithms
Finished test(python3.6): pyspark.ml.tests.test_training_summary (54s)
Starting test(python3.6): pyspark.mllib.tests.test_util
Finished test(python3.6): pyspark.ml.tests.test_persistence (76s)
Starting test(python3.6): pyspark.sql.tests.test_arrow
Finished test(python3.6): pyspark.sql.tests.test_arrow (0s) ... 67 tests were skipped
Starting test(python3.6): pyspark.sql.tests.test_catalog
Finished test(python3.6): pyspark.mllib.tests.test_feature (40s)
Starting test(python3.6): pyspark.sql.tests.test_column
Finished test(python3.6): pyspark.mllib.tests.test_util (17s)
Starting test(python3.6): pyspark.sql.tests.test_conf
Finished test(python3.6): pyspark.mllib.tests.test_stat (35s)
Starting test(python3.6): pyspark.sql.tests.test_context
Finished test(python3.6): pyspark.sql.tests.test_catalog (24s)
Starting test(python3.6): pyspark.sql.tests.test_dataframe
Finished test(python3.6): pyspark.sql.tests.test_conf (14s)
Starting test(python3.6): pyspark.sql.tests.test_datasources
Finished test(python3.6): pyspark.sql.tests.test_column (25s)
Starting test(python3.6): pyspark.sql.tests.test_functions
Finished test(python3.6): pyspark.sql.tests.test_context (26s)
Starting test(python3.6): pyspark.sql.tests.test_group
Finished test(python3.6): pyspark.mllib.tests.test_algorithms (89s)
Starting test(python3.6): pyspark.sql.tests.test_pandas_cogrouped_map
Finished test(python3.6): pyspark.sql.tests.test_pandas_cogrouped_map (1s) ... 16 tests were skipped
Starting test(python3.6): pyspark.sql.tests.test_pandas_grouped_map
Finished test(python3.6): pyspark.sql.tests.test_pandas_grouped_map (0s) ... 21 tests were skipped
Starting test(python3.6): pyspark.sql.tests.test_pandas_map
Finished test(python3.6): pyspark.sql.tests.test_datasources (26s)
Starting test(python3.6): pyspark.sql.tests.test_pandas_udf
Finished test(python3.6): pyspark.mllib.tests.test_linalg (79s)
Starting test(python3.6): pyspark.sql.tests.test_pandas_udf_grouped_agg
Finished test(python3.6): pyspark.sql.tests.test_pandas_map (0s) ... 7 tests were skipped
Starting test(python3.6): pyspark.sql.tests.test_pandas_udf_scalar
Finished test(python3.6): pyspark.sql.tests.test_pandas_udf (0s) ... 6 tests were skipped
Starting test(python3.6): pyspark.sql.tests.test_pandas_udf_typehints
Finished test(python3.6): pyspark.sql.tests.test_pandas_udf_grouped_agg (1s) ... 16 tests were skipped
Starting test(python3.6): pyspark.sql.tests.test_pandas_udf_window
Finished test(python3.6): pyspark.sql.tests.test_pandas_udf_scalar (1s) ... 51 tests were skipped
Starting test(python3.6): pyspark.sql.tests.test_readwriter
Finished test(python3.6): pyspark.sql.tests.test_pandas_udf_typehints (1s) ... 10 tests were skipped
Starting test(python3.6): pyspark.sql.tests.test_serde
Finished test(python3.6): pyspark.sql.tests.test_pandas_udf_window (1s) ... 14 tests were skipped
Starting test(python3.6): pyspark.sql.tests.test_session
Finished test(python3.6): pyspark.sql.tests.test_group (22s)
Starting test(python3.6): pyspark.sql.tests.test_streaming
Finished test(python3.6): pyspark.sql.tests.test_dataframe (51s) ... 3 tests were skipped
Starting test(python3.6): pyspark.sql.tests.test_types
Finished test(python3.6): pyspark.sql.tests.test_functions (48s)
Starting test(python3.6): pyspark.sql.tests.test_udf
Finished test(python3.6): pyspark.sql.tests.test_serde (31s)
Starting test(python3.6): pyspark.sql.tests.test_utils
Finished test(python3.6): pyspark.sql.tests.test_readwriter (35s)
Starting test(python3.6): pyspark.streaming.tests.test_context
Finished test(python3.6): pyspark.sql.tests.test_session (44s)
Starting test(python3.6): pyspark.streaming.tests.test_dstream
Finished test(python3.6): pyspark.sql.tests.test_utils (14s)
Starting test(python3.6): pyspark.streaming.tests.test_kinesis
Finished test(python3.6): pyspark.streaming.tests.test_kinesis (0s) ... 2 tests were skipped
Starting test(python3.6): pyspark.streaming.tests.test_listener
Finished test(python3.6): pyspark.sql.tests.test_streaming (47s)
Starting test(python3.6): pyspark.tests.test_appsubmit
Finished test(python3.6): pyspark.streaming.tests.test_listener (13s)
Starting test(python3.6): pyspark.tests.test_broadcast
Finished test(python3.6): pyspark.streaming.tests.test_context (26s)
Starting test(python3.6): pyspark.tests.test_conf
Finished test(python3.6): pyspark.sql.tests.test_types (50s)
Starting test(python3.6): pyspark.tests.test_context
Finished test(python3.6): pyspark.tests.test_conf (21s)
Starting test(python3.6): pyspark.tests.test_daemon
Finished test(python3.6): pyspark.sql.tests.test_udf (57s)
Starting test(python3.6): pyspark.tests.test_install_spark
Finished test(python3.6): pyspark.tests.test_daemon (5s)
Starting test(python3.6): pyspark.tests.test_join
Finished test(python3.6): pyspark.tests.test_join (13s)
Starting test(python3.6): pyspark.tests.test_profiler
Finished test(python3.6): pyspark.tests.test_broadcast (47s)
Starting test(python3.6): pyspark.tests.test_rdd
Finished test(python3.6): pyspark.tests.test_profiler (15s)
Starting test(python3.6): pyspark.tests.test_rddbarrier
Finished test(python3.6): pyspark.mllib.tests.test_streaming_algorithms (182s)
Starting test(python3.6): pyspark.tests.test_readwrite
Finished test(python3.6): pyspark.tests.test_install_spark (32s)
Starting test(python3.6): pyspark.tests.test_serializers
Finished test(python3.6): pyspark.tests.test_rddbarrier (12s)
Starting test(python3.6): pyspark.tests.test_shuffle
Finished test(python3.6): pyspark.tests.test_serializers (15s)
Starting test(python3.6): pyspark.tests.test_taskcontext
Finished test(python3.6): pyspark.tests.test_context (68s)
Starting test(python3.6): pyspark.tests.test_util
Finished test(python3.6): pyspark.tests.test_readwrite (24s)
Starting test(python3.6): pyspark.tests.test_worker
Finished test(python3.6): pyspark.tests.test_shuffle (13s)
Starting test(pypy3): pyspark.accumulators
Finished test(python3.6): pyspark.tests.test_util (8s)
Starting test(pypy3): pyspark.broadcast
Finished test(pypy3): pyspark.accumulators (11s)
Starting test(pypy3): pyspark.conf
Finished test(pypy3): pyspark.broadcast (9s)
Starting test(pypy3): pyspark.context
Finished test(pypy3): pyspark.conf (6s)
Starting test(pypy3): pyspark.profiler
Setting default log level to "WARN".
To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel).

[Stage 0:>                                                          (0 + 1) / 1]
Current mem limits: -1 of max -1

Setting mem limits to 2147483648 of max 2147483648


                                                                                

[Stage 0:>                                                          (0 + 4) / 8]
Current mem limits: -1 of max -1

Setting mem limits to 2147483648 of max 2147483648

Current mem limits: -1 of max -1

Setting mem limits to 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: -1 of max -1

Setting mem limits to 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648


                                                                                
Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: -1 of max -1

Setting mem limits to 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648


[Stage 0:>                                                         (0 + 4) / 20]
Current mem limits: -1 of max -1

Setting mem limits to 2147483648 of max 2147483648

Current mem limits: -1 of max -1

Setting mem limits to 2147483648 of max 2147483648

Current mem limits: -1 of max -1

Setting mem limits to 2147483648 of max 2147483648

Current mem limits: -1 of max -1

Setting mem limits to 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648


                                                                                
Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648


[Stage 9:>                                                          (0 + 1) / 1]
Current mem limits: 2147483648 of max 2147483648


                                                                                
Current mem limits: 2147483648 of max 2147483648

Current mem limits: -1 of max -1

Setting mem limits to 2147483648 of max 2147483648

Current mem limits: -1 of max -1

Setting mem limits to 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: 2147483648 of max 2147483648

Current mem limits: -1 of max -1

Setting mem limits to 2147483648 of max 2147483648

Current mem limits: -1 of max -1

Setting mem limits to 2147483648 of max 2147483648


Running tests...
----------------------------------------------------------------------
  test_memory_limit (pyspark.tests.test_worker.WorkerMemoryTest) ... OK (9.414s)
  test_reuse_worker_of_parallelize_range (pyspark.tests.test_worker.WorkerReuseTest) ... FAIL (2.599s)
  test_accumulator_when_reuse_worker (pyspark.tests.test_worker.WorkerTests) ... OK (1.219s)
  test_after_exception (pyspark.tests.test_worker.WorkerTests) ... OK (0.302s)
  test_after_jvm_exception (pyspark.tests.test_worker.WorkerTests) ... OK (0.887s)
  test_after_non_exception_error (pyspark.tests.test_worker.WorkerTests) ... OK (0.124s)
  test_cancel_task (pyspark.tests.test_worker.WorkerTests) ... OK (4.356s)
  test_python_exception_non_hanging (pyspark.tests.test_worker.WorkerTests) ... OK (0.189s)
  test_reuse_worker_after_take (pyspark.tests.test_worker.WorkerTests) ... OK (0.254s)
  test_with_different_versions_of_python (pyspark.tests.test_worker.WorkerTests) ... OK (0.160s)

======================================================================
ERROR [2.599s]: test_reuse_worker_of_parallelize_range (pyspark.tests.test_worker.WorkerReuseTest)
----------------------------------------------------------------------
Traceback (most recent call last):
  File "/home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/python/pyspark/tests/test_worker.py", line 179, in test_reuse_worker_of_parallelize_range
    self.assertTrue(pid in previous_pids)
AssertionError: False is not true

----------------------------------------------------------------------
Ran 10 tests in 20.707s

FAILED (errors=1)

Generating XML reports...

Had test failures in pyspark.tests.test_worker with python3.6; see logs.
[error] running /home/jenkins/workspace/spark-branch-3.1-test-sbt-hadoop-2.7/python/run-tests --parallelism=8 ; received return code 255
Process leaked file descriptors. See https://www.jenkins.io/redirect/troubleshooting/process-leaked-file-descriptors for more information
Build step 'Execute shell' marked build as failure
Archiving artifacts
Recording test results
[Checks API] No suitable checks publisher found.
Finished: FAILURE