|
17 | 17 |
|
18 | 18 | package org.apache.spark.sql.hive.thriftserver |
19 | 19 |
|
20 | | -import java.io.File |
| 20 | +import java.io.{File, FilenameFilter} |
21 | 21 | import java.net.URL |
22 | 22 | import java.nio.charset.StandardCharsets |
23 | 23 | import java.sql.{Date, DriverManager, SQLException, Statement} |
| 24 | +import java.util.UUID |
24 | 25 |
|
25 | 26 | import scala.collection.mutable |
26 | 27 | import scala.collection.mutable.ArrayBuffer |
@@ -613,6 +614,28 @@ class HiveThriftBinaryServerSuite extends HiveThriftJdbcTest { |
613 | 614 | bufferSrc.close() |
614 | 615 | } |
615 | 616 | } |
| 617 | + |
| 618 | + test("SPARK-23547 Cleanup the .pipeout file when the Hive Session closed") { |
| 619 | + def pipeoutFileList(sessionID: UUID): Array[File] = { |
| 620 | + lScratchDir.listFiles(new FilenameFilter { |
| 621 | + override def accept(dir: File, name: String): Boolean = { |
| 622 | + name.startsWith(sessionID.toString) && name.endsWith(".pipeout") |
| 623 | + } |
| 624 | + }) |
| 625 | + } |
| 626 | + |
| 627 | + withCLIServiceClient { client => |
| 628 | + val user = System.getProperty("user.name") |
| 629 | + val sessionHandle = client.openSession(user, "") |
| 630 | + val sessionID = sessionHandle.getSessionId |
| 631 | + |
| 632 | + assert(pipeoutFileList(sessionID).length == 1) |
| 633 | + |
| 634 | + client.closeSession(sessionHandle) |
| 635 | + |
| 636 | + assert(pipeoutFileList(sessionID).length == 0) |
| 637 | + } |
| 638 | + } |
616 | 639 | } |
617 | 640 |
|
618 | 641 | class SingleSessionSuite extends HiveThriftJdbcTest { |
@@ -807,6 +830,7 @@ abstract class HiveThriftServer2Test extends SparkFunSuite with BeforeAndAfterAl |
807 | 830 | private val pidDir: File = Utils.createTempDir(namePrefix = "thriftserver-pid") |
808 | 831 | protected var logPath: File = _ |
809 | 832 | protected var operationLogPath: File = _ |
| 833 | + protected var lScratchDir: File = _ |
810 | 834 | private var logTailingProcess: Process = _ |
811 | 835 | private var diagnosisBuffer: ArrayBuffer[String] = ArrayBuffer.empty[String] |
812 | 836 |
|
@@ -844,6 +868,7 @@ abstract class HiveThriftServer2Test extends SparkFunSuite with BeforeAndAfterAl |
844 | 868 | | --hiveconf ${ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST}=localhost |
845 | 869 | | --hiveconf ${ConfVars.HIVE_SERVER2_TRANSPORT_MODE}=$mode |
846 | 870 | | --hiveconf ${ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LOG_LOCATION}=$operationLogPath |
| 871 | + | --hiveconf ${ConfVars.LOCALSCRATCHDIR}=$lScratchDir |
847 | 872 | | --hiveconf $portConf=$port |
848 | 873 | | --driver-class-path $driverClassPath |
849 | 874 | | --driver-java-options -Dlog4j.debug |
@@ -873,6 +898,8 @@ abstract class HiveThriftServer2Test extends SparkFunSuite with BeforeAndAfterAl |
873 | 898 | metastorePath.delete() |
874 | 899 | operationLogPath = Utils.createTempDir() |
875 | 900 | operationLogPath.delete() |
| 901 | + lScratchDir = Utils.createTempDir() |
| 902 | + lScratchDir.delete() |
876 | 903 | logPath = null |
877 | 904 | logTailingProcess = null |
878 | 905 |
|
@@ -956,6 +983,9 @@ abstract class HiveThriftServer2Test extends SparkFunSuite with BeforeAndAfterAl |
956 | 983 | operationLogPath.delete() |
957 | 984 | operationLogPath = null |
958 | 985 |
|
| 986 | + lScratchDir.delete() |
| 987 | + lScratchDir = null |
| 988 | + |
959 | 989 | Option(logPath).foreach(_.delete()) |
960 | 990 | logPath = null |
961 | 991 |
|
|
0 commit comments