Skip to content

Commit f479221

Browse files
committed
last fix
1 parent bc7608a commit f479221

File tree

3 files changed

+18
-7
lines changed

3 files changed

+18
-7
lines changed

R/pkg/R/DataFrame.R

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -213,11 +213,12 @@ setMethod("showDF",
213213
signature(x = "SparkDataFrame"),
214214
function(x, numRows = 20, truncate = TRUE, vertical = FALSE) {
215215
if (is.logical(truncate) && truncate) {
216-
s <- callJMethod(x@sdf, "showString", numToInt(numRows), numToInt(20), vertical)
216+
s <- callJMethod(x@sdf, "showString", numToInt(numRows), numToInt(20),
217+
vertical, FALSE)
217218
} else {
218219
truncate2 <- as.numeric(truncate)
219220
s <- callJMethod(x@sdf, "showString", numToInt(numRows), numToInt(truncate2),
220-
vertical)
221+
vertical, FALSE)
221222
}
222223
cat(s)
223224
})

python/pyspark/sql/dataframe.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -325,9 +325,9 @@ def show(self, n=20, truncate=True, vertical=False):
325325
name | Bob
326326
"""
327327
if isinstance(truncate, bool) and truncate:
328-
print(self._jdf.showString(n, 20, vertical))
328+
print(self._jdf.showString(n, 20, vertical, False))
329329
else:
330-
print(self._jdf.showString(n, int(truncate), vertical))
330+
print(self._jdf.showString(n, int(truncate), vertical, False))
331331

332332
def __repr__(self):
333333
return "DataFrame[%s]" % (", ".join("%s: %s" % c for c in self.dtypes))

sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala

Lines changed: 13 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -34,8 +34,8 @@ import org.apache.spark.{SparkConf, SparkContext}
3434
import org.apache.spark.internal.Logging
3535
import org.apache.spark.sql.{SparkSession, SQLContext}
3636
import org.apache.spark.sql.catalyst.analysis.UnresolvedRelation
37-
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
38-
import org.apache.spark.sql.execution.QueryExecution
37+
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, OneRowRelation}
38+
import org.apache.spark.sql.execution.{QueryExecution, SQLExecution}
3939
import org.apache.spark.sql.execution.command.CacheTableCommand
4040
import org.apache.spark.sql.hive._
4141
import org.apache.spark.sql.hive.client.HiveClient
@@ -456,7 +456,17 @@ private[hive] class TestHiveSparkSession(
456456
logDebug(s"Loading test table $name")
457457
val createCmds =
458458
testTables.get(name).map(_.commands).getOrElse(sys.error(s"Unknown test table $name"))
459-
createCmds.foreach(_())
459+
460+
// test tables are loaded lazily, so they may be loaded in the middle a query execution which
461+
// has already set the execution id.
462+
if (sparkContext.getLocalProperty(SQLExecution.EXECUTION_ID_KEY) == null) {
463+
// We don't actually have a `QueryExecution` here, use a fake one instead.
464+
SQLExecution.withNewExecutionId(this, new QueryExecution(this, OneRowRelation)) {
465+
createCmds.foreach(_())
466+
}
467+
} else {
468+
createCmds.foreach(_())
469+
}
460470

461471
if (cacheTables) {
462472
new SQLContext(self).cacheTable(name)

0 commit comments

Comments
 (0)