Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions core/src/main/java/org/apache/spark/ExecutorPlugin.java
Original file line number Diff line number Diff line change
Expand Up @@ -20,18 +20,18 @@
import org.apache.spark.annotation.DeveloperApi;

/**
* A plugin which can be automaticaly instantiated within each Spark executor. Users can specify
* A plugin which can be automatically instantiated within each Spark executor. Users can specify
* plugins which should be created with the "spark.executor.plugins" configuration. An instance
* of each plugin will be created for every executor, including those created by dynamic allocation,
* before the executor starts running any tasks.
*
* The specific api exposed to the end users still considered to be very unstable. We will
* hopefully be able to keep compatability by providing default implementations for any methods
* hopefully be able to keep compatibility by providing default implementations for any methods
* added, but make no guarantees this will always be possible across all Spark releases.
*
* Spark does nothing to verify the plugin is doing legitimate things, or to manage the resources
* it uses. A plugin acquires the same privileges as the user running the task. A bad plugin
* could also intefere with task execution and make the executor fail in unexpected ways.
* could also interfere with task execution and make the executor fail in unexpected ways.
*/
@DeveloperApi
public interface ExecutorPlugin {
Expand Down
4 changes: 2 additions & 2 deletions core/src/test/java/org/apache/spark/ExecutorPluginSuite.java
Original file line number Diff line number Diff line change
Expand Up @@ -63,10 +63,10 @@ private SparkConf initializeSparkConf(String pluginNames) {

@Test
public void testPluginClassDoesNotExist() {
SparkConf conf = initializeSparkConf("nonexistant.plugin");
SparkConf conf = initializeSparkConf("nonexistent.plugin");
try {
sc = new JavaSparkContext(conf);
fail("No exception thrown for nonexistant plugin");
fail("No exception thrown for nonexistent plugin");
} catch (Exception e) {
// We cannot catch ClassNotFoundException directly because Java doesn't think it'll be thrown
assertTrue(e.toString().startsWith("java.lang.ClassNotFoundException"));
Expand Down
2 changes: 1 addition & 1 deletion docs/sql-migration-guide-upgrade.md
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ displayTitle: Spark SQL Upgrading Guide

- Since Spark 2.4, Metadata files (e.g. Parquet summary files) and temporary files are not counted as data files when calculating table size during Statistics computation.

- Since Spark 2.4, empty strings are saved as quoted empty strings `""`. In version 2.3 and earlier, empty strings are equal to `null` values and do not reflect to any characters in saved CSV files. For example, the row of `"a", null, "", 1` was writted as `a,,,1`. Since Spark 2.4, the same row is saved as `a,,"",1`. To restore the previous behavior, set the CSV option `emptyValue` to empty (not quoted) string.
- Since Spark 2.4, empty strings are saved as quoted empty strings `""`. In version 2.3 and earlier, empty strings are equal to `null` values and do not reflect to any characters in saved CSV files. For example, the row of `"a", null, "", 1` was written as `a,,,1`. Since Spark 2.4, the same row is saved as `a,,"",1`. To restore the previous behavior, set the CSV option `emptyValue` to empty (not quoted) string.

- Since Spark 2.4, The LOAD DATA command supports wildcard `?` and `*`, which match any one character, and zero or more characters, respectively. Example: `LOAD DATA INPATH '/tmp/folder*/'` or `LOAD DATA INPATH '/tmp/part-?'`. Special Characters like `space` also now work in paths. Example: `LOAD DATA INPATH '/tmp/folder name/'`.

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ private[r] object AFTSurvivalRegressionWrapper extends MLReadable[AFTSurvivalReg
private val FORMULA_REGEXP = """Surv\(([^,]+), ([^,]+)\) ~ (.+)""".r

private def formulaRewrite(formula: String): (String, String) = {
var rewritedFormula: String = null
var rewrittenFormula: String = null
var censorCol: String = null
try {
val FORMULA_REGEXP(label, censor, features) = formula
Expand All @@ -71,14 +71,14 @@ private[r] object AFTSurvivalRegressionWrapper extends MLReadable[AFTSurvivalReg
throw new UnsupportedOperationException(
"Terms of survreg formula can not support dot operator.")
}
rewritedFormula = label.trim + "~" + features.trim
rewrittenFormula = label.trim + "~" + features.trim
censorCol = censor.trim
} catch {
case e: MatchError =>
throw new SparkException(s"Could not parse formula: $formula")
}

(rewritedFormula, censorCol)
(rewrittenFormula, censorCol)
}


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ object Summarizer extends Logging {
* - numNonzeros: a vector with the number of non-zeros for each coefficients
* - max: the maximum for each coefficient.
* - min: the minimum for each coefficient.
* - normL2: the Euclidian norm for each coefficient.
* - normL2: the Euclidean norm for each coefficient.
* - normL1: the L1 norm of each coefficient (sum of the absolute values).
* @param metrics metrics that can be provided.
* @return a builder.
Expand Down Expand Up @@ -536,7 +536,7 @@ private[ml] object SummaryBuilderImpl extends Logging {
}

/**
* L2 (Euclidian) norm of each dimension.
* L2 (Euclidean) norm of each dimension.
*/
def normL2: Vector = {
require(requestedMetrics.contains(NormL2))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -273,7 +273,7 @@ class MultivariateOnlineSummarizer extends MultivariateStatisticalSummary with S
}

/**
* L2 (Euclidian) norm of each dimension.
* L2 (Euclidean) norm of each dimension.
*
*/
@Since("1.2.0")
Expand Down
2 changes: 1 addition & 1 deletion python/pyspark/ml/stat.py
Original file line number Diff line number Diff line change
Expand Up @@ -336,7 +336,7 @@ def metrics(*metrics):
- numNonzeros: a vector with the number of non-zeros for each coefficients
- max: the maximum for each coefficient.
- min: the minimum for each coefficient.
- normL2: the Euclidian norm for each coefficient.
- normL2: the Euclidean norm for each coefficient.
- normL1: the L1 norm of each coefficient (sum of the absolute values).

:param metrics:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ package org.apache.spark.sql.hive
import java.io.File

import org.apache.spark.sql.{AnalysisException, Dataset, QueryTest, SaveMode}
import org.apache.spark.sql.catalyst.analysis.NoSuchTableException
import org.apache.spark.sql.catalyst.parser.ParseException
import org.apache.spark.sql.execution.columnar.InMemoryTableScanExec
import org.apache.spark.sql.execution.datasources.{CatalogFileIndex, HadoopFsRelation, LogicalRelation}
Expand Down Expand Up @@ -97,24 +96,24 @@ class CachedTableSuite extends QueryTest with SQLTestUtils with TestHiveSingleto
}
}

test("DROP nonexistant table") {
sql("DROP TABLE IF EXISTS nonexistantTable")
test("DROP nonexistent table") {
sql("DROP TABLE IF EXISTS nonexistentTable")
}

test("uncache of nonexistant tables") {
val expectedErrorMsg = "Table or view not found: nonexistantTable"
test("uncache of nonexistent tables") {
val expectedErrorMsg = "Table or view not found: nonexistentTable"
// make sure table doesn't exist
var e = intercept[AnalysisException](spark.table("nonexistantTable")).getMessage
var e = intercept[AnalysisException](spark.table("nonexistentTable")).getMessage
assert(e.contains(expectedErrorMsg))
e = intercept[AnalysisException] {
spark.catalog.uncacheTable("nonexistantTable")
spark.catalog.uncacheTable("nonexistentTable")
}.getMessage
assert(e.contains(expectedErrorMsg))
e = intercept[AnalysisException] {
sql("UNCACHE TABLE nonexistantTable")
sql("UNCACHE TABLE nonexistentTable")
}.getMessage
assert(e.contains(expectedErrorMsg))
sql("UNCACHE TABLE IF EXISTS nonexistantTable")
sql("UNCACHE TABLE IF EXISTS nonexistentTable")
}

test("no error on uncache of non-cached table") {
Expand Down