Skip to content

Commit 6606446

Browse files
cloud-fanyaooqinn
authored andcommitted
[SPARK-52010] Do not generate API docs for internal classes
### What changes were proposed in this pull request? While reviewing the API doc of 4.0 RC4, I found that there are some newly added internal classes appear in the API doc. This PR fixes them. ### Why are the changes needed? API doc should not include internal classes ### Does this PR introduce _any_ user-facing change? no ### How was this patch tested? N/A ### Was this patch authored or co-authored using generative AI tooling? no Closes #50797 from cloud-fan/api. Authored-by: Wenchen Fan <[email protected]> Signed-off-by: Kent Yao <[email protected]> (cherry picked from commit 9f5ae88) Signed-off-by: Kent Yao <[email protected]>
1 parent 457e266 commit 6606446

File tree

13 files changed

+33
-25
lines changed

13 files changed

+33
-25
lines changed

common/utils/src/main/scala/org/apache/spark/util/SparkStringUtils.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,11 +16,11 @@
1616
*/
1717
package org.apache.spark.util
1818

19-
trait SparkStringUtils {
19+
private[spark] trait SparkStringUtils {
2020
def stringToSeq(str: String): Seq[String] = {
2121
import org.apache.spark.util.ArrayImplicits._
2222
str.split(",").map(_.trim()).filter(_.nonEmpty).toImmutableArraySeq
2323
}
2424
}
2525

26-
object SparkStringUtils extends SparkStringUtils
26+
private[spark] object SparkStringUtils extends SparkStringUtils

common/utils/src/main/scala/org/apache/spark/util/SparkTestUtils.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ import javax.tools.{JavaFileObject, SimpleJavaFileObject, ToolProvider}
2525

2626
import scala.jdk.CollectionConverters._
2727

28-
trait SparkTestUtils {
28+
private[spark] trait SparkTestUtils {
2929
// Adapted from the JavaCompiler.java doc examples
3030
private val SOURCE = JavaFileObject.Kind.SOURCE
3131

@@ -103,4 +103,4 @@ trait SparkTestUtils {
103103

104104
}
105105

106-
object SparkTestUtils extends SparkTestUtils
106+
private[spark] object SparkTestUtils extends SparkTestUtils

connector/avro/src/main/scala/org/apache/spark/sql/avro/AvroDataToCatalyst.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ import org.apache.spark.sql.catalyst.util.{FailFastMode, ParseMode, PermissiveMo
3030
import org.apache.spark.sql.errors.QueryCompilationErrors
3131
import org.apache.spark.sql.types._
3232

33-
private[sql] case class AvroDataToCatalyst(
33+
case class AvroDataToCatalyst(
3434
child: Expression,
3535
jsonFormatSchema: String,
3636
options: Map[String, String])

connector/avro/src/main/scala/org/apache/spark/sql/avro/CatalystDataToAvro.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ import org.apache.spark.sql.catalyst.expressions.{Expression, UnaryExpression}
2727
import org.apache.spark.sql.catalyst.expressions.codegen.{CodegenContext, ExprCode}
2828
import org.apache.spark.sql.types.{BinaryType, DataType}
2929

30-
private[sql] case class CatalystDataToAvro(
30+
case class CatalystDataToAvro(
3131
child: Expression,
3232
jsonFormatSchema: Option[String]) extends UnaryExpression {
3333

connector/avro/src/main/scala/org/apache/spark/sql/avro/SchemaOfAvro.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ import org.apache.spark.sql.catalyst.util.{FailFastMode, ParseMode, PermissiveMo
2525
import org.apache.spark.sql.errors.QueryCompilationErrors
2626
import org.apache.spark.sql.types.{DataType, ObjectType, StringType}
2727

28-
private[sql] case class SchemaOfAvro(
28+
case class SchemaOfAvro(
2929
jsonFormatSchema: String,
3030
options: Map[String, String])
3131
extends LeafExpression with RuntimeReplaceable {

connector/kafka-0-10-token-provider/src/main/scala/org/apache/spark/kafka010/KafkaConfigUpdater.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ import org.apache.spark.internal.Logging
3030
/**
3131
* Class to conveniently update Kafka config params, while logging the changes
3232
*/
33-
private[spark] case class KafkaConfigUpdater(module: String, kafkaParams: Map[String, Object])
33+
case class KafkaConfigUpdater(module: String, kafkaParams: Map[String, Object])
3434
extends Logging {
3535
private val map = new ju.HashMap[String, Object](kafkaParams.asJava)
3636

connector/kafka-0-10-token-provider/src/main/scala/org/apache/spark/kafka010/KafkaDelegationTokenProvider.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ import org.apache.spark.internal.{Logging, MDC}
2828
import org.apache.spark.internal.LogKeys.{CLUSTER_ID, SERVICE_NAME}
2929
import org.apache.spark.security.HadoopDelegationTokenProvider
3030

31-
private[spark] class KafkaDelegationTokenProvider
31+
class KafkaDelegationTokenProvider
3232
extends HadoopDelegationTokenProvider with Logging {
3333

3434
override def serviceName: String = "kafka"

connector/kafka-0-10-token-provider/src/main/scala/org/apache/spark/kafka010/KafkaRedactionUtil.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,8 +24,8 @@ import org.apache.spark.internal.Logging
2424
import org.apache.spark.internal.config.SECRET_REDACTION_PATTERN
2525
import org.apache.spark.util.Utils.{redact, REDACTION_REPLACEMENT_TEXT}
2626

27-
private[spark] object KafkaRedactionUtil extends Logging {
28-
private[spark] def redactParams(params: Seq[(String, Object)]): Seq[(String, String)] = {
27+
object KafkaRedactionUtil extends Logging {
28+
def redactParams(params: Seq[(String, Object)]): Seq[(String, String)] = {
2929
val redactionPattern = Some(Option(SparkEnv.get).map(_.conf)
3030
.getOrElse(new SparkConf()).get(SECRET_REDACTION_PATTERN))
3131
params.map { case (key, value) =>
@@ -42,7 +42,7 @@ private[spark] object KafkaRedactionUtil extends Logging {
4242
}
4343
}
4444

45-
private[kafka010] def redactJaasParam(param: String): String = {
45+
def redactJaasParam(param: String): String = {
4646
if (param != null && !param.isEmpty) {
4747
param.replaceAll("password=\".*\"", s"""password="$REDACTION_REPLACEMENT_TEXT"""")
4848
} else {

connector/kafka-0-10-token-provider/src/main/scala/org/apache/spark/kafka010/KafkaTokenSparkConf.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ import org.apache.spark.SparkConf
2525
import org.apache.spark.internal.Logging
2626
import org.apache.spark.util.Utils.REDACTION_REPLACEMENT_TEXT
2727

28-
private[spark] case class KafkaTokenClusterConf(
28+
case class KafkaTokenClusterConf(
2929
identifier: String,
3030
authBootstrapServers: String,
3131
targetServersRegex: String,
@@ -57,7 +57,7 @@ private[spark] case class KafkaTokenClusterConf(
5757
s"specifiedKafkaParams=${KafkaRedactionUtil.redactParams(specifiedKafkaParams.toSeq)}}"
5858
}
5959

60-
private [kafka010] object KafkaTokenSparkConf extends Logging {
60+
object KafkaTokenSparkConf extends Logging {
6161
val CLUSTERS_CONFIG_PREFIX = "spark.kafka.clusters."
6262
val DEFAULT_TARGET_SERVERS_REGEX = ".*"
6363
val DEFAULT_SASL_KERBEROS_SERVICE_NAME = "kafka"

connector/kafka-0-10-token-provider/src/main/scala/org/apache/spark/kafka010/KafkaTokenUtil.scala

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -44,25 +44,25 @@ import org.apache.spark.internal.config._
4444
import org.apache.spark.util.{SecurityUtils, Utils}
4545
import org.apache.spark.util.Utils.REDACTION_REPLACEMENT_TEXT
4646

47-
private[spark] object KafkaTokenUtil extends Logging {
47+
object KafkaTokenUtil extends Logging {
4848
val TOKEN_KIND = new Text("KAFKA_DELEGATION_TOKEN")
4949
private val TOKEN_SERVICE_PREFIX = "kafka.server.delegation.token"
5050
private val DATE_TIME_FORMATTER =
5151
DateTimeFormatter
5252
.ofPattern("yyyy-MM-dd'T'HH:mm")
5353
.withZone(ZoneId.systemDefault())
5454

55-
private[kafka010] def getTokenService(identifier: String): Text =
55+
def getTokenService(identifier: String): Text =
5656
new Text(s"$TOKEN_SERVICE_PREFIX.$identifier")
5757

5858
private def getClusterIdentifier(service: Text): String =
5959
service.toString().replace(s"$TOKEN_SERVICE_PREFIX.", "")
6060

61-
private[spark] class KafkaDelegationTokenIdentifier extends AbstractDelegationTokenIdentifier {
61+
class KafkaDelegationTokenIdentifier extends AbstractDelegationTokenIdentifier {
6262
override def getKind: Text = TOKEN_KIND
6363
}
6464

65-
private[kafka010] def obtainToken(
65+
def obtainToken(
6666
sparkConf: SparkConf,
6767
clusterConf: KafkaTokenClusterConf): (Token[KafkaDelegationTokenIdentifier], Long) = {
6868
checkProxyUser()
@@ -81,15 +81,15 @@ private[spark] object KafkaTokenUtil extends Logging {
8181
), token.tokenInfo.expiryTimestamp)
8282
}
8383

84-
private[kafka010] def checkProxyUser(): Unit = {
84+
def checkProxyUser(): Unit = {
8585
val currentUser = UserGroupInformation.getCurrentUser()
8686
// Obtaining delegation token for proxy user is planned but not yet implemented
8787
// See https://issues.apache.org/jira/browse/KAFKA-6945
8888
require(!SparkHadoopUtil.get.isProxyUser(currentUser), "Obtaining delegation token for proxy " +
8989
"user is not yet supported.")
9090
}
9191

92-
private[kafka010] def createAdminClientProperties(
92+
def createAdminClientProperties(
9393
sparkConf: SparkConf,
9494
clusterConf: KafkaTokenClusterConf): ju.Properties = {
9595
val adminClientProperties = new ju.Properties

0 commit comments

Comments
 (0)